Camera and editor improvements

This commit is contained in:
Ilya Laktyushin 2023-05-19 18:12:22 +04:00
parent 2749d3a2fe
commit 8408e4dda6
33 changed files with 1417 additions and 279 deletions

View File

@ -874,7 +874,7 @@ public protocol SharedAccountContext: AnyObject {
func makeStickerPackScreen(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?, mainStickerPack: StickerPackReference, stickerPacks: [StickerPackReference], loadedStickerPacks: [LoadedStickerPack], parentNavigationController: NavigationController?, sendSticker: ((FileMediaReference, UIView, CGRect) -> Bool)?) -> ViewController
func makeMediaPickerScreen(context: AccountContext, completion: @escaping (PHAsset) -> Void) -> ViewController
func makeMediaPickerScreen(context: AccountContext, completion: @escaping (Any) -> Void) -> ViewController
func makeProxySettingsController(sharedContext: SharedAccountContext, account: UnauthorizedAccount) -> ViewController

View File

@ -5,6 +5,7 @@ import AsyncDisplayKit
final class AlertControllerNode: ASDisplayNode {
var existingAlertControllerNode: AlertControllerNode?
private let dimContainerView: UIView
private let centerDimView: UIImageView
private let topDimView: UIView
private let bottomDimView: UIView
@ -26,6 +27,8 @@ final class AlertControllerNode: ASDisplayNode {
let dimColor = UIColor(white: 0.0, alpha: 0.5)
self.dimContainerView = UIView()
self.centerDimView = UIImageView()
self.centerDimView.backgroundColor = dimColor
@ -56,11 +59,12 @@ final class AlertControllerNode: ASDisplayNode {
super.init()
self.view.addSubview(self.centerDimView)
self.view.addSubview(self.topDimView)
self.view.addSubview(self.bottomDimView)
self.view.addSubview(self.leftDimView)
self.view.addSubview(self.rightDimView)
self.view.addSubview(self.dimContainerView)
self.dimContainerView.addSubview(self.centerDimView)
self.dimContainerView.addSubview(self.topDimView)
self.dimContainerView.addSubview(self.bottomDimView)
self.dimContainerView.addSubview(self.leftDimView)
self.dimContainerView.addSubview(self.rightDimView)
self.containerNode.addSubnode(self.effectNode)
self.containerNode.addSubnode(self.backgroundNode)
@ -135,6 +139,7 @@ final class AlertControllerNode: ASDisplayNode {
}
})*/
self.containerNode.layer.animateSpring(from: 0.8 as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: 0.5, initialVelocity: 0.0, removeOnCompletion: true, additive: false, completion: nil)
self.dimContainerView.layer.animateSpring(from: 0.8 as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: 0.5, initialVelocity: 0.0, removeOnCompletion: true, additive: false, completion: nil)
}
}
@ -152,6 +157,7 @@ final class AlertControllerNode: ASDisplayNode {
self.containerNode.layer.animateScale(from: 1.0, to: 0.8, duration: 0.4, removeOnCompletion: false, completion: { _ in
completion()
})
self.dimContainerView.layer.animateScale(from: 1.0, to: 0.8, duration: 0.4, removeOnCompletion: false)
}
func containerLayoutUpdated(_ layout: ContainerViewLayout, transition: ContainedViewLayoutTransition) {
@ -170,11 +176,14 @@ final class AlertControllerNode: ASDisplayNode {
let containerSize = CGSize(width: contentSize.width, height: contentSize.height)
let containerFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - containerSize.width) / 2.0), y: contentAvailableFrame.minY + floor((contentAvailableFrame.size.height - containerSize.height) / 2.0)), size: containerSize)
let outerEdge: CGFloat = 100.0
transition.updateFrame(view: self.dimContainerView, frame: CGRect(origin: .zero, size: layout.size))
transition.updateFrame(view: self.centerDimView, frame: containerFrame)
transition.updateFrame(view: self.topDimView, frame: CGRect(origin: CGPoint(), size: CGSize(width: layout.size.width, height: containerFrame.minY)))
transition.updateFrame(view: self.bottomDimView, frame: CGRect(origin: CGPoint(x: 0.0, y: containerFrame.maxY), size: CGSize(width: layout.size.width, height: layout.size.height - containerFrame.maxY)))
transition.updateFrame(view: self.leftDimView, frame: CGRect(origin: CGPoint(x: 0.0, y: containerFrame.minY), size: CGSize(width: containerFrame.minX, height: containerFrame.height)))
transition.updateFrame(view: self.rightDimView, frame: CGRect(origin: CGPoint(x: containerFrame.maxX, y: containerFrame.minY), size: CGSize(width: layout.size.width - containerFrame.maxX, height: containerFrame.height)))
transition.updateFrame(view: self.topDimView, frame: CGRect(origin: CGPoint(x: -outerEdge, y: -outerEdge), size: CGSize(width: layout.size.width + outerEdge * 2.0, height: containerFrame.minY + outerEdge)))
transition.updateFrame(view: self.bottomDimView, frame: CGRect(origin: CGPoint(x: -outerEdge, y: containerFrame.maxY), size: CGSize(width: layout.size.width + outerEdge * 2.0, height: layout.size.height - containerFrame.maxY + outerEdge)))
transition.updateFrame(view: self.leftDimView, frame: CGRect(origin: CGPoint(x: -outerEdge, y: containerFrame.minY), size: CGSize(width: containerFrame.minX + outerEdge, height: containerFrame.height)))
transition.updateFrame(view: self.rightDimView, frame: CGRect(origin: CGPoint(x: containerFrame.maxX, y: containerFrame.minY), size: CGSize(width: layout.size.width - containerFrame.maxX + outerEdge, height: containerFrame.height)))
transition.updateFrame(node: self.containerNode, frame: containerFrame)
transition.updateFrame(node: self.effectNode, frame: CGRect(origin: CGPoint(), size: containerFrame.size))

View File

@ -14,6 +14,7 @@
- (instancetype)initWithImage:(UIImage *)image metadata:(PGCameraShotMetadata *)metadata;
- (instancetype)initWithExistingImage:(UIImage *)image;
- (instancetype)initWithExistingImage:(UIImage *)image identifier:(NSString *)identifier;
- (instancetype)initWithImage:(UIImage *)image rectangle:(PGRectangle *)rectangle;

View File

@ -75,6 +75,33 @@
return self;
}
- (instancetype)initWithExistingImage:(UIImage *)image identifier:(NSString *)identifier
{
self = [super init];
if (self != nil)
{
_identifier = identifier;
_dimensions = CGSizeMake(image.size.width, image.size.height);
_thumbnail = [[SVariable alloc] init];
_existingImage = image;
SSignal *thumbnailSignal = [[[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
{
CGFloat thumbnailImageSide = TGPhotoThumbnailSizeForCurrentScreen().width * TGScreenScaling();
CGSize thumbnailSize = TGScaleToSize(image.size, CGSizeMake(thumbnailImageSide, thumbnailImageSide));
UIImage *thumbnailImage = TGScaleImageToPixelSize(image, thumbnailSize);
[subscriber putNext:thumbnailImage];
[subscriber putCompletion];
return nil;
}] startOn:[SQueue concurrentDefaultQueue]];
[_thumbnail set:thumbnailSignal];
}
return self;
}
- (void)_cleanUp
{
[[NSFileManager defaultManager] removeItemAtPath:[self filePath] error:nil];

View File

@ -42,7 +42,8 @@ swift_library(
"//submodules/UndoUI:UndoUI",
"//submodules/MoreButtonNode:MoreButtonNode",
"//submodules/InvisibleInkDustNode:InvisibleInkDustNode",
"//submodules/TelegramUI/Components/CameraScreen",
"//submodules/TelegramUI/Components/CameraScreen",
"//submodules/TelegramUI/Components/MediaEditor",
],
visibility = [
"//visibility:public",

View File

@ -15,10 +15,12 @@ import PhotoResources
import InvisibleInkDustNode
import ImageBlur
import FastBlur
import MediaEditor
enum MediaPickerGridItemContent: Equatable {
case asset(PHFetchResult<PHAsset>, Int)
case media(MediaPickerScreen.Subject.Media, Int)
case draft(MediaEditorDraft, Int)
}
final class MediaPickerGridItem: GridItem {
@ -48,23 +50,25 @@ final class MediaPickerGridItem: GridItem {
let node = MediaPickerGridItemNode()
node.setup(interaction: self.interaction, media: media, index: index, theme: self.theme, selectable: self.selectable, enableAnimations: self.enableAnimations)
return node
case let .draft(draft, index):
let node = MediaPickerGridItemNode()
node.setup(interaction: self.interaction, draft: draft, index: index, theme: self.theme, selectable: self.selectable, enableAnimations: self.enableAnimations)
return node
}
}
func update(node: GridItemNode) {
guard let node = node as? MediaPickerGridItemNode else {
assertionFailure()
return
}
switch self.content {
case let .asset(fetchResult, index):
guard let node = node as? MediaPickerGridItemNode else {
assertionFailure()
return
}
node.setup(interaction: self.interaction, fetchResult: fetchResult, index: index, theme: self.theme, selectable: self.selectable, enableAnimations: self.enableAnimations)
case let .media(media, index):
guard let node = node as? MediaPickerGridItemNode else {
assertionFailure()
return
}
node.setup(interaction: self.interaction, media: media, index: index, theme: self.theme, selectable: self.selectable, enableAnimations: self.enableAnimations)
case let .draft(draft, index):
node.setup(interaction: self.interaction, draft: draft, index: index, theme: self.theme, selectable: self.selectable, enableAnimations: self.enableAnimations)
}
}
}
@ -85,6 +89,7 @@ private let maskImage = generateImage(CGSize(width: 1.0, height: 24.0), opaque:
final class MediaPickerGridItemNode: GridItemNode {
var currentMediaState: (TGMediaSelectableItem, Int)?
var currentState: (PHFetchResult<PHAsset>, Int)?
var currentDraftState: (MediaEditorDraft, Int)?
var enableAnimations: Bool = true
private var selectable: Bool = false
@ -93,6 +98,7 @@ final class MediaPickerGridItemNode: GridItemNode {
private let gradientNode: ASImageNode
private let typeIconNode: ASImageNode
private let durationNode: ImmediateTextNode
private let draftNode: ImmediateTextNode
private let activateAreaNode: AccessibilityAreaNode
@ -123,6 +129,7 @@ final class MediaPickerGridItemNode: GridItemNode {
self.typeIconNode.displayWithoutProcessing = true
self.durationNode = ImmediateTextNode()
self.draftNode = ImmediateTextNode()
self.activateAreaNode = AccessibilityAreaNode()
self.activateAreaNode.accessibilityTraits = [.image]
@ -228,7 +235,7 @@ final class MediaPickerGridItemNode: GridItemNode {
self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.imageNodeTap(_:))))
}
func setup(interaction: MediaPickerInteraction, media: MediaPickerScreen.Subject.Media, index: Int, theme: PresentationTheme, selectable: Bool, enableAnimations: Bool) {
func setup(interaction: MediaPickerInteraction, draft: MediaEditorDraft, index: Int, theme: PresentationTheme, selectable: Bool, enableAnimations: Bool) {
self.interaction = interaction
self.theme = theme
self.selectable = selectable
@ -236,7 +243,34 @@ final class MediaPickerGridItemNode: GridItemNode {
self.backgroundColor = theme.list.mediaPlaceholderColor
if self.currentMediaState == nil || self.currentMediaState!.0.uniqueIdentifier != media.identifier || self.currentState!.1 != index {
if self.currentDraftState == nil || self.currentDraftState?.0.path != draft.path || self.currentDraftState!.1 != index {
let imageSignal: Signal<UIImage?, NoError> = .single(draft.thumbnail)
self.imageNode.setSignal(imageSignal)
self.currentDraftState = (draft, index)
self.setNeedsLayout()
if self.typeIconNode.supernode == nil {
self.draftNode.attributedText = NSAttributedString(string: "Draft", font: Font.semibold(12.0), textColor: .white)
self.addSubnode(self.draftNode)
self.setNeedsLayout()
}
}
self.updateSelectionState()
self.updateHiddenMedia()
}
func setup(interaction: MediaPickerInteraction, media: MediaPickerScreen.Subject.Media, index: Int, theme: PresentationTheme, selectable: Bool, enableAnimations: Bool) {
self.interaction = interaction
self.theme = theme
self.selectable = selectable
self.enableAnimations = enableAnimations
self.backgroundColor = theme.list.mediaPlaceholderColor
if self.currentMediaState == nil || self.currentMediaState!.0.uniqueIdentifier != media.identifier || self.currentMediaState!.1 != index {
self.currentMediaState = (media.asset, index)
self.setNeedsLayout()
}
@ -408,6 +442,11 @@ final class MediaPickerGridItemNode: GridItemNode {
self.durationNode.frame = CGRect(origin: CGPoint(x: self.bounds.size.width - durationSize.width - 7.0, y: self.bounds.height - durationSize.height - 5.0), size: durationSize)
}
if self.draftNode.supernode != nil {
let draftSize = self.draftNode.updateLayout(self.bounds.size)
self.draftNode.frame = CGRect(origin: CGPoint(x: 7.0, y: 5.0), size: draftSize)
}
let checkSize = CGSize(width: 29.0, height: 29.0)
self.checkNode?.frame = CGRect(origin: CGPoint(x: self.bounds.width - checkSize.width - 3.0, y: 3.0), size: checkSize)
@ -424,6 +463,10 @@ final class MediaPickerGridItemNode: GridItemNode {
}
@objc func imageNodeTap(_ recognizer: UITapGestureRecognizer) {
if let (draft, _) = self.currentDraftState {
self.interaction?.openDraft(draft, self.imageNode.image)
return
}
guard let (fetchResult, index) = self.currentState else {
return
}

View File

@ -22,10 +22,12 @@ import UndoUI
import PresentationDataUtils
import MoreButtonNode
import CameraScreen
import MediaEditor
final class MediaPickerInteraction {
let openMedia: (PHFetchResult<PHAsset>, Int, UIImage?) -> Void
let openSelectedMedia: (TGMediaSelectableItem, UIImage?) -> Void
let openDraft: (MediaEditorDraft, UIImage?) -> Void
let toggleSelection: (TGMediaSelectableItem, Bool, Bool) -> Bool
let sendSelected: (TGMediaSelectableItem?, Bool, Int32?, Bool, @escaping () -> Void) -> Void
let schedule: () -> Void
@ -34,9 +36,10 @@ final class MediaPickerInteraction {
let editingState: TGMediaEditingContext
var hiddenMediaId: String?
init(openMedia: @escaping (PHFetchResult<PHAsset>, Int, UIImage?) -> Void, openSelectedMedia: @escaping (TGMediaSelectableItem, UIImage?) -> Void, toggleSelection: @escaping (TGMediaSelectableItem, Bool, Bool) -> Bool, sendSelected: @escaping (TGMediaSelectableItem?, Bool, Int32?, Bool, @escaping () -> Void) -> Void, schedule: @escaping () -> Void, dismissInput: @escaping () -> Void, selectionState: TGMediaSelectionContext?, editingState: TGMediaEditingContext) {
init(openMedia: @escaping (PHFetchResult<PHAsset>, Int, UIImage?) -> Void, openSelectedMedia: @escaping (TGMediaSelectableItem, UIImage?) -> Void, openDraft: @escaping (MediaEditorDraft, UIImage?) -> Void, toggleSelection: @escaping (TGMediaSelectableItem, Bool, Bool) -> Bool, sendSelected: @escaping (TGMediaSelectableItem?, Bool, Int32?, Bool, @escaping () -> Void) -> Void, schedule: @escaping () -> Void, dismissInput: @escaping () -> Void, selectionState: TGMediaSelectionContext?, editingState: TGMediaEditingContext) {
self.openMedia = openMedia
self.openSelectedMedia = openSelectedMedia
self.openDraft = openDraft
self.toggleSelection = toggleSelection
self.sendSelected = sendSelected
self.schedule = schedule
@ -165,7 +168,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
public var presentWebSearch: (MediaGroupsScreen, Bool) -> Void = { _, _ in }
public var getCaptionPanelView: () -> TGCaptionPanelView? = { return nil }
public var customSelection: ((PHAsset) -> Void)? = nil
public var customSelection: ((Any) -> Void)? = nil
private var completed = false
public var legacyCompletion: (_ signals: [Any], _ silently: Bool, _ scheduleTime: Int32?, @escaping (String) -> UIView?, @escaping () -> Void) -> Void = { _, _, _, _, _ in }
@ -187,7 +190,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
enum State {
case noAccess(cameraAccess: AVAuthorizationStatus?)
case assets(fetchResult: PHFetchResult<PHAsset>?, preload: Bool, mediaAccess: PHAuthorizationStatus, cameraAccess: AVAuthorizationStatus?)
case assets(fetchResult: PHFetchResult<PHAsset>?, preload: Bool, drafts: [MediaEditorDraft], mediaAccess: PHAuthorizationStatus, cameraAccess: AVAuthorizationStatus?)
case media([Subject.Media])
}
@ -277,23 +280,29 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
let preloadPromise = self.preloadPromise
let updatedState: Signal<State, NoError>
switch controller.subject {
case let .assets(collection, _):
case let .assets(collection, mode):
let drafts: Signal<[MediaEditorDraft], NoError>
if mode == .story {
drafts = storyDrafts(engine: controller.context.engine)
} else {
drafts = .single([])
}
updatedState = combineLatest(mediaAssetsContext.mediaAccess(), mediaAssetsContext.cameraAccess())
|> mapToSignal { mediaAccess, cameraAccess -> Signal<State, NoError> in
if case .notDetermined = mediaAccess {
return .single(.assets(fetchResult: nil, preload: false, mediaAccess: mediaAccess, cameraAccess: cameraAccess))
return .single(.assets(fetchResult: nil, preload: false, drafts: [], mediaAccess: mediaAccess, cameraAccess: cameraAccess))
} else if [.restricted, .denied].contains(mediaAccess) {
return .single(.noAccess(cameraAccess: cameraAccess))
} else {
if let collection = collection {
return combineLatest(mediaAssetsContext.fetchAssets(collection), preloadPromise.get())
|> map { fetchResult, preload in
return .assets(fetchResult: fetchResult, preload: preload, mediaAccess: mediaAccess, cameraAccess: cameraAccess)
return .assets(fetchResult: fetchResult, preload: preload, drafts: [], mediaAccess: mediaAccess, cameraAccess: cameraAccess)
}
} else {
return combineLatest(mediaAssetsContext.recentAssets(), preloadPromise.get())
|> map { fetchResult, preload in
return .assets(fetchResult: fetchResult, preload: preload, mediaAccess: mediaAccess, cameraAccess: cameraAccess)
return combineLatest(mediaAssetsContext.recentAssets(), preloadPromise.get(), drafts)
|> map { fetchResult, preload, drafts in
return .assets(fetchResult: fetchResult, preload: preload, drafts: drafts, mediaAccess: mediaAccess, cameraAccess: cameraAccess)
}
}
}
@ -590,11 +599,18 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
self.requestedCameraAccess = true
self.mediaAssetsContext.requestCameraAccess()
}
case let .assets(fetchResult, preload, mediaAccess, cameraAccess):
case let .assets(fetchResult, preload, drafts, mediaAccess, cameraAccess):
if let fetchResult = fetchResult {
let totalCount = fetchResult.count
let count = preload ? min(13, totalCount) : totalCount
var draftIndex = 0
for draft in drafts {
entries.append(MediaPickerGridEntry(stableId: stableId, content: .draft(draft, draftIndex), selectable: selectable))
stableId += 1
draftIndex += 1
}
for i in 0 ..< count {
let index: Int
if case let .assets(collection, _) = controller.subject, let _ = collection {
@ -606,7 +622,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
stableId += 1
}
if case let .assets(previousFetchResult, _, _, previousCameraAccess) = previousState, previousFetchResult == nil || previousCameraAccess != cameraAccess {
if case let .assets(previousFetchResult, _, _, _, previousCameraAccess) = previousState, previousFetchResult == nil || previousCameraAccess != cameraAccess {
updateLayout = true
}
@ -848,6 +864,23 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
})
}
fileprivate func openDraft(draft: MediaEditorDraft, immediateThumbnail: UIImage?) {
guard let controller = self.controller, !self.openingMedia else {
return
}
Queue.mainQueue().justDispatch {
self.dismissInput()
}
if let customSelection = controller.customSelection {
self.openingMedia = true
customSelection(draft)
Queue.mainQueue().after(0.3) {
self.openingMedia = false
}
}
}
fileprivate func send(asFile: Bool = false, silently: Bool, scheduleTime: Int32?, animated: Bool, completion: @escaping () -> Void) {
guard let controller = self.controller, !controller.completed else {
return
@ -1064,7 +1097,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
}
var manageHeight: CGFloat = 0.0
if case let .assets(_, _, mediaAccess, cameraAccess) = self.state {
if case let .assets(_, _, _, mediaAccess, cameraAccess) = self.state {
if cameraAccess == nil {
cameraRect = nil
}
@ -1387,6 +1420,12 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
if case let .assets(_, mode) = self.subject, mode != .default {
self.navigationItem.leftBarButtonItem = UIBarButtonItem(title: self.presentationData.strings.Common_Cancel, style: .plain, target: self, action: #selector(self.cancelPressed))
if mode == .story {
self.navigationItem.rightBarButtonItem = UIBarButtonItem(customDisplayNode: self.moreButtonNode)
self.navigationItem.rightBarButtonItem?.action = #selector(self.rightButtonPressed)
self.navigationItem.rightBarButtonItem?.target = self
}
} else {
if case let .assets(collection, _) = self.subject, collection != nil {
self.navigationItem.leftBarButtonItem = UIBarButtonItem(backButtonAppearanceWithTitle: self.presentationData.strings.Common_Back, target: self, action: #selector(self.backPressed))
@ -1432,6 +1471,8 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
self?.controllerNode.openMedia(fetchResult: fetchResult, index: index, immediateThumbnail: immediateThumbnail)
}, openSelectedMedia: { [weak self] item, immediateThumbnail in
self?.controllerNode.openSelectedMedia(item: item, immediateThumbnail: immediateThumbnail)
}, openDraft: { [weak self] draft, immediateThumbnail in
self?.controllerNode.openDraft(draft: draft, immediateThumbnail: immediateThumbnail)
}, toggleSelection: { [weak self] item, value, suggestUndo in
if let self = self, let selectionState = self.interaction?.selectionState {
if let _ = item as? TGMediaPickerGalleryPhotoItem {
@ -1751,7 +1792,8 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
return
}
self.requestAttachmentMenuExpansion()
self.presentWebSearch(MediaGroupsScreen(context: self.context, updatedPresentationData: self.updatedPresentationData, mediaAssetsContext: self.controllerNode.mediaAssetsContext, openGroup: { [weak self] collection in
let groupsController = MediaGroupsScreen(context: self.context, updatedPresentationData: self.updatedPresentationData, mediaAssetsContext: self.controllerNode.mediaAssetsContext, openGroup: { [weak self] collection in
if let strongSelf = self {
let mediaPicker = MediaPickerScreen(context: strongSelf.context, updatedPresentationData: strongSelf.updatedPresentationData, peer: strongSelf.peer, threadTitle: strongSelf.threadTitle, chatLocation: strongSelf.chatLocation, bannedSendPhotos: strongSelf.bannedSendPhotos, bannedSendVideos: strongSelf.bannedSendVideos, subject: .assets(collection, mode), editingContext: strongSelf.interaction?.editingState, selectionContext: strongSelf.interaction?.selectionState)
@ -1767,7 +1809,12 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
mediaPicker.updateNavigationStack = strongSelf.updateNavigationStack
strongSelf.updateNavigationStack({ _ in return ([strongSelf, mediaPicker], strongSelf.mediaPickerContext)})
}
}), activateOnDisplay)
})
if case .story = mode {
self.present(groupsController, in: .current)
} else {
self.presentWebSearch(groupsController, activateOnDisplay)
}
}
@objc private func searchOrMorePressed(node: ContextReferenceContentNode, gesture: ContextGesture?) {
@ -2043,7 +2090,7 @@ public func wallpaperMediaPickerController(
updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)? = nil,
peer: EnginePeer,
animateAppearance: Bool,
completion: @escaping (PHAsset) -> Void = { _ in },
completion: @escaping (Any) -> Void = { _ in },
openColors: @escaping () -> Void
) -> ViewController {
let controller = AttachmentController(context: context, updatedPresentationData: updatedPresentationData, chatLocation: nil, buttons: [.standalone], initialButton: .standalone, fromMenu: false, hasTextInput: false, makeEntityInputView: {
@ -2065,7 +2112,7 @@ public func wallpaperMediaPickerController(
public func storyMediaPickerController(
context: AccountContext,
completion: @escaping (PHAsset) -> Void = { _ in }
completion: @escaping (Any) -> Void = { _ in }
) -> ViewController {
let presentationData = context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkColorPresentationTheme)
let updatedPresentationData: (PresentationData, Signal<PresentationData, NoError>) = (presentationData, .single(presentationData))

View File

@ -156,7 +156,7 @@ public final class ThemeGridController: ViewController {
let controller = MediaPickerScreen(context: strongSelf.context, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .wallpaper))
controller.customSelection = { [weak self] asset in
guard let strongSelf = self else {
guard let strongSelf = self, let asset = asset as? PHAsset else {
return
}
let controller = WallpaperGalleryController(context: strongSelf.context, source: .asset(asset))

View File

@ -70,6 +70,8 @@ swift_library(
"//submodules/Components/MultilineTextComponent",
"//submodules/Components/BlurredBackgroundComponent",
"//submodules/Components/LottieAnimationComponent:LottieAnimationComponent",
"//submodules/TooltipUI",
"//submodules/TelegramUI/Components/MediaEditor",
],
visibility = [
"//visibility:public",

View File

@ -15,6 +15,8 @@ import MultilineTextComponent
import BlurredBackgroundComponent
import Photos
import LottieAnimationComponent
import TooltipUI
import MediaEditor
let videoRedColor = UIColor(rgb: 0xff3b30)
@ -61,6 +63,7 @@ private let flashButtonTag = GenericComponentViewTag()
private let zoomControlTag = GenericComponentViewTag()
private let captureControlsTag = GenericComponentViewTag()
private let modeControlTag = GenericComponentViewTag()
private let galleryButtonTag = GenericComponentViewTag()
private final class CameraScreenComponent: CombinedComponent {
typealias EnvironmentType = ViewControllerComponentContainer.Environment
@ -211,6 +214,9 @@ private final class CameraScreenComponent: CombinedComponent {
if let self {
self.cameraState = self.cameraState.updatedDuration(duration)
self.updated(transition: .easeInOut(duration: 0.1))
if duration > 59.0 {
self.stopVideoRecording()
}
}
}))
self.updated(transition: .spring(duration: 0.4))
@ -231,6 +237,10 @@ private final class CameraScreenComponent: CombinedComponent {
self.cameraState = self.cameraState.updatedRecording(.handsFree)
self.updated(transition: .spring(duration: 0.4))
}
func updateZoom(fraction: CGFloat) {
self.camera.setZoomLevel(fraction)
}
}
func makeState() -> State {
@ -348,7 +358,7 @@ private final class CameraScreenComponent: CombinedComponent {
// transition: context.transition
// )
// context.add(zoomControl
// .position(CGPoint(x: context.availableSize.width / 2.0, y: availableSize.height - zoomControl.size.height / 2.0 - 187.0 - environment.safeInsets.bottom))
// .position(CGPoint(x: context.availableSize.width / 2.0, y: availableSize.height - zoomControl.size.height / 2.0 - 114.0 - environment.safeInsets.bottom))
// .appear(.default(alpha: true))
// .disappear(.default(alpha: true))
// )
@ -374,6 +384,7 @@ private final class CameraScreenComponent: CombinedComponent {
shutterState: shutterState,
lastGalleryAsset: state.lastGalleryAsset,
tag: captureControlsTag,
galleryButtonTag: galleryButtonTag,
shutterTapped: { [weak state] in
guard let state else {
return
@ -420,6 +431,9 @@ private final class CameraScreenComponent: CombinedComponent {
},
swipeHintUpdated: { hint in
state.updateSwipeHint(hint)
},
zoomUpdated: { fraction in
state.updateZoom(fraction: fraction)
}
),
availableSize: availableSize,
@ -492,7 +506,7 @@ private final class CameraScreenComponent: CombinedComponent {
transition: .immediate
)
context.add(hintLabel
.position(CGPoint(x: availableSize.width / 2.0, y: availableSize.height - environment.safeInsets.bottom + 14.0 + hintLabel.size.height / 2.0))
.position(CGPoint(x: availableSize.width / 2.0, y: availableSize.height - environment.safeInsets.bottom - 136.0))
.appear(.default(alpha: true))
.disappear(.default(alpha: true))
)
@ -584,6 +598,7 @@ public class CameraScreen: ViewController {
case image(UIImage)
case video(String, PixelDimensions)
case asset(PHAsset)
case draft(MediaEditorDraft)
}
public final class TransitionIn {
@ -976,8 +991,13 @@ public class CameraScreen: ViewController {
}
}
func commitTransitionToEditor() {
self.previewContainerView.alpha = 0.0
}
private var previewSnapshotView: UIView?
func animateInFromEditor() {
self.previewContainerView.alpha = 1.0
if let snapshot = self.simplePreviewView?.snapshotView(afterScreenUpdates: false) {
self.simplePreviewView?.addSubview(snapshot)
self.previewSnapshotView = snapshot
@ -1021,6 +1041,21 @@ public class CameraScreen: ViewController {
view.animateInFromEditor(transition: transition)
}
}
func presentDraftTooltip() {
guard let sourceView = self.componentHost.findTaggedView(tag: galleryButtonTag) else {
return
}
let parentFrame = self.view.convert(self.bounds, to: nil)
let absoluteFrame = sourceView.convert(sourceView.bounds, to: nil).offsetBy(dx: -parentFrame.minX, dy: 0.0)
let location = CGRect(origin: CGPoint(x: absoluteFrame.midX, y: absoluteFrame.minY - 3.0), size: CGSize())
let controller = TooltipScreen(account: self.context.account, sharedContext: self.context.sharedContext, text: "Draft Saved", location: .point(location, .bottom), displayDuration: .default, inset: 16.0, shouldDismissOnTouch: { _ in
return .ignore
})
self.controller?.present(controller, in: .current)
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
let result = super.hitTest(point, with: event)
@ -1168,13 +1203,21 @@ public class CameraScreen: ViewController {
self.node.animateInFromEditor()
}
public func commitTransitionToEditor() {
self.node.commitTransitionToEditor()
}
func presentGallery() {
var dismissGalleryControllerImpl: (() -> Void)?
let controller = self.context.sharedContext.makeMediaPickerScreen(context: self.context, completion: { [weak self] asset in
let controller = self.context.sharedContext.makeMediaPickerScreen(context: self.context, completion: { [weak self] result in
dismissGalleryControllerImpl?()
if let self {
self.node.animateOutToEditor()
self.completion(.single(.asset(asset)))
if let asset = result as? PHAsset {
self.completion(.single(.asset(asset)))
} else if let draft = result as? MediaEditorDraft {
self.completion(.single(.draft(draft)))
}
}
})
dismissGalleryControllerImpl = { [weak controller] in
@ -1182,6 +1225,10 @@ public class CameraScreen: ViewController {
}
push(controller)
}
public func presentDraftTooltip() {
self.node.presentDraftTooltip()
}
private var isDismissed = false
fileprivate func requestDismiss(animated: Bool) {

View File

@ -273,6 +273,7 @@ final class CaptureControlsComponent: Component {
let shutterState: ShutterButtonState
let lastGalleryAsset: PHAsset?
let tag: AnyObject?
let galleryButtonTag: AnyObject?
let shutterTapped: () -> Void
let shutterPressed: () -> Void
let shutterReleased: () -> Void
@ -280,22 +281,26 @@ final class CaptureControlsComponent: Component {
let flipTapped: () -> Void
let galleryTapped: () -> Void
let swipeHintUpdated: (SwipeHint) -> Void
let zoomUpdated: (CGFloat) -> Void
init(
shutterState: ShutterButtonState,
lastGalleryAsset: PHAsset?,
tag: AnyObject?,
galleryButtonTag: AnyObject?,
shutterTapped: @escaping () -> Void,
shutterPressed: @escaping () -> Void,
shutterReleased: @escaping () -> Void,
lockRecording: @escaping () -> Void,
flipTapped: @escaping () -> Void,
galleryTapped: @escaping () -> Void,
swipeHintUpdated: @escaping (SwipeHint) -> Void
swipeHintUpdated: @escaping (SwipeHint) -> Void,
zoomUpdated: @escaping (CGFloat) -> Void
) {
self.shutterState = shutterState
self.lastGalleryAsset = lastGalleryAsset
self.tag = tag
self.galleryButtonTag = galleryButtonTag
self.shutterTapped = shutterTapped
self.shutterPressed = shutterPressed
self.shutterReleased = shutterReleased
@ -303,6 +308,7 @@ final class CaptureControlsComponent: Component {
self.flipTapped = flipTapped
self.galleryTapped = galleryTapped
self.swipeHintUpdated = swipeHintUpdated
self.zoomUpdated = zoomUpdated
}
static func ==(lhs: CaptureControlsComponent, rhs: CaptureControlsComponent) -> Bool {
@ -437,6 +443,13 @@ final class CaptureControlsComponent: Component {
}
blobOffset -= self.frame.width / 2.0
var isBanding = false
if location.y < -10.0 {
let fraction = 1.0 + min(8.0, ((abs(location.y) - 10.0) / 60.0))
self.component?.zoomUpdated(fraction)
} else {
self.component?.zoomUpdated(1.0)
}
if location.x < self.frame.width / 2.0 - 20.0 {
if location.x < self.frame.width / 2.0 - 60.0 {
self.component?.swipeHintUpdated(.releaseLock)
@ -568,6 +581,7 @@ final class CaptureControlsComponent: Component {
contentMode: .scaleAspectFill
)
),
tag: component.galleryButtonTag,
action: {
component.galleryTapped()
}

View File

@ -45,7 +45,7 @@ final class ZoomComponent: Component {
}
func update(value: String, selected: Bool) {
self.setAttributedTitle(NSAttributedString(string: value, font: Font.with(size: 13.0, design: .round, weight: selected ? .semibold : .regular), textColor: selected ? UIColor(rgb: 0xf8d74a) : .white, paragraphAlignment: .center), for: .normal)
self.setAttributedTitle(NSAttributedString(string: value, font: Font.with(size: 13.0, design: .round, weight: .semibold), textColor: selected ? UIColor(rgb: 0xf8d74a) : .white, paragraphAlignment: .center), for: .normal)
}
}

View File

@ -59,6 +59,7 @@ swift_library(
"//submodules/TelegramCore:TelegramCore",
"//submodules/SSignalKit/SwiftSignalKit:SwiftSignalKit",
"//submodules/TelegramPresentationData:TelegramPresentationData",
"//submodules/TelegramUIPreferences:TelegramUIPreferences",
"//submodules/AccountContext:AccountContext",
"//submodules/AppBundle:AppBundle",
"//submodules/TextFormat:TextFormat",
@ -66,6 +67,7 @@ swift_library(
"//submodules/TelegramAnimatedStickerNode:TelegramAnimatedStickerNode",
"//submodules/StickerResources:StickerResources",
"//submodules/YuvConversion:YuvConversion",
"//submodules/FastBlur:FastBlur",
],
visibility = [
"//visibility:public",

View File

@ -8,12 +8,23 @@ import SwiftSignalKit
import Display
import TelegramCore
import TelegramPresentationData
import FastBlur
public struct MediaEditorPlayerState {
public let duration: Double
public let timeRange: Range<Double>?
public let position: Double
public let frames: [UIImage]
public let framesCount: Int
public let framesUpdateTimestamp: Double
}
public final class MediaEditor {
public enum Subject {
case image(UIImage, PixelDimensions)
case video(String, PixelDimensions)
case asset(PHAsset)
case draft(MediaEditorDraft)
var dimensions: PixelDimensions {
switch self {
@ -21,12 +32,15 @@ public final class MediaEditor {
return dimensions
case let .asset(asset):
return PixelDimensions(width: Int32(asset.pixelWidth), height: Int32(asset.pixelHeight))
case let .draft(draft):
return draft.dimensions
}
}
}
private let subject: Subject
private var player: AVPlayer?
private var timeObserver: Any?
private var didPlayToEndTimeObserver: NSObjectProtocol?
private weak var previewView: MediaEditorPreviewView?
@ -36,8 +50,10 @@ public final class MediaEditor {
if !self.skipRendering {
self.updateRenderChain()
}
self.valuesPromise.set(.single(self.values))
}
}
private var valuesPromise = Promise<MediaEditorValues>()
private let renderer = MediaEditorRenderer()
private let renderChain = MediaEditorRenderChain()
@ -74,6 +90,119 @@ public final class MediaEditor {
return self.renderer.finalRenderedImage()
}
private let playerPromise = Promise<AVPlayer?>()
private var playerPosition: (Double, Double) = (0.0, 0.0) {
didSet {
self.playerPositionPromise.set(.single(self.playerPosition))
}
}
private let playerPositionPromise = Promise<(Double, Double)>((0.0, 0.0))
public func playerState(framesCount: Int) -> Signal<MediaEditorPlayerState?, NoError> {
return self.playerPromise.get()
|> mapToSignal { [weak self] player in
if let self, let asset = player?.currentItem?.asset {
return combineLatest(self.valuesPromise.get(), self.playerPositionPromise.get(), self.videoFrames(asset: asset, count: framesCount))
|> map { values, durationAndPosition, framesAndUpdateTimestamp in
let (duration, position) = durationAndPosition
let (frames, framesUpdateTimestamp) = framesAndUpdateTimestamp
return MediaEditorPlayerState(
duration: duration,
timeRange: values.videoTrimRange,
position: position,
frames: frames,
framesCount: framesCount,
framesUpdateTimestamp: framesUpdateTimestamp
)
}
} else {
return .single(nil)
}
}
}
public func videoFrames(asset: AVAsset, count: Int) -> Signal<([UIImage], Double), NoError> {
func blurredImage(_ image: UIImage) -> UIImage? {
guard let image = image.cgImage else {
return nil
}
let thumbnailSize = CGSize(width: image.width, height: image.height)
let thumbnailContextSize = thumbnailSize.aspectFilled(CGSize(width: 20.0, height: 20.0))
if let thumbnailContext = DrawingContext(size: thumbnailContextSize, scale: 1.0) {
thumbnailContext.withFlippedContext { c in
c.interpolationQuality = .none
c.draw(image, in: CGRect(origin: CGPoint(), size: thumbnailContextSize))
}
imageFastBlur(Int32(thumbnailContextSize.width), Int32(thumbnailContextSize.height), Int32(thumbnailContext.bytesPerRow), thumbnailContext.bytes)
let thumbnailContext2Size = thumbnailSize.aspectFitted(CGSize(width: 100.0, height: 100.0))
if let thumbnailContext2 = DrawingContext(size: thumbnailContext2Size, scale: 1.0) {
thumbnailContext2.withFlippedContext { c in
c.interpolationQuality = .none
if let image = thumbnailContext.generateImage()?.cgImage {
c.draw(image, in: CGRect(origin: CGPoint(), size: thumbnailContext2Size))
}
}
imageFastBlur(Int32(thumbnailContext2Size.width), Int32(thumbnailContext2Size.height), Int32(thumbnailContext2.bytesPerRow), thumbnailContext2.bytes)
return thumbnailContext2.generateImage()
}
}
return nil
}
guard count > 0 else {
return .complete()
}
let scale = UIScreen.main.scale
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.maximumSize = CGSize(width: 48.0 * scale, height: 36.0 * scale)
imageGenerator.appliesPreferredTrackTransform = true
imageGenerator.requestedTimeToleranceBefore = .zero
imageGenerator.requestedTimeToleranceAfter = .zero
var firstFrame: UIImage
if let cgImage = try? imageGenerator.copyCGImage(at: .zero, actualTime: nil) {
firstFrame = UIImage(cgImage: cgImage)
if let blurred = blurredImage(firstFrame) {
firstFrame = blurred
}
} else {
firstFrame = generateSingleColorImage(size: CGSize(width: 24.0, height: 36.0), color: .black)!
}
return Signal { subscriber in
subscriber.putNext((Array(repeating: firstFrame, count: count), CACurrentMediaTime()))
var timestamps: [NSValue] = []
let duration = asset.duration.seconds
let interval = duration / Double(count)
for i in 0 ..< count {
timestamps.append(NSValue(time: CMTime(seconds: Double(i) * interval, preferredTimescale: CMTimeScale(60.0))))
}
var updatedFrames: [UIImage] = []
imageGenerator.generateCGImagesAsynchronously(forTimes: timestamps) { _, image, _, _, _ in
if let image {
updatedFrames.append(UIImage(cgImage: image))
if updatedFrames.count == count {
subscriber.putNext((updatedFrames, CACurrentMediaTime()))
subscriber.putCompletion()
} else {
var tempFrames = updatedFrames
for _ in 0 ..< count - updatedFrames.count {
tempFrames.append(firstFrame)
}
subscriber.putNext((tempFrames, CACurrentMediaTime()))
}
}
}
return ActionDisposable {
imageGenerator.cancelAllCGImageGeneration()
}
}
}
public init(subject: Subject, values: MediaEditorValues? = nil, hasHistogram: Bool = false) {
self.subject = subject
if let values {
@ -94,6 +223,7 @@ public final class MediaEditor {
toolValues: [:]
)
}
self.valuesPromise.set(.single(self.values))
self.renderer.addRenderChain(self.renderChain)
if hasHistogram {
@ -110,6 +240,9 @@ public final class MediaEditor {
deinit {
self.textureSourceDisposable?.dispose()
if let timeObserver = self.timeObserver {
self.player?.removeTimeObserver(timeObserver)
}
if let didPlayToEndTimeObserver = self.didPlayToEndTimeObserver {
NotificationCenter.default.removeObserver(didPlayToEndTimeObserver)
}
@ -139,6 +272,17 @@ public final class MediaEditor {
case let .image(image, _):
let colors = gradientColors(from: image)
textureSource = .single((ImageTextureSource(image: image, renderTarget: renderTarget), image, nil, colors.0, colors.1))
case let .draft(draft):
guard let image = UIImage(contentsOfFile: draft.path) else {
return
}
let colors: (UIColor, UIColor)
if let gradientColors = draft.values.gradientColors {
colors = (gradientColors.first!, gradientColors.last!)
} else {
colors = gradientColors(from: image)
}
textureSource = .single((ImageTextureSource(image: image, renderTarget: renderTarget), image, nil, colors.0, colors.1))
case let .video(path, _):
textureSource = Signal { subscriber in
let url = URL(fileURLWithPath: path)
@ -221,20 +365,27 @@ public final class MediaEditor {
let (source, image, player, topColor, bottomColor) = sourceAndColors
self.renderer.textureSource = source
self.player = player
self.playerPromise.set(.single(player))
self.gradientColorsValue = (topColor, bottomColor)
self.setGradientColors([topColor, bottomColor])
self.maybeGeneratePersonSegmentation(image)
if let player {
self.timeObserver = player.addPeriodicTimeObserver(forInterval: CMTimeMake(value: 1, timescale: 10), queue: DispatchQueue.main) { [weak self] time in
guard let self, let duration = player.currentItem?.duration.seconds else {
return
}
self.playerPosition = (duration, time.seconds)
}
self.didPlayToEndTimeObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: nil, using: { [weak self] notification in
if let strongSelf = self {
strongSelf.player?.seek(to: CMTime(seconds: 0.0, preferredTimescale: 30))
strongSelf.player?.play()
if let self {
let start = self.values.videoTrimRange?.lowerBound ?? 0.0
self.player?.seek(to: CMTime(seconds: start, preferredTimescale: 60))
self.player?.play()
}
})
} else {
self.didPlayToEndTimeObserver = nil
self.player?.play()
}
}
})
@ -272,6 +423,28 @@ public final class MediaEditor {
self.values = self.values.withUpdatedVideoIsMuted(videoIsMuted)
}
public func seek(_ position: Double, andPlay: Bool) {
if !andPlay {
self.player?.pause()
}
self.player?.seek(to: CMTime(seconds: position, preferredTimescale: CMTimeScale(60.0)), toleranceBefore: .zero, toleranceAfter: .zero, completionHandler: { _ in })
if andPlay {
self.player?.play()
}
}
public func setVideoTrimStart(_ trimStart: Double) {
let trimEnd = self.values.videoTrimRange?.upperBound ?? self.playerPosition.0
let trimRange = trimStart ..< trimEnd
self.values = self.values.withUpdatedVideoTrimRange(trimRange)
}
public func setVideoTrimEnd(_ trimEnd: Double) {
let trimStart = self.values.videoTrimRange?.lowerBound ?? 0.0
let trimRange = trimStart ..< trimEnd
self.values = self.values.withUpdatedVideoTrimRange(trimRange)
}
public func setDrawingAndEntities(data: Data?, image: UIImage?, entities: [CodableDrawingEntity]) {
self.values = self.values.withUpdatedDrawingAndEntities(drawing: image, entities: entities)
}

View File

@ -17,6 +17,7 @@ final class MediaEditorComposer {
private let values: MediaEditorValues
private let dimensions: CGSize
private let outputDimensions: CGSize
private let ciContext: CIContext?
private var textureCache: CVMetalTextureCache?
@ -28,9 +29,10 @@ final class MediaEditorComposer {
private let drawingImage: CIImage?
private var entities: [MediaEditorComposerEntity]
init(account: Account, values: MediaEditorValues, dimensions: CGSize) {
init(account: Account, values: MediaEditorValues, dimensions: CGSize, outputDimensions: CGSize) {
self.values = values
self.dimensions = dimensions
self.outputDimensions = outputDimensions
self.renderer.addRenderChain(self.renderChain)
self.renderer.addRenderPass(ComposerRenderPass())
@ -91,7 +93,10 @@ final class MediaEditorComposer {
if let pixelBuffer {
processImage(inputImage: ciImage, time: time, completion: { compositedImage in
if let compositedImage {
if var compositedImage {
let scale = self.outputDimensions.width / self.dimensions.width
compositedImage = compositedImage.transformed(by: CGAffineTransform(scaleX: scale, y: scale))
self.ciContext?.render(compositedImage, to: pixelBuffer)
completion(pixelBuffer)
} else {
@ -130,7 +135,10 @@ final class MediaEditorComposer {
if let pixelBuffer {
makeEditorImageFrameComposition(inputImage: image, gradientImage: self.gradientImage, drawingImage: self.drawingImage, dimensions: self.dimensions, values: self.values, entities: self.entities, time: time, completion: { compositedImage in
if let compositedImage {
if var compositedImage {
let scale = self.outputDimensions.width / self.dimensions.width
compositedImage = compositedImage.transformed(by: CGAffineTransform(scaleX: scale, y: scale))
self.ciContext?.render(compositedImage, to: pixelBuffer)
completion(pixelBuffer, time)
} else {

View File

@ -0,0 +1,133 @@
import Foundation
import UIKit
import SwiftSignalKit
import TelegramCore
import TelegramUIPreferences
import PersistentStringHash
import Postbox
public final class MediaEditorDraft: Codable, Equatable {
public static func == (lhs: MediaEditorDraft, rhs: MediaEditorDraft) -> Bool {
return lhs.path == rhs.path
}
private enum CodingKeys: String, CodingKey {
case path
case isVideo
case thumbnail
case dimensionsWidth
case dimensionsHeight
case values
}
public let path: String
public let isVideo: Bool
public let thumbnail: UIImage
public let dimensions: PixelDimensions
public let values: MediaEditorValues
public init(path: String, isVideo: Bool, thumbnail: UIImage, dimensions: PixelDimensions, values: MediaEditorValues) {
self.path = path
self.isVideo = isVideo
self.thumbnail = thumbnail
self.dimensions = dimensions
self.values = values
}
public init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
self.path = try container.decode(String.self, forKey: .path)
self.isVideo = try container.decode(Bool.self, forKey: .isVideo)
let thumbnailData = try container.decode(Data.self, forKey: .thumbnail)
if let thumbnail = UIImage(data: thumbnailData) {
self.thumbnail = thumbnail
} else {
fatalError()
}
self.dimensions = PixelDimensions(
width: try container.decode(Int32.self, forKey: .dimensionsWidth),
height: try container.decode(Int32.self, forKey: .dimensionsHeight)
)
let valuesData = try container.decode(Data.self, forKey: .values)
if let values = try? JSONDecoder().decode(MediaEditorValues.self, from: valuesData) {
self.values = values
} else {
fatalError()
}
}
public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(self.path, forKey: .path)
try container.encode(self.isVideo, forKey: .isVideo)
if let thumbnailData = self.thumbnail.jpegData(compressionQuality: 0.8) {
try container.encode(thumbnailData, forKey: .thumbnail)
}
try container.encode(self.dimensions.width, forKey: .dimensionsWidth)
try container.encode(self.dimensions.height, forKey: .dimensionsHeight)
if let valuesData = try? JSONEncoder().encode(self.values) {
try container.encode(valuesData, forKey: .values)
} else {
fatalError()
}
}
}
private struct MediaEditorDraftItemId {
public let rawValue: MemoryBuffer
var value: Int64 {
return self.rawValue.makeData().withUnsafeBytes { buffer -> Int64 in
guard let bytes = buffer.baseAddress?.assumingMemoryBound(to: Int64.self) else {
return 0
}
return bytes.pointee
}
}
init(_ rawValue: MemoryBuffer) {
self.rawValue = rawValue
}
init(_ value: Int64) {
var value = value
self.rawValue = MemoryBuffer(data: Data(bytes: &value, count: MemoryLayout.size(ofValue: value)))
}
init(_ value: UInt64) {
var value = Int64(bitPattern: value)
self.rawValue = MemoryBuffer(data: Data(bytes: &value, count: MemoryLayout.size(ofValue: value)))
}
}
public func addStoryDraft(engine: TelegramEngine, item: MediaEditorDraft) {
let itemId = MediaEditorDraftItemId(item.path.persistentHashValue)
let _ = engine.orderedLists.addOrMoveToFirstPosition(collectionId: ApplicationSpecificOrderedItemListCollectionId.storyDrafts, id: itemId.rawValue, item: item, removeTailIfCountExceeds: 50).start()
}
public func removeStoryDraft(engine: TelegramEngine, path: String, delete: Bool) {
if delete {
try? FileManager.default.removeItem(atPath: path)
}
let itemId = MediaEditorDraftItemId(path.persistentHashValue)
let _ = engine.orderedLists.removeItem(collectionId: ApplicationSpecificOrderedItemListCollectionId.storyDrafts, id: itemId.rawValue).start()
}
public func clearStoryDrafts(engine: TelegramEngine) {
let _ = engine.orderedLists.clear(collectionId: ApplicationSpecificOrderedItemListCollectionId.storyDrafts).start()
}
public func storyDrafts(engine: TelegramEngine) -> Signal<[MediaEditorDraft], NoError> {
return engine.data.subscribe(TelegramEngine.EngineData.Item.OrderedLists.ListItems(collectionId: ApplicationSpecificOrderedItemListCollectionId.storyDrafts))
|> map { items -> [MediaEditorDraft] in
var result: [MediaEditorDraft] = []
for item in items {
if let draft = item.contents.get(MediaEditorDraft.self) {
result.append(draft)
}
}
return result
}
}

View File

@ -190,6 +190,10 @@ public final class MediaEditorValues: Codable {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues)
}
func withUpdatedVideoTrimRange(_ videoTrimRange: Range<Double>) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues)
}
func withUpdatedDrawingAndEntities(drawing: UIImage?, entities: [CodableDrawingEntity]) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, drawing: drawing, entities: entities, toolValues: self.toolValues)
}
@ -919,14 +923,14 @@ extension CodableToolValue: Codable {
public func recommendedVideoExportConfiguration(values: MediaEditorValues) -> MediaEditorVideoExport.Configuration {
let compressionProperties: [String: Any] = [
AVVideoAverageBitRateKey: 2000000,
//AVVideoProfileLevelKey: kVTProfileLevel_HEVC_Main_AutoLevel
AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC
AVVideoProfileLevelKey: kVTProfileLevel_HEVC_Main_AutoLevel
//AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
//AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC
]
let videoSettings: [String: Any] = [
AVVideoCodecKey: AVVideoCodecType.h264,
//AVVideoCodecKey: AVVideoCodecType.hevc,
//AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoCodecKey: AVVideoCodecType.hevc,
AVVideoCompressionPropertiesKey: compressionProperties,
AVVideoWidthKey: 720,
AVVideoHeightKey: 1280

View File

@ -191,6 +191,10 @@ public final class MediaEditorVideoExport {
}
}
var composerDimensions: CGSize {
return CGSize(width: 1080.0, height: 1920.0)
}
var dimensions: CGSize {
if let width = self.videoSettings[AVVideoWidthKey] as? Int, let height = self.videoSettings[AVVideoHeightKey] as? Int {
return CGSize(width: width, height: height)
@ -286,7 +290,7 @@ public final class MediaEditorVideoExport {
guard self.composer == nil else {
return
}
self.composer = MediaEditorComposer(account: self.account, values: self.configuration.values, dimensions: self.configuration.dimensions)
self.composer = MediaEditorComposer(account: self.account, values: self.configuration.values, dimensions: self.configuration.composerDimensions, outputDimensions: self.configuration.dimensions)
}
private func setupWithAsset(_ asset: AVAsset) {

View File

@ -210,6 +210,5 @@ final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullD
public func outputMediaDataWillChange(_ sender: AVPlayerItemOutput) {
self.displayLink?.isPaused = false
self.player.play()
}
}

View File

@ -20,6 +20,7 @@ import TooltipUI
import BlurredBackgroundComponent
import AvatarNode
import ShareWithPeersScreen
import PresentationDataUtils
enum DrawingScreenType {
case drawing
@ -37,6 +38,7 @@ final class MediaEditorScreenComponent: Component {
let context: AccountContext
let mediaEditor: MediaEditor?
let privacy: EngineStoryPrivacy
let timeout: Bool
let openDrawing: (DrawingScreenType) -> Void
let openTools: () -> Void
@ -44,12 +46,14 @@ final class MediaEditorScreenComponent: Component {
context: AccountContext,
mediaEditor: MediaEditor?,
privacy: EngineStoryPrivacy,
timeout: Bool,
openDrawing: @escaping (DrawingScreenType) -> Void,
openTools: @escaping () -> Void
) {
self.context = context
self.mediaEditor = mediaEditor
self.privacy = privacy
self.timeout = timeout
self.openDrawing = openDrawing
self.openTools = openTools
}
@ -61,6 +65,9 @@ final class MediaEditorScreenComponent: Component {
if lhs.privacy != rhs.privacy {
return false
}
if lhs.timeout != rhs.timeout {
return false
}
return true
}
@ -118,22 +125,34 @@ final class MediaEditorScreenComponent: Component {
}
let context: AccountContext
init(context: AccountContext) {
var playerStateDisposable: Disposable?
var playerState: MediaEditorPlayerState?
init(context: AccountContext, mediaEditor: MediaEditor?) {
self.context = context
super.init()
if let mediaEditor {
self.playerStateDisposable = (mediaEditor.playerState(framesCount: 16)
|> deliverOnMainQueue).start(next: { [weak self] playerState in
if let self {
self.playerState = playerState
self.updated()
}
})
}
}
deinit {
self.playerStateDisposable?.dispose()
}
}
func makeState() -> State {
return State(
context: self.context
context: self.context,
mediaEditor: self.mediaEditor
)
}
@ -265,6 +284,12 @@ final class MediaEditorScreenComponent: Component {
transition.setAlpha(view: view, alpha: 0.0)
transition.setScale(view: view, scale: 0.1)
}
if let view = self.scrubber.view {
view.layer.animatePosition(from: .zero, to: CGPoint(x: 0.0, y: 44.0), duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, additive: true)
view.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
view.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2)
}
}
func animateOutToTool() {
@ -312,6 +337,11 @@ final class MediaEditorScreenComponent: Component {
transition.setAlpha(view: view, alpha: 0.0)
transition.setScale(view: view, scale: 0.1)
}
if let view = self.scrubber.view {
transition.setAlpha(view: view, alpha: 0.0)
view.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2)
}
}
func animateInFromTool() {
@ -359,6 +389,11 @@ final class MediaEditorScreenComponent: Component {
transition.setAlpha(view: view, alpha: 1.0)
transition.setScale(view: view, scale: 1.0)
}
if let view = self.scrubber.view {
transition.setAlpha(view: view, alpha: 1.0)
view.layer.animateScale(from: 0.0, to: 1.0, duration: 0.2)
}
}
func update(component: MediaEditorScreenComponent, availableSize: CGSize, state: State, environment: Environment<ViewControllerComponentContainer.Environment>, transition: Transition) -> CGSize {
@ -392,7 +427,7 @@ final class MediaEditorScreenComponent: Component {
guard let controller = environment.controller() as? MediaEditorScreen else {
return
}
controller.requestDismiss(animated: true)
controller.maybePresentDiscardAlert()
}
)),
environment: {},
@ -548,16 +583,43 @@ final class MediaEditorScreenComponent: Component {
transition.setFrame(view: toolsButtonView, frame: toolsButtonFrame)
}
let mediaEditor = component.mediaEditor
var scrubberBottomInset: CGFloat = 0.0
if !"".isEmpty {
if let playerState = state.playerState {
let scrubberInset: CGFloat = 9.0
let scrubberSize = self.scrubber.update(
transition: transition,
component: AnyComponent(VideoScrubberComponent(
context: component.context,
duration: 1.0,
startPosition: 0.0,
endPosition: 1.0
duration: playerState.duration,
startPosition: playerState.timeRange?.lowerBound ?? 0.0,
endPosition: playerState.timeRange?.upperBound ?? playerState.duration,
position: playerState.position,
frames: playerState.frames,
framesUpdateTimestamp: playerState.framesUpdateTimestamp,
startPositionUpdated: { [weak mediaEditor] position, done in
if let mediaEditor {
mediaEditor.setVideoTrimStart(position)
mediaEditor.seek(position, andPlay: done)
}
},
endPositionUpdated: { [weak mediaEditor] position, done in
if let mediaEditor {
mediaEditor.setVideoTrimEnd(position)
if done {
let start = mediaEditor.values.videoTrimRange?.lowerBound ?? 0.0
mediaEditor.seek(start, andPlay: true)
} else {
mediaEditor.seek(position, andPlay: false)
}
}
},
positionUpdated: { position, done in
if let mediaEditor {
mediaEditor.seek(position, andPlay: done)
}
}
)),
environment: {},
containerSize: CGSize(width: availableSize.width - scrubberInset * 2.0, height: availableSize.height)
@ -593,16 +655,21 @@ final class MediaEditorScreenComponent: Component {
//component.presentController(c)
},
sendMessageAction: { [weak self] in
guard let _ = self else {
guard let self else {
return
}
//self.performSendMessageAction()
self.endEditing(true)
},
setMediaRecordingActive: nil,
attachmentAction: nil,
reactionAction: nil,
timeoutAction: { view in
},
audioRecorder: nil,
videoRecordingStatus: nil,
timeoutValue: 24,
timeoutSelected: component.timeout,
displayGradient: false,//component.inputHeight != 0.0,
bottomInset: 0.0 //component.inputHeight != 0.0 ? 0.0 : bottomContentInset
)),
@ -713,48 +780,49 @@ final class MediaEditorScreenComponent: Component {
transition.setAlpha(view: saveButtonView, alpha: self.inputPanelExternalState.isEditing ? 0.0 : 1.0)
}
let isVideoMuted = component.mediaEditor?.values.videoIsMuted ?? false
let muteButtonSize = self.muteButton.update(
transition: transition,
component: AnyComponent(Button(
content: AnyComponent(
LottieAnimationComponent(
animation: LottieAnimationComponent.AnimationItem(
name: "anim_storymute",
mode: .animating(loop: false),
range: isVideoMuted ? (0.0, 0.5) : (0.5, 1.0)
),
colors: ["__allcolors__": .white],
size: CGSize(width: 33.0, height: 33.0)
).tagged(muteButtonTag)
),
action: { [weak self, weak state] in
if let self, let mediaEditor = self.component?.mediaEditor {
mediaEditor.setVideoIsMuted(!mediaEditor.values.videoIsMuted)
state?.updated()
if let _ = state.playerState {
let isVideoMuted = component.mediaEditor?.values.videoIsMuted ?? false
let muteButtonSize = self.muteButton.update(
transition: transition,
component: AnyComponent(Button(
content: AnyComponent(
LottieAnimationComponent(
animation: LottieAnimationComponent.AnimationItem(
name: "anim_storymute",
mode: .animating(loop: false),
range: isVideoMuted ? (0.0, 0.5) : (0.5, 1.0)
),
colors: ["__allcolors__": .white],
size: CGSize(width: 33.0, height: 33.0)
).tagged(muteButtonTag)
),
action: { [weak self, weak state] in
if let self, let mediaEditor = self.component?.mediaEditor {
mediaEditor.setVideoIsMuted(!mediaEditor.values.videoIsMuted)
state?.updated()
}
}
)),
environment: {},
containerSize: CGSize(width: 44.0, height: 44.0)
)
let muteButtonFrame = CGRect(
origin: CGPoint(x: availableSize.width - 20.0 - muteButtonSize.width - 50.0, y: environment.safeInsets.top + 20.0 - inputPanelOffset),
size: muteButtonSize
)
if let muteButtonView = self.muteButton.view {
if muteButtonView.superview == nil {
muteButtonView.layer.shadowOffset = CGSize(width: 0.0, height: 0.0)
muteButtonView.layer.shadowRadius = 4.0
muteButtonView.layer.shadowColor = UIColor.black.cgColor
muteButtonView.layer.shadowOpacity = 0.2
self.addSubview(muteButtonView)
}
)),
environment: {},
containerSize: CGSize(width: 44.0, height: 44.0)
)
let muteButtonFrame = CGRect(
origin: CGPoint(x: availableSize.width - 20.0 - muteButtonSize.width - 50.0, y: environment.safeInsets.top + 20.0 - inputPanelOffset),
size: muteButtonSize
)
if let muteButtonView = self.muteButton.view {
if muteButtonView.superview == nil {
muteButtonView.layer.shadowOffset = CGSize(width: 0.0, height: 0.0)
muteButtonView.layer.shadowRadius = 4.0
muteButtonView.layer.shadowColor = UIColor.black.cgColor
muteButtonView.layer.shadowOpacity = 0.2
//self.addSubview(muteButtonView)
transition.setPosition(view: muteButtonView, position: muteButtonFrame.center)
transition.setBounds(view: muteButtonView, bounds: CGRect(origin: .zero, size: muteButtonFrame.size))
transition.setScale(view: muteButtonView, scale: self.inputPanelExternalState.isEditing ? 0.01 : 1.0)
transition.setAlpha(view: muteButtonView, alpha: self.inputPanelExternalState.isEditing ? 0.0 : 1.0)
}
transition.setPosition(view: muteButtonView, position: muteButtonFrame.center)
transition.setBounds(view: muteButtonView, bounds: CGRect(origin: .zero, size: muteButtonFrame.size))
transition.setScale(view: muteButtonView, scale: self.inputPanelExternalState.isEditing ? 0.01 : 1.0)
transition.setAlpha(view: muteButtonView, alpha: self.inputPanelExternalState.isEditing ? 0.0 : 1.0)
}
return availableSize
@ -813,6 +881,7 @@ public final class MediaEditorScreen: ViewController {
fileprivate var subject: MediaEditorScreen.Subject?
private var subjectDisposable: Disposable?
fileprivate var storyPrivacy: EngineStoryPrivacy = EngineStoryPrivacy(base: .everyone, additionallyIncludePeers: [])
fileprivate var timeout: Bool = true
private let backgroundDimView: UIView
fileprivate let componentHost: ComponentView<ViewControllerComponentContainer.Environment>
@ -983,7 +1052,17 @@ public final class MediaEditorScreen: ViewController {
}
}
let mediaEditor = MediaEditor(subject: subject.editorSubject, hasHistogram: true)
let initialValues: MediaEditorValues?
if case let .draft(draft) = subject {
initialValues = draft.values
for entity in draft.values.entities {
entitiesView.add(entity.entity, announce: false)
}
} else {
initialValues = nil
}
let mediaEditor = MediaEditor(subject: subject.editorSubject, values: initialValues, hasHistogram: true)
mediaEditor.attachPreviewView(self.previewView)
self.gradientColorsDisposable = mediaEditor.gradientColors.start(next: { [weak self] colors in
@ -998,7 +1077,10 @@ public final class MediaEditorScreen: ViewController {
self.previewContainerView.layer.allowsGroupOpacity = true
self.previewContainerView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25, completion: { _ in
self.previewContainerView.layer.allowsGroupOpacity = false
self.controller?.onReady()
})
} else {
self.controller?.onReady()
}
}
}
@ -1053,9 +1135,9 @@ public final class MediaEditorScreen: ViewController {
}
}
Queue.mainQueue().after(0.5) {
self.presentPrivacyTooltip()
}
// Queue.mainQueue().after(0.5) {
// self.presentPrivacyTooltip()
// }
}
func animateOut(finished: Bool, completion: @escaping () -> Void) {
@ -1138,6 +1220,29 @@ public final class MediaEditorScreen: ViewController {
self.controller?.present(controller, in: .current)
}
func presentSaveTooltip() {
guard let sourceView = self.componentHost.findTaggedView(tag: saveButtonTag) else {
return
}
let parentFrame = self.view.convert(self.bounds, to: nil)
let absoluteFrame = sourceView.convert(sourceView.bounds, to: nil).offsetBy(dx: -parentFrame.minX, dy: 0.0)
let location = CGRect(origin: CGPoint(x: absoluteFrame.midX, y: absoluteFrame.maxY + 3.0), size: CGSize())
let text: String
let isVideo = self.mediaEditor?.resultIsVideo ?? false
if isVideo {
text = "Video saved to Photos"
} else {
text = "Image saved to Photos"
}
let controller = TooltipScreen(account: self.context.account, sharedContext: self.context.sharedContext, text: text, location: .point(location, .top), displayDuration: .default, inset: 16.0, shouldDismissOnTouch: { _ in
return .ignore
})
self.controller?.present(controller, in: .current)
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
let result = super.hitTest(point, with: event)
if result == self.componentHost.view {
@ -1194,6 +1299,7 @@ public final class MediaEditorScreen: ViewController {
context: self.context,
mediaEditor: self.mediaEditor,
privacy: self.storyPrivacy,
timeout: self.timeout,
openDrawing: { [weak self] mode in
if let self {
let controller = DrawingScreen(context: self.context, sourceHint: .storyEditor, size: self.previewContainerView.frame.size, originalSize: storyDimensions, isVideo: false, isAvatar: false, drawingView: self.drawingView, entitiesView: self.entitiesView, existingStickerPickerInputData: self.stickerPickerInputData)
@ -1277,9 +1383,9 @@ public final class MediaEditorScreen: ViewController {
var bottomInputOffset: CGFloat = 0.0
if let inputHeight = layout.inputHeight, inputHeight > 0.0 {
bottomInputOffset = inputHeight - topInset
bottomInputOffset = inputHeight - topInset - 17.0
}
transition.setFrame(view: self.backgroundDimView, frame: CGRect(origin: .zero, size: layout.size))
var previewFrame = CGRect(origin: CGPoint(x: 0.0, y: topInset - bottomInputOffset), size: previewSize)
@ -1308,6 +1414,7 @@ public final class MediaEditorScreen: ViewController {
case image(UIImage, PixelDimensions)
case video(String, PixelDimensions)
case asset(PHAsset)
case draft(MediaEditorDraft)
var dimensions: PixelDimensions {
switch self {
@ -1315,6 +1422,8 @@ public final class MediaEditorScreen: ViewController {
return dimensions
case let .asset(asset):
return PixelDimensions(width: Int32(asset.pixelWidth), height: Int32(asset.pixelHeight))
case let .draft(draft):
return draft.dimensions
}
}
@ -1326,6 +1435,8 @@ public final class MediaEditorScreen: ViewController {
return .video(videoPath, dimensions)
case let .asset(asset):
return .asset(asset)
case let .draft(draft):
return .draft(draft)
}
}
@ -1337,6 +1448,8 @@ public final class MediaEditorScreen: ViewController {
return .video(videoPath, dimensions)
case let .asset(asset):
return .asset(asset)
case let .draft(draft):
return .image(draft.thumbnail, draft.dimensions)
}
}
}
@ -1361,8 +1474,9 @@ public final class MediaEditorScreen: ViewController {
}
public var sourceHint: SourceHint?
public var cancelled: () -> Void = {}
public var cancelled: (Bool) -> Void = { _ in }
public var completion: (MediaEditorScreen.Result, @escaping () -> Void, EngineStoryPrivacy) -> Void = { _, _, _ in }
public var onReady: () -> Void = {}
public init(
context: AccountContext,
@ -1378,6 +1492,7 @@ public final class MediaEditorScreen: ViewController {
self.completion = completion
super.init(navigationBarPresentationData: nil)
self.navigationPresentation = .flatModal
self.supportedOrientations = ViewControllerSupportedOrientations(regularSize: .all, compactSize: .portrait)
@ -1410,83 +1525,76 @@ public final class MediaEditorScreen: ViewController {
self.node.requestUpdate()
}))
})
/*enum AdditionalCategoryId: Int {
case everyone
case contacts
case closeFriends
}
let presentationData = self.context.sharedContext.currentPresentationData.with({ $0 })
let additionalCategories: [ChatListNodeAdditionalCategory] = [
ChatListNodeAdditionalCategory(
id: AdditionalCategoryId.everyone.rawValue,
icon: generateAvatarImage(size: CGSize(width: 40.0, height: 40.0), icon: generateTintedImage(image: UIImage(bundleImageName: "Chat List/Filters/Channel"), color: .white), cornerRadius: nil, color: .blue),
smallIcon: generateAvatarImage(size: CGSize(width: 22.0, height: 22.0), icon: generateTintedImage(image: UIImage(bundleImageName: "Chat List/Filters/Channel"), color: .white), iconScale: 0.6, cornerRadius: 6.0, circleCorners: true, color: .blue),
title: "Everyone",
appearance: .option(sectionTitle: "WHO CAN VIEW FOR 24 HOURS")
),
ChatListNodeAdditionalCategory(
id: AdditionalCategoryId.contacts.rawValue,
icon: generateAvatarImage(size: CGSize(width: 40.0, height: 40.0), icon: generateTintedImage(image: UIImage(bundleImageName: "Chat List/Tabs/IconContacts"), color: .white), iconScale: 1.0 * 0.8, cornerRadius: nil, color: .yellow),
smallIcon: generateAvatarImage(size: CGSize(width: 22.0, height: 22.0), icon: generateTintedImage(image: UIImage(bundleImageName: "Chat List/Tabs/IconContacts"), color: .white), iconScale: 0.6 * 0.8, cornerRadius: 6.0, circleCorners: true, color: .yellow),
title: presentationData.strings.ChatListFolder_CategoryContacts,
appearance: .option(sectionTitle: "WHO CAN VIEW FOR 24 HOURS")
),
ChatListNodeAdditionalCategory(
id: AdditionalCategoryId.closeFriends.rawValue,
icon: generateAvatarImage(size: CGSize(width: 40.0, height: 40.0), icon: generateTintedImage(image: UIImage(bundleImageName: "Call/StarHighlighted"), color: .white), iconScale: 1.0 * 0.6, cornerRadius: nil, color: .green),
smallIcon: generateAvatarImage(size: CGSize(width: 22.0, height: 22.0), icon: generateTintedImage(image: UIImage(bundleImageName: "Call/StarHighlighted"), color: .white), iconScale: 0.6 * 0.6, cornerRadius: 6.0, circleCorners: true, color: .green),
title: "Close Friends",
appearance: .option(sectionTitle: "WHO CAN VIEW FOR 24 HOURS")
)
]
let updatedPresentationData = presentationData.withUpdated(theme: defaultDarkColorPresentationTheme)
let selectionController = self.context.sharedContext.makeContactMultiselectionController(ContactMultiselectionControllerParams(context: self.context, updatedPresentationData: (initial: updatedPresentationData, signal: .single(updatedPresentationData)), mode: .chatSelection(ContactMultiselectionControllerMode.ChatSelection(
title: "Share Story",
searchPlaceholder: "Search contacts",
selectedChats: Set(),
additionalCategories: ContactMultiselectionControllerAdditionalCategories(categories: additionalCategories, selectedCategories: Set([AdditionalCategoryId.everyone.rawValue])),
chatListFilters: nil,
displayPresence: true
)), options: [], filters: [.excludeSelf], alwaysEnabled: true, limit: 1000, reachedLimit: { _ in
}))
selectionController.navigationPresentation = .modal
self.push(selectionController)
let _ = (selectionController.result
|> take(1)
|> deliverOnMainQueue).start(next: { [weak selectionController, weak self] result in
selectionController?.dismiss()
guard case let .result(peerIds, additionalCategoryIds) = result else {
return
}
var privacy = EngineStoryPrivacy(base: .everyone, additionallyIncludePeers: [])
if additionalCategoryIds.contains(AdditionalCategoryId.everyone.rawValue) {
privacy.base = .everyone
} else if additionalCategoryIds.contains(AdditionalCategoryId.contacts.rawValue) {
privacy.base = .contacts
} else if additionalCategoryIds.contains(AdditionalCategoryId.closeFriends.rawValue) {
privacy.base = .closeFriends
}
privacy.additionallyIncludePeers = peerIds.compactMap { id -> EnginePeer.Id? in
switch id {
case let .peer(peerId):
return peerId
default:
return nil
}
}
self?.node.storyPrivacy = privacy
self?.node.requestUpdate()
})*/
}
func requestDismiss(animated: Bool) {
self.cancelled()
func maybePresentDiscardAlert() {
if let subject = self.node.subject, case .asset = subject {
self.requestDismiss(saveDraft: false, animated: true)
return
}
let title: String
let save: String
if case .draft = self.node.subject {
title = "Discard Draft?"
save = "Keep Draft"
} else {
title = "Discard Media?"
save = "Save Draft"
}
let theme = defaultDarkPresentationTheme
let controller = textAlertController(
context: self.context,
forceTheme: theme,
title: title,
text: "If you go back now, you will lose any changes that you've made.",
actions: [
TextAlertAction(type: .destructiveAction, title: "Discard", action: { [weak self] in
if let self {
self.requestDismiss(saveDraft: false, animated: true)
}
}),
TextAlertAction(type: .genericAction, title: save, action: { [weak self] in
if let self {
self.requestDismiss(saveDraft: true, animated: true)
}
}),
TextAlertAction(type: .genericAction, title: "Cancel", action: {
})
],
actionLayout: .vertical
)
self.present(controller, in: .window(.root))
}
func requestDismiss(saveDraft: Bool, animated: Bool) {
if saveDraft, let subject = self.node.subject, let values = self.node.mediaEditor?.values {
if let resultImage = self.node.mediaEditor?.resultImage {
let fittedSize = resultImage.size.aspectFitted(CGSize(width: 128.0, height: 128.0))
if case let .image(image, dimensions) = subject {
if let thumbnailImage = generateScaledImage(image: resultImage, size: fittedSize) {
let path = NSTemporaryDirectory() + "\(Int64.random(in: .min ... .max)).jpg"
if let data = image.jpegData(compressionQuality: 0.87) {
try? data.write(to: URL(fileURLWithPath: path))
let draft = MediaEditorDraft(path: path, isVideo: false, thumbnail: thumbnailImage, dimensions: dimensions, values: values)
addStoryDraft(engine: self.context.engine, item: draft)
}
}
} else if case let .draft(draft) = subject {
if let thumbnailImage = generateScaledImage(image: resultImage, size: fittedSize) {
removeStoryDraft(engine: self.context.engine, path: draft.path, delete: false)
let draft = MediaEditorDraft(path: draft.path, isVideo: draft.isVideo, thumbnail: thumbnailImage, dimensions: draft.dimensions, values: values)
addStoryDraft(engine: self.context.engine, item: draft)
}
}
}
} else {
if case let .draft(draft) = self.node.subject {
removeStoryDraft(engine: self.context.engine, path: draft.path, delete: true)
}
}
self.cancelled(saveDraft)
self.node.animateOut(finished: false, completion: { [weak self] in
self?.dismiss()
@ -1497,7 +1605,7 @@ public final class MediaEditorScreen: ViewController {
guard let mediaEditor = self.node.mediaEditor, let subject = self.node.subject else {
return
}
if mediaEditor.resultIsVideo {
let videoResult: Result.VideoResult
let duration: Double
@ -1527,12 +1635,28 @@ public final class MediaEditorScreen: ViewController {
} else {
duration = 5.0
}
case let .draft(draft):
if draft.isVideo {
videoResult = .videoFile(path: draft.path)
if let videoTrimRange = mediaEditor.values.videoTrimRange {
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
} else {
duration = 5.0
}
} else {
videoResult = .imageFile(path: draft.path)
duration = 5.0
}
}
self.completion(.video(video: videoResult, coverImage: nil, values: mediaEditor.values, duration: duration, dimensions: PixelDimensions(width: 720, height: 1280), caption: caption), { [weak self] in
self?.node.animateOut(finished: true, completion: { [weak self] in
self?.dismiss()
})
}, self.node.storyPrivacy)
if case let .draft(draft) = subject {
removeStoryDraft(engine: self.context.engine, path: draft.path, delete: true)
}
} else {
if let image = mediaEditor.resultImage {
makeEditorImageComposition(account: self.context.account, inputImage: image, dimensions: storyDimensions, values: mediaEditor.values, time: .zero, completion: { resultImage in
@ -1542,6 +1666,9 @@ public final class MediaEditorScreen: ViewController {
self?.dismiss()
})
}, self.node.storyPrivacy)
if case let .draft(draft) = subject {
removeStoryDraft(engine: self.context.engine, path: draft.path, delete: true)
}
}
})
}
@ -1605,6 +1732,17 @@ public final class MediaEditorScreen: ViewController {
}
return EmptyDisposable
}
case let .draft(draft):
if draft.isVideo {
let asset = AVURLAsset(url: NSURL(fileURLWithPath: draft.path) as URL)
exportSubject = .single(.video(asset))
} else {
if let image = UIImage(contentsOfFile: draft.path) {
exportSubject = .single(.image(image))
} else {
fatalError()
}
}
}
let _ = exportSubject.start(next: { [weak self] exportSubject in
@ -1624,6 +1762,7 @@ public final class MediaEditorScreen: ViewController {
if case .completed = status {
self.videoExport = nil
saveToPhotos(outputPath, true)
self.node.presentSaveTooltip()
}
}
})
@ -1637,6 +1776,7 @@ public final class MediaEditorScreen: ViewController {
saveToPhotos(outputPath, false)
}
})
self.node.presentSaveTooltip()
}
}
}

View File

@ -13,6 +13,7 @@ private let handleWidth: CGFloat = 14.0
private let scrubberHeight: CGFloat = 39.0
private let borderHeight: CGFloat = 1.0 + UIScreenPixel
private let frameWidth: CGFloat = 24.0
private let minumumDuration: CGFloat = 1.0
final class VideoScrubberComponent: Component {
typealias EnvironmentType = Empty
@ -21,33 +22,71 @@ final class VideoScrubberComponent: Component {
let duration: Double
let startPosition: Double
let endPosition: Double
let position: Double
let frames: [UIImage]
let framesUpdateTimestamp: Double
let startPositionUpdated: (Double, Bool) -> Void
let endPositionUpdated: (Double, Bool) -> Void
let positionUpdated: (Double, Bool) -> Void
init(
context: AccountContext,
duration: Double,
startPosition: Double,
endPosition: Double
endPosition: Double,
position: Double,
frames: [UIImage],
framesUpdateTimestamp: Double,
startPositionUpdated: @escaping (Double, Bool) -> Void,
endPositionUpdated: @escaping (Double, Bool) -> Void,
positionUpdated: @escaping (Double, Bool) -> Void
) {
self.context = context
self.duration = duration
self.startPosition = startPosition
self.endPosition = endPosition
self.position = position
self.frames = frames
self.framesUpdateTimestamp = framesUpdateTimestamp
self.startPositionUpdated = startPositionUpdated
self.endPositionUpdated = endPositionUpdated
self.positionUpdated = positionUpdated
}
static func ==(lhs: VideoScrubberComponent, rhs: VideoScrubberComponent) -> Bool {
if lhs.context !== rhs.context {
return false
}
if lhs.duration != rhs.duration {
return false
}
if lhs.startPosition != rhs.startPosition {
return false
}
if lhs.endPosition != rhs.endPosition {
return false
}
if lhs.position != rhs.position {
return false
}
if lhs.framesUpdateTimestamp != rhs.framesUpdateTimestamp {
return false
}
return true
}
final class View: UIView, UITextFieldDelegate {
private let containerView = UIView()
private let leftHandleView = UIImageView()
private let rightHandleView = UIImageView()
private let borderView = UIImageView()
private let cursorView = UIImageView()
private let transparentFramesContainer = UIView()
private let opaqueFramesContainer = UIView()
private var transparentFrameLayers: [CALayer] = []
private var opaqueFrameLayers: [CALayer] = []
private var component: VideoScrubberComponent?
private weak var state: EmptyComponentState?
@ -74,48 +113,193 @@ final class VideoScrubberComponent: Component {
let holePath = UIBezierPath(roundedRect: CGRect(origin: CGPoint(x: 5.0 - UIScreenPixel, y: (size.height - holeSize.height) / 2.0), size: holeSize), cornerRadius: holeSize.width / 2.0)
context.addPath(holePath.cgPath)
context.fillPath()
})
})?.withRenderingMode(.alwaysTemplate)
self.leftHandleView.image = handleImage
self.leftHandleView.isUserInteractionEnabled = true
self.leftHandleView.tintColor = .white
self.rightHandleView.image = handleImage
self.rightHandleView.transform = CGAffineTransform(scaleX: -1.0, y: 1.0)
self.rightHandleView.isUserInteractionEnabled = true
self.rightHandleView.tintColor = .white
self.borderView.image = generateImage(CGSize(width: 1.0, height: scrubberHeight), rotatedContext: { size, context in
context.clear(CGRect(origin: .zero, size: size))
context.setFillColor(UIColor.white.cgColor)
context.fill(CGRect(origin: .zero, size: CGSize(width: size.width, height: borderHeight)))
context.fill(CGRect(origin: CGPoint(x: 0.0, y: size.height - borderHeight), size: CGSize(width: size.width, height: scrubberHeight)))
})
})?.withRenderingMode(.alwaysTemplate)
self.borderView.tintColor = .white
self.addSubview(self.containerView)
self.transparentFramesContainer.alpha = 0.5
self.transparentFramesContainer.clipsToBounds = true
self.transparentFramesContainer.layer.cornerRadius = 9.0
self.opaqueFramesContainer.clipsToBounds = true
self.opaqueFramesContainer.layer.cornerRadius = 9.0
self.addSubview(self.transparentFramesContainer)
self.addSubview(self.opaqueFramesContainer)
self.addSubview(self.leftHandleView)
self.addSubview(self.rightHandleView)
self.addSubview(self.borderView)
self.addSubview(self.cursorView)
self.leftHandleView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleLeftHandlePan(_:))))
self.rightHandleView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleRightHandlePan(_:))))
//self.rightHandleView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handlePositionHandlePan(_:))))
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private var isPanningHandle = false
@objc private func handleLeftHandlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
guard let component = self.component else {
return
}
let location = gestureRecognizer.location(in: self)
let start = handleWidth / 2.0
let end = self.frame.width - handleWidth
let length = end - start
let fraction = (location.x - start) / length
var value = max(0.0, component.duration * fraction)
if value > component.endPosition - minumumDuration {
value = max(0.0, component.endPosition - minumumDuration)
}
var transition: Transition = .immediate
switch gestureRecognizer.state {
case .began, .changed:
self.isPanningHandle = true
component.startPositionUpdated(value, false)
if case .began = gestureRecognizer.state {
transition = .easeInOut(duration: 0.25)
}
case .ended, .cancelled:
self.isPanningHandle = false
component.startPositionUpdated(value, true)
transition = .easeInOut(duration: 0.25)
default:
break
}
self.state?.updated(transition: transition)
}
@objc private func handleRightHandlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
guard let component = self.component else {
return
}
let location = gestureRecognizer.location(in: self)
let start = handleWidth / 2.0
let end = self.frame.width - handleWidth
let length = end - start
let fraction = (location.x - start) / length
var value = min(component.duration, component.duration * fraction)
if value < component.startPosition + minumumDuration {
value = min(component.duration, component.startPosition + minumumDuration)
}
var transition: Transition = .immediate
switch gestureRecognizer.state {
case .began, .changed:
self.isPanningHandle = true
component.endPositionUpdated(value, false)
if case .began = gestureRecognizer.state {
transition = .easeInOut(duration: 0.25)
}
case .ended, .cancelled:
self.isPanningHandle = false
component.endPositionUpdated(value, true)
transition = .easeInOut(duration: 0.25)
default:
break
}
self.state?.updated(transition: transition)
}
func update(component: VideoScrubberComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: Transition) -> CGSize {
let previousFramesUpdateTimestamp = self.component?.framesUpdateTimestamp
self.component = component
self.state = state
let scrubberSize = CGSize(width: availableSize.width, height: scrubberHeight)
let bounds = CGRect(origin: .zero, size: scrubberSize)
transition.setFrame(view: self.containerView, frame: bounds)
if component.framesUpdateTimestamp != previousFramesUpdateTimestamp {
for i in 0 ..< component.frames.count {
let transparentFrameLayer: CALayer
let opaqueFrameLayer: CALayer
if i >= self.transparentFrameLayers.count {
transparentFrameLayer = SimpleLayer()
transparentFrameLayer.masksToBounds = true
transparentFrameLayer.contentsGravity = .resizeAspectFill
self.transparentFramesContainer.layer.addSublayer(transparentFrameLayer)
self.transparentFrameLayers.append(transparentFrameLayer)
opaqueFrameLayer = SimpleLayer()
opaqueFrameLayer.masksToBounds = true
opaqueFrameLayer.contentsGravity = .resizeAspectFill
self.opaqueFramesContainer.layer.addSublayer(opaqueFrameLayer)
self.opaqueFrameLayers.append(opaqueFrameLayer)
} else {
transparentFrameLayer = self.transparentFrameLayers[i]
opaqueFrameLayer = self.opaqueFrameLayers[i]
}
transparentFrameLayer.contents = component.frames[i].cgImage
if let contents = opaqueFrameLayer.contents, (contents as! CGImage) !== component.frames[i].cgImage, opaqueFrameLayer.animation(forKey: "contents") == nil {
opaqueFrameLayer.contents = component.frames[i].cgImage
opaqueFrameLayer.animate(from: contents as AnyObject, to: component.frames[i].cgImage! as AnyObject, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.2)
} else {
opaqueFrameLayer.contents = component.frames[i].cgImage
}
}
}
let leftHandleFrame = CGRect(origin: .zero, size: CGSize(width: handleWidth, height: scrubberSize.height))
let trimColor = self.isPanningHandle ? UIColor(rgb: 0xf8d74a) : .white
transition.setTintColor(view: self.leftHandleView, color: trimColor)
transition.setTintColor(view: self.rightHandleView, color: trimColor)
transition.setTintColor(view: self.borderView, color: trimColor)
let totalWidth = scrubberSize.width - handleWidth
let leftHandlePositionFraction = component.duration > 0.0 ? component.startPosition / component.duration : 0.0
let leftHandlePosition = floorToScreenPixels(handleWidth / 2.0 + totalWidth * leftHandlePositionFraction)
let leftHandleFrame = CGRect(origin: CGPoint(x: leftHandlePosition - handleWidth / 2.0, y: 0.0), size: CGSize(width: handleWidth, height: scrubberSize.height))
transition.setFrame(view: self.leftHandleView, frame: leftHandleFrame)
let rightHandlePositionFraction = component.duration > 0.0 ? component.endPosition / component.duration : 1.0
let rightHandlePosition = floorToScreenPixels(handleWidth / 2.0 + totalWidth * rightHandlePositionFraction)
let rightHandleFrame = CGRect(origin: CGPoint(x: scrubberSize.width - handleWidth, y: 0.0), size: CGSize(width: handleWidth, height: scrubberSize.height))
let rightHandleFrame = CGRect(origin: CGPoint(x: max(leftHandleFrame.maxX, rightHandlePosition - handleWidth / 2.0), y: 0.0), size: CGSize(width: handleWidth, height: scrubberSize.height))
transition.setFrame(view: self.rightHandleView, frame: rightHandleFrame)
let borderFrame = CGRect(origin: CGPoint(x: leftHandleFrame.maxX, y: 0.0), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX, height: scrubberSize.height))
transition.setFrame(view: self.borderView, frame: borderFrame)
let handleInset: CGFloat = 7.0
transition.setFrame(view: self.transparentFramesContainer, frame: bounds)
transition.setFrame(view: self.opaqueFramesContainer, frame: CGRect(origin: CGPoint(x: leftHandleFrame.maxX - handleInset, y: 0.0), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX + handleInset * 2.0, height: bounds.height)))
transition.setBounds(view: self.opaqueFramesContainer, bounds: CGRect(origin: CGPoint(x: leftHandleFrame.maxX - handleInset, y: 0.0), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX + handleInset * 2.0, height: bounds.height)))
var frameAspectRatio = 0.66
if let image = component.frames.first, image.size.height > 0.0 {
frameAspectRatio = max(0.66, image.size.width / image.size.height)
}
let frameSize = CGSize(width: 39.0 * frameAspectRatio, height: 39.0)
var frameOffset: CGFloat = 0.0
for i in 0 ..< component.frames.count {
if i < self.transparentFrameLayers.count {
let transparentFrameLayer = self.transparentFrameLayers[i]
let opaqueFrameLayer = self.opaqueFrameLayers[i]
let frame = CGRect(origin: CGPoint(x: frameOffset, y: 0.0), size: frameSize)
transparentFrameLayer.frame = frame
opaqueFrameLayer.frame = frame
}
frameOffset += frameSize.width
}
return scrubberSize
}
}

View File

@ -33,8 +33,11 @@ public final class MessageInputPanelComponent: Component {
public let setMediaRecordingActive: ((Bool, Bool, Bool) -> Void)?
public let attachmentAction: (() -> Void)?
public let reactionAction: ((UIView) -> Void)?
public let timeoutAction: ((UIView) -> Void)?
public let audioRecorder: ManagedAudioRecorder?
public let videoRecordingStatus: InstantVideoControllerRecordingStatus?
public let timeoutValue: Int32?
public let timeoutSelected: Bool
public let displayGradient: Bool
public let bottomInset: CGFloat
@ -50,8 +53,11 @@ public final class MessageInputPanelComponent: Component {
setMediaRecordingActive: ((Bool, Bool, Bool) -> Void)?,
attachmentAction: (() -> Void)?,
reactionAction: ((UIView) -> Void)?,
timeoutAction: ((UIView) -> Void)?,
audioRecorder: ManagedAudioRecorder?,
videoRecordingStatus: InstantVideoControllerRecordingStatus?,
timeoutValue: Int32?,
timeoutSelected: Bool,
displayGradient: Bool,
bottomInset: CGFloat
) {
@ -66,8 +72,11 @@ public final class MessageInputPanelComponent: Component {
self.setMediaRecordingActive = setMediaRecordingActive
self.attachmentAction = attachmentAction
self.reactionAction = reactionAction
self.timeoutAction = timeoutAction
self.audioRecorder = audioRecorder
self.videoRecordingStatus = videoRecordingStatus
self.timeoutValue = timeoutValue
self.timeoutSelected = timeoutSelected
self.displayGradient = displayGradient
self.bottomInset = bottomInset
}
@ -97,6 +106,12 @@ public final class MessageInputPanelComponent: Component {
if lhs.videoRecordingStatus !== rhs.videoRecordingStatus {
return false
}
if lhs.timeoutValue != rhs.timeoutValue {
return false
}
if lhs.timeoutSelected != rhs.timeoutSelected {
return false
}
if lhs.displayGradient != rhs.displayGradient {
return false
}
@ -126,6 +141,7 @@ public final class MessageInputPanelComponent: Component {
private let inputActionButton = ComponentView<Empty>()
private let stickerButton = ComponentView<Empty>()
private let reactionButton = ComponentView<Empty>()
private let timeoutButton = ComponentView<Empty>()
private var mediaRecordingPanel: ComponentView<Empty>?
private weak var dismissingMediaRecordingPanel: UIView?
@ -473,6 +489,58 @@ public final class MessageInputPanelComponent: Component {
}
}
if let timeoutAction = component.timeoutAction, let timeoutValue = component.timeoutValue {
func generateIcon(value: Int32) -> UIImage? {
let image = UIImage(bundleImageName: "Media Editor/Timeout")!
let string = "\(value)"
let valueString = NSAttributedString(string: "\(value)", font: Font.with(size: string.count == 1 ? 12.0 : 10.0, design: .round, weight: .semibold), textColor: .white, paragraphAlignment: .center)
return generateImage(image.size, contextGenerator: { size, context in
let bounds = CGRect(origin: CGPoint(), size: size)
context.clear(bounds)
if let cgImage = image.cgImage {
context.draw(cgImage, in: CGRect(origin: .zero, size: size))
}
let valuePath = CGMutablePath()
valuePath.addRect(bounds.offsetBy(dx: 0.0, dy: -3.0 - UIScreenPixel))
let valueFramesetter = CTFramesetterCreateWithAttributedString(valueString as CFAttributedString)
let valyeFrame = CTFramesetterCreateFrame(valueFramesetter, CFRangeMake(0, valueString.length), valuePath, nil)
CTFrameDraw(valyeFrame, context)
})?.withRenderingMode(.alwaysTemplate)
}
let icon = generateIcon(value: timeoutValue)
let timeoutButtonSize = self.timeoutButton.update(
transition: transition,
component: AnyComponent(Button(
content: AnyComponent(Image(image: icon, tintColor: component.timeoutSelected ? UIColor(rgb: 0x007aff) : UIColor(white: 1.0, alpha: 0.5), size: CGSize(width: 20.0, height: 20.0))),
action: { [weak self] in
guard let self, let timeoutButtonView = self.timeoutButton.view else {
return
}
timeoutAction(timeoutButtonView)
}
).minSize(CGSize(width: 32.0, height: 32.0))),
environment: {},
containerSize: CGSize(width: 32.0, height: 32.0)
)
if let timeoutButtonView = self.timeoutButton.view {
if timeoutButtonView.superview == nil {
self.addSubview(timeoutButtonView)
}
let timeoutIconFrame = CGRect(origin: CGPoint(x: fieldIconNextX - timeoutButtonSize.width, y: fieldFrame.minY + 1.0 + floor((fieldFrame.height - timeoutButtonSize.height) * 0.5)), size: timeoutButtonSize)
transition.setPosition(view: timeoutButtonView, position: timeoutIconFrame.center)
transition.setBounds(view: timeoutButtonView, bounds: CGRect(origin: CGPoint(), size: timeoutIconFrame.size))
transition.setAlpha(view: timeoutButtonView, alpha: self.textFieldExternalState.isEditing ? 0.0 : 1.0)
transition.setScale(view: timeoutButtonView, scale: self.textFieldExternalState.isEditing ? 0.1 : 1.0)
fieldIconNextX -= timeoutButtonSize.width + 2.0
}
}
self.fieldBackgroundView.updateColor(color: self.textFieldExternalState.isEditing || component.style == .editor ? UIColor(white: 0.0, alpha: 0.5) : UIColor(white: 1.0, alpha: 0.09), transition: transition.containedViewLayoutTransition)
transition.setAlpha(view: self.fieldBackgroundView, alpha: hasMediaRecording ? 0.0 : 1.0)
if let placeholder = self.placeholder.view, let vibrancyPlaceholderView = self.vibrancyPlaceholder.view {

View File

@ -841,8 +841,11 @@ public final class StoryItemSetContainerComponent: Component {
self.state?.updated(transition: Transition(animation: .curve(duration: 0.25, curve: .easeInOut)))
})
},
timeoutAction: nil,
audioRecorder: self.sendMessageContext.audioRecorderValue,
videoRecordingStatus: self.sendMessageContext.videoRecorderValue?.audioStatus,
timeoutValue: nil,
timeoutSelected: false,
displayGradient: component.inputHeight != 0.0,
bottomInset: component.inputHeight != 0.0 ? 0.0 : bottomContentInset
)),

View File

@ -327,7 +327,7 @@ public final class StoryPeerListItemComponent: Component {
transition.setScale(view: self.avatarContainer, scale: scaledAvatarSize / avatarSize.width)
if component.peer.id == component.context.account.peerId && !component.hasItems {
if component.peer.id == component.context.account.peerId && !component.hasItems && component.progress == nil {
self.indicatorColorLayer.isHidden = true
let avatarAddBadgeView: UIImageView
@ -408,7 +408,7 @@ public final class StoryPeerListItemComponent: Component {
let avatarPath = CGMutablePath()
avatarPath.addEllipse(in: CGRect(origin: CGPoint(), size: avatarSize).insetBy(dx: -1.0, dy: -1.0))
if component.peer.id == component.context.account.peerId && !component.hasItems {
if component.peer.id == component.context.account.peerId && !component.hasItems && component.progress == nil {
let cutoutSize: CGFloat = 18.0 + UIScreenPixel * 2.0
avatarPath.addEllipse(in: CGRect(origin: CGPoint(x: avatarSize.width - cutoutSize + UIScreenPixel, y: avatarSize.height - cutoutSize + UIScreenPixel), size: CGSize(width: cutoutSize, height: cutoutSize)))
} else if let mappedRightCenter {

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "time.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,185 @@
%PDF-1.7
1 0 obj
<< /Type /XObject
/Length 2 0 R
/Group << /Type /Group
/S /Transparency
/I true
>>
/Subtype /Form
/Resources << >>
/BBox [ 0.000000 0.000000 20.000000 20.000000 ]
>>
stream
q
1.000000 0.000000 -0.000000 1.000000 0.170000 0.170000 cm
1.000000 1.000000 1.000000 scn
9.619128 17.997332 m
9.689203 17.999107 9.759498 18.000000 9.830000 18.000000 c
9.998494 18.000000 10.165698 17.994911 10.331472 17.984888 c
10.789033 17.957226 11.182384 18.305727 11.210047 18.763288 c
11.237709 19.220850 10.889208 19.614201 10.431647 19.641863 c
10.232541 19.653900 10.031932 19.660000 9.830000 19.660000 c
4.401041 19.660000 0.000000 15.258959 0.000000 9.830000 c
0.000000 4.549489 4.163650 0.241449 9.387031 0.009802 c
9.533873 0.003290 9.681552 0.000000 9.830000 0.000000 c
10.031932 0.000000 10.232541 0.006100 10.431646 0.018137 c
10.889208 0.045799 11.237709 0.439150 11.210047 0.896711 c
11.182384 1.354273 10.789033 1.702774 10.331472 1.675112 c
10.165698 1.665089 9.998494 1.660000 9.830000 1.660000 c
9.710889 1.660000 9.592374 1.662548 9.474505 1.667595 c
5.197033 1.850725 1.771516 5.322806 1.662668 9.619128 c
1.660893 9.689203 1.660000 9.759498 1.660000 9.830000 c
1.660000 9.912314 1.661217 9.994345 1.663635 10.076074 c
1.791672 14.404406 5.286234 17.887556 9.619128 17.997332 c
h
14.215949 18.629501 m
13.805835 18.834278 13.307367 18.667820 13.102591 18.257706 c
12.897814 17.847591 13.064272 17.349125 13.474386 17.144348 c
13.773925 16.994783 14.063067 16.827311 14.340406 16.643307 c
14.722379 16.389881 15.237470 16.494089 15.490895 16.876060 c
15.744320 17.258034 15.640112 17.773125 15.258140 18.026550 c
14.924556 18.247871 14.576603 18.449421 14.215949 18.629501 c
h
18.026550 15.258140 m
17.773125 15.640112 17.258034 15.744320 16.876060 15.490895 c
16.494089 15.237471 16.389881 14.722379 16.643307 14.340406 c
16.827311 14.063068 16.994783 13.773926 17.144346 13.474386 c
17.349125 13.064272 17.847591 12.897814 18.257706 13.102591 c
18.667820 13.307367 18.834278 13.805836 18.629501 14.215949 c
18.449421 14.576603 18.247871 14.924557 18.026550 15.258140 c
h
19.641863 10.431646 m
19.614201 10.889208 19.220850 11.237709 18.763288 11.210047 c
18.305727 11.182384 17.957226 10.789033 17.984888 10.331472 c
17.994911 10.165698 18.000000 9.998494 18.000000 9.830000 c
18.000000 9.661506 17.994911 9.494302 17.984888 9.328527 c
17.957226 8.870967 18.305727 8.477615 18.763288 8.449953 c
19.220850 8.422291 19.614201 8.770792 19.641863 9.228353 c
19.653900 9.427459 19.660000 9.628068 19.660000 9.830000 c
19.660000 10.031932 19.653900 10.232541 19.641863 10.431646 c
h
18.629501 5.444051 m
18.834278 5.854165 18.667820 6.352633 18.257706 6.557409 c
17.847591 6.762186 17.349125 6.595728 17.144348 6.185614 c
16.994783 5.886075 16.827311 5.596932 16.643307 5.319593 c
16.389881 4.937621 16.494089 4.422530 16.876062 4.169105 c
17.258034 3.915680 17.773125 4.019888 18.026550 4.401860 c
18.247871 4.735444 18.449421 5.083397 18.629501 5.444051 c
h
15.258140 1.633450 m
15.640112 1.886875 15.744320 2.401966 15.490895 2.783939 c
15.237471 3.165911 14.722379 3.270119 14.340406 3.016693 c
14.063068 2.832689 13.773925 2.665216 13.474386 2.515654 c
13.064273 2.310875 12.897814 1.812408 13.102591 1.402294 c
13.307367 0.992180 13.805835 0.825722 14.215949 1.030499 c
14.576604 1.210579 14.924557 1.412128 15.258140 1.633450 c
h
f*
n
Q
endstream
endobj
2 0 obj
3166
endobj
3 0 obj
<< /Type /XObject
/Length 4 0 R
/Group << /Type /Group
/S /Transparency
/I true
>>
/Subtype /Form
/Resources << /XObject << /X1 1 0 R >>
/ExtGState << /E2 << /ca 1.000000 >>
/E1 << /BM /Overlay >>
>>
>>
/BBox [ 0.000000 0.000000 20.000000 20.000000 ]
>>
stream
q
/E1 gs
/E2 gs
/X1 Do
Q
endstream
endobj
4 0 obj
25
endobj
5 0 obj
<< /XObject << /X1 3 0 R >>
/ExtGState << /E1 << /ca 1.000000 >> >>
>>
endobj
6 0 obj
<< /Length 7 0 R >>
stream
/DeviceRGB CS
/DeviceRGB cs
q
/E1 gs
/X1 Do
Q
endstream
endobj
7 0 obj
46
endobj
8 0 obj
<< /Annots []
/Type /Page
/MediaBox [ 0.000000 0.000000 20.000000 20.000000 ]
/Resources 5 0 R
/Contents 6 0 R
/Parent 9 0 R
>>
endobj
9 0 obj
<< /Kids [ 8 0 R ]
/Count 1
/Type /Pages
>>
endobj
10 0 obj
<< /Pages 9 0 R
/Type /Catalog
>>
endobj
xref
0 11
0000000000 65535 f
0000000010 00000 n
0000003447 00000 n
0000003470 00000 n
0000003952 00000 n
0000003973 00000 n
0000004069 00000 n
0000004171 00000 n
0000004192 00000 n
0000004365 00000 n
0000004439 00000 n
trailer
<< /ID [ (some) (id) ]
/Root 10 0 R
/Size 11
>>
startxref
4499
%%EOF

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -18615,8 +18615,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
updatedPresentationData: strongSelf.updatedPresentationData,
peer: EnginePeer(peer),
animateAppearance: animateAppearance,
completion: { [weak self] asset in
guard let strongSelf = self else {
completion: { [weak self] result in
guard let strongSelf = self, let asset = result as? PHAsset else {
return
}
let controller = WallpaperGalleryController(context: strongSelf.context, source: .asset(asset), mode: .peer(EnginePeer(peer), false))

View File

@ -1828,7 +1828,7 @@ public final class SharedAccountContextImpl: SharedAccountContext {
return StickerPackScreen(context: context, updatedPresentationData: updatedPresentationData, mainStickerPack: mainStickerPack, stickerPacks: stickerPacks, loadedStickerPacks: loadedStickerPacks, parentNavigationController: parentNavigationController, sendSticker: sendSticker)
}
public func makeMediaPickerScreen(context: AccountContext, completion: @escaping (PHAsset) -> Void) -> ViewController {
public func makeMediaPickerScreen(context: AccountContext, completion: @escaping (Any) -> Void) -> ViewController {
return storyMediaPickerController(context: context, completion: completion)
}

View File

@ -300,6 +300,8 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
var presentImpl: ((ViewController) -> Void)?
var returnToCameraImpl: (() -> Void)?
var dismissCameraImpl: (() -> Void)?
var hideCameraImpl: (() -> Void)?
var showDraftTooltipImpl: (() -> Void)?
let cameraController = CameraScreen(
context: context,
mode: .story,
@ -326,78 +328,92 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
}
},
completion: { result in
let subject: Signal<MediaEditorScreen.Subject?, NoError> = result
|> map { value -> MediaEditorScreen.Subject? in
switch value {
case .pendingImage:
return nil
case let .image(image):
return .image(image, PixelDimensions(image.size))
case let .video(path, dimensions):
return .video(path, dimensions)
case let .asset(asset):
return .asset(asset)
}
}
let controller = MediaEditorScreen(context: context, subject: subject, transitionIn: nil, transitionOut: { finished in
if finished, let transitionOut = transitionOut(true), let destinationView = transitionOut.destinationView {
return MediaEditorScreen.TransitionOut(
destinationView: destinationView,
destinationRect: transitionOut.destinationRect,
destinationCornerRadius: transitionOut.destinationCornerRadius
)
} else {
return nil
}
}, completion: { [weak self] mediaResult, commit, privacy in
guard let self else {
dismissCameraImpl?()
commit()
return
}
if let chatListController = self.chatListController as? ChatListControllerImpl, let storyListContext = chatListController.storyListContext {
switch mediaResult {
case let .image(image, dimensions, caption):
if let data = image.jpegData(compressionQuality: 0.8) {
storyListContext.upload(media: .image(dimensions: dimensions, data: data), text: caption?.string ?? "", entities: [], privacy: privacy)
Queue.mainQueue().after(0.2, { [weak chatListController] in
chatListController?.animateStoryUploadRipple()
})
}
case let .video(content, _, values, duration, dimensions, caption):
let adjustments: VideoMediaResourceAdjustments
if let valuesData = try? JSONEncoder().encode(values) {
let data = MemoryBuffer(data: valuesData)
let digest = MemoryBuffer(data: data.md5Digest())
adjustments = VideoMediaResourceAdjustments(data: data, digest: digest, isStory: true)
let resource: TelegramMediaResource
switch content {
case let .imageFile(path):
resource = LocalFileVideoMediaResource(randomId: Int64.random(in: .min ... .max), path: path, adjustments: adjustments)
case let .videoFile(path):
resource = LocalFileVideoMediaResource(randomId: Int64.random(in: .min ... .max), path: path, adjustments: adjustments)
case let .asset(localIdentifier):
resource = VideoLibraryMediaResource(localIdentifier: localIdentifier, conversion: .compress(adjustments))
}
storyListContext.upload(media: .video(dimensions: dimensions, duration: Int(duration), resource: resource), text: caption?.string ?? "", entities: [], privacy: privacy)
Queue.mainQueue().after(0.2, { [weak chatListController] in
chatListController?.animateStoryUploadRipple()
})
}
let subject: Signal<MediaEditorScreen.Subject?, NoError> = result
|> map { value -> MediaEditorScreen.Subject? in
switch value {
case .pendingImage:
return nil
case let .image(image):
return .image(image, PixelDimensions(image.size))
case let .video(path, dimensions):
return .video(path, dimensions)
case let .asset(asset):
return .asset(asset)
case let .draft(draft):
return .draft(draft)
}
}
dismissCameraImpl?()
commit()
})
controller.sourceHint = .camera
controller.cancelled = {
returnToCameraImpl?()
let controller = MediaEditorScreen(
context: context,
subject: subject,
transitionIn: nil,
transitionOut: { finished in
if finished, let transitionOut = transitionOut(true), let destinationView = transitionOut.destinationView {
return MediaEditorScreen.TransitionOut(
destinationView: destinationView,
destinationRect: transitionOut.destinationRect,
destinationCornerRadius: transitionOut.destinationCornerRadius
)
} else {
return nil
}
}, completion: { [weak self] mediaResult, commit, privacy in
guard let self else {
dismissCameraImpl?()
commit()
return
}
if let chatListController = self.chatListController as? ChatListControllerImpl, let storyListContext = chatListController.storyListContext {
switch mediaResult {
case let .image(image, dimensions, caption):
if let data = image.jpegData(compressionQuality: 0.8) {
storyListContext.upload(media: .image(dimensions: dimensions, data: data), text: caption?.string ?? "", entities: [], privacy: privacy)
Queue.mainQueue().after(0.2, { [weak chatListController] in
chatListController?.animateStoryUploadRipple()
})
}
case let .video(content, _, values, duration, dimensions, caption):
let adjustments: VideoMediaResourceAdjustments
if let valuesData = try? JSONEncoder().encode(values) {
let data = MemoryBuffer(data: valuesData)
let digest = MemoryBuffer(data: data.md5Digest())
adjustments = VideoMediaResourceAdjustments(data: data, digest: digest, isStory: true)
let resource: TelegramMediaResource
switch content {
case let .imageFile(path):
resource = LocalFileVideoMediaResource(randomId: Int64.random(in: .min ... .max), path: path, adjustments: adjustments)
case let .videoFile(path):
resource = LocalFileVideoMediaResource(randomId: Int64.random(in: .min ... .max), path: path, adjustments: adjustments)
case let .asset(localIdentifier):
resource = VideoLibraryMediaResource(localIdentifier: localIdentifier, conversion: .compress(adjustments))
}
storyListContext.upload(media: .video(dimensions: dimensions, duration: Int(duration), resource: resource), text: caption?.string ?? "", entities: [], privacy: privacy)
Queue.mainQueue().after(0.2, { [weak chatListController] in
chatListController?.animateStoryUploadRipple()
})
}
}
}
dismissCameraImpl?()
commit()
}
)
controller.sourceHint = .camera
controller.cancelled = { showDraftTooltip in
if showDraftTooltip {
showDraftTooltipImpl?()
}
returnToCameraImpl?()
}
controller.onReady = {
hideCameraImpl?()
}
presentImpl?(controller)
}
presentImpl?(controller)
})
)
controller.push(cameraController)
presentImpl = { [weak cameraController] c in
if let navigationController = cameraController?.navigationController as? NavigationController {
@ -414,6 +430,16 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
cameraController.returnFromEditor()
}
}
hideCameraImpl = { [weak cameraController] in
if let cameraController {
cameraController.commitTransitionToEditor()
}
}
showDraftTooltipImpl = { [weak cameraController] in
if let cameraController {
cameraController.presentDraftTooltip()
}
}
}
public func openSettings() {

View File

@ -95,6 +95,7 @@ private enum ApplicationSpecificOrderedItemListCollectionIdValues: Int32 {
case wallpaperSearchRecentQueries = 1
case settingsSearchRecentItems = 2
case localThemes = 3
case storyDrafts = 4
}
public struct ApplicationSpecificOrderedItemListCollectionId {
@ -102,4 +103,5 @@ public struct ApplicationSpecificOrderedItemListCollectionId {
public static let wallpaperSearchRecentQueries = applicationSpecificOrderedItemListCollectionId(ApplicationSpecificOrderedItemListCollectionIdValues.wallpaperSearchRecentQueries.rawValue)
public static let settingsSearchRecentItems = applicationSpecificOrderedItemListCollectionId(ApplicationSpecificOrderedItemListCollectionIdValues.settingsSearchRecentItems.rawValue)
public static let localThemes = applicationSpecificOrderedItemListCollectionId(ApplicationSpecificOrderedItemListCollectionIdValues.localThemes.rawValue)
public static let storyDrafts = applicationSpecificOrderedItemListCollectionId(ApplicationSpecificOrderedItemListCollectionIdValues.storyDrafts.rawValue)
}

View File

@ -580,8 +580,12 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
}
if event.type == .touches || eventIsPresses {
if case .manual = self.displayDuration {
self.requestDismiss()
return self.view
if self.containerNode.frame.contains(point) {
self.requestDismiss()
return self.view
} else {
return nil
}
}
switch self.shouldDismissOnTouch(point) {
case .ignore: