mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-15 21:45:19 +00:00
[WIP] Stories
This commit is contained in:
parent
f72f2e3c60
commit
862cb0b366
@ -1393,12 +1393,13 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
|
||||
let storyContainerScreen = StoryContainerScreen(
|
||||
context: self.context,
|
||||
initialFocusedId: AnyHashable(peerId),
|
||||
initialContent: initialContent,
|
||||
transitionIn: nil,
|
||||
transitionOut: { _ in
|
||||
transitionOut: { _, _ in
|
||||
return nil
|
||||
}
|
||||
)
|
||||
@ -2481,28 +2482,18 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
|
||||
initialFocusedId: initialFocusedId,
|
||||
initialContent: initialContent,
|
||||
transitionIn: transitionIn,
|
||||
transitionOut: { [weak self] peerId in
|
||||
transitionOut: { [weak self] peerId, _ in
|
||||
guard let self else {
|
||||
return nil
|
||||
}
|
||||
|
||||
if let componentView = self.headerContentView.view as? ChatListHeaderComponent.View {
|
||||
if let transitionView = componentView.storyPeerListView()?.transitionViewForItem(peerId: peerId) {
|
||||
//let localRect = transitionView.convert(transitionView.bounds, to: self.view)
|
||||
|
||||
/*Queue.mainQueue().after(0.2 * UIView.animationDurationFactor, { [weak self] in
|
||||
HapticFeedback().impact()
|
||||
self?.animateRipple(centerLocation: localRect.center)
|
||||
})*/
|
||||
|
||||
return StoryContainerScreen.TransitionOut(
|
||||
destinationView: transitionView,
|
||||
destinationRect: transitionView.bounds,
|
||||
destinationCornerRadius: transitionView.bounds.height * 0.5,
|
||||
completed: { [weak self] in
|
||||
let _ = self
|
||||
//self?.animateRipple(centerLocation: localRect.center)
|
||||
}
|
||||
completed: {}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -432,7 +432,7 @@ public struct Transition {
|
||||
self.setTransform(layer: view.layer, transform: transform, completion: completion)
|
||||
}
|
||||
|
||||
public func setTransformAsKeyframes(view: UIView, transform: (CGFloat) -> CATransform3D, completion: ((Bool) -> Void)? = nil) {
|
||||
public func setTransformAsKeyframes(view: UIView, transform: (CGFloat, Bool) -> CATransform3D, completion: ((Bool) -> Void)? = nil) {
|
||||
self.setTransformAsKeyframes(layer: view.layer, transform: transform, completion: completion)
|
||||
}
|
||||
|
||||
@ -477,8 +477,8 @@ public struct Transition {
|
||||
}
|
||||
}
|
||||
|
||||
public func setTransformAsKeyframes(layer: CALayer, transform: (CGFloat) -> CATransform3D, completion: ((Bool) -> Void)? = nil) {
|
||||
let finalTransform = transform(1.0)
|
||||
public func setTransformAsKeyframes(layer: CALayer, transform: (CGFloat, Bool) -> CATransform3D, completion: ((Bool) -> Void)? = nil) {
|
||||
let finalTransform = transform(1.0, true)
|
||||
|
||||
let t = layer.presentation()?.transform ?? layer.transform
|
||||
if CATransform3DEqualToTransform(t, finalTransform) {
|
||||
@ -495,7 +495,7 @@ public struct Transition {
|
||||
|
||||
switch self.animation {
|
||||
case .none:
|
||||
layer.transform = transform(1.0)
|
||||
layer.transform = transform(1.0, true)
|
||||
completion?(true)
|
||||
case let .curve(duration, curve):
|
||||
let framesPerSecond: CGFloat
|
||||
@ -507,7 +507,7 @@ public struct Transition {
|
||||
|
||||
let numValues = Int(framesPerSecond * duration)
|
||||
if numValues == 0 {
|
||||
layer.transform = transform(1.0)
|
||||
layer.transform = transform(1.0, true)
|
||||
completion?(true)
|
||||
return
|
||||
}
|
||||
@ -516,10 +516,10 @@ public struct Transition {
|
||||
|
||||
for i in 0 ... numValues {
|
||||
let t = curve.solve(at: CGFloat(i) / CGFloat(numValues))
|
||||
values.append(NSValue(caTransform3D: transform(t)))
|
||||
values.append(NSValue(caTransform3D: transform(t, false)))
|
||||
}
|
||||
|
||||
layer.transform = transform(1.0)
|
||||
layer.transform = transform(1.0, true)
|
||||
layer.animateKeyframes(
|
||||
values: values,
|
||||
duration: duration,
|
||||
|
@ -111,6 +111,10 @@ public final class Button: Component {
|
||||
public final class View: UIButton, ComponentTaggedView {
|
||||
private let contentView: ComponentHostView<Empty>
|
||||
|
||||
public var content: UIView? {
|
||||
return self.contentView.componentView
|
||||
}
|
||||
|
||||
private var component: Button?
|
||||
private var currentIsHighlighted: Bool = false {
|
||||
didSet {
|
||||
|
@ -76,8 +76,12 @@ private func storeImage(context: DrawingContext, mediaBox: MediaBox, resourceId:
|
||||
switch imageType {
|
||||
case .blurredThumbnail:
|
||||
representationId = "blurred32"
|
||||
case let .square(width):
|
||||
representationId = "shm\(width)"
|
||||
case let .square(width, aspectRatio):
|
||||
if aspectRatio == 1.0 {
|
||||
representationId = "shm\(width)"
|
||||
} else {
|
||||
representationId = "shm\(width)-\(aspectRatio)"
|
||||
}
|
||||
}
|
||||
let path = mediaBox.cachedRepresentationPathForId(resourceId.stringRepresentation, representationId: representationId, keepDuration: .general)
|
||||
|
||||
@ -229,7 +233,7 @@ public final class DirectMediaImageCache {
|
||||
|
||||
fileprivate enum ImageType {
|
||||
case blurredThumbnail
|
||||
case square(width: Int)
|
||||
case square(width: Int, aspectRatio: CGFloat)
|
||||
}
|
||||
|
||||
private let account: Account
|
||||
@ -243,13 +247,17 @@ public final class DirectMediaImageCache {
|
||||
switch imageType {
|
||||
case .blurredThumbnail:
|
||||
representationId = "blurred32"
|
||||
case let .square(width):
|
||||
representationId = "shm\(width)"
|
||||
case let .square(width, aspectRatio):
|
||||
if aspectRatio == 1.0 {
|
||||
representationId = "shm\(width)"
|
||||
} else {
|
||||
representationId = "shm\(width)-\(aspectRatio)"
|
||||
}
|
||||
}
|
||||
return self.account.postbox.mediaBox.cachedRepresentationPathForId(resourceId.stringRepresentation, representationId: representationId, keepDuration: .general)
|
||||
}
|
||||
|
||||
private func getLoadSignal(width: Int, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resource: MediaResourceReference, resourceSizeLimit: Int64) -> Signal<UIImage?, NoError>? {
|
||||
private func getLoadSignal(width: Int, aspectRatio: CGFloat, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resource: MediaResourceReference, resourceSizeLimit: Int64) -> Signal<UIImage?, NoError>? {
|
||||
return Signal { subscriber in
|
||||
let fetch = fetchedMediaResource(
|
||||
mediaBox: self.account.postbox.mediaBox,
|
||||
@ -282,7 +290,7 @@ public final class DirectMediaImageCache {
|
||||
|
||||
let data = dataSignal.start(next: { data in
|
||||
if let data = data, let image = UIImage(data: data) {
|
||||
let scaledSize = CGSize(width: CGFloat(width), height: CGFloat(width))
|
||||
let scaledSize = CGSize(width: CGFloat(width), height: floor(CGFloat(width) / aspectRatio))
|
||||
guard let scaledContext = DrawingContext(size: scaledSize, scale: 1.0, opaque: true) else {
|
||||
subscriber.putNext(nil)
|
||||
subscriber.putCompletion()
|
||||
@ -294,7 +302,7 @@ public final class DirectMediaImageCache {
|
||||
context.draw(image.cgImage!, in: imageRect)
|
||||
}
|
||||
|
||||
if let scaledImage = storeImage(context: scaledContext, mediaBox: self.account.postbox.mediaBox, resourceId: resource.resource.id, imageType: .square(width: width)) {
|
||||
if let scaledImage = storeImage(context: scaledContext, mediaBox: self.account.postbox.mediaBox, resourceId: resource.resource.id, imageType: .square(width: width, aspectRatio: aspectRatio)) {
|
||||
subscriber.putNext(scaledImage)
|
||||
subscriber.putCompletion()
|
||||
}
|
||||
@ -342,8 +350,16 @@ public final class DirectMediaImageCache {
|
||||
private func getResource(message: Message, file: TelegramMediaFile, width: Int) -> (resource: MediaResourceReference, size: Int64)? {
|
||||
return self.getProgressiveSize(mediaReference: MediaReference.message(message: MessageReference(message), media: file).abstract, width: width, representations: file.previewRepresentations)
|
||||
}
|
||||
|
||||
private func getResource(peer: PeerReference, story: StoryListContext.Item, image: TelegramMediaImage, width: Int) -> (resource: MediaResourceReference, size: Int64)? {
|
||||
return self.getProgressiveSize(mediaReference: MediaReference.story(peer: peer, id: story.id, media: image).abstract, width: width, representations: image.representations)
|
||||
}
|
||||
|
||||
private func getImageSynchronous(message: Message, userLocation: MediaResourceUserLocation, media: Media, width: Int, possibleWidths: [Int], includeBlurred: Bool) -> GetMediaResult? {
|
||||
private func getResource(peer: PeerReference, story: StoryListContext.Item, file: TelegramMediaFile, width: Int) -> (resource: MediaResourceReference, size: Int64)? {
|
||||
return self.getProgressiveSize(mediaReference: MediaReference.story(peer: peer, id: story.id, media: file).abstract, width: width, representations: file.previewRepresentations)
|
||||
}
|
||||
|
||||
private func getImageSynchronous(message: Message, userLocation: MediaResourceUserLocation, media: Media, width: Int, aspectRatio: CGFloat, possibleWidths: [Int], includeBlurred: Bool) -> GetMediaResult? {
|
||||
var immediateThumbnailData: Data?
|
||||
var resource: (resource: MediaResourceReference, size: Int64)?
|
||||
if let image = media as? TelegramMediaImage {
|
||||
@ -367,11 +383,11 @@ public final class DirectMediaImageCache {
|
||||
var resultImage: UIImage?
|
||||
for otherWidth in possibleWidths.reversed() {
|
||||
if otherWidth == width {
|
||||
if let data = try? Data(contentsOf: URL(fileURLWithPath: self.getCachePath(resourceId: resource.resource.resource.id, imageType: .square(width: otherWidth)))), let image = loadImage(data: data) {
|
||||
if let data = try? Data(contentsOf: URL(fileURLWithPath: self.getCachePath(resourceId: resource.resource.resource.id, imageType: .square(width: otherWidth, aspectRatio: aspectRatio)))), let image = loadImage(data: data) {
|
||||
return GetMediaResult(image: image, blurredImage: blurredImage, loadSignal: nil)
|
||||
}
|
||||
} else {
|
||||
if let data = try? Data(contentsOf: URL(fileURLWithPath: self.getCachePath(resourceId: resource.resource.resource.id, imageType: .square(width: otherWidth)))), let image = loadImage(data: data) {
|
||||
if let data = try? Data(contentsOf: URL(fileURLWithPath: self.getCachePath(resourceId: resource.resource.resource.id, imageType: .square(width: otherWidth, aspectRatio: aspectRatio)))), let image = loadImage(data: data) {
|
||||
resultImage = image
|
||||
}
|
||||
}
|
||||
@ -387,12 +403,12 @@ public final class DirectMediaImageCache {
|
||||
}
|
||||
}
|
||||
|
||||
return GetMediaResult(image: resultImage, blurredImage: blurredImage, loadSignal: self.getLoadSignal(width: width, userLocation: userLocation, userContentType: .image, resource: resource.resource, resourceSizeLimit: resource.size))
|
||||
return GetMediaResult(image: resultImage, blurredImage: blurredImage, loadSignal: self.getLoadSignal(width: width, aspectRatio: aspectRatio, userLocation: userLocation, userContentType: .image, resource: resource.resource, resourceSizeLimit: resource.size))
|
||||
}
|
||||
|
||||
public func getImage(message: Message, media: Media, width: Int, possibleWidths: [Int], includeBlurred: Bool = false, synchronous: Bool) -> GetMediaResult? {
|
||||
if synchronous {
|
||||
return self.getImageSynchronous(message: message, userLocation: .peer(message.id.peerId), media: media, width: width, possibleWidths: possibleWidths, includeBlurred: includeBlurred)
|
||||
return self.getImageSynchronous(message: message, userLocation: .peer(message.id.peerId), media: media, width: width, aspectRatio: 1.0, possibleWidths: possibleWidths, includeBlurred: includeBlurred)
|
||||
} else {
|
||||
var immediateThumbnailData: Data?
|
||||
if let image = media as? TelegramMediaImage {
|
||||
@ -405,7 +421,93 @@ public final class DirectMediaImageCache {
|
||||
blurredImage = blurredImageValue
|
||||
}
|
||||
return GetMediaResult(image: nil, blurredImage: blurredImage, loadSignal: Signal { subscriber in
|
||||
let result = self.getImageSynchronous(message: message, userLocation: .peer(message.id.peerId), media: media, width: width, possibleWidths: possibleWidths, includeBlurred: includeBlurred)
|
||||
let result = self.getImageSynchronous(message: message, userLocation: .peer(message.id.peerId), media: media, width: width, aspectRatio: 1.0, possibleWidths: possibleWidths, includeBlurred: includeBlurred)
|
||||
guard let result = result else {
|
||||
subscriber.putNext(nil)
|
||||
subscriber.putCompletion()
|
||||
|
||||
return EmptyDisposable
|
||||
}
|
||||
|
||||
if let image = result.image {
|
||||
subscriber.putNext(image)
|
||||
}
|
||||
|
||||
if let signal = result.loadSignal {
|
||||
return signal.start(next: subscriber.putNext, error: subscriber.putError, completed: subscriber.putCompletion)
|
||||
} else {
|
||||
subscriber.putCompletion()
|
||||
|
||||
return EmptyDisposable
|
||||
}
|
||||
}
|
||||
|> runOn(.concurrentDefaultQueue()))
|
||||
}
|
||||
}
|
||||
|
||||
private func getImageSynchronous(peer: PeerReference, story: StoryListContext.Item, userLocation: MediaResourceUserLocation, media: Media, width: Int, aspectRatio: CGFloat, possibleWidths: [Int], includeBlurred: Bool) -> GetMediaResult? {
|
||||
var immediateThumbnailData: Data?
|
||||
var resource: (resource: MediaResourceReference, size: Int64)?
|
||||
if let image = media as? TelegramMediaImage {
|
||||
immediateThumbnailData = image.immediateThumbnailData
|
||||
resource = self.getResource(peer: peer, story: story, image: image, width: width)
|
||||
} else if let file = media as? TelegramMediaFile {
|
||||
immediateThumbnailData = file.immediateThumbnailData
|
||||
resource = self.getResource(peer: peer, story: story, file: file, width: width)
|
||||
}
|
||||
|
||||
guard let resource = resource else {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
var blurredImage: UIImage?
|
||||
if includeBlurred, let data = immediateThumbnailData.flatMap(decodeTinyThumbnail), let image = loadImage(data: data), let blurredImageValue = generateBlurredThumbnail(image: image, adjustSaturation: true) {
|
||||
blurredImage = blurredImageValue
|
||||
}
|
||||
|
||||
var resultImage: UIImage?
|
||||
for otherWidth in possibleWidths.reversed() {
|
||||
if otherWidth == width {
|
||||
if let data = try? Data(contentsOf: URL(fileURLWithPath: self.getCachePath(resourceId: resource.resource.resource.id, imageType: .square(width: otherWidth, aspectRatio: aspectRatio)))), let image = loadImage(data: data) {
|
||||
return GetMediaResult(image: image, blurredImage: blurredImage, loadSignal: nil)
|
||||
}
|
||||
} else {
|
||||
if let data = try? Data(contentsOf: URL(fileURLWithPath: self.getCachePath(resourceId: resource.resource.resource.id, imageType: .square(width: otherWidth, aspectRatio: aspectRatio)))), let image = loadImage(data: data) {
|
||||
resultImage = image
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if resultImage == nil {
|
||||
if let data = try? Data(contentsOf: URL(fileURLWithPath: self.getCachePath(resourceId: resource.resource.resource.id, imageType: .blurredThumbnail))), let image = loadImage(data: data) {
|
||||
resultImage = image
|
||||
} else if let data = immediateThumbnailData.flatMap(decodeTinyThumbnail), let image = loadImage(data: data) {
|
||||
if let blurredImageValue = generateBlurredThumbnail(image: image) {
|
||||
resultImage = blurredImageValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return GetMediaResult(image: resultImage, blurredImage: blurredImage, loadSignal: self.getLoadSignal(width: width, aspectRatio: aspectRatio, userLocation: userLocation, userContentType: .image, resource: resource.resource, resourceSizeLimit: resource.size))
|
||||
}
|
||||
|
||||
public func getImage(peer: PeerReference, story: StoryListContext.Item, media: Media, width: Int, aspectRatio: CGFloat, possibleWidths: [Int], includeBlurred: Bool = false, synchronous: Bool) -> GetMediaResult? {
|
||||
if synchronous {
|
||||
return self.getImageSynchronous(peer: peer, story: story, userLocation: .peer(peer.id), media: media, width: width, aspectRatio: aspectRatio, possibleWidths: possibleWidths, includeBlurred: includeBlurred)
|
||||
} else {
|
||||
var immediateThumbnailData: Data?
|
||||
if let image = media as? TelegramMediaImage {
|
||||
immediateThumbnailData = image.immediateThumbnailData
|
||||
} else if let file = media as? TelegramMediaFile {
|
||||
immediateThumbnailData = file.immediateThumbnailData
|
||||
}
|
||||
var blurredImage: UIImage?
|
||||
if includeBlurred, let data = immediateThumbnailData.flatMap(decodeTinyThumbnail), let image = loadImage(data: data), let blurredImageValue = generateBlurredThumbnail(image: image, adjustSaturation: true) {
|
||||
blurredImage = blurredImageValue
|
||||
}
|
||||
return GetMediaResult(image: nil, blurredImage: blurredImage, loadSignal: Signal { subscriber in
|
||||
let result = self.getImageSynchronous(peer: peer, story: story, userLocation: .peer(peer.id), media: media, width: width, aspectRatio: aspectRatio, possibleWidths: possibleWidths, includeBlurred: includeBlurred)
|
||||
guard let result = result else {
|
||||
subscriber.putNext(nil)
|
||||
subscriber.putCompletion()
|
||||
|
@ -249,6 +249,7 @@ public extension CALayer {
|
||||
animation.speed = speed
|
||||
animation.duration = duration
|
||||
animation.isAdditive = additive
|
||||
animation.calculationMode = .linear
|
||||
if let mediaTimingFunction = mediaTimingFunction {
|
||||
animation.timingFunction = mediaTimingFunction
|
||||
} else {
|
||||
|
@ -3,6 +3,15 @@ import AVFoundation
|
||||
import UIKit
|
||||
import MozjpegBinding
|
||||
|
||||
public func scaleImageToPixelSize(image: UIImage, size: CGSize) -> UIImage? {
|
||||
UIGraphicsBeginImageContextWithOptions(size, true, 1.0)
|
||||
image.draw(in: CGRect(origin: CGPoint(), size: size), blendMode: .copy, alpha: 1.0)
|
||||
let result = UIGraphicsGetImageFromCurrentImageContext()
|
||||
UIGraphicsEndImageContext()
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
public func extractImageExtraScans(_ data: Data) -> [Int] {
|
||||
return extractJPEGDataScans(data).map { item in
|
||||
return item.intValue
|
||||
|
@ -405,7 +405,11 @@ public final class SparseItemGrid: ASDisplayNode {
|
||||
let itemsPerRow = CGFloat(zoomLevel.rawValue)
|
||||
self.itemsPerRow = Int(itemsPerRow)
|
||||
let itemSize = floorToScreenPixels((width - (self.itemSpacing * CGFloat(self.itemsPerRow - 1))) / itemsPerRow)
|
||||
self.itemSize = CGSize(width: itemSize, height: itemSize)
|
||||
if let fixedItemAspect = containerLayout.fixedItemAspect {
|
||||
self.itemSize = CGSize(width: itemSize, height: floor(itemSize / fixedItemAspect))
|
||||
} else {
|
||||
self.itemSize = CGSize(width: itemSize, height: itemSize)
|
||||
}
|
||||
|
||||
self.lastItemSize = width - (self.itemSize.width + self.itemSpacing) * CGFloat(self.itemsPerRow - 1)
|
||||
}
|
||||
@ -1308,6 +1312,7 @@ public final class SparseItemGrid: ASDisplayNode {
|
||||
var scrollIndicatorInsets: UIEdgeInsets
|
||||
var lockScrollingAtTop: Bool
|
||||
var fixedItemHeight: CGFloat?
|
||||
var fixedItemAspect: CGFloat?
|
||||
}
|
||||
|
||||
private var tapRecognizer: UITapGestureRecognizer?
|
||||
@ -1565,9 +1570,9 @@ public final class SparseItemGrid: ASDisplayNode {
|
||||
}
|
||||
}
|
||||
|
||||
public func update(size: CGSize, insets: UIEdgeInsets, useSideInsets: Bool, scrollIndicatorInsets: UIEdgeInsets, lockScrollingAtTop: Bool, fixedItemHeight: CGFloat?, items: Items, theme: PresentationTheme, synchronous: SparseItemGrid.Synchronous) {
|
||||
public func update(size: CGSize, insets: UIEdgeInsets, useSideInsets: Bool, scrollIndicatorInsets: UIEdgeInsets, lockScrollingAtTop: Bool, fixedItemHeight: CGFloat?, fixedItemAspect: CGFloat?, items: Items, theme: PresentationTheme, synchronous: SparseItemGrid.Synchronous) {
|
||||
self.theme = theme
|
||||
let containerLayout = ContainerLayout(size: size, insets: insets, useSideInsets: useSideInsets, scrollIndicatorInsets: scrollIndicatorInsets, lockScrollingAtTop: lockScrollingAtTop, fixedItemHeight: fixedItemHeight)
|
||||
let containerLayout = ContainerLayout(size: size, insets: insets, useSideInsets: useSideInsets, scrollIndicatorInsets: scrollIndicatorInsets, lockScrollingAtTop: lockScrollingAtTop, fixedItemHeight: fixedItemHeight, fixedItemAspect: fixedItemAspect)
|
||||
self.containerLayout = containerLayout
|
||||
self.items = items
|
||||
self.scrollingArea.isHidden = lockScrollingAtTop
|
||||
|
@ -107,7 +107,7 @@ class Download: NSObject, MTRequestMessageServiceDelegate {
|
||||
let saveFilePart: (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.Bool>)
|
||||
if asBigPart {
|
||||
let totalParts: Int32
|
||||
if let bigTotalParts = bigTotalParts {
|
||||
if let bigTotalParts = bigTotalParts, bigTotalParts > 0 && bigTotalParts < Int32.max {
|
||||
totalParts = Int32(bigTotalParts)
|
||||
} else {
|
||||
totalParts = -1
|
||||
|
@ -172,17 +172,19 @@ public final class StoryListContext {
|
||||
self.stateValue = State(itemSets: [], uploadProgress: nil, loadMoreToken: LoadMoreToken(value: nil))
|
||||
self.state.set(.single(self.stateValue))
|
||||
|
||||
let _ = (account.postbox.transaction { transaction -> Peer? in
|
||||
return transaction.getPeer(account.peerId)
|
||||
}
|
||||
|> deliverOnMainQueue).start(next: { [weak self] peer in
|
||||
guard let self, let peer else {
|
||||
return
|
||||
if case .all = scope {
|
||||
let _ = (account.postbox.transaction { transaction -> Peer? in
|
||||
return transaction.getPeer(account.peerId)
|
||||
}
|
||||
self.stateValue = State(itemSets: [
|
||||
PeerItemSet(peerId: peer.id, peer: EnginePeer(peer), maxReadId: 0, items: [], totalCount: 0)
|
||||
], uploadProgress: nil, loadMoreToken: LoadMoreToken(value: nil))
|
||||
})
|
||||
|> deliverOnMainQueue).start(next: { [weak self] peer in
|
||||
guard let self, let peer else {
|
||||
return
|
||||
}
|
||||
self.stateValue = State(itemSets: [
|
||||
PeerItemSet(peerId: peer.id, peer: EnginePeer(peer), maxReadId: 0, items: [], totalCount: 0)
|
||||
], uploadProgress: nil, loadMoreToken: LoadMoreToken(value: nil))
|
||||
})
|
||||
}
|
||||
|
||||
self.updatesDisposable = (account.stateManager.storyUpdates
|
||||
|> deliverOn(queue)).start(next: { [weak self] updates in
|
||||
@ -253,7 +255,12 @@ public final class StoryListContext {
|
||||
|
||||
items.sort(by: { lhsItem, rhsItem in
|
||||
if lhsItem.timestamp != rhsItem.timestamp {
|
||||
return lhsItem.timestamp < rhsItem.timestamp
|
||||
switch scope {
|
||||
case .all:
|
||||
return lhsItem.timestamp > rhsItem.timestamp
|
||||
case .peer:
|
||||
return lhsItem.timestamp < rhsItem.timestamp
|
||||
}
|
||||
}
|
||||
return lhsItem.id < rhsItem.id
|
||||
})
|
||||
@ -267,13 +274,23 @@ public final class StoryListContext {
|
||||
}
|
||||
}
|
||||
if !found, let peer = peers[peerId] {
|
||||
itemSets.insert(PeerItemSet(
|
||||
peerId: peerId,
|
||||
peer: EnginePeer(peer),
|
||||
maxReadId: 0,
|
||||
items: [item],
|
||||
totalCount: 1
|
||||
), at: 0)
|
||||
let matchesScope: Bool
|
||||
if case .all = scope {
|
||||
matchesScope = true
|
||||
} else if case .peer(peerId) = scope {
|
||||
matchesScope = true
|
||||
} else {
|
||||
matchesScope = false
|
||||
}
|
||||
if matchesScope {
|
||||
itemSets.insert(PeerItemSet(
|
||||
peerId: peerId,
|
||||
peer: EnginePeer(peer),
|
||||
maxReadId: 0,
|
||||
items: [item],
|
||||
totalCount: 1
|
||||
), at: 0)
|
||||
}
|
||||
}
|
||||
case let .read(peerId, maxId):
|
||||
for i in 0 ..< itemSets.count {
|
||||
@ -301,7 +318,12 @@ public final class StoryListContext {
|
||||
}
|
||||
|
||||
if lhsItem.timestamp != rhsItem.timestamp {
|
||||
return lhsItem.timestamp > rhsItem.timestamp
|
||||
switch scope {
|
||||
case .all:
|
||||
return lhsItem.timestamp > rhsItem.timestamp
|
||||
case .peer:
|
||||
return lhsItem.timestamp < rhsItem.timestamp
|
||||
}
|
||||
}
|
||||
return lhsItem.id > rhsItem.id
|
||||
})
|
||||
@ -342,7 +364,7 @@ public final class StoryListContext {
|
||||
guard let inputPeer = inputPeer else {
|
||||
return .single(nil)
|
||||
}
|
||||
return account.network.request(Api.functions.stories.getUserStories(flags: 0, userId: inputPeer, offsetId: 0, limit: 100))
|
||||
return account.network.request(Api.functions.stories.getUserStories(flags: 0, userId: inputPeer, offsetId: 0, limit: 30))
|
||||
|> map(Optional.init)
|
||||
|> `catch` { _ -> Signal<Api.stories.Stories?, NoError> in
|
||||
return .single(nil)
|
||||
@ -433,139 +455,220 @@ public final class StoryListContext {
|
||||
|
||||
self.isLoadingMore = true
|
||||
let account = self.account
|
||||
let scope = self.scope
|
||||
|
||||
self.pollDisposable?.dispose()
|
||||
self.pollDisposable = nil
|
||||
|
||||
self.loadMoreDisposable.set((account.network.request(Api.functions.stories.getAllStories(offset: loadMoreToken))
|
||||
|> map(Optional.init)
|
||||
|> `catch` { _ -> Signal<Api.stories.AllStories?, NoError> in
|
||||
return .single(nil)
|
||||
}
|
||||
|> mapToSignal { result -> Signal<([PeerItemSet], LoadMoreToken?), NoError> in
|
||||
guard let result else {
|
||||
return .single(([], nil))
|
||||
switch scope {
|
||||
case .all:
|
||||
self.loadMoreDisposable.set((account.network.request(Api.functions.stories.getAllStories(offset: loadMoreToken))
|
||||
|> map(Optional.init)
|
||||
|> `catch` { _ -> Signal<Api.stories.AllStories?, NoError> in
|
||||
return .single(nil)
|
||||
}
|
||||
return account.postbox.transaction { transaction -> ([PeerItemSet], LoadMoreToken?) in
|
||||
switch result {
|
||||
case let .allStories(_, userStorySets, nextOffset, users):
|
||||
var parsedItemSets: [PeerItemSet] = []
|
||||
|
||||
var peers: [Peer] = []
|
||||
var peerPresences: [PeerId: Api.User] = [:]
|
||||
|
||||
for user in users {
|
||||
let telegramUser = TelegramUser(user: user)
|
||||
peers.append(telegramUser)
|
||||
peerPresences[telegramUser.id] = user
|
||||
}
|
||||
|
||||
updatePeers(transaction: transaction, peers: peers, update: { _, updated -> Peer in
|
||||
return updated
|
||||
})
|
||||
updatePeerPresences(transaction: transaction, accountPeerId: account.peerId, peerPresences: peerPresences)
|
||||
|
||||
for userStories in userStorySets {
|
||||
let apiUserId: Int64
|
||||
let apiStories: [Api.StoryItem]
|
||||
var apiTotalCount: Int32?
|
||||
var apiMaxReadId: Int32 = 0
|
||||
switch userStories {
|
||||
case let .userStories(_, userId, maxReadId, stories, missingCount):
|
||||
apiUserId = userId
|
||||
apiStories = stories
|
||||
apiTotalCount = (missingCount ?? 0) + Int32(stories.count)
|
||||
apiMaxReadId = maxReadId ?? 0
|
||||
|> mapToSignal { result -> Signal<([PeerItemSet], LoadMoreToken?), NoError> in
|
||||
guard let result else {
|
||||
return .single(([], nil))
|
||||
}
|
||||
return account.postbox.transaction { transaction -> ([PeerItemSet], LoadMoreToken?) in
|
||||
switch result {
|
||||
case let .allStories(_, userStorySets, nextOffset, users):
|
||||
var parsedItemSets: [PeerItemSet] = []
|
||||
|
||||
var peers: [Peer] = []
|
||||
var peerPresences: [PeerId: Api.User] = [:]
|
||||
|
||||
for user in users {
|
||||
let telegramUser = TelegramUser(user: user)
|
||||
peers.append(telegramUser)
|
||||
peerPresences[telegramUser.id] = user
|
||||
}
|
||||
|
||||
let peerId = PeerId(namespace: Namespaces.Peer.CloudUser, id: PeerId.Id._internalFromInt64Value(apiUserId))
|
||||
for apiStory in apiStories {
|
||||
if let item = _internal_parseApiStoryItem(transaction: transaction, peerId: peerId, apiStory: apiStory) {
|
||||
if !parsedItemSets.isEmpty && parsedItemSets[parsedItemSets.count - 1].peerId == peerId {
|
||||
parsedItemSets[parsedItemSets.count - 1].items.append(item)
|
||||
} else {
|
||||
parsedItemSets.append(StoryListContext.PeerItemSet(
|
||||
peerId: peerId,
|
||||
peer: transaction.getPeer(peerId).flatMap(EnginePeer.init),
|
||||
maxReadId: apiMaxReadId,
|
||||
items: [item],
|
||||
totalCount: apiTotalCount.flatMap(Int.init)
|
||||
))
|
||||
updatePeers(transaction: transaction, peers: peers, update: { _, updated -> Peer in
|
||||
return updated
|
||||
})
|
||||
updatePeerPresences(transaction: transaction, accountPeerId: account.peerId, peerPresences: peerPresences)
|
||||
|
||||
for userStories in userStorySets {
|
||||
let apiUserId: Int64
|
||||
let apiStories: [Api.StoryItem]
|
||||
var apiTotalCount: Int32?
|
||||
var apiMaxReadId: Int32 = 0
|
||||
switch userStories {
|
||||
case let .userStories(_, userId, maxReadId, stories, missingCount):
|
||||
apiUserId = userId
|
||||
apiStories = stories
|
||||
apiTotalCount = (missingCount ?? 0) + Int32(stories.count)
|
||||
apiMaxReadId = maxReadId ?? 0
|
||||
}
|
||||
|
||||
let peerId = PeerId(namespace: Namespaces.Peer.CloudUser, id: PeerId.Id._internalFromInt64Value(apiUserId))
|
||||
for apiStory in apiStories {
|
||||
if let item = _internal_parseApiStoryItem(transaction: transaction, peerId: peerId, apiStory: apiStory) {
|
||||
if !parsedItemSets.isEmpty && parsedItemSets[parsedItemSets.count - 1].peerId == peerId {
|
||||
parsedItemSets[parsedItemSets.count - 1].items.append(item)
|
||||
} else {
|
||||
parsedItemSets.append(StoryListContext.PeerItemSet(
|
||||
peerId: peerId,
|
||||
peer: transaction.getPeer(peerId).flatMap(EnginePeer.init),
|
||||
maxReadId: apiMaxReadId,
|
||||
items: [item],
|
||||
totalCount: apiTotalCount.flatMap(Int.init)
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !parsedItemSets.contains(where: { $0.peerId == account.peerId }) {
|
||||
if let peer = transaction.getPeer(account.peerId) {
|
||||
parsedItemSets.insert(PeerItemSet(peerId: peer.id, peer: EnginePeer(peer), maxReadId: 0, items: [], totalCount: 0), at: 0)
|
||||
|
||||
if !parsedItemSets.contains(where: { $0.peerId == account.peerId }) {
|
||||
if let peer = transaction.getPeer(account.peerId) {
|
||||
parsedItemSets.insert(PeerItemSet(peerId: peer.id, peer: EnginePeer(peer), maxReadId: 0, items: [], totalCount: 0), at: 0)
|
||||
}
|
||||
}
|
||||
|
||||
return (parsedItemSets, nextOffset.flatMap { LoadMoreToken(value: $0) })
|
||||
}
|
||||
|
||||
return (parsedItemSets, nextOffset.flatMap { LoadMoreToken(value: $0) })
|
||||
}
|
||||
}
|
||||
}
|
||||
|> deliverOn(self.queue)).start(next: { [weak self] result in
|
||||
guard let `self` = self else {
|
||||
return
|
||||
}
|
||||
self.isLoadingMore = false
|
||||
|
||||
var itemSets = self.stateValue.itemSets
|
||||
for itemSet in result.0 {
|
||||
if let index = itemSets.firstIndex(where: { $0.peerId == itemSet.peerId }) {
|
||||
let currentItemSet = itemSets[index]
|
||||
|
||||
var items = currentItemSet.items
|
||||
for item in itemSet.items {
|
||||
if !items.contains(where: { $0.id == item.id }) {
|
||||
items.append(item)
|
||||
}
|
||||
}
|
||||
|
||||
items.sort(by: { lhsItem, rhsItem in
|
||||
if lhsItem.timestamp != rhsItem.timestamp {
|
||||
return lhsItem.timestamp < rhsItem.timestamp
|
||||
}
|
||||
return lhsItem.id < rhsItem.id
|
||||
})
|
||||
|
||||
itemSets[index] = PeerItemSet(
|
||||
peerId: itemSet.peerId,
|
||||
peer: itemSet.peer,
|
||||
maxReadId: itemSet.maxReadId,
|
||||
items: items,
|
||||
totalCount: items.count
|
||||
)
|
||||
} else {
|
||||
itemSet.items.sort(by: { lhsItem, rhsItem in
|
||||
if lhsItem.timestamp != rhsItem.timestamp {
|
||||
return lhsItem.timestamp < rhsItem.timestamp
|
||||
}
|
||||
return lhsItem.id < rhsItem.id
|
||||
})
|
||||
itemSets.append(itemSet)
|
||||
|> deliverOn(self.queue)).start(next: { [weak self] result in
|
||||
guard let `self` = self else {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
itemSets.sort(by: { lhs, rhs in
|
||||
guard let lhsItem = lhs.items.first, let rhsItem = rhs.items.first else {
|
||||
if lhs.items.first != nil {
|
||||
return false
|
||||
self.isLoadingMore = false
|
||||
|
||||
var itemSets = self.stateValue.itemSets
|
||||
for itemSet in result.0 {
|
||||
if let index = itemSets.firstIndex(where: { $0.peerId == itemSet.peerId }) {
|
||||
let currentItemSet = itemSets[index]
|
||||
|
||||
var items = currentItemSet.items
|
||||
for item in itemSet.items {
|
||||
if !items.contains(where: { $0.id == item.id }) {
|
||||
items.append(item)
|
||||
}
|
||||
}
|
||||
|
||||
items.sort(by: { lhsItem, rhsItem in
|
||||
if lhsItem.timestamp != rhsItem.timestamp {
|
||||
switch scope {
|
||||
case .all:
|
||||
return lhsItem.timestamp > rhsItem.timestamp
|
||||
case .peer:
|
||||
return lhsItem.timestamp < rhsItem.timestamp
|
||||
}
|
||||
}
|
||||
return lhsItem.id < rhsItem.id
|
||||
})
|
||||
|
||||
itemSets[index] = PeerItemSet(
|
||||
peerId: itemSet.peerId,
|
||||
peer: itemSet.peer,
|
||||
maxReadId: itemSet.maxReadId,
|
||||
items: items,
|
||||
totalCount: items.count
|
||||
)
|
||||
} else {
|
||||
return true
|
||||
itemSet.items.sort(by: { lhsItem, rhsItem in
|
||||
if lhsItem.timestamp != rhsItem.timestamp {
|
||||
switch scope {
|
||||
case .all:
|
||||
return lhsItem.timestamp > rhsItem.timestamp
|
||||
case .peer:
|
||||
return lhsItem.timestamp < rhsItem.timestamp
|
||||
}
|
||||
}
|
||||
return lhsItem.id < rhsItem.id
|
||||
})
|
||||
itemSets.append(itemSet)
|
||||
}
|
||||
}
|
||||
|
||||
if lhsItem.timestamp != rhsItem.timestamp {
|
||||
return lhsItem.timestamp > rhsItem.timestamp
|
||||
}
|
||||
return lhsItem.id > rhsItem.id
|
||||
})
|
||||
itemSets.sort(by: { lhs, rhs in
|
||||
guard let lhsItem = lhs.items.first, let rhsItem = rhs.items.first else {
|
||||
if lhs.items.first != nil {
|
||||
return false
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
if lhsItem.timestamp != rhsItem.timestamp {
|
||||
switch scope {
|
||||
case .all:
|
||||
return lhsItem.timestamp > rhsItem.timestamp
|
||||
case .peer:
|
||||
return lhsItem.timestamp < rhsItem.timestamp
|
||||
}
|
||||
}
|
||||
return lhsItem.id > rhsItem.id
|
||||
})
|
||||
|
||||
self.stateValue = State(itemSets: itemSets, uploadProgress: self.stateValue.uploadProgress, loadMoreToken: result.1)
|
||||
}))
|
||||
case let .peer(peerId):
|
||||
let account = self.account
|
||||
let queue = self.queue
|
||||
|
||||
self.stateValue = State(itemSets: itemSets, uploadProgress: self.stateValue.uploadProgress, loadMoreToken: result.1)
|
||||
}))
|
||||
self.loadMoreDisposable.set((self.account.postbox.transaction { transaction -> Api.InputUser? in
|
||||
return transaction.getPeer(peerId).flatMap(apiInputUser)
|
||||
}
|
||||
|> mapToSignal { inputPeer -> Signal<PeerItemSet?, NoError> in
|
||||
guard let inputPeer = inputPeer else {
|
||||
return .single(nil)
|
||||
}
|
||||
return account.network.request(Api.functions.stories.getUserStories(flags: 0, userId: inputPeer, offsetId: 0, limit: 30))
|
||||
|> map(Optional.init)
|
||||
|> `catch` { _ -> Signal<Api.stories.Stories?, NoError> in
|
||||
return .single(nil)
|
||||
}
|
||||
|> mapToSignal { stories -> Signal<PeerItemSet?, NoError> in
|
||||
guard let stories = stories else {
|
||||
return .single(nil)
|
||||
}
|
||||
return account.postbox.transaction { transaction -> PeerItemSet? in
|
||||
switch stories {
|
||||
case let .stories(_, apiStories, users):
|
||||
var parsedItemSets: [PeerItemSet] = []
|
||||
|
||||
var peers: [Peer] = []
|
||||
var peerPresences: [PeerId: Api.User] = [:]
|
||||
|
||||
for user in users {
|
||||
let telegramUser = TelegramUser(user: user)
|
||||
peers.append(telegramUser)
|
||||
peerPresences[telegramUser.id] = user
|
||||
}
|
||||
|
||||
updatePeers(transaction: transaction, peers: peers, update: { _, updated -> Peer in
|
||||
return updated
|
||||
})
|
||||
updatePeerPresences(transaction: transaction, accountPeerId: account.peerId, peerPresences: peerPresences)
|
||||
|
||||
for apiStory in apiStories {
|
||||
if let item = _internal_parseApiStoryItem(transaction: transaction, peerId: peerId, apiStory: apiStory) {
|
||||
if !parsedItemSets.isEmpty && parsedItemSets[parsedItemSets.count - 1].peerId == peerId {
|
||||
parsedItemSets[parsedItemSets.count - 1].items.append(item)
|
||||
parsedItemSets[parsedItemSets.count - 1].totalCount = parsedItemSets[parsedItemSets.count - 1].items.count
|
||||
} else {
|
||||
parsedItemSets.append(StoryListContext.PeerItemSet(peerId: peerId, peer: transaction.getPeer(peerId).flatMap(EnginePeer.init), maxReadId: 0, items: [item], totalCount: 1))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return parsedItemSets.first
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|> deliverOn(queue)).start(next: { [weak self] itemSet in
|
||||
guard let `self` = self, let itemSet = itemSet else {
|
||||
return
|
||||
}
|
||||
self.isLoadingMore = false
|
||||
self.stateValue.itemSets = [itemSet]
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
func delete(id: Int32) {
|
||||
|
@ -370,6 +370,7 @@ swift_library(
|
||||
"//submodules/TelegramUI/Components/LegacyInstantVideoController",
|
||||
"//submodules/TelegramUI/Components/FullScreenEffectView",
|
||||
"//submodules/TelegramUI/Components/ShareWithPeersScreen",
|
||||
"//submodules/TelegramUI/Components/PeerInfo/PeerInfoVisualMediaPaneNode",
|
||||
] + select({
|
||||
"@build_bazel_rules_apple//apple:ios_armv7": [],
|
||||
"@build_bazel_rules_apple//apple:ios_arm64": appcenter_targets,
|
||||
|
@ -7,30 +7,41 @@ import UniversalMediaPlayer
|
||||
import SwiftSignalKit
|
||||
|
||||
public final class AudioWaveformComponent: Component {
|
||||
public enum Style {
|
||||
case bottom
|
||||
case middle
|
||||
}
|
||||
|
||||
public let backgroundColor: UIColor
|
||||
public let foregroundColor: UIColor
|
||||
public let shimmerColor: UIColor?
|
||||
public let style: Style
|
||||
public let samples: Data
|
||||
public let peak: Int32
|
||||
public let status: Signal<MediaPlayerStatus, NoError>
|
||||
public let seek: (Double) -> Void
|
||||
public let seek: ((Double) -> Void)?
|
||||
public let updateIsSeeking: ((Bool) -> Void)?
|
||||
|
||||
public init(
|
||||
backgroundColor: UIColor,
|
||||
foregroundColor: UIColor,
|
||||
shimmerColor: UIColor?,
|
||||
style: Style,
|
||||
samples: Data,
|
||||
peak: Int32,
|
||||
status: Signal<MediaPlayerStatus, NoError>,
|
||||
seek: @escaping (Double) -> Void
|
||||
seek: ((Double) -> Void)?,
|
||||
updateIsSeeking: ((Bool) -> Void)?
|
||||
) {
|
||||
self.backgroundColor = backgroundColor
|
||||
self.foregroundColor = foregroundColor
|
||||
self.shimmerColor = shimmerColor
|
||||
self.style = style
|
||||
self.samples = samples
|
||||
self.peak = peak
|
||||
self.status = status
|
||||
self.seek = seek
|
||||
self.updateIsSeeking = updateIsSeeking
|
||||
}
|
||||
|
||||
public static func ==(lhs: AudioWaveformComponent, rhs: AudioWaveformComponent) -> Bool {
|
||||
@ -43,6 +54,9 @@ public final class AudioWaveformComponent: Component {
|
||||
if lhs.shimmerColor != rhs.shimmerColor {
|
||||
return false
|
||||
}
|
||||
if lhs.style != rhs.style {
|
||||
return false
|
||||
}
|
||||
if lhs.samples != rhs.samples {
|
||||
return false
|
||||
}
|
||||
@ -199,7 +213,6 @@ public final class AudioWaveformComponent: Component {
|
||||
let panRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.panGesture(_:)))
|
||||
panRecognizer.delegate = self
|
||||
self.addGestureRecognizer(panRecognizer)
|
||||
panRecognizer.isEnabled = false
|
||||
self.panRecognizer = panRecognizer
|
||||
}
|
||||
|
||||
@ -261,6 +274,7 @@ public final class AudioWaveformComponent: Component {
|
||||
if let statusValue = self.playbackStatus, statusValue.duration > 0.0 {
|
||||
self.scrubbingBeginTimestamp = statusValue.timestamp
|
||||
self.scrubbingTimestampValue = statusValue.timestamp
|
||||
self.component?.updateIsSeeking?(true)
|
||||
self.setNeedsDisplay()
|
||||
}
|
||||
}
|
||||
@ -280,7 +294,8 @@ public final class AudioWaveformComponent: Component {
|
||||
})
|
||||
|
||||
if let scrubbingTimestampValue = scrubbingTimestampValue, apply {
|
||||
self.component?.seek(scrubbingTimestampValue)
|
||||
self.component?.seek?(scrubbingTimestampValue)
|
||||
self.component?.updateIsSeeking?(false)
|
||||
}
|
||||
}
|
||||
|
||||
@ -523,14 +538,12 @@ public final class AudioWaveformComponent: Component {
|
||||
diff = sampleWidth * 1.5
|
||||
|
||||
let gravityMultiplierY: CGFloat
|
||||
gravityMultiplierY = 1.0
|
||||
|
||||
/*switch parameters.gravity ?? .bottom {
|
||||
switch component.style {
|
||||
case .bottom:
|
||||
return 1
|
||||
case .center:
|
||||
return 0.5
|
||||
}*/
|
||||
gravityMultiplierY = 1.0
|
||||
case .middle:
|
||||
gravityMultiplierY = 0.5
|
||||
}
|
||||
|
||||
context.setFillColor(component.backgroundColor.mixedWith(component.foregroundColor, alpha: colorMixFraction).cgColor)
|
||||
context.setBlendMode(.copy)
|
||||
|
@ -244,6 +244,8 @@ public final class LottieComponent: Component {
|
||||
var advanceFrameCount = 1
|
||||
if animationInstance.frameRate == 360 {
|
||||
advanceFrameCount = 6
|
||||
} else if animationInstance.frameRate == 240 {
|
||||
advanceFrameCount = 4
|
||||
}
|
||||
self.currentFrame += advanceFrameCount
|
||||
if self.currentFrame >= Int(animationInstance.frameCount) - 1 {
|
||||
|
@ -599,10 +599,16 @@ final class MediaEditorScreenComponent: Component {
|
||||
//self.performSendMessageAction()
|
||||
},
|
||||
setMediaRecordingActive: nil,
|
||||
lockMediaRecording: nil,
|
||||
stopAndPreviewMediaRecording: nil,
|
||||
discardMediaRecordingPreview: nil,
|
||||
attachmentAction: nil,
|
||||
reactionAction: nil,
|
||||
audioRecorder: nil,
|
||||
videoRecordingStatus: nil,
|
||||
isRecordingLocked: false,
|
||||
recordedAudioPreview: nil,
|
||||
wasRecordingDismissed: false,
|
||||
displayGradient: false,//component.inputHeight != 0.0,
|
||||
bottomInset: 0.0 //component.inputHeight != 0.0 ? 0.0 : bottomContentInset
|
||||
)),
|
||||
|
@ -19,8 +19,11 @@ swift_library(
|
||||
"//submodules/TelegramUI/Components/LottieComponent",
|
||||
"//submodules/AccountContext",
|
||||
"//submodules/TelegramPresentationData",
|
||||
"//submodules/ManagedAnimationNode",
|
||||
"//submodules/SSignalKit/SwiftSignalKit",
|
||||
"//submodules/Components/HierarchyTrackingLayer",
|
||||
"//submodules/TelegramUI/Components/AudioWaveformComponent",
|
||||
"//submodules/MediaPlayer:UniversalMediaPlayer",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -0,0 +1,357 @@
|
||||
import Foundation
|
||||
import UIKit
|
||||
import Display
|
||||
import ComponentFlow
|
||||
import AppBundle
|
||||
import TextFieldComponent
|
||||
import BundleIconComponent
|
||||
import AccountContext
|
||||
import TelegramPresentationData
|
||||
import ChatPresentationInterfaceState
|
||||
import SwiftSignalKit
|
||||
import LottieComponent
|
||||
import HierarchyTrackingLayer
|
||||
import ManagedAnimationNode
|
||||
import AudioWaveformComponent
|
||||
import UniversalMediaPlayer
|
||||
|
||||
private final class PlayPauseIconNode: ManagedAnimationNode {
|
||||
enum State: Equatable {
|
||||
case play
|
||||
case pause
|
||||
}
|
||||
|
||||
private let duration: Double = 0.35
|
||||
private var iconState: State = .pause
|
||||
|
||||
init() {
|
||||
super.init(size: CGSize(width: 28.0, height: 28.0))
|
||||
|
||||
self.enqueueState(.play, animated: false)
|
||||
}
|
||||
|
||||
func enqueueState(_ state: State, animated: Bool) {
|
||||
guard self.iconState != state else {
|
||||
return
|
||||
}
|
||||
|
||||
let previousState = self.iconState
|
||||
self.iconState = state
|
||||
|
||||
switch previousState {
|
||||
case .pause:
|
||||
switch state {
|
||||
case .play:
|
||||
if animated {
|
||||
self.trackTo(item: ManagedAnimationItem(source: .local("anim_playpause"), frames: .range(startFrame: 41, endFrame: 83), duration: self.duration))
|
||||
} else {
|
||||
self.trackTo(item: ManagedAnimationItem(source: .local("anim_playpause"), frames: .range(startFrame: 0, endFrame: 0), duration: 0.01))
|
||||
}
|
||||
case .pause:
|
||||
break
|
||||
}
|
||||
case .play:
|
||||
switch state {
|
||||
case .pause:
|
||||
if animated {
|
||||
self.trackTo(item: ManagedAnimationItem(source: .local("anim_playpause"), frames: .range(startFrame: 0, endFrame: 41), duration: self.duration))
|
||||
} else {
|
||||
self.trackTo(item: ManagedAnimationItem(source: .local("anim_playpause"), frames: .range(startFrame: 41, endFrame: 41), duration: 0.01))
|
||||
}
|
||||
case .play:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func textForDuration(seconds: Int32) -> String {
|
||||
if seconds >= 60 * 60 {
|
||||
return String(format: "%d:%02d:%02d", seconds / 3600, seconds / 60 % 60)
|
||||
} else {
|
||||
return String(format: "%d:%02d", seconds / 60, seconds % 60)
|
||||
}
|
||||
}
|
||||
|
||||
public final class MediaPreviewPanelComponent: Component {
|
||||
public let context: AccountContext
|
||||
public let theme: PresentationTheme
|
||||
public let strings: PresentationStrings
|
||||
public let mediaPreview: ChatRecordedMediaPreview
|
||||
public let insets: UIEdgeInsets
|
||||
|
||||
public init(
|
||||
context: AccountContext,
|
||||
theme: PresentationTheme,
|
||||
strings: PresentationStrings,
|
||||
mediaPreview: ChatRecordedMediaPreview,
|
||||
insets: UIEdgeInsets
|
||||
) {
|
||||
self.context = context
|
||||
self.theme = theme
|
||||
self.strings = strings
|
||||
self.mediaPreview = mediaPreview
|
||||
self.insets = insets
|
||||
}
|
||||
|
||||
public static func ==(lhs: MediaPreviewPanelComponent, rhs: MediaPreviewPanelComponent) -> Bool {
|
||||
if lhs.context !== rhs.context {
|
||||
return false
|
||||
}
|
||||
if lhs.theme !== rhs.theme {
|
||||
return false
|
||||
}
|
||||
if lhs.strings !== rhs.strings {
|
||||
return false
|
||||
}
|
||||
if lhs.mediaPreview !== rhs.mediaPreview {
|
||||
return false
|
||||
}
|
||||
if lhs.insets != rhs.insets {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
public final class View: UIView {
|
||||
private var component: MediaPreviewPanelComponent?
|
||||
private weak var state: EmptyComponentState?
|
||||
|
||||
public let vibrancyContainer: UIView
|
||||
|
||||
private let trackingLayer: HierarchyTrackingLayer
|
||||
|
||||
private let indicator = ComponentView<Empty>()
|
||||
|
||||
private let timerFont: UIFont
|
||||
private let timerText = ComponentView<Empty>()
|
||||
|
||||
private var timerTextValue: String = "0:00"
|
||||
|
||||
private let playPauseIconButton: HighlightableButton
|
||||
private let playPauseIconNode: PlayPauseIconNode
|
||||
|
||||
private let waveform = ComponentView<Empty>()
|
||||
private let vibrancyWaveform = ComponentView<Empty>()
|
||||
|
||||
private var mediaPlayer: MediaPlayer?
|
||||
private let mediaPlayerStatus = Promise<MediaPlayerStatus?>(nil)
|
||||
private var mediaPlayerStatusDisposable: Disposable?
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.trackingLayer = HierarchyTrackingLayer()
|
||||
|
||||
self.timerFont = Font.with(size: 15.0, design: .camera, traits: .monospacedNumbers)
|
||||
|
||||
self.vibrancyContainer = UIView()
|
||||
|
||||
self.playPauseIconButton = HighlightableButton()
|
||||
self.playPauseIconNode = PlayPauseIconNode()
|
||||
self.playPauseIconNode.isUserInteractionEnabled = false
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
self.layer.addSublayer(self.trackingLayer)
|
||||
self.playPauseIconButton.addSubview(self.playPauseIconNode.view)
|
||||
self.addSubview(self.playPauseIconButton)
|
||||
|
||||
self.playPauseIconButton.addTarget(self, action: #selector(self.playPauseButtonPressed), for: .touchUpInside)
|
||||
|
||||
self.mediaPlayerStatusDisposable = (self.mediaPlayerStatus.get()
|
||||
|> deliverOnMainQueue).start(next: { [weak self] status in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
|
||||
if let status {
|
||||
switch status.status {
|
||||
case .playing, .buffering(_, true, _, _):
|
||||
self.playPauseIconNode.enqueueState(.play, animated: true)
|
||||
default:
|
||||
self.playPauseIconNode.enqueueState(.pause, animated: true)
|
||||
}
|
||||
|
||||
//self.timerTextValue = textForDuration(seconds: component.mediaPreview.duration)
|
||||
} else {
|
||||
self.playPauseIconNode.enqueueState(.play, animated: true)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.mediaPlayerStatusDisposable?.dispose()
|
||||
}
|
||||
|
||||
public func animateIn() {
|
||||
self.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25)
|
||||
}
|
||||
|
||||
public func animateOut(transition: Transition, completion: @escaping () -> Void) {
|
||||
let vibrancyContainer = self.vibrancyContainer
|
||||
transition.setAlpha(view: vibrancyContainer, alpha: 0.0, completion: { [weak vibrancyContainer] _ in
|
||||
vibrancyContainer?.removeFromSuperview()
|
||||
})
|
||||
transition.setAlpha(view: self, alpha: 0.0, completion: { _ in
|
||||
completion()
|
||||
})
|
||||
}
|
||||
|
||||
@objc private func playPauseButtonPressed() {
|
||||
guard let component = self.component else {
|
||||
return
|
||||
}
|
||||
|
||||
if let mediaPlayer = self.mediaPlayer {
|
||||
mediaPlayer.togglePlayPause()
|
||||
} else {
|
||||
let mediaManager = component.context.sharedContext.mediaManager
|
||||
let mediaPlayer = MediaPlayer(
|
||||
audioSessionManager: mediaManager.audioSession,
|
||||
postbox: component.context.account.postbox,
|
||||
userLocation: .other,
|
||||
userContentType: .audio,
|
||||
resourceReference: .standalone(resource: component.mediaPreview.resource),
|
||||
streamable: .none,
|
||||
video: false,
|
||||
preferSoftwareDecoding: false,
|
||||
enableSound: true,
|
||||
fetchAutomatically: true
|
||||
)
|
||||
mediaPlayer.actionAtEnd = .action { [weak mediaPlayer] in
|
||||
mediaPlayer?.seek(timestamp: 0.0)
|
||||
}
|
||||
self.mediaPlayer = mediaPlayer
|
||||
|
||||
self.mediaPlayerStatus.set(mediaPlayer.status |> map(Optional.init))
|
||||
|
||||
mediaPlayer.play()
|
||||
}
|
||||
}
|
||||
|
||||
func update(component: MediaPreviewPanelComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
|
||||
if self.component == nil {
|
||||
self.timerTextValue = textForDuration(seconds: component.mediaPreview.duration)
|
||||
}
|
||||
|
||||
self.component = component
|
||||
self.state = state
|
||||
|
||||
let timerTextSize = self.timerText.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(Text(text: self.timerTextValue, font: self.timerFont, color: .white)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 100.0, height: 100.0)
|
||||
)
|
||||
if let timerTextView = self.timerText.view {
|
||||
if timerTextView.superview == nil {
|
||||
self.addSubview(timerTextView)
|
||||
timerTextView.layer.anchorPoint = CGPoint(x: 1.0, y: 0.5)
|
||||
}
|
||||
let timerTextFrame = CGRect(origin: CGPoint(x: availableSize.width - component.insets.right - 8.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - timerTextSize.height) * 0.5)), size: timerTextSize)
|
||||
transition.setPosition(view: timerTextView, position: CGPoint(x: timerTextFrame.minX, y: timerTextFrame.midY))
|
||||
timerTextView.bounds = CGRect(origin: CGPoint(), size: timerTextFrame.size)
|
||||
}
|
||||
|
||||
let playPauseSize = CGSize(width: 28.0, height: 28.0)
|
||||
var playPauseFrame = CGRect(origin: CGPoint(x: component.insets.left + 8.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - playPauseSize.height) * 0.5)), size: playPauseSize)
|
||||
let playPauseButtonFrame = playPauseFrame.insetBy(dx: -8.0, dy: -8.0)
|
||||
playPauseFrame = playPauseFrame.offsetBy(dx: -playPauseButtonFrame.minX, dy: -playPauseButtonFrame.minY)
|
||||
transition.setFrame(view: self.playPauseIconButton, frame: playPauseButtonFrame)
|
||||
transition.setFrame(view: self.playPauseIconNode.view, frame: playPauseFrame)
|
||||
|
||||
let waveformFrame = CGRect(origin: CGPoint(x: component.insets.left + 47.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - 24.0) * 0.5)), size: CGSize(width: availableSize.width - component.insets.right - 47.0 - (component.insets.left + 47.0), height: 24.0))
|
||||
|
||||
let _ = self.waveform.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(AudioWaveformComponent(
|
||||
backgroundColor: UIColor.white.withAlphaComponent(0.1),
|
||||
foregroundColor: UIColor.white.withAlphaComponent(1.0),
|
||||
shimmerColor: nil,
|
||||
style: .middle,
|
||||
samples: component.mediaPreview.waveform.samples,
|
||||
peak: component.mediaPreview.waveform.peak,
|
||||
status: self.mediaPlayerStatus.get() |> map { value -> MediaPlayerStatus in
|
||||
if let value {
|
||||
return value
|
||||
} else {
|
||||
return MediaPlayerStatus(
|
||||
generationTimestamp: 0.0,
|
||||
duration: 0.0,
|
||||
dimensions: CGSize(),
|
||||
timestamp: 0.0,
|
||||
baseRate: 1.0,
|
||||
seekId: 0,
|
||||
status: .paused,
|
||||
soundEnabled: true
|
||||
)
|
||||
}
|
||||
},
|
||||
seek: { [weak self] timestamp in
|
||||
guard let self, let mediaPlayer = self.mediaPlayer else {
|
||||
return
|
||||
}
|
||||
mediaPlayer.seek(timestamp: timestamp)
|
||||
},
|
||||
updateIsSeeking: { [weak self] isSeeking in
|
||||
guard let self, let mediaPlayer = self.mediaPlayer else {
|
||||
return
|
||||
}
|
||||
if isSeeking {
|
||||
mediaPlayer.pause()
|
||||
} else {
|
||||
mediaPlayer.play()
|
||||
}
|
||||
}
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: waveformFrame.size
|
||||
)
|
||||
let _ = self.vibrancyWaveform.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(AudioWaveformComponent(
|
||||
backgroundColor: .white,
|
||||
foregroundColor: .white,
|
||||
shimmerColor: nil,
|
||||
style: .middle,
|
||||
samples: component.mediaPreview.waveform.samples,
|
||||
peak: component.mediaPreview.waveform.peak,
|
||||
status: .complete(),
|
||||
seek: nil,
|
||||
updateIsSeeking: nil
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: waveformFrame.size
|
||||
)
|
||||
|
||||
if let waveformView = self.waveform.view as? AudioWaveformComponent.View {
|
||||
if waveformView.superview == nil {
|
||||
waveformView.enableScrubbing = true
|
||||
self.addSubview(waveformView)
|
||||
}
|
||||
transition.setFrame(view: waveformView, frame: waveformFrame)
|
||||
}
|
||||
if let vibrancyWaveformView = self.vibrancyWaveform.view {
|
||||
if vibrancyWaveformView.superview == nil {
|
||||
self.vibrancyContainer.addSubview(vibrancyWaveformView)
|
||||
}
|
||||
transition.setFrame(view: vibrancyWaveformView, frame: waveformFrame)
|
||||
}
|
||||
|
||||
transition.setFrame(view: self.vibrancyContainer, frame: CGRect(origin: CGPoint(), size: availableSize))
|
||||
|
||||
return availableSize
|
||||
}
|
||||
}
|
||||
|
||||
public func makeView() -> View {
|
||||
return View(frame: CGRect())
|
||||
}
|
||||
|
||||
public func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
|
||||
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||
}
|
||||
}
|
@ -13,33 +13,60 @@ import LottieComponent
|
||||
import HierarchyTrackingLayer
|
||||
|
||||
public final class MediaRecordingPanelComponent: Component {
|
||||
public let theme: PresentationTheme
|
||||
public let strings: PresentationStrings
|
||||
public let audioRecorder: ManagedAudioRecorder?
|
||||
public let videoRecordingStatus: InstantVideoControllerRecordingStatus?
|
||||
public let isRecordingLocked: Bool
|
||||
public let cancelFraction: CGFloat
|
||||
public let inputInsets: UIEdgeInsets
|
||||
public let insets: UIEdgeInsets
|
||||
public let cancelAction: () -> Void
|
||||
|
||||
public init(
|
||||
theme: PresentationTheme,
|
||||
strings: PresentationStrings,
|
||||
audioRecorder: ManagedAudioRecorder?,
|
||||
videoRecordingStatus: InstantVideoControllerRecordingStatus?,
|
||||
isRecordingLocked: Bool,
|
||||
cancelFraction: CGFloat,
|
||||
insets: UIEdgeInsets
|
||||
inputInsets: UIEdgeInsets,
|
||||
insets: UIEdgeInsets,
|
||||
cancelAction: @escaping () -> Void
|
||||
) {
|
||||
self.theme = theme
|
||||
self.strings = strings
|
||||
self.audioRecorder = audioRecorder
|
||||
self.videoRecordingStatus = videoRecordingStatus
|
||||
self.isRecordingLocked = isRecordingLocked
|
||||
self.cancelFraction = cancelFraction
|
||||
self.inputInsets = inputInsets
|
||||
self.insets = insets
|
||||
self.cancelAction = cancelAction
|
||||
}
|
||||
|
||||
public static func ==(lhs: MediaRecordingPanelComponent, rhs: MediaRecordingPanelComponent) -> Bool {
|
||||
if lhs.theme !== rhs.theme {
|
||||
return false
|
||||
}
|
||||
if lhs.strings !== rhs.strings {
|
||||
return false
|
||||
}
|
||||
if lhs.audioRecorder !== rhs.audioRecorder {
|
||||
return false
|
||||
}
|
||||
if lhs.videoRecordingStatus !== rhs.videoRecordingStatus {
|
||||
return false
|
||||
}
|
||||
if lhs.isRecordingLocked != rhs.isRecordingLocked {
|
||||
return false
|
||||
}
|
||||
if lhs.cancelFraction != rhs.cancelFraction {
|
||||
return false
|
||||
}
|
||||
if lhs.inputInsets != rhs.inputInsets {
|
||||
return false
|
||||
}
|
||||
if lhs.insets != rhs.insets {
|
||||
return false
|
||||
}
|
||||
@ -50,13 +77,21 @@ public final class MediaRecordingPanelComponent: Component {
|
||||
private var component: MediaRecordingPanelComponent?
|
||||
private weak var state: EmptyComponentState?
|
||||
|
||||
public let vibrancyContainer: UIView
|
||||
|
||||
private let trackingLayer: HierarchyTrackingLayer
|
||||
|
||||
private let indicator = ComponentView<Empty>()
|
||||
|
||||
private let cancelContainerView: UIView
|
||||
private let vibrancyCancelContainerView: UIView
|
||||
private let cancelIconView: UIImageView
|
||||
private let vibrancyCancelIconView: UIImageView
|
||||
private let vibrancyCancelText = ComponentView<Empty>()
|
||||
private let cancelText = ComponentView<Empty>()
|
||||
private let vibrancyCancelButtonText = ComponentView<Empty>()
|
||||
private let cancelButtonText = ComponentView<Empty>()
|
||||
private var cancelButton: HighlightableButton?
|
||||
|
||||
private let timerFont: UIFont
|
||||
private let timerText = ComponentView<Empty>()
|
||||
@ -68,16 +103,23 @@ public final class MediaRecordingPanelComponent: Component {
|
||||
override init(frame: CGRect) {
|
||||
self.trackingLayer = HierarchyTrackingLayer()
|
||||
self.cancelIconView = UIImageView()
|
||||
self.vibrancyCancelIconView = UIImageView()
|
||||
|
||||
self.timerFont = Font.with(size: 15.0, design: .camera, traits: .monospacedNumbers)
|
||||
|
||||
self.vibrancyContainer = UIView()
|
||||
|
||||
self.cancelContainerView = UIView()
|
||||
self.vibrancyCancelContainerView = UIView()
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
self.layer.addSublayer(self.trackingLayer)
|
||||
|
||||
self.cancelContainerView.addSubview(self.cancelIconView)
|
||||
self.vibrancyCancelContainerView.addSubview(self.vibrancyCancelIconView)
|
||||
|
||||
self.vibrancyContainer.addSubview(self.vibrancyCancelContainerView)
|
||||
self.addSubview(self.cancelContainerView)
|
||||
|
||||
self.trackingLayer.didEnterHierarchy = { [weak self] in
|
||||
@ -97,6 +139,10 @@ public final class MediaRecordingPanelComponent: Component {
|
||||
}
|
||||
|
||||
private func updateAnimations() {
|
||||
guard let component = self.component else {
|
||||
return
|
||||
}
|
||||
|
||||
if let indicatorView = self.indicator.view {
|
||||
if indicatorView.layer.animation(forKey: "recording") == nil {
|
||||
let animation = CAKeyframeAnimation(keyPath: "opacity")
|
||||
@ -109,7 +155,7 @@ public final class MediaRecordingPanelComponent: Component {
|
||||
indicatorView.layer.add(animation, forKey: "recording")
|
||||
}
|
||||
}
|
||||
if self.cancelContainerView.layer.animation(forKey: "recording") == nil {
|
||||
if !component.isRecordingLocked, self.cancelContainerView.layer.animation(forKey: "recording") == nil {
|
||||
let animation = CAKeyframeAnimation(keyPath: "position.x")
|
||||
animation.values = [-5.0 as NSNumber, 5.0 as NSNumber, 0.0 as NSNumber]
|
||||
animation.keyTimes = [0.0 as NSNumber, 0.4546 as NSNumber, 0.9091 as NSNumber, 1 as NSNumber]
|
||||
@ -119,26 +165,40 @@ public final class MediaRecordingPanelComponent: Component {
|
||||
animation.repeatCount = Float.infinity
|
||||
|
||||
self.cancelContainerView.layer.add(animation, forKey: "recording")
|
||||
self.vibrancyCancelContainerView.layer.add(animation, forKey: "recording")
|
||||
}
|
||||
}
|
||||
|
||||
public func animateIn() {
|
||||
guard let component = self.component else {
|
||||
return
|
||||
}
|
||||
if let indicatorView = self.indicator.view {
|
||||
indicatorView.layer.animatePosition(from: CGPoint(x: -20.0, y: 0.0), to: CGPoint(), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
||||
indicatorView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25)
|
||||
indicatorView.layer.animatePosition(from: CGPoint(x: component.inputInsets.left - component.insets.left, y: 0.0), to: CGPoint(), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
||||
}
|
||||
if let timerTextView = self.timerText.view {
|
||||
timerTextView.layer.animatePosition(from: CGPoint(x: -20.0, y: 0.0), to: CGPoint(), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
||||
timerTextView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25)
|
||||
timerTextView.layer.animatePosition(from: CGPoint(x: component.inputInsets.left - component.insets.left, y: 0.0), to: CGPoint(), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
||||
}
|
||||
self.cancelContainerView.layer.animatePosition(from: CGPoint(x: self.bounds.width, y: 0.0), to: CGPoint(), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
||||
self.vibrancyCancelContainerView.layer.animatePosition(from: CGPoint(x: self.bounds.width, y: 0.0), to: CGPoint(), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
||||
}
|
||||
|
||||
public func animateOut(dismissRecording: Bool, completion: @escaping () -> Void) {
|
||||
if let indicatorView = self.indicator.view as? LottieComponent.View {
|
||||
if let _ = indicatorView.layer.animation(forKey: "recording") {
|
||||
let fromAlpha = indicatorView.layer.presentation()?.opacity ?? indicatorView.layer.opacity
|
||||
indicatorView.layer.removeAnimation(forKey: "recording")
|
||||
indicatorView.layer.animateAlpha(from: CGFloat(fromAlpha), to: 1.0, duration: 0.2)
|
||||
|
||||
public func animateOut(transition: Transition, dismissRecording: Bool, completion: @escaping () -> Void) {
|
||||
guard let component = self.component else {
|
||||
completion()
|
||||
return
|
||||
}
|
||||
|
||||
if let indicatorView = self.indicator.view as? LottieComponent.View, let _ = indicatorView.layer.animation(forKey: "recording") {
|
||||
let fromAlpha = indicatorView.layer.presentation()?.opacity ?? indicatorView.layer.opacity
|
||||
indicatorView.layer.removeAnimation(forKey: "recording")
|
||||
indicatorView.layer.animateAlpha(from: CGFloat(fromAlpha), to: 1.0, duration: 0.2)
|
||||
}
|
||||
|
||||
if dismissRecording {
|
||||
if let indicatorView = self.indicator.view as? LottieComponent.View {
|
||||
indicatorView.playOnce(completion: { [weak indicatorView] in
|
||||
if let indicatorView {
|
||||
let transition = Transition(animation: .curve(duration: 0.3, curve: .spring))
|
||||
@ -147,19 +207,35 @@ public final class MediaRecordingPanelComponent: Component {
|
||||
|
||||
completion()
|
||||
})
|
||||
} else {
|
||||
completion()
|
||||
}
|
||||
} else {
|
||||
completion()
|
||||
if let indicatorView = self.indicator.view as? LottieComponent.View {
|
||||
transition.setPosition(view: indicatorView, position: indicatorView.center.offsetBy(dx: component.inputInsets.left - component.insets.left, dy: 0.0))
|
||||
transition.setAlpha(view: indicatorView, alpha: 0.0)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
let transition = Transition(animation: .curve(duration: 0.3, curve: .spring))
|
||||
if let timerTextView = self.timerText.view {
|
||||
transition.setAlpha(view: timerTextView, alpha: 0.0)
|
||||
transition.setAlpha(view: timerTextView, alpha: 0.0, completion: { _ in
|
||||
if !dismissRecording {
|
||||
completion()
|
||||
}
|
||||
})
|
||||
transition.setScale(view: timerTextView, scale: 0.001)
|
||||
transition.setPosition(view: timerTextView, position: timerTextView.center.offsetBy(dx: component.inputInsets.left - component.insets.left, dy: 0.0))
|
||||
}
|
||||
|
||||
transition.setAlpha(view: self.cancelContainerView, alpha: 0.0)
|
||||
transition.setAlpha(view: self.vibrancyCancelContainerView, alpha: 0.0)
|
||||
}
|
||||
|
||||
@objc private func cancelButtonPressed() {
|
||||
guard let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.cancelAction()
|
||||
}
|
||||
|
||||
func update(component: MediaRecordingPanelComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
|
||||
@ -240,7 +316,7 @@ public final class MediaRecordingPanelComponent: Component {
|
||||
if indicatorView.superview == nil {
|
||||
self.addSubview(indicatorView)
|
||||
}
|
||||
transition.setFrame(view: indicatorView, frame: CGRect(origin: CGPoint(x: 3.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - indicatorSize.height) * 0.5)), size: indicatorSize))
|
||||
transition.setFrame(view: indicatorView, frame: CGRect(origin: CGPoint(x: 5.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - indicatorSize.height) * 0.5)), size: indicatorSize))
|
||||
}
|
||||
|
||||
let timerTextSize = self.timerText.update(
|
||||
@ -254,25 +330,48 @@ public final class MediaRecordingPanelComponent: Component {
|
||||
self.addSubview(timerTextView)
|
||||
timerTextView.layer.anchorPoint = CGPoint(x: 0.0, y: 0.5)
|
||||
}
|
||||
let timerTextFrame = CGRect(origin: CGPoint(x: 38.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - timerTextSize.height) * 0.5)), size: timerTextSize)
|
||||
let timerTextFrame = CGRect(origin: CGPoint(x: 40.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - timerTextSize.height) * 0.5)), size: timerTextSize)
|
||||
transition.setPosition(view: timerTextView, position: CGPoint(x: timerTextFrame.minX, y: timerTextFrame.midY))
|
||||
timerTextView.bounds = CGRect(origin: CGPoint(), size: timerTextFrame.size)
|
||||
}
|
||||
|
||||
if self.cancelIconView.image == nil {
|
||||
self.cancelIconView.image = UIImage(bundleImageName: "Chat/Input/Text/AudioRecordingCancelArrow")?.withRenderingMode(.alwaysTemplate)
|
||||
let image = UIImage(bundleImageName: "Chat/Input/Text/AudioRecordingCancelArrow")?.withRenderingMode(.alwaysTemplate)
|
||||
self.cancelIconView.image = image
|
||||
self.vibrancyCancelIconView.image = image
|
||||
}
|
||||
|
||||
self.cancelIconView.tintColor = UIColor(white: 1.0, alpha: 0.4)
|
||||
self.cancelIconView.tintColor = UIColor(white: 1.0, alpha: 0.3)
|
||||
self.vibrancyCancelIconView.tintColor = .white
|
||||
|
||||
let cancelTextSize = self.cancelText.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(Text(text: "Slide to cancel", font: Font.regular(15.0), color: UIColor(white: 1.0, alpha: 0.4))),
|
||||
component: AnyComponent(Text(text: "Slide to cancel", font: Font.regular(15.0), color: UIColor(rgb: 0xffffff, alpha: 0.3))),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: max(30.0, availableSize.width - 100.0), height: 44.0)
|
||||
)
|
||||
let _ = self.vibrancyCancelText.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(Text(text: "Slide to cancel", font: Font.regular(15.0), color: .white)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: max(30.0, availableSize.width - 100.0), height: 44.0)
|
||||
)
|
||||
|
||||
let cancelButtonTextSize = self.cancelButtonText.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(Text(text: "Cancel", font: Font.regular(17.0), color: .white)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: max(30.0, availableSize.width - 100.0), height: 44.0)
|
||||
)
|
||||
let _ = self.vibrancyCancelButtonText.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(Text(text: "Cancel", font: Font.regular(17.0), color: .clear)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: max(30.0, availableSize.width - 100.0), height: 44.0)
|
||||
)
|
||||
|
||||
var textFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - cancelTextSize.width) * 0.5), y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - cancelTextSize.height) * 0.5)), size: cancelTextSize)
|
||||
let cancelButtonTextFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - cancelButtonTextSize.width) * 0.5), y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - cancelButtonTextSize.height) * 0.5)), size: cancelButtonTextSize)
|
||||
|
||||
let bandingStart: CGFloat = 0.0
|
||||
let bandedOffset = abs(component.cancelFraction) - bandingStart
|
||||
@ -282,18 +381,105 @@ public final class MediaRecordingPanelComponent: Component {
|
||||
|
||||
textFrame.origin.x -= mappedCancelFraction * 0.5
|
||||
|
||||
if component.isRecordingLocked {
|
||||
if self.cancelContainerView.layer.animation(forKey: "recording") != nil {
|
||||
if let presentation = self.cancelContainerView.layer.presentation() {
|
||||
transition.animatePosition(view: self.cancelContainerView, from: presentation.position, to: CGPoint())
|
||||
transition.animatePosition(view: self.vibrancyCancelContainerView, from: presentation.position, to: CGPoint())
|
||||
}
|
||||
self.cancelContainerView.layer.removeAnimation(forKey: "recording")
|
||||
self.vibrancyCancelContainerView.layer.removeAnimation(forKey: "recording")
|
||||
}
|
||||
}
|
||||
|
||||
if let cancelTextView = self.cancelText.view {
|
||||
if cancelTextView.superview == nil {
|
||||
self.cancelContainerView.addSubview(cancelTextView)
|
||||
}
|
||||
transition.setFrame(view: cancelTextView, frame: textFrame)
|
||||
transition.setPosition(view: cancelTextView, position: textFrame.center)
|
||||
transition.setBounds(view: cancelTextView, bounds: CGRect(origin: CGPoint(), size: textFrame.size))
|
||||
transition.setAlpha(view: cancelTextView, alpha: !component.isRecordingLocked ? 1.0 : 0.0)
|
||||
transition.setScale(view: cancelTextView, scale: !component.isRecordingLocked ? 1.0 : 0.001)
|
||||
}
|
||||
if let vibrancyCancelTextView = self.vibrancyCancelText.view {
|
||||
if vibrancyCancelTextView.superview == nil {
|
||||
self.vibrancyCancelContainerView.addSubview(vibrancyCancelTextView)
|
||||
}
|
||||
transition.setPosition(view: vibrancyCancelTextView, position: textFrame.center)
|
||||
transition.setBounds(view: vibrancyCancelTextView, bounds: CGRect(origin: CGPoint(), size: textFrame.size))
|
||||
transition.setAlpha(view: vibrancyCancelTextView, alpha: !component.isRecordingLocked ? 1.0 : 0.0)
|
||||
transition.setScale(view: vibrancyCancelTextView, scale: !component.isRecordingLocked ? 1.0 : 0.001)
|
||||
}
|
||||
|
||||
if let cancelButtonTextView = self.cancelButtonText.view {
|
||||
if cancelButtonTextView.superview == nil {
|
||||
self.cancelContainerView.addSubview(cancelButtonTextView)
|
||||
}
|
||||
transition.setPosition(view: cancelButtonTextView, position: cancelButtonTextFrame.center)
|
||||
transition.setBounds(view: cancelButtonTextView, bounds: CGRect(origin: CGPoint(), size: cancelButtonTextFrame.size))
|
||||
transition.setAlpha(view: cancelButtonTextView, alpha: component.isRecordingLocked ? 1.0 : 0.0)
|
||||
transition.setScale(view: cancelButtonTextView, scale: component.isRecordingLocked ? 1.0 : 0.001)
|
||||
}
|
||||
if let vibrancyCancelButtonTextView = self.vibrancyCancelButtonText.view {
|
||||
if vibrancyCancelButtonTextView.superview == nil {
|
||||
self.vibrancyCancelContainerView.addSubview(vibrancyCancelButtonTextView)
|
||||
}
|
||||
transition.setPosition(view: vibrancyCancelButtonTextView, position: cancelButtonTextFrame.center)
|
||||
transition.setBounds(view: vibrancyCancelButtonTextView, bounds: CGRect(origin: CGPoint(), size: cancelButtonTextFrame.size))
|
||||
transition.setAlpha(view: vibrancyCancelButtonTextView, alpha: component.isRecordingLocked ? 1.0 : 0.0)
|
||||
transition.setScale(view: vibrancyCancelButtonTextView, scale: component.isRecordingLocked ? 1.0 : 0.001)
|
||||
}
|
||||
|
||||
if component.isRecordingLocked {
|
||||
let cancelButton: HighlightableButton
|
||||
if let current = self.cancelButton {
|
||||
cancelButton = current
|
||||
} else {
|
||||
cancelButton = HighlightableButton()
|
||||
self.cancelButton = cancelButton
|
||||
self.addSubview(cancelButton)
|
||||
|
||||
cancelButton.highligthedChanged = { [weak self] highlighted in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if highlighted {
|
||||
self.cancelContainerView.alpha = 0.6
|
||||
self.vibrancyCancelContainerView.alpha = 0.6
|
||||
} else {
|
||||
self.cancelContainerView.alpha = 1.0
|
||||
self.vibrancyCancelContainerView.alpha = 1.0
|
||||
self.cancelContainerView.layer.animateAlpha(from: 0.6, to: 1.0, duration: 0.2)
|
||||
self.vibrancyCancelContainerView.layer.animateAlpha(from: 0.6, to: 1.0, duration: 0.2)
|
||||
}
|
||||
}
|
||||
|
||||
cancelButton.addTarget(self, action: #selector(self.cancelButtonPressed), for: .touchUpInside)
|
||||
}
|
||||
|
||||
cancelButton.frame = CGRect(origin: CGPoint(x: cancelButtonTextFrame.minX - 8.0, y: 0.0), size: CGSize(width: cancelButtonTextFrame.width + 8.0 * 2.0, height: availableSize.height))
|
||||
} else if let cancelButton = self.cancelButton {
|
||||
cancelButton.removeFromSuperview()
|
||||
}
|
||||
|
||||
if let image = self.cancelIconView.image {
|
||||
transition.setFrame(view: self.cancelIconView, frame: CGRect(origin: CGPoint(x: textFrame.minX - 4.0 - image.size.width, y: textFrame.minY + floor((textFrame.height - image.size.height) * 0.5)), size: image.size))
|
||||
let iconFrame = CGRect(origin: CGPoint(x: textFrame.minX - 4.0 - image.size.width, y: textFrame.minY + floor((textFrame.height - image.size.height) * 0.5)), size: image.size)
|
||||
|
||||
transition.setPosition(view: self.cancelIconView, position: iconFrame.center)
|
||||
transition.setBounds(view: self.cancelIconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
|
||||
transition.setAlpha(view: self.cancelIconView, alpha: !component.isRecordingLocked ? 1.0 : 0.0)
|
||||
transition.setScale(view: self.cancelIconView, scale: !component.isRecordingLocked ? 1.0 : 0.001)
|
||||
|
||||
transition.setPosition(view: self.vibrancyCancelIconView, position: iconFrame.center)
|
||||
transition.setBounds(view: self.vibrancyCancelIconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
|
||||
transition.setAlpha(view: self.vibrancyCancelIconView, alpha: !component.isRecordingLocked ? 1.0 : 0.0)
|
||||
transition.setScale(view: self.vibrancyCancelIconView, scale: !component.isRecordingLocked ? 1.0 : 0.001)
|
||||
}
|
||||
|
||||
self.updateAnimations()
|
||||
|
||||
transition.setFrame(view: self.vibrancyContainer, frame: CGRect(origin: CGPoint(), size: availableSize))
|
||||
|
||||
return availableSize
|
||||
}
|
||||
}
|
||||
|
@ -8,6 +8,19 @@ import AccountContext
|
||||
import TelegramPresentationData
|
||||
import ChatPresentationInterfaceState
|
||||
|
||||
private extension MessageInputActionButtonComponent.Mode {
|
||||
var iconName: String? {
|
||||
switch self {
|
||||
case .delete:
|
||||
return "Chat/Context Menu/Delete"
|
||||
case .attach:
|
||||
return "Chat/Input/Text/IconAttachment"
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public final class MessageInputActionButtonComponent: Component {
|
||||
public enum Mode {
|
||||
case none
|
||||
@ -15,6 +28,8 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
case apply
|
||||
case voiceInput
|
||||
case videoInput
|
||||
case delete
|
||||
case attach
|
||||
}
|
||||
|
||||
public enum Action {
|
||||
@ -26,6 +41,8 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
public let action: (Mode, Action, Bool) -> Void
|
||||
public let switchMediaInputMode: () -> Void
|
||||
public let updateMediaCancelFraction: (CGFloat) -> Void
|
||||
public let lockMediaRecording: () -> Void
|
||||
public let stopAndPreviewMediaRecording: () -> Void
|
||||
public let context: AccountContext
|
||||
public let theme: PresentationTheme
|
||||
public let strings: PresentationStrings
|
||||
@ -38,6 +55,8 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
action: @escaping (Mode, Action, Bool) -> Void,
|
||||
switchMediaInputMode: @escaping () -> Void,
|
||||
updateMediaCancelFraction: @escaping (CGFloat) -> Void,
|
||||
lockMediaRecording: @escaping () -> Void,
|
||||
stopAndPreviewMediaRecording: @escaping () -> Void,
|
||||
context: AccountContext,
|
||||
theme: PresentationTheme,
|
||||
strings: PresentationStrings,
|
||||
@ -49,6 +68,8 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
self.action = action
|
||||
self.switchMediaInputMode = switchMediaInputMode
|
||||
self.updateMediaCancelFraction = updateMediaCancelFraction
|
||||
self.lockMediaRecording = lockMediaRecording
|
||||
self.stopAndPreviewMediaRecording = stopAndPreviewMediaRecording
|
||||
self.context = context
|
||||
self.theme = theme
|
||||
self.strings = strings
|
||||
@ -162,6 +183,12 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
break
|
||||
}
|
||||
}
|
||||
micButton.stopRecording = { [weak self] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.stopAndPreviewMediaRecording()
|
||||
}
|
||||
micButton.endRecording = { [weak self] sendMedia in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
@ -173,6 +200,12 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
break
|
||||
}
|
||||
}
|
||||
micButton.updateLocked = { [weak self] _ in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.lockMediaRecording()
|
||||
}
|
||||
micButton.switchMode = { [weak self] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
@ -187,29 +220,33 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
if self.sendIconView.image == nil {
|
||||
self.sendIconView.image = generateImage(CGSize(width: 33.0, height: 33.0), rotatedContext: { size, context in
|
||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||
context.setFillColor(UIColor.white.cgColor)
|
||||
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
|
||||
context.setBlendMode(.copy)
|
||||
context.setStrokeColor(UIColor.clear.cgColor)
|
||||
context.setLineWidth(2.0)
|
||||
context.setLineCap(.round)
|
||||
context.setLineJoin(.round)
|
||||
|
||||
context.translateBy(x: 5.45, y: 4.0)
|
||||
|
||||
context.saveGState()
|
||||
context.translateBy(x: 4.0, y: 4.0)
|
||||
let _ = try? drawSvgPath(context, path: "M1,7 L7,1 L13,7 S ")
|
||||
context.restoreGState()
|
||||
|
||||
context.saveGState()
|
||||
context.translateBy(x: 10.0, y: 4.0)
|
||||
let _ = try? drawSvgPath(context, path: "M1,16 V1 S ")
|
||||
context.restoreGState()
|
||||
})
|
||||
if self.sendIconView.image == nil || previousComponent?.mode.iconName != component.mode.iconName {
|
||||
if let iconName = component.mode.iconName {
|
||||
self.sendIconView.image = generateTintedImage(image: UIImage(bundleImageName: iconName), color: .white)
|
||||
} else {
|
||||
self.sendIconView.image = generateImage(CGSize(width: 33.0, height: 33.0), rotatedContext: { size, context in
|
||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||
context.setFillColor(UIColor.white.cgColor)
|
||||
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
|
||||
context.setBlendMode(.copy)
|
||||
context.setStrokeColor(UIColor.clear.cgColor)
|
||||
context.setLineWidth(2.0)
|
||||
context.setLineCap(.round)
|
||||
context.setLineJoin(.round)
|
||||
|
||||
context.translateBy(x: 5.45, y: 4.0)
|
||||
|
||||
context.saveGState()
|
||||
context.translateBy(x: 4.0, y: 4.0)
|
||||
let _ = try? drawSvgPath(context, path: "M1,7 L7,1 L13,7 S ")
|
||||
context.restoreGState()
|
||||
|
||||
context.saveGState()
|
||||
context.translateBy(x: 10.0, y: 4.0)
|
||||
let _ = try? drawSvgPath(context, path: "M1,16 V1 S ")
|
||||
context.restoreGState()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
var sendAlpha: CGFloat = 0.0
|
||||
@ -218,7 +255,7 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
switch component.mode {
|
||||
case .none:
|
||||
break
|
||||
case .send, .apply:
|
||||
case .send, .apply, .attach, .delete:
|
||||
sendAlpha = 1.0
|
||||
case .videoInput, .voiceInput:
|
||||
microphoneAlpha = 1.0
|
||||
@ -248,7 +285,7 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
|
||||
if previousComponent?.mode != component.mode {
|
||||
switch component.mode {
|
||||
case .none, .send, .apply, .voiceInput:
|
||||
case .none, .send, .apply, .voiceInput, .attach, .delete:
|
||||
micButton.updateMode(mode: .audio, animated: !transition.animation.isImmediate)
|
||||
case .videoInput:
|
||||
micButton.updateMode(mode: .video, animated: !transition.animation.isImmediate)
|
||||
|
@ -8,6 +8,7 @@ import BundleIconComponent
|
||||
import AccountContext
|
||||
import TelegramPresentationData
|
||||
import ChatPresentationInterfaceState
|
||||
import LottieComponent
|
||||
|
||||
public final class MessageInputPanelComponent: Component {
|
||||
public enum Style {
|
||||
@ -31,10 +32,16 @@ public final class MessageInputPanelComponent: Component {
|
||||
public let presentController: (ViewController) -> Void
|
||||
public let sendMessageAction: () -> Void
|
||||
public let setMediaRecordingActive: ((Bool, Bool, Bool) -> Void)?
|
||||
public let lockMediaRecording: (() -> Void)?
|
||||
public let stopAndPreviewMediaRecording: (() -> Void)?
|
||||
public let discardMediaRecordingPreview: (() -> Void)?
|
||||
public let attachmentAction: (() -> Void)?
|
||||
public let reactionAction: ((UIView) -> Void)?
|
||||
public let audioRecorder: ManagedAudioRecorder?
|
||||
public let videoRecordingStatus: InstantVideoControllerRecordingStatus?
|
||||
public let isRecordingLocked: Bool
|
||||
public let recordedAudioPreview: ChatRecordedMediaPreview?
|
||||
public let wasRecordingDismissed: Bool
|
||||
public let displayGradient: Bool
|
||||
public let bottomInset: CGFloat
|
||||
|
||||
@ -48,10 +55,16 @@ public final class MessageInputPanelComponent: Component {
|
||||
presentController: @escaping (ViewController) -> Void,
|
||||
sendMessageAction: @escaping () -> Void,
|
||||
setMediaRecordingActive: ((Bool, Bool, Bool) -> Void)?,
|
||||
lockMediaRecording: (() -> Void)?,
|
||||
stopAndPreviewMediaRecording: (() -> Void)?,
|
||||
discardMediaRecordingPreview: (() -> Void)?,
|
||||
attachmentAction: (() -> Void)?,
|
||||
reactionAction: ((UIView) -> Void)?,
|
||||
audioRecorder: ManagedAudioRecorder?,
|
||||
videoRecordingStatus: InstantVideoControllerRecordingStatus?,
|
||||
isRecordingLocked: Bool,
|
||||
recordedAudioPreview: ChatRecordedMediaPreview?,
|
||||
wasRecordingDismissed: Bool,
|
||||
displayGradient: Bool,
|
||||
bottomInset: CGFloat
|
||||
) {
|
||||
@ -64,10 +77,16 @@ public final class MessageInputPanelComponent: Component {
|
||||
self.presentController = presentController
|
||||
self.sendMessageAction = sendMessageAction
|
||||
self.setMediaRecordingActive = setMediaRecordingActive
|
||||
self.lockMediaRecording = lockMediaRecording
|
||||
self.stopAndPreviewMediaRecording = stopAndPreviewMediaRecording
|
||||
self.discardMediaRecordingPreview = discardMediaRecordingPreview
|
||||
self.attachmentAction = attachmentAction
|
||||
self.reactionAction = reactionAction
|
||||
self.audioRecorder = audioRecorder
|
||||
self.videoRecordingStatus = videoRecordingStatus
|
||||
self.isRecordingLocked = isRecordingLocked
|
||||
self.wasRecordingDismissed = wasRecordingDismissed
|
||||
self.recordedAudioPreview = recordedAudioPreview
|
||||
self.displayGradient = displayGradient
|
||||
self.bottomInset = bottomInset
|
||||
}
|
||||
@ -97,6 +116,15 @@ public final class MessageInputPanelComponent: Component {
|
||||
if lhs.videoRecordingStatus !== rhs.videoRecordingStatus {
|
||||
return false
|
||||
}
|
||||
if lhs.isRecordingLocked != rhs.isRecordingLocked {
|
||||
return false
|
||||
}
|
||||
if lhs.wasRecordingDismissed != rhs.wasRecordingDismissed {
|
||||
return false
|
||||
}
|
||||
if lhs.recordedAudioPreview !== rhs.recordedAudioPreview {
|
||||
return false
|
||||
}
|
||||
if lhs.displayGradient != rhs.displayGradient {
|
||||
return false
|
||||
}
|
||||
@ -123,13 +151,17 @@ public final class MessageInputPanelComponent: Component {
|
||||
private let textFieldExternalState = TextFieldComponent.ExternalState()
|
||||
|
||||
private let attachmentButton = ComponentView<Empty>()
|
||||
private var deleteMediaPreviewButton: ComponentView<Empty>?
|
||||
private let inputActionButton = ComponentView<Empty>()
|
||||
private let stickerButton = ComponentView<Empty>()
|
||||
private let reactionButton = ComponentView<Empty>()
|
||||
|
||||
private var mediaRecordingVibrancyContainer: UIView
|
||||
private var mediaRecordingPanel: ComponentView<Empty>?
|
||||
private weak var dismissingMediaRecordingPanel: UIView?
|
||||
|
||||
private var mediaPreviewPanel: ComponentView<Empty>?
|
||||
|
||||
private var currentMediaInputIsVoice: Bool = true
|
||||
private var mediaCancelFraction: CGFloat = 0.0
|
||||
|
||||
@ -145,6 +177,9 @@ public final class MessageInputPanelComponent: Component {
|
||||
let vibrancyEffectView = UIVisualEffectView(effect: vibrancyEffect)
|
||||
self.vibrancyEffectView = vibrancyEffectView
|
||||
|
||||
self.mediaRecordingVibrancyContainer = UIView()
|
||||
self.vibrancyEffectView.contentView.addSubview(self.mediaRecordingVibrancyContainer)
|
||||
|
||||
self.gradientView = UIImageView()
|
||||
self.bottomGradientView = UIView()
|
||||
|
||||
@ -181,20 +216,31 @@ public final class MessageInputPanelComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||
let result = super.hitTest(point, with: event)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func update(component: MessageInputPanelComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
|
||||
var insets = UIEdgeInsets(top: 14.0, left: 7.0, bottom: 6.0, right: 7.0)
|
||||
|
||||
if let _ = component.attachmentAction {
|
||||
insets.left = 41.0
|
||||
}
|
||||
if let _ = component.setMediaRecordingActive {
|
||||
insets.right = 41.0
|
||||
}
|
||||
|
||||
let mediaInsets = UIEdgeInsets(top: insets.top, left: 7.0, bottom: insets.bottom, right: insets.right)
|
||||
|
||||
let baseFieldHeight: CGFloat = 40.0
|
||||
|
||||
self.component = component
|
||||
self.state = state
|
||||
|
||||
let hasMediaRecording = component.audioRecorder != nil || component.videoRecordingStatus != nil
|
||||
let hasMediaEditing = component.recordedAudioPreview != nil
|
||||
|
||||
let topGradientHeight: CGFloat = 32.0
|
||||
if self.gradientView.image == nil {
|
||||
@ -264,13 +310,20 @@ public final class MessageInputPanelComponent: Component {
|
||||
}
|
||||
|
||||
let fieldFrame = CGRect(origin: CGPoint(x: insets.left, y: insets.top), size: CGSize(width: availableSize.width - insets.left - insets.right, height: textFieldSize.height))
|
||||
transition.setFrame(view: self.vibrancyEffectView, frame: CGRect(origin: CGPoint(), size: fieldFrame.size))
|
||||
transition.setAlpha(view: self.vibrancyEffectView, alpha: (component.audioRecorder != nil || component.videoRecordingStatus != nil) ? 0.0 : 1.0)
|
||||
|
||||
transition.setFrame(view: self.fieldBackgroundView, frame: fieldFrame)
|
||||
self.fieldBackgroundView.update(size: fieldFrame.size, cornerRadius: baseFieldHeight * 0.5, transition: transition.containedViewLayoutTransition)
|
||||
let fieldBackgroundFrame: CGRect
|
||||
if hasMediaRecording {
|
||||
fieldBackgroundFrame = CGRect(origin: CGPoint(x: mediaInsets.left, y: insets.top), size: CGSize(width: availableSize.width - mediaInsets.left - mediaInsets.right, height: textFieldSize.height))
|
||||
} else {
|
||||
fieldBackgroundFrame = fieldFrame
|
||||
}
|
||||
|
||||
let gradientFrame = CGRect(origin: CGPoint(x: 0.0, y: -topGradientHeight), size: CGSize(width: availableSize.width, height: topGradientHeight + fieldFrame.maxY + insets.bottom))
|
||||
transition.setFrame(view: self.vibrancyEffectView, frame: CGRect(origin: CGPoint(), size: fieldBackgroundFrame.size))
|
||||
|
||||
transition.setFrame(view: self.fieldBackgroundView, frame: fieldBackgroundFrame)
|
||||
self.fieldBackgroundView.update(size: fieldBackgroundFrame.size, cornerRadius: baseFieldHeight * 0.5, transition: transition.containedViewLayoutTransition)
|
||||
|
||||
let gradientFrame = CGRect(origin: CGPoint(x: fieldBackgroundFrame.minX - fieldFrame.minX, y: -topGradientHeight), size: CGSize(width: availableSize.width - (fieldBackgroundFrame.minX - fieldFrame.minX), height: topGradientHeight + fieldBackgroundFrame.maxY + insets.bottom))
|
||||
transition.setFrame(view: self.gradientView, frame: gradientFrame)
|
||||
transition.setFrame(view: self.bottomGradientView, frame: CGRect(origin: CGPoint(x: 0.0, y: gradientFrame.maxY), size: CGSize(width: availableSize.width, height: component.bottomInset)))
|
||||
transition.setAlpha(view: self.gradientView, alpha: component.displayGradient ? 1.0 : 0.0)
|
||||
@ -282,7 +335,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
} else {
|
||||
placeholderOriginX = floorToScreenPixels((availableSize.width - placeholderSize.width) / 2.0)
|
||||
}
|
||||
let placeholderFrame = CGRect(origin: CGPoint(x: placeholderOriginX, y: floor((fieldFrame.height - placeholderSize.height) * 0.5)), size: placeholderSize)
|
||||
let placeholderFrame = CGRect(origin: CGPoint(x: placeholderOriginX, y: floor((fieldBackgroundFrame.height - placeholderSize.height) * 0.5)), size: placeholderSize)
|
||||
if let placeholderView = self.placeholder.view, let vibrancyPlaceholderView = self.vibrancyPlaceholder.view {
|
||||
if vibrancyPlaceholderView.superview == nil {
|
||||
vibrancyPlaceholderView.layer.anchorPoint = CGPoint()
|
||||
@ -298,6 +351,9 @@ public final class MessageInputPanelComponent: Component {
|
||||
}
|
||||
transition.setPosition(view: placeholderView, position: placeholderFrame.origin)
|
||||
placeholderView.bounds = CGRect(origin: CGPoint(), size: placeholderFrame.size)
|
||||
|
||||
transition.setAlpha(view: placeholderView, alpha: (hasMediaRecording || hasMediaEditing) ? 0.0 : 1.0)
|
||||
transition.setAlpha(view: vibrancyPlaceholderView, alpha: (hasMediaRecording || hasMediaEditing) ? 0.0 : 1.0)
|
||||
}
|
||||
|
||||
let size = CGSize(width: availableSize.width, height: textFieldSize.height + insets.top + insets.bottom)
|
||||
@ -306,46 +362,151 @@ public final class MessageInputPanelComponent: Component {
|
||||
if textFieldView.superview == nil {
|
||||
self.addSubview(textFieldView)
|
||||
}
|
||||
transition.setFrame(view: textFieldView, frame: CGRect(origin: CGPoint(x: fieldFrame.minX, y: fieldFrame.maxY - textFieldSize.height), size: textFieldSize))
|
||||
transition.setAlpha(view: textFieldView, alpha: (component.audioRecorder != nil || component.videoRecordingStatus != nil) ? 0.0 : 1.0)
|
||||
transition.setFrame(view: textFieldView, frame: CGRect(origin: CGPoint(x: fieldBackgroundFrame.minX, y: fieldBackgroundFrame.maxY - textFieldSize.height), size: textFieldSize))
|
||||
transition.setAlpha(view: textFieldView, alpha: (hasMediaRecording || hasMediaEditing) ? 0.0 : 1.0)
|
||||
}
|
||||
|
||||
if let attachmentAction = component.attachmentAction {
|
||||
if component.attachmentAction != nil {
|
||||
let attachmentButtonMode: MessageInputActionButtonComponent.Mode
|
||||
attachmentButtonMode = .attach
|
||||
|
||||
let attachmentButtonSize = self.attachmentButton.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(Button(
|
||||
content: AnyComponent(BundleIconComponent(
|
||||
name: "Chat/Input/Text/IconAttachment",
|
||||
tintColor: .white
|
||||
)),
|
||||
action: {
|
||||
attachmentAction()
|
||||
}
|
||||
).minSize(CGSize(width: 41.0, height: baseFieldHeight))),
|
||||
component: AnyComponent(MessageInputActionButtonComponent(
|
||||
mode: attachmentButtonMode,
|
||||
action: { [weak self] mode, action, sendAction in
|
||||
guard let self, let component = self.component, case .up = action else {
|
||||
return
|
||||
}
|
||||
|
||||
switch mode {
|
||||
case .delete:
|
||||
break
|
||||
case .attach:
|
||||
component.attachmentAction?()
|
||||
default:
|
||||
break
|
||||
}
|
||||
},
|
||||
switchMediaInputMode: {
|
||||
},
|
||||
updateMediaCancelFraction: { _ in
|
||||
},
|
||||
lockMediaRecording: {
|
||||
},
|
||||
stopAndPreviewMediaRecording: {
|
||||
},
|
||||
context: component.context,
|
||||
theme: component.theme,
|
||||
strings: component.strings,
|
||||
presentController: component.presentController,
|
||||
audioRecorder: nil,
|
||||
videoRecordingStatus: nil
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 41.0, height: baseFieldHeight)
|
||||
containerSize: CGSize(width: 33.0, height: baseFieldHeight)
|
||||
)
|
||||
if let attachmentButtonView = self.attachmentButton.view {
|
||||
if attachmentButtonView.superview == nil {
|
||||
self.addSubview(attachmentButtonView)
|
||||
}
|
||||
transition.setFrame(view: attachmentButtonView, frame: CGRect(origin: CGPoint(x: floor((insets.left - attachmentButtonSize.width) * 0.5), y: size.height - insets.bottom - baseFieldHeight + floor((baseFieldHeight - attachmentButtonSize.height) * 0.5)), size: attachmentButtonSize))
|
||||
let attachmentButtonFrame = CGRect(origin: CGPoint(x: floor((insets.left - attachmentButtonSize.width) * 0.5) + (fieldBackgroundFrame.minX - fieldFrame.minX), y: size.height - insets.bottom - baseFieldHeight + floor((baseFieldHeight - attachmentButtonSize.height) * 0.5)), size: attachmentButtonSize)
|
||||
transition.setPosition(view: attachmentButtonView, position: attachmentButtonFrame.center)
|
||||
transition.setBounds(view: attachmentButtonView, bounds: CGRect(origin: CGPoint(), size: attachmentButtonFrame.size))
|
||||
transition.setAlpha(view: attachmentButtonView, alpha: (hasMediaRecording || hasMediaEditing) ? 0.0 : 1.0)
|
||||
transition.setScale(view: attachmentButtonView, scale: hasMediaEditing ? 0.001 : 1.0)
|
||||
}
|
||||
}
|
||||
|
||||
if hasMediaEditing {
|
||||
let deleteMediaPreviewButton: ComponentView<Empty>
|
||||
var deleteMediaPreviewButtonTransition = transition
|
||||
if let current = self.deleteMediaPreviewButton {
|
||||
deleteMediaPreviewButton = current
|
||||
} else {
|
||||
if !transition.animation.isImmediate {
|
||||
deleteMediaPreviewButtonTransition = .immediate
|
||||
}
|
||||
deleteMediaPreviewButton = ComponentView()
|
||||
self.deleteMediaPreviewButton = deleteMediaPreviewButton
|
||||
}
|
||||
let buttonSize = CGSize(width: 40.0, height: 40.0)
|
||||
let deleteMediaPreviewButtonFrame = CGRect(origin: CGPoint(x: 1.0 + (fieldBackgroundFrame.minX - fieldFrame.minX), y: 3.0 + floor((size.height - buttonSize.height) * 0.5)), size: CGSize(width: buttonSize.width, height: buttonSize.height))
|
||||
let _ = deleteMediaPreviewButton.update(
|
||||
transition: deleteMediaPreviewButtonTransition,
|
||||
component: AnyComponent(Button(
|
||||
content: AnyComponent(LottieComponent(
|
||||
content: LottieComponent.AppBundleContent(name: "BinBlue"),
|
||||
color: .white,
|
||||
startingPosition: .begin
|
||||
)),
|
||||
action: { [weak self] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.discardMediaRecordingPreview?()
|
||||
}
|
||||
).minSize(buttonSize)),
|
||||
environment: {},
|
||||
containerSize: buttonSize
|
||||
)
|
||||
if let deleteMediaPreviewButtonView = deleteMediaPreviewButton.view {
|
||||
if deleteMediaPreviewButtonView.superview == nil {
|
||||
self.addSubview(deleteMediaPreviewButtonView)
|
||||
transition.animateAlpha(view: deleteMediaPreviewButtonView, from: 0.0, to: 1.0)
|
||||
transition.animatePosition(view: deleteMediaPreviewButtonView, from: CGPoint(x: mediaInsets.left - insets.left, y: 0.0), to: CGPoint(), additive: true)
|
||||
}
|
||||
deleteMediaPreviewButtonTransition.setFrame(view: deleteMediaPreviewButtonView, frame: deleteMediaPreviewButtonFrame)
|
||||
}
|
||||
} else if let deleteMediaPreviewButton = self.deleteMediaPreviewButton {
|
||||
self.deleteMediaPreviewButton = nil
|
||||
if let deleteMediaPreviewButtonView = deleteMediaPreviewButton.view {
|
||||
if component.wasRecordingDismissed, let deleteMediaPreviewButtonView = deleteMediaPreviewButtonView as? Button.View, let animationView = deleteMediaPreviewButtonView.content as? LottieComponent.View {
|
||||
if let attachmentButtonView = self.attachmentButton.view {
|
||||
attachmentButtonView.isHidden = true
|
||||
}
|
||||
animationView.playOnce(completion: { [weak self, weak deleteMediaPreviewButtonView] in
|
||||
guard let self, let deleteMediaPreviewButtonView else {
|
||||
return
|
||||
}
|
||||
let transition = Transition(animation: .curve(duration: 0.3, curve: .spring))
|
||||
transition.setAlpha(view: deleteMediaPreviewButtonView, alpha: 0.0, completion: { [weak deleteMediaPreviewButtonView] _ in
|
||||
deleteMediaPreviewButtonView?.removeFromSuperview()
|
||||
})
|
||||
transition.setScale(view: deleteMediaPreviewButtonView, scale: 0.001)
|
||||
|
||||
if let attachmentButtonView = self.attachmentButton.view {
|
||||
attachmentButtonView.isHidden = false
|
||||
|
||||
transition.animateAlpha(view: attachmentButtonView, from: 0.0, to: attachmentButtonView.alpha)
|
||||
transition.animateScale(view: attachmentButtonView, from: 0.001, to: 1.0)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
transition.setAlpha(view: deleteMediaPreviewButtonView, alpha: 0.0, completion: { [weak deleteMediaPreviewButtonView] _ in
|
||||
deleteMediaPreviewButtonView?.removeFromSuperview()
|
||||
})
|
||||
transition.setScale(view: deleteMediaPreviewButtonView, scale: 0.001)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let inputActionButtonMode: MessageInputActionButtonComponent.Mode
|
||||
if case .editor = component.style {
|
||||
inputActionButtonMode = self.textFieldExternalState.isEditing ? .apply : .none
|
||||
} else {
|
||||
inputActionButtonMode = self.textFieldExternalState.hasText ? .send : (self.currentMediaInputIsVoice ? .voiceInput : .videoInput)
|
||||
if hasMediaEditing {
|
||||
inputActionButtonMode = .send
|
||||
} else {
|
||||
inputActionButtonMode = self.textFieldExternalState.hasText ? .send : (self.currentMediaInputIsVoice ? .voiceInput : .videoInput)
|
||||
}
|
||||
}
|
||||
let inputActionButtonSize = self.inputActionButton.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(MessageInputActionButtonComponent(
|
||||
mode: inputActionButtonMode,
|
||||
action: { [weak self] mode, action, sendAction in
|
||||
guard let self else {
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
|
||||
@ -354,9 +515,11 @@ public final class MessageInputPanelComponent: Component {
|
||||
break
|
||||
case .send:
|
||||
if case .up = action {
|
||||
if case .text("") = self.getSendMessageInput() {
|
||||
if component.recordedAudioPreview != nil {
|
||||
component.sendMessageAction()
|
||||
} else if case .text("") = self.getSendMessageInput() {
|
||||
} else {
|
||||
self.component?.sendMessageAction()
|
||||
component.sendMessageAction()
|
||||
}
|
||||
}
|
||||
case .apply:
|
||||
@ -364,7 +527,9 @@ public final class MessageInputPanelComponent: Component {
|
||||
self.component?.sendMessageAction()
|
||||
}
|
||||
case .voiceInput, .videoInput:
|
||||
self.component?.setMediaRecordingActive?(action == .down, mode == .videoInput, sendAction)
|
||||
component.setMediaRecordingActive?(action == .down, mode == .videoInput, sendAction)
|
||||
default:
|
||||
break
|
||||
}
|
||||
},
|
||||
switchMediaInputMode: { [weak self] in
|
||||
@ -383,6 +548,18 @@ public final class MessageInputPanelComponent: Component {
|
||||
self.state?.updated(transition: .immediate)
|
||||
}
|
||||
},
|
||||
lockMediaRecording: { [weak self] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.lockMediaRecording?()
|
||||
},
|
||||
stopAndPreviewMediaRecording: { [weak self] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.stopAndPreviewMediaRecording?()
|
||||
},
|
||||
context: component.context,
|
||||
theme: component.theme,
|
||||
strings: component.strings,
|
||||
@ -406,7 +583,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
transition.setFrame(view: inputActionButtonView, frame: CGRect(origin: CGPoint(x: inputActionButtonOriginX, y: size.height - insets.bottom - baseFieldHeight + floorToScreenPixels((baseFieldHeight - inputActionButtonSize.height) * 0.5)), size: inputActionButtonSize))
|
||||
}
|
||||
|
||||
var fieldIconNextX = fieldFrame.maxX - 2.0
|
||||
var fieldIconNextX = fieldBackgroundFrame.maxX - 2.0
|
||||
if case .story = component.style {
|
||||
let stickerButtonSize = self.stickerButton.update(
|
||||
transition: transition,
|
||||
@ -429,12 +606,12 @@ public final class MessageInputPanelComponent: Component {
|
||||
if stickerButtonView.superview == nil {
|
||||
self.addSubview(stickerButtonView)
|
||||
}
|
||||
let stickerIconFrame = CGRect(origin: CGPoint(x: fieldIconNextX - stickerButtonSize.width, y: fieldFrame.minY + floor((fieldFrame.height - stickerButtonSize.height) * 0.5)), size: stickerButtonSize)
|
||||
let stickerIconFrame = CGRect(origin: CGPoint(x: fieldIconNextX - stickerButtonSize.width, y: fieldBackgroundFrame.minY + floor((fieldBackgroundFrame.height - stickerButtonSize.height) * 0.5)), size: stickerButtonSize)
|
||||
transition.setPosition(view: stickerButtonView, position: stickerIconFrame.center)
|
||||
transition.setBounds(view: stickerButtonView, bounds: CGRect(origin: CGPoint(), size: stickerIconFrame.size))
|
||||
|
||||
transition.setAlpha(view: stickerButtonView, alpha: (self.textFieldExternalState.hasText || hasMediaRecording) ? 0.0 : 1.0)
|
||||
transition.setScale(view: stickerButtonView, scale: self.textFieldExternalState.hasText ? 0.1 : 1.0)
|
||||
transition.setAlpha(view: stickerButtonView, alpha: (self.textFieldExternalState.hasText || hasMediaRecording || hasMediaEditing) ? 0.0 : 1.0)
|
||||
transition.setScale(view: stickerButtonView, scale: (self.textFieldExternalState.hasText || hasMediaRecording || hasMediaEditing) ? 0.1 : 1.0)
|
||||
|
||||
fieldIconNextX -= stickerButtonSize.width + 2.0
|
||||
}
|
||||
@ -462,19 +639,18 @@ public final class MessageInputPanelComponent: Component {
|
||||
if reactionButtonView.superview == nil {
|
||||
self.addSubview(reactionButtonView)
|
||||
}
|
||||
let reactionIconFrame = CGRect(origin: CGPoint(x: fieldIconNextX - reactionButtonSize.width, y: fieldFrame.minY + 1.0 + floor((fieldFrame.height - reactionButtonSize.height) * 0.5)), size: reactionButtonSize)
|
||||
let reactionIconFrame = CGRect(origin: CGPoint(x: fieldIconNextX - reactionButtonSize.width, y: fieldBackgroundFrame.minY + 1.0 + floor((fieldBackgroundFrame.height - reactionButtonSize.height) * 0.5)), size: reactionButtonSize)
|
||||
transition.setPosition(view: reactionButtonView, position: reactionIconFrame.center)
|
||||
transition.setBounds(view: reactionButtonView, bounds: CGRect(origin: CGPoint(), size: reactionIconFrame.size))
|
||||
|
||||
transition.setAlpha(view: reactionButtonView, alpha: (self.textFieldExternalState.hasText || hasMediaRecording) ? 0.0 : 1.0)
|
||||
transition.setScale(view: reactionButtonView, scale: self.textFieldExternalState.hasText ? 0.1 : 1.0)
|
||||
transition.setAlpha(view: reactionButtonView, alpha: (self.textFieldExternalState.hasText || hasMediaRecording || hasMediaEditing) ? 0.0 : 1.0)
|
||||
transition.setScale(view: reactionButtonView, scale: (self.textFieldExternalState.hasText || hasMediaRecording || hasMediaEditing) ? 0.1 : 1.0)
|
||||
|
||||
fieldIconNextX -= reactionButtonSize.width + 2.0
|
||||
}
|
||||
}
|
||||
|
||||
self.fieldBackgroundView.updateColor(color: self.textFieldExternalState.isEditing || component.style == .editor ? UIColor(white: 0.0, alpha: 0.5) : UIColor(white: 1.0, alpha: 0.09), transition: transition.containedViewLayoutTransition)
|
||||
transition.setAlpha(view: self.fieldBackgroundView, alpha: hasMediaRecording ? 0.0 : 1.0)
|
||||
if let placeholder = self.placeholder.view, let vibrancyPlaceholderView = self.vibrancyPlaceholder.view {
|
||||
placeholder.isHidden = self.textFieldExternalState.hasText
|
||||
vibrancyPlaceholderView.isHidden = placeholder.isHidden
|
||||
@ -483,7 +659,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
component.externalState.isEditing = self.textFieldExternalState.isEditing
|
||||
component.externalState.hasText = self.textFieldExternalState.hasText
|
||||
|
||||
if component.audioRecorder != nil || component.videoRecordingStatus != nil {
|
||||
if hasMediaRecording {
|
||||
if let dismissingMediaRecordingPanel = self.dismissingMediaRecordingPanel {
|
||||
self.dismissingMediaRecordingPanel = nil
|
||||
transition.setAlpha(view: dismissingMediaRecordingPanel, alpha: 0.0, completion: { [weak dismissingMediaRecordingPanel] _ in
|
||||
@ -504,10 +680,20 @@ public final class MessageInputPanelComponent: Component {
|
||||
let _ = mediaRecordingPanel.update(
|
||||
transition: mediaRecordingPanelTransition,
|
||||
component: AnyComponent(MediaRecordingPanelComponent(
|
||||
theme: component.theme,
|
||||
strings: component.strings,
|
||||
audioRecorder: component.audioRecorder,
|
||||
videoRecordingStatus: component.videoRecordingStatus,
|
||||
isRecordingLocked: component.isRecordingLocked,
|
||||
cancelFraction: self.mediaCancelFraction,
|
||||
insets: insets
|
||||
inputInsets: insets,
|
||||
insets: mediaInsets,
|
||||
cancelAction: { [weak self] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.setMediaRecordingActive?(false, false, false)
|
||||
}
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: size
|
||||
@ -516,17 +702,18 @@ public final class MessageInputPanelComponent: Component {
|
||||
var animateIn = false
|
||||
if mediaRecordingPanelView.superview == nil {
|
||||
animateIn = true
|
||||
self.insertSubview(mediaRecordingPanelView, at: 0)
|
||||
self.insertSubview(mediaRecordingPanelView, aboveSubview: self.fieldBackgroundView)
|
||||
|
||||
self.mediaRecordingVibrancyContainer.addSubview(mediaRecordingPanelView.vibrancyContainer)
|
||||
}
|
||||
mediaRecordingPanelTransition.setFrame(view: mediaRecordingPanelView, frame: CGRect(origin: CGPoint(), size: size))
|
||||
|
||||
transition.setFrame(view: self.mediaRecordingVibrancyContainer, frame: CGRect(origin: CGPoint(x: -fieldBackgroundFrame.minX, y: -fieldBackgroundFrame.minY), size: size))
|
||||
|
||||
if animateIn && !transition.animation.isImmediate {
|
||||
mediaRecordingPanelView.animateIn()
|
||||
}
|
||||
}
|
||||
|
||||
if let attachmentButtonView = self.attachmentButton.view {
|
||||
transition.setAlpha(view: attachmentButtonView, alpha: 0.0)
|
||||
}
|
||||
} else {
|
||||
if let mediaRecordingPanel = self.mediaRecordingPanel {
|
||||
self.mediaRecordingPanel = nil
|
||||
@ -541,7 +728,11 @@ public final class MessageInputPanelComponent: Component {
|
||||
self.dismissingMediaRecordingPanel = mediaRecordingPanel.view
|
||||
|
||||
if let mediaRecordingPanelView = mediaRecordingPanel.view as? MediaRecordingPanelComponent.View {
|
||||
mediaRecordingPanelView.animateOut(dismissRecording: true, completion: { [weak self, weak mediaRecordingPanelView] in
|
||||
let wasRecordingDismissed = component.wasRecordingDismissed
|
||||
if wasRecordingDismissed, let attachmentButtonView = self.attachmentButton.view {
|
||||
attachmentButtonView.isHidden = true
|
||||
}
|
||||
mediaRecordingPanelView.animateOut(transition: transition, dismissRecording: wasRecordingDismissed, completion: { [weak self, weak mediaRecordingPanelView] in
|
||||
let transition = Transition(animation: .curve(duration: 0.3, curve: .spring))
|
||||
|
||||
if let mediaRecordingPanelView = mediaRecordingPanelView {
|
||||
@ -553,8 +744,10 @@ public final class MessageInputPanelComponent: Component {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if self.mediaRecordingPanel == nil, let attachmentButtonView = self.attachmentButton.view {
|
||||
transition.setAlpha(view: attachmentButtonView, alpha: 1.0)
|
||||
if wasRecordingDismissed, self.mediaRecordingPanel == nil, let attachmentButtonView = self.attachmentButton.view {
|
||||
attachmentButtonView.isHidden = false
|
||||
|
||||
transition.animateAlpha(view: attachmentButtonView, from: 0.0, to: attachmentButtonView.alpha)
|
||||
transition.animateScale(view: attachmentButtonView, from: 0.001, to: 1.0)
|
||||
}
|
||||
})
|
||||
@ -562,6 +755,57 @@ public final class MessageInputPanelComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
if let recordedAudioPreview = component.recordedAudioPreview {
|
||||
let mediaPreviewPanel: ComponentView<Empty>
|
||||
var mediaPreviewPanelTransition = transition
|
||||
if let current = self.mediaPreviewPanel {
|
||||
mediaPreviewPanel = current
|
||||
} else {
|
||||
mediaPreviewPanelTransition = .immediate
|
||||
mediaPreviewPanel = ComponentView()
|
||||
self.mediaPreviewPanel = mediaPreviewPanel
|
||||
}
|
||||
|
||||
let _ = mediaPreviewPanel.update(
|
||||
transition: mediaPreviewPanelTransition,
|
||||
component: AnyComponent(MediaPreviewPanelComponent(
|
||||
context: component.context,
|
||||
theme: component.theme,
|
||||
strings: component.strings,
|
||||
mediaPreview: recordedAudioPreview,
|
||||
insets: insets
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: size
|
||||
)
|
||||
if let mediaPreviewPanelView = mediaPreviewPanel.view as? MediaPreviewPanelComponent.View {
|
||||
var animateIn = false
|
||||
if mediaPreviewPanelView.superview == nil {
|
||||
animateIn = true
|
||||
self.insertSubview(mediaPreviewPanelView, aboveSubview: self.fieldBackgroundView)
|
||||
|
||||
self.mediaRecordingVibrancyContainer.addSubview(mediaPreviewPanelView.vibrancyContainer)
|
||||
}
|
||||
mediaPreviewPanelTransition.setFrame(view: mediaPreviewPanelView, frame: CGRect(origin: CGPoint(), size: size))
|
||||
|
||||
transition.setFrame(view: self.mediaRecordingVibrancyContainer, frame: CGRect(origin: CGPoint(x: -fieldBackgroundFrame.minX, y: -fieldBackgroundFrame.minY), size: size))
|
||||
|
||||
if animateIn && !transition.animation.isImmediate {
|
||||
mediaPreviewPanelView.animateIn()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if let mediaPreviewPanel = self.mediaPreviewPanel {
|
||||
self.mediaPreviewPanel = nil
|
||||
|
||||
if let mediaPreviewPanelView = mediaPreviewPanel.view as? MediaPreviewPanelComponent.View {
|
||||
mediaPreviewPanelView.animateOut(transition: transition, completion: { [weak mediaPreviewPanelView] in
|
||||
mediaPreviewPanelView?.removeFromSuperview()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return size
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,44 @@
|
||||
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
|
||||
|
||||
swift_library(
|
||||
name = "PeerInfoVisualMediaPaneNode",
|
||||
module_name = "PeerInfoVisualMediaPaneNode",
|
||||
srcs = glob([
|
||||
"Sources/**/*.swift",
|
||||
]),
|
||||
copts = [
|
||||
"-warnings-as-errors",
|
||||
],
|
||||
deps = [
|
||||
"//submodules/SSignalKit/SwiftSignalKit",
|
||||
"//submodules/AsyncDisplayKit",
|
||||
"//submodules/TelegramCore",
|
||||
"//submodules/Postbox",
|
||||
"//submodules/TelegramPresentationData",
|
||||
"//submodules/AccountContext",
|
||||
"//submodules/ContextUI",
|
||||
"//submodules/PhotoResources",
|
||||
"//submodules/RadialStatusNode",
|
||||
"//submodules/TelegramStringFormatting",
|
||||
"//submodules/GridMessageSelectionNode",
|
||||
"//submodules/MediaPlayer:UniversalMediaPlayer",
|
||||
"//submodules/ListMessageItem",
|
||||
"//submodules/ChatMessageInteractiveMediaBadge",
|
||||
"//submodules/SparseItemGrid",
|
||||
"//submodules/ShimmerEffect",
|
||||
"//submodules/DirectMediaImageCache",
|
||||
"//submodules/ComponentFlow",
|
||||
"//submodules/TelegramNotices",
|
||||
"//submodules/TelegramUIPreferences",
|
||||
"//submodules/CheckNode",
|
||||
"//submodules/AppBundle",
|
||||
"//submodules/TelegramUI/Components/ChatControllerInteraction",
|
||||
"//submodules/InvisibleInkDustNode",
|
||||
"//submodules/MediaPickerUI",
|
||||
"//submodules/TelegramUI/Components/Stories/StoryContainerScreen",
|
||||
"//submodules/TelegramUI/Components/Stories/StoryContentComponent",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
],
|
||||
)
|
File diff suppressed because it is too large
Load Diff
@ -27,6 +27,55 @@ import ChatControllerInteraction
|
||||
import InvisibleInkDustNode
|
||||
import MediaPickerUI
|
||||
|
||||
public enum PeerInfoPaneKey: Int32 {
|
||||
case members
|
||||
case stories
|
||||
case media
|
||||
case files
|
||||
case music
|
||||
case voice
|
||||
case links
|
||||
case gifs
|
||||
case groupsInCommon
|
||||
}
|
||||
|
||||
public struct PeerInfoStatusData: Equatable {
|
||||
public var text: String
|
||||
public var isActivity: Bool
|
||||
public var key: PeerInfoPaneKey?
|
||||
|
||||
public init(
|
||||
text: String,
|
||||
isActivity: Bool,
|
||||
key: PeerInfoPaneKey?
|
||||
) {
|
||||
self.text = text
|
||||
self.isActivity = isActivity
|
||||
self.key = key
|
||||
}
|
||||
}
|
||||
|
||||
public protocol PeerInfoPaneNode: ASDisplayNode {
|
||||
var isReady: Signal<Bool, NoError> { get }
|
||||
|
||||
var parentController: ViewController? { get set }
|
||||
|
||||
var status: Signal<PeerInfoStatusData?, NoError> { get }
|
||||
var tabBarOffsetUpdated: ((ContainedViewLayoutTransition) -> Void)? { get set }
|
||||
var tabBarOffset: CGFloat { get }
|
||||
|
||||
func update(size: CGSize, topInset: CGFloat, sideInset: CGFloat, bottomInset: CGFloat, visibleHeight: CGFloat, isScrollingLockedAtTop: Bool, expandProgress: CGFloat, presentationData: PresentationData, synchronous: Bool, transition: ContainedViewLayoutTransition)
|
||||
func scrollToTop() -> Bool
|
||||
func transferVelocity(_ velocity: CGFloat)
|
||||
func cancelPreviewGestures()
|
||||
func findLoadedMessage(id: MessageId) -> Message?
|
||||
func transitionNodeForGallery(messageId: MessageId, media: Media) -> (ASDisplayNode, CGRect, () -> (UIView?, UIView?))?
|
||||
func addToTransitionSurface(view: UIView)
|
||||
func updateHiddenMedia()
|
||||
func updateSelectedMessages(animated: Bool)
|
||||
func ensureMessageIsVisible(id: MessageId)
|
||||
}
|
||||
|
||||
private final class FrameSequenceThumbnailNode: ASDisplayNode {
|
||||
private let context: AccountContext
|
||||
private let file: FileMediaReference
|
||||
@ -147,493 +196,6 @@ private final class VisualMediaItemInteraction {
|
||||
}
|
||||
}
|
||||
|
||||
/*private final class VisualMediaItemNode: ASDisplayNode {
|
||||
private let context: AccountContext
|
||||
private let interaction: VisualMediaItemInteraction
|
||||
|
||||
private var videoLayerFrameManager: SoftwareVideoLayerFrameManager?
|
||||
private var sampleBufferLayer: SampleBufferLayer?
|
||||
private var displayLink: ConstantDisplayLinkAnimator?
|
||||
private var displayLinkTimestamp: Double = 0.0
|
||||
|
||||
private var frameSequenceThumbnailNode: FrameSequenceThumbnailNode?
|
||||
|
||||
private let containerNode: ContextControllerSourceNode
|
||||
|
||||
private var placeholderNode: ShimmerEffectNode?
|
||||
private var absoluteLocation: (CGRect, CGSize)?
|
||||
|
||||
private let imageNode: TransformImageNode
|
||||
private var statusNode: RadialStatusNode
|
||||
private let mediaBadgeNode: ChatMessageInteractiveMediaBadge
|
||||
private var selectionNode: GridMessageSelectionNode?
|
||||
|
||||
private let fetchStatusDisposable = MetaDisposable()
|
||||
private let fetchDisposable = MetaDisposable()
|
||||
private var resourceStatus: MediaResourceStatus?
|
||||
|
||||
private var item: (VisualMediaItem, Media?, CGSize, CGSize?)?
|
||||
private var theme: PresentationTheme?
|
||||
|
||||
private var hasVisibility: Bool = false
|
||||
|
||||
init(context: AccountContext, interaction: VisualMediaItemInteraction) {
|
||||
self.context = context
|
||||
self.interaction = interaction
|
||||
|
||||
self.containerNode = ContextControllerSourceNode()
|
||||
self.imageNode = TransformImageNode()
|
||||
self.statusNode = RadialStatusNode(backgroundNodeColor: UIColor(white: 0.0, alpha: 0.6))
|
||||
let progressDiameter: CGFloat = 40.0
|
||||
self.statusNode.frame = CGRect(x: 0.0, y: 0.0, width: progressDiameter, height: progressDiameter)
|
||||
self.statusNode.isUserInteractionEnabled = false
|
||||
|
||||
self.mediaBadgeNode = ChatMessageInteractiveMediaBadge()
|
||||
self.mediaBadgeNode.frame = CGRect(origin: CGPoint(x: 6.0, y: 6.0), size: CGSize(width: 50.0, height: 50.0))
|
||||
|
||||
let shimmerNode = ShimmerEffectNode()
|
||||
self.placeholderNode = shimmerNode
|
||||
|
||||
super.init()
|
||||
|
||||
self.addSubnode(self.containerNode)
|
||||
self.containerNode.addSubnode(self.imageNode)
|
||||
self.containerNode.addSubnode(self.mediaBadgeNode)
|
||||
|
||||
self.containerNode.activated = { [weak self] gesture, _ in
|
||||
guard let strongSelf = self, let item = strongSelf.item, let message = item.0.message else {
|
||||
return
|
||||
}
|
||||
strongSelf.interaction.openMessageContextActions(message, strongSelf.containerNode, strongSelf.containerNode.bounds, gesture)
|
||||
}
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.fetchStatusDisposable.dispose()
|
||||
self.fetchDisposable.dispose()
|
||||
}
|
||||
|
||||
override func didLoad() {
|
||||
super.didLoad()
|
||||
|
||||
let recognizer = TapLongTapOrDoubleTapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:)))
|
||||
recognizer.tapActionAtPoint = { _ in
|
||||
return .waitForSingleTap
|
||||
}
|
||||
self.imageNode.view.addGestureRecognizer(recognizer)
|
||||
|
||||
self.mediaBadgeNode.pressed = { [weak self] in
|
||||
self?.progressPressed()
|
||||
}
|
||||
}
|
||||
|
||||
func updateAbsoluteRect(_ rect: CGRect, within containerSize: CGSize) {
|
||||
self.absoluteLocation = (rect, containerSize)
|
||||
if let shimmerNode = self.placeholderNode {
|
||||
shimmerNode.updateAbsoluteRect(rect, within: containerSize)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func tapGesture(_ recognizer: TapLongTapOrDoubleTapGestureRecognizer) {
|
||||
if case .ended = recognizer.state {
|
||||
if let (gesture, _) = recognizer.lastRecognizedGestureAndLocation {
|
||||
if case .tap = gesture {
|
||||
if let (item, _, _, _) = self.item, let message = item.message {
|
||||
var media: Media?
|
||||
for value in message.media {
|
||||
if let image = value as? TelegramMediaImage {
|
||||
media = image
|
||||
break
|
||||
} else if let file = value as? TelegramMediaFile {
|
||||
media = file
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if let media = media {
|
||||
if let file = media as? TelegramMediaFile {
|
||||
if isMediaStreamable(message: message, media: file) {
|
||||
self.interaction.openMessage(message)
|
||||
} else {
|
||||
self.progressPressed()
|
||||
}
|
||||
} else {
|
||||
self.interaction.openMessage(message)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func progressPressed() {
|
||||
guard let message = self.item?.0.message else {
|
||||
return
|
||||
}
|
||||
|
||||
var media: Media?
|
||||
for value in message.media {
|
||||
if let image = value as? TelegramMediaImage {
|
||||
media = image
|
||||
break
|
||||
} else if let file = value as? TelegramMediaFile {
|
||||
media = file
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if let resourceStatus = self.resourceStatus, let file = media as? TelegramMediaFile {
|
||||
switch resourceStatus {
|
||||
case .Fetching:
|
||||
messageMediaFileCancelInteractiveFetch(context: self.context, messageId: message.id, file: file)
|
||||
case .Local:
|
||||
self.interaction.openMessage(message)
|
||||
case .Remote:
|
||||
self.fetchDisposable.set(messageMediaFileInteractiveFetched(context: self.context, message: message, file: file, userInitiated: true).start())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func cancelPreviewGesture() {
|
||||
self.containerNode.cancelGesture()
|
||||
}
|
||||
|
||||
func update(size: CGSize, item: VisualMediaItem?, theme: PresentationTheme, synchronousLoad: Bool) {
|
||||
if item === self.item?.0 && size == self.item?.2 {
|
||||
return
|
||||
}
|
||||
self.theme = theme
|
||||
var media: Media?
|
||||
if let item = item, let message = item.message {
|
||||
for value in message.media {
|
||||
if let image = value as? TelegramMediaImage {
|
||||
media = image
|
||||
break
|
||||
} else if let file = value as? TelegramMediaFile {
|
||||
media = file
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let shimmerNode = self.placeholderNode {
|
||||
shimmerNode.frame = CGRect(origin: CGPoint(), size: size)
|
||||
if let (rect, size) = self.absoluteLocation {
|
||||
shimmerNode.updateAbsoluteRect(rect, within: size)
|
||||
}
|
||||
|
||||
var shapes: [ShimmerEffectNode.Shape] = []
|
||||
shapes.append(.rect(rect: CGRect(origin: CGPoint(), size: size)))
|
||||
|
||||
shimmerNode.update(backgroundColor: theme.list.itemBlocksBackgroundColor, foregroundColor: theme.list.mediaPlaceholderColor, shimmeringColor: theme.list.itemBlocksBackgroundColor.withAlphaComponent(0.4), shapes: shapes, size: size)
|
||||
}
|
||||
|
||||
if let item = item, let message = item.message, let file = media as? TelegramMediaFile, file.isAnimated {
|
||||
if self.videoLayerFrameManager == nil {
|
||||
let sampleBufferLayer: SampleBufferLayer
|
||||
if let current = self.sampleBufferLayer {
|
||||
sampleBufferLayer = current
|
||||
} else {
|
||||
sampleBufferLayer = takeSampleBufferLayer()
|
||||
self.sampleBufferLayer = sampleBufferLayer
|
||||
self.imageNode.layer.addSublayer(sampleBufferLayer.layer)
|
||||
}
|
||||
|
||||
self.videoLayerFrameManager = SoftwareVideoLayerFrameManager(account: self.context.account, fileReference: FileMediaReference.message(message: MessageReference(message), media: file), layerHolder: sampleBufferLayer)
|
||||
self.videoLayerFrameManager?.start()
|
||||
}
|
||||
} else {
|
||||
if let sampleBufferLayer = self.sampleBufferLayer {
|
||||
sampleBufferLayer.layer.removeFromSuperlayer()
|
||||
self.sampleBufferLayer = nil
|
||||
}
|
||||
self.videoLayerFrameManager = nil
|
||||
}
|
||||
|
||||
if let item = item, let message = item.message, let media = media, (self.item?.1 == nil || !media.isEqual(to: self.item!.1!)) {
|
||||
var mediaDimensions: CGSize?
|
||||
if let image = media as? TelegramMediaImage, let largestSize = largestImageRepresentation(image.representations)?.dimensions {
|
||||
mediaDimensions = largestSize.cgSize
|
||||
|
||||
if let placeholderNode = self.placeholderNode, placeholderNode.supernode == nil {
|
||||
self.containerNode.insertSubnode(placeholderNode, at: 0)
|
||||
}
|
||||
self.imageNode.imageUpdated = { [weak self] image in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
if image != nil {
|
||||
strongSelf.placeholderNode?.removeFromSupernode()
|
||||
}
|
||||
}
|
||||
|
||||
self.imageNode.setSignal(mediaGridMessagePhoto(account: context.account, photoReference: .message(message: MessageReference(message), media: image), fullRepresentationSize: CGSize(width: 300.0, height: 300.0), synchronousLoad: synchronousLoad), attemptSynchronously: synchronousLoad, dispatchOnDisplayLink: true)
|
||||
|
||||
self.fetchStatusDisposable.set(nil)
|
||||
self.statusNode.transitionToState(.none, completion: { [weak self] in
|
||||
self?.statusNode.isHidden = true
|
||||
})
|
||||
self.mediaBadgeNode.isHidden = true
|
||||
self.resourceStatus = nil
|
||||
} else if let file = media as? TelegramMediaFile, file.isVideo {
|
||||
if let placeholderNode = self.placeholderNode, placeholderNode.supernode == nil {
|
||||
self.containerNode.insertSubnode(placeholderNode, at: 0)
|
||||
}
|
||||
self.imageNode.imageUpdated = { [weak self] image in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
if image != nil {
|
||||
strongSelf.placeholderNode?.removeFromSupernode()
|
||||
}
|
||||
}
|
||||
|
||||
mediaDimensions = file.dimensions?.cgSize
|
||||
self.imageNode.setSignal(mediaGridMessageVideo(postbox: context.account.postbox, videoReference: .message(message: MessageReference(message), media: file), synchronousLoad: synchronousLoad, autoFetchFullSizeThumbnail: true), attemptSynchronously: synchronousLoad)
|
||||
|
||||
self.mediaBadgeNode.isHidden = file.isAnimated
|
||||
|
||||
self.resourceStatus = nil
|
||||
|
||||
self.item = (item, media, size, mediaDimensions)
|
||||
|
||||
self.fetchStatusDisposable.set((messageMediaFileStatus(context: context, messageId: message.id, file: file)
|
||||
|> deliverOnMainQueue).start(next: { [weak self] status in
|
||||
if let strongSelf = self, let (item, _, _, _) = strongSelf.item, let message = item.message {
|
||||
strongSelf.resourceStatus = status
|
||||
|
||||
let isStreamable = isMediaStreamable(message: message, media: file)
|
||||
|
||||
var statusState: RadialStatusNodeState = .none
|
||||
if isStreamable || file.isAnimated {
|
||||
statusState = .none
|
||||
} else {
|
||||
switch status {
|
||||
case let .Fetching(_, progress):
|
||||
let adjustedProgress = max(progress, 0.027)
|
||||
statusState = .progress(color: .white, lineWidth: nil, value: CGFloat(adjustedProgress), cancelEnabled: true, animateRotation: true)
|
||||
case .Local:
|
||||
statusState = .none
|
||||
case .Remote:
|
||||
statusState = .download(.white)
|
||||
}
|
||||
}
|
||||
|
||||
switch statusState {
|
||||
case .none:
|
||||
break
|
||||
default:
|
||||
strongSelf.statusNode.isHidden = false
|
||||
}
|
||||
|
||||
strongSelf.statusNode.transitionToState(statusState, animated: true, completion: {
|
||||
if let strongSelf = self {
|
||||
if case .none = statusState {
|
||||
strongSelf.statusNode.isHidden = true
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if let duration = file.duration {
|
||||
let durationString = stringForDuration(duration)
|
||||
|
||||
var badgeContent: ChatMessageInteractiveMediaBadgeContent?
|
||||
var mediaDownloadState: ChatMessageInteractiveMediaDownloadState?
|
||||
|
||||
if isStreamable {
|
||||
switch status {
|
||||
case let .Fetching(_, progress):
|
||||
let progressString = String(format: "%d%%", Int(progress * 100.0))
|
||||
badgeContent = .text(inset: 12.0, backgroundColor: mediaBadgeBackgroundColor, foregroundColor: mediaBadgeTextColor, text: NSAttributedString(string: progressString))
|
||||
mediaDownloadState = .compactFetching(progress: 0.0)
|
||||
case .Local:
|
||||
badgeContent = .text(inset: 0.0, backgroundColor: mediaBadgeBackgroundColor, foregroundColor: mediaBadgeTextColor, text: NSAttributedString(string: durationString))
|
||||
case .Remote:
|
||||
badgeContent = .text(inset: 12.0, backgroundColor: mediaBadgeBackgroundColor, foregroundColor: mediaBadgeTextColor, text: NSAttributedString(string: durationString))
|
||||
mediaDownloadState = .compactRemote
|
||||
}
|
||||
} else {
|
||||
badgeContent = .text(inset: 0.0, backgroundColor: mediaBadgeBackgroundColor, foregroundColor: mediaBadgeTextColor, text: NSAttributedString(string: durationString))
|
||||
}
|
||||
|
||||
strongSelf.mediaBadgeNode.update(theme: nil, content: badgeContent, mediaDownloadState: mediaDownloadState, alignment: .right, animated: false, badgeAnimated: false)
|
||||
}
|
||||
}
|
||||
}))
|
||||
if self.statusNode.supernode == nil {
|
||||
self.imageNode.addSubnode(self.statusNode)
|
||||
}
|
||||
} else {
|
||||
self.mediaBadgeNode.isHidden = true
|
||||
}
|
||||
self.item = (item, media, size, mediaDimensions)
|
||||
|
||||
self.updateHiddenMedia()
|
||||
} else {
|
||||
if let placeholderNode = self.placeholderNode, placeholderNode.supernode == nil {
|
||||
self.containerNode.insertSubnode(placeholderNode, at: 0)
|
||||
}
|
||||
}
|
||||
|
||||
let progressDiameter: CGFloat = 40.0
|
||||
self.statusNode.frame = CGRect(origin: CGPoint(x: floor((size.width - progressDiameter) / 2.0), y: floor((size.height - progressDiameter) / 2.0)), size: CGSize(width: progressDiameter, height: progressDiameter))
|
||||
|
||||
self.mediaBadgeNode.frame = CGRect(origin: CGPoint(x: size.width - 3.0, y: size.height - 18.0 - 3.0), size: CGSize(width: 50.0, height: 50.0))
|
||||
|
||||
self.selectionNode?.frame = CGRect(origin: CGPoint(), size: size)
|
||||
|
||||
if let (item, media, _, mediaDimensions) = self.item {
|
||||
self.item = (item, media, size, mediaDimensions)
|
||||
|
||||
let imageFrame = CGRect(origin: CGPoint(), size: size)
|
||||
|
||||
self.containerNode.frame = imageFrame
|
||||
self.imageNode.frame = imageFrame
|
||||
if let sampleBufferLayer = self.sampleBufferLayer {
|
||||
sampleBufferLayer.layer.frame = imageFrame
|
||||
}
|
||||
|
||||
if let mediaDimensions = mediaDimensions {
|
||||
let imageSize = mediaDimensions.aspectFilled(imageFrame.size)
|
||||
self.imageNode.asyncLayout()(TransformImageArguments(corners: ImageCorners(), imageSize: imageSize, boundingSize: imageFrame.size, intrinsicInsets: UIEdgeInsets(), emptyColor: theme.list.mediaPlaceholderColor))()
|
||||
}
|
||||
|
||||
self.updateSelectionState(animated: false)
|
||||
}
|
||||
}
|
||||
|
||||
func updateIsVisible(_ isVisible: Bool) {
|
||||
self.hasVisibility = isVisible
|
||||
if let _ = self.videoLayerFrameManager {
|
||||
let displayLink: ConstantDisplayLinkAnimator
|
||||
if let current = self.displayLink {
|
||||
displayLink = current
|
||||
} else {
|
||||
displayLink = ConstantDisplayLinkAnimator { [weak self] in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
strongSelf.videoLayerFrameManager?.tick(timestamp: strongSelf.displayLinkTimestamp)
|
||||
strongSelf.displayLinkTimestamp += 1.0 / 30.0
|
||||
}
|
||||
displayLink.frameInterval = 2
|
||||
self.displayLink = displayLink
|
||||
}
|
||||
}
|
||||
self.displayLink?.isPaused = !self.hasVisibility || self.isHidden
|
||||
|
||||
/*if isVisible {
|
||||
if let item = self.item?.0, let file = self.item?.1 as? TelegramMediaFile, !file.isAnimated {
|
||||
if self.frameSequenceThumbnailNode == nil {
|
||||
let frameSequenceThumbnailNode = FrameSequenceThumbnailNode(context: context, file: .message(message: MessageReference(item.message), media: file))
|
||||
self.frameSequenceThumbnailNode = frameSequenceThumbnailNode
|
||||
self.imageNode.addSubnode(frameSequenceThumbnailNode)
|
||||
}
|
||||
if let frameSequenceThumbnailNode = self.frameSequenceThumbnailNode {
|
||||
let size = self.bounds.size
|
||||
frameSequenceThumbnailNode.frame = CGRect(origin: CGPoint(), size: size)
|
||||
frameSequenceThumbnailNode.updateLayout(size: size)
|
||||
}
|
||||
} else {
|
||||
if let frameSequenceThumbnailNode = self.frameSequenceThumbnailNode {
|
||||
self.frameSequenceThumbnailNode = nil
|
||||
frameSequenceThumbnailNode.removeFromSupernode()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if let frameSequenceThumbnailNode = self.frameSequenceThumbnailNode {
|
||||
self.frameSequenceThumbnailNode = nil
|
||||
frameSequenceThumbnailNode.removeFromSupernode()
|
||||
}
|
||||
}*/
|
||||
|
||||
self.frameSequenceThumbnailNode?.updateIsPlaying(isVisible)
|
||||
}
|
||||
|
||||
func tick() {
|
||||
self.frameSequenceThumbnailNode?.tick()
|
||||
}
|
||||
|
||||
func updateSelectionState(animated: Bool) {
|
||||
if let (item, _, _, _) = self.item, let message = item.message, let theme = self.theme {
|
||||
self.containerNode.isGestureEnabled = self.interaction.selectedMessageIds == nil
|
||||
|
||||
if let selectedIds = self.interaction.selectedMessageIds {
|
||||
let selected = selectedIds.contains(message.id)
|
||||
|
||||
if let selectionNode = self.selectionNode {
|
||||
selectionNode.updateSelected(selected, animated: animated)
|
||||
selectionNode.frame = CGRect(origin: CGPoint(), size: self.bounds.size)
|
||||
} else {
|
||||
let selectionNode = GridMessageSelectionNode(theme: theme, toggle: { [weak self] value in
|
||||
if let strongSelf = self, let messageId = strongSelf.item?.0.message?.id {
|
||||
var toggledValue = true
|
||||
if let selectedMessageIds = strongSelf.interaction.selectedMessageIds, selectedMessageIds.contains(messageId) {
|
||||
toggledValue = false
|
||||
}
|
||||
strongSelf.interaction.toggleSelection(messageId, toggledValue)
|
||||
}
|
||||
})
|
||||
|
||||
selectionNode.frame = CGRect(origin: CGPoint(), size: self.bounds.size)
|
||||
self.containerNode.addSubnode(selectionNode)
|
||||
self.selectionNode = selectionNode
|
||||
selectionNode.updateSelected(selected, animated: false)
|
||||
if animated {
|
||||
selectionNode.animateIn()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if let selectionNode = self.selectionNode {
|
||||
self.selectionNode = nil
|
||||
if animated {
|
||||
selectionNode.animateOut { [weak selectionNode] in
|
||||
selectionNode?.removeFromSupernode()
|
||||
}
|
||||
} else {
|
||||
selectionNode.removeFromSupernode()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func transitionNode() -> (ASDisplayNode, CGRect, () -> (UIView?, UIView?))? {
|
||||
let imageNode = self.imageNode
|
||||
return (self.imageNode, self.imageNode.bounds, { [weak self, weak imageNode] in
|
||||
var statusNodeHidden = false
|
||||
var accessoryHidden = false
|
||||
if let strongSelf = self {
|
||||
statusNodeHidden = strongSelf.statusNode.isHidden
|
||||
accessoryHidden = strongSelf.mediaBadgeNode.isHidden
|
||||
strongSelf.statusNode.isHidden = true
|
||||
strongSelf.mediaBadgeNode.isHidden = true
|
||||
}
|
||||
let view = imageNode?.view.snapshotView(afterScreenUpdates: false)
|
||||
if let strongSelf = self {
|
||||
strongSelf.statusNode.isHidden = statusNodeHidden
|
||||
strongSelf.mediaBadgeNode.isHidden = accessoryHidden
|
||||
}
|
||||
return (view, nil)
|
||||
})
|
||||
}
|
||||
|
||||
func updateHiddenMedia() {
|
||||
if let (item, _, _, _) = self.item {
|
||||
if let _ = self.interaction.hiddenMedia[item.id] {
|
||||
self.isHidden = true
|
||||
} else {
|
||||
self.isHidden = false
|
||||
}
|
||||
} else {
|
||||
self.isHidden = false
|
||||
}
|
||||
self.displayLink?.isPaused = !self.hasVisibility || self.isHidden
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
private final class VisualMediaHoleAnchor: SparseItemGrid.HoleAnchor {
|
||||
let messageId: MessageId
|
||||
override var id: AnyHashable {
|
||||
@ -1632,8 +1194,12 @@ private func tagMaskForType(_ type: PeerInfoVisualMediaPaneNode.ContentType) ->
|
||||
}
|
||||
}
|
||||
|
||||
final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScrollViewDelegate, UIGestureRecognizerDelegate {
|
||||
enum ContentType {
|
||||
public protocol PeerInfoScreenNodeProtocol: AnyObject {
|
||||
func displaySharedMediaFastScrollingTooltip()
|
||||
}
|
||||
|
||||
public final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScrollViewDelegate, UIGestureRecognizerDelegate {
|
||||
public enum ContentType {
|
||||
case photoOrVideo
|
||||
case photo
|
||||
case video
|
||||
@ -1643,7 +1209,7 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
case music
|
||||
}
|
||||
|
||||
struct ZoomLevel {
|
||||
public struct ZoomLevel {
|
||||
fileprivate var value: SparseItemGrid.ZoomLevel
|
||||
|
||||
init(_ value: SparseItemGrid.ZoomLevel) {
|
||||
@ -1664,10 +1230,10 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
private let chatLocation: ChatLocation
|
||||
private let chatLocationContextHolder: Atomic<ChatLocationContextHolder?>
|
||||
private let chatControllerInteraction: ChatControllerInteraction
|
||||
private(set) var contentType: ContentType
|
||||
public private(set) var contentType: ContentType
|
||||
private var contentTypePromise: ValuePromise<ContentType>
|
||||
|
||||
weak var parentController: ViewController?
|
||||
public weak var parentController: ViewController?
|
||||
|
||||
private let contextGestureContainerNode: ContextControllerSourceNode
|
||||
private let itemGrid: SparseItemGrid
|
||||
@ -1687,17 +1253,17 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
|
||||
private let ready = Promise<Bool>()
|
||||
private var didSetReady: Bool = false
|
||||
var isReady: Signal<Bool, NoError> {
|
||||
public var isReady: Signal<Bool, NoError> {
|
||||
return self.ready.get()
|
||||
}
|
||||
|
||||
private let statusPromise = Promise<PeerInfoStatusData?>(nil)
|
||||
var status: Signal<PeerInfoStatusData?, NoError> {
|
||||
public var status: Signal<PeerInfoStatusData?, NoError> {
|
||||
self.statusPromise.get()
|
||||
}
|
||||
|
||||
var tabBarOffsetUpdated: ((ContainedViewLayoutTransition) -> Void)?
|
||||
var tabBarOffset: CGFloat {
|
||||
public var tabBarOffsetUpdated: ((ContainedViewLayoutTransition) -> Void)?
|
||||
public var tabBarOffset: CGFloat {
|
||||
return self.itemGrid.coveringInsetOffset
|
||||
}
|
||||
|
||||
@ -1712,11 +1278,11 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
|
||||
private var animationTimer: SwiftSignalKit.Timer?
|
||||
|
||||
private(set) var calendarSource: SparseMessageCalendar?
|
||||
public private(set) var calendarSource: SparseMessageCalendar?
|
||||
private var listSource: SparseMessageList
|
||||
|
||||
var openCurrentDate: (() -> Void)?
|
||||
var paneDidScroll: (() -> Void)?
|
||||
public var openCurrentDate: (() -> Void)?
|
||||
public var paneDidScroll: (() -> Void)?
|
||||
|
||||
private let stateTag: MessageTags
|
||||
private var storedStateDisposable: Disposable?
|
||||
@ -1726,7 +1292,7 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
private var presentationData: PresentationData
|
||||
private var presentationDataDisposable: Disposable?
|
||||
|
||||
init(context: AccountContext, chatControllerInteraction: ChatControllerInteraction, peerId: PeerId, chatLocation: ChatLocation, chatLocationContextHolder: Atomic<ChatLocationContextHolder?>, contentType: ContentType, captureProtected: Bool) {
|
||||
public init(context: AccountContext, chatControllerInteraction: ChatControllerInteraction, peerId: PeerId, chatLocation: ChatLocation, chatLocationContextHolder: Atomic<ChatLocationContextHolder?>, contentType: ContentType, captureProtected: Bool) {
|
||||
self.context = context
|
||||
self.peerId = peerId
|
||||
self.chatLocation = chatLocation
|
||||
@ -1889,9 +1455,9 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
let _ = ApplicationSpecificNotice.incrementSharedMediaFastScrollingTooltip(accountManager: strongSelf.context.sharedContext.accountManager).start()
|
||||
|
||||
var currentNode: ASDisplayNode = strongSelf
|
||||
var result: PeerInfoScreenNode?
|
||||
var result: PeerInfoScreenNodeProtocol?
|
||||
while true {
|
||||
if let currentNode = currentNode as? PeerInfoScreenNode {
|
||||
if let currentNode = currentNode as? PeerInfoScreenNodeProtocol {
|
||||
result = currentNode
|
||||
break
|
||||
} else if let supernode = currentNode.supernode {
|
||||
@ -2221,7 +1787,7 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
self.presentationDataDisposable?.dispose()
|
||||
}
|
||||
|
||||
func loadHole(anchor: SparseItemGrid.HoleAnchor, at location: SparseItemGrid.HoleLocation) -> Signal<Never, NoError> {
|
||||
public func loadHole(anchor: SparseItemGrid.HoleAnchor, at location: SparseItemGrid.HoleLocation) -> Signal<Never, NoError> {
|
||||
guard let anchor = anchor as? VisualMediaHoleAnchor else {
|
||||
return .never()
|
||||
}
|
||||
@ -2244,7 +1810,7 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
}
|
||||
}
|
||||
|
||||
func updateContentType(contentType: ContentType) {
|
||||
public func updateContentType(contentType: ContentType) {
|
||||
if self.contentType == contentType {
|
||||
return
|
||||
}
|
||||
@ -2263,13 +1829,13 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
self.requestHistoryAroundVisiblePosition(synchronous: true, reloadAtTop: true)
|
||||
}
|
||||
|
||||
func updateZoomLevel(level: ZoomLevel) {
|
||||
public func updateZoomLevel(level: ZoomLevel) {
|
||||
self.itemGrid.setZoomLevel(level: level.value)
|
||||
|
||||
let _ = updateVisualMediaStoredState(engine: self.context.engine, peerId: self.peerId, messageTag: self.stateTag, state: VisualMediaStoredState(zoomLevel: level.rawValue)).start()
|
||||
}
|
||||
|
||||
func ensureMessageIsVisible(id: MessageId) {
|
||||
public func ensureMessageIsVisible(id: MessageId) {
|
||||
}
|
||||
|
||||
private func requestHistoryAroundVisiblePosition(synchronous: Bool, reloadAtTop: Bool) {
|
||||
@ -2342,19 +1908,19 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
}
|
||||
}
|
||||
|
||||
func scrollToTop() -> Bool {
|
||||
public func scrollToTop() -> Bool {
|
||||
return self.itemGrid.scrollToTop()
|
||||
}
|
||||
|
||||
func hitTestResultForScrolling() -> UIView? {
|
||||
public func hitTestResultForScrolling() -> UIView? {
|
||||
return self.itemGrid.hitTestResultForScrolling()
|
||||
}
|
||||
|
||||
func brieflyDisableTouchActions() {
|
||||
public func brieflyDisableTouchActions() {
|
||||
self.itemGrid.brieflyDisableTouchActions()
|
||||
}
|
||||
|
||||
func findLoadedMessage(id: MessageId) -> Message? {
|
||||
public func findLoadedMessage(id: MessageId) -> Message? {
|
||||
guard let items = self.items else {
|
||||
return nil
|
||||
}
|
||||
@ -2369,7 +1935,7 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
return nil
|
||||
}
|
||||
|
||||
func updateHiddenMedia() {
|
||||
public func updateHiddenMedia() {
|
||||
self.itemGrid.forEachVisibleItem { item in
|
||||
guard let itemLayer = item.layer as? ItemLayer else {
|
||||
return
|
||||
@ -2388,11 +1954,11 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
}
|
||||
}
|
||||
|
||||
func transferVelocity(_ velocity: CGFloat) {
|
||||
public func transferVelocity(_ velocity: CGFloat) {
|
||||
self.itemGrid.transferVelocity(velocity)
|
||||
}
|
||||
|
||||
func cancelPreviewGestures() {
|
||||
public func cancelPreviewGestures() {
|
||||
self.itemGrid.forEachVisibleItem { item in
|
||||
guard let itemView = item.view as? ItemView else {
|
||||
return
|
||||
@ -2403,7 +1969,7 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
}
|
||||
}
|
||||
|
||||
func transitionNodeForGallery(messageId: MessageId, media: Media) -> (ASDisplayNode, CGRect, () -> (UIView?, UIView?))? {
|
||||
public func transitionNodeForGallery(messageId: MessageId, media: Media) -> (ASDisplayNode, CGRect, () -> (UIView?, UIView?))? {
|
||||
var foundItemLayer: SparseItemGridLayer?
|
||||
self.itemGrid.forEachVisibleItem { item in
|
||||
guard let itemLayer = item.layer as? ItemLayer else {
|
||||
@ -2442,14 +2008,14 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
return nil
|
||||
}
|
||||
|
||||
func addToTransitionSurface(view: UIView) {
|
||||
public func addToTransitionSurface(view: UIView) {
|
||||
self.itemGrid.addToTransitionSurface(view: view)
|
||||
}
|
||||
|
||||
private var gridSelectionGesture: MediaPickerGridSelectionGesture<EngineMessage.Id>?
|
||||
private var listSelectionGesture: MediaListSelectionRecognizer?
|
||||
|
||||
override func didLoad() {
|
||||
override public func didLoad() {
|
||||
super.didLoad()
|
||||
|
||||
let selectionRecognizer = MediaListSelectionRecognizer(target: self, action: #selector(self.selectionPanGesture(_:)))
|
||||
@ -2594,7 +2160,7 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
self.selectionScrollDisplayLink?.isPaused = false
|
||||
}
|
||||
|
||||
override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
|
||||
override public func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
|
||||
let location = gestureRecognizer.location(in: gestureRecognizer.view)
|
||||
if location.x < 44.0 {
|
||||
return false
|
||||
@ -2602,7 +2168,7 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
return true
|
||||
}
|
||||
|
||||
func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool {
|
||||
public func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool {
|
||||
if gestureRecognizer.state != .failed, let otherGestureRecognizer = otherGestureRecognizer as? UIPanGestureRecognizer {
|
||||
otherGestureRecognizer.isEnabled = false
|
||||
otherGestureRecognizer.isEnabled = true
|
||||
@ -2612,7 +2178,7 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
}
|
||||
}
|
||||
|
||||
func updateSelectedMessages(animated: Bool) {
|
||||
public func updateSelectedMessages(animated: Bool) {
|
||||
switch self.contentType {
|
||||
case .files, .music, .voiceAndVideoMessages:
|
||||
self.itemGrid.forEachVisibleItem { item in
|
||||
@ -2673,7 +2239,7 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
}
|
||||
}
|
||||
|
||||
func update(size: CGSize, topInset: CGFloat, sideInset: CGFloat, bottomInset: CGFloat, visibleHeight: CGFloat, isScrollingLockedAtTop: Bool, expandProgress: CGFloat, presentationData: PresentationData, synchronous: Bool, transition: ContainedViewLayoutTransition) {
|
||||
public func update(size: CGSize, topInset: CGFloat, sideInset: CGFloat, bottomInset: CGFloat, visibleHeight: CGFloat, isScrollingLockedAtTop: Bool, expandProgress: CGFloat, presentationData: PresentationData, synchronous: Bool, transition: ContainedViewLayoutTransition) {
|
||||
self.currentParams = (size, topInset, sideInset, bottomInset, visibleHeight, isScrollingLockedAtTop, expandProgress, presentationData)
|
||||
|
||||
transition.updateFrame(node: self.contextGestureContainerNode, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: size.width, height: size.height)))
|
||||
@ -2746,11 +2312,11 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
fixedItemHeight = nil
|
||||
}
|
||||
|
||||
self.itemGrid.update(size: size, insets: UIEdgeInsets(top: topInset, left: sideInset, bottom: bottomInset, right: sideInset), useSideInsets: !isList, scrollIndicatorInsets: UIEdgeInsets(top: 0.0, left: sideInset, bottom: bottomInset, right: sideInset), lockScrollingAtTop: isScrollingLockedAtTop, fixedItemHeight: fixedItemHeight, items: items, theme: self.itemGridBinding.chatPresentationData.theme.theme, synchronous: wasFirstTime ? .full : .none)
|
||||
self.itemGrid.update(size: size, insets: UIEdgeInsets(top: topInset, left: sideInset, bottom: bottomInset, right: sideInset), useSideInsets: !isList, scrollIndicatorInsets: UIEdgeInsets(top: 0.0, left: sideInset, bottom: bottomInset, right: sideInset), lockScrollingAtTop: isScrollingLockedAtTop, fixedItemHeight: fixedItemHeight, fixedItemAspect: nil, items: items, theme: self.itemGridBinding.chatPresentationData.theme.theme, synchronous: wasFirstTime ? .full : .none)
|
||||
}
|
||||
}
|
||||
|
||||
func currentTopTimestamp() -> Int32? {
|
||||
public func currentTopTimestamp() -> Int32? {
|
||||
var timestamp: Int32?
|
||||
self.itemGrid.forEachVisibleItem { item in
|
||||
guard let itemLayer = item.layer as? ItemLayer else {
|
||||
@ -2767,7 +2333,7 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
return timestamp
|
||||
}
|
||||
|
||||
func scrollToTimestamp(timestamp: Int32) {
|
||||
public func scrollToTimestamp(timestamp: Int32) {
|
||||
if let items = self.items, !items.items.isEmpty {
|
||||
var previousIndex: Int?
|
||||
for item in items.items {
|
||||
@ -2806,14 +2372,14 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
}
|
||||
}
|
||||
|
||||
func scrollToItem(index: Int) {
|
||||
public func scrollToItem(index: Int) {
|
||||
guard let _ = self.items else {
|
||||
return
|
||||
}
|
||||
self.itemGrid.scrollToItem(at: index)
|
||||
}
|
||||
|
||||
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||
override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||
guard let result = super.hitTest(point, with: event) else {
|
||||
return nil
|
||||
}
|
||||
@ -2826,21 +2392,21 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
|
||||
return result
|
||||
}
|
||||
|
||||
func availableZoomLevels() -> (decrement: ZoomLevel?, increment: ZoomLevel?) {
|
||||
public func availableZoomLevels() -> (decrement: ZoomLevel?, increment: ZoomLevel?) {
|
||||
let levels = self.itemGrid.availableZoomLevels()
|
||||
return (levels.decrement.flatMap(ZoomLevel.init), levels.increment.flatMap(ZoomLevel.init))
|
||||
}
|
||||
}
|
||||
|
||||
final class VisualMediaStoredState: Codable {
|
||||
let zoomLevel: Int32
|
||||
public final class VisualMediaStoredState: Codable {
|
||||
public let zoomLevel: Int32
|
||||
|
||||
public init(zoomLevel: Int32) {
|
||||
self.zoomLevel = zoomLevel
|
||||
}
|
||||
}
|
||||
|
||||
func visualMediaStoredState(engine: TelegramEngine, peerId: PeerId, messageTag: MessageTags) -> Signal<VisualMediaStoredState?, NoError> {
|
||||
public func visualMediaStoredState(engine: TelegramEngine, peerId: PeerId, messageTag: MessageTags) -> Signal<VisualMediaStoredState?, NoError> {
|
||||
let key = ValueBoxKey(length: 8 + 4)
|
||||
key.setInt64(0, value: peerId.toInt64())
|
||||
key.setUInt32(8, value: messageTag.rawValue)
|
||||
@ -2851,7 +2417,7 @@ func visualMediaStoredState(engine: TelegramEngine, peerId: PeerId, messageTag:
|
||||
}
|
||||
}
|
||||
|
||||
func updateVisualMediaStoredState(engine: TelegramEngine, peerId: PeerId, messageTag: MessageTags, state: VisualMediaStoredState?) -> Signal<Never, NoError> {
|
||||
public func updateVisualMediaStoredState(engine: TelegramEngine, peerId: PeerId, messageTag: MessageTags, state: VisualMediaStoredState?) -> Signal<Never, NoError> {
|
||||
let key = ValueBoxKey(length: 8 + 4)
|
||||
key.setInt64(0, value: peerId.toInt64())
|
||||
key.setUInt32(8, value: messageTag.rawValue)
|
@ -50,6 +50,7 @@ swift_library(
|
||||
"//submodules/ReactionSelectionNode",
|
||||
"//submodules/ContextUI",
|
||||
"//submodules/AvatarNode",
|
||||
"//submodules/ChatPresentationInterfaceState",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -36,14 +36,14 @@ private final class StoryContainerScreenComponent: Component {
|
||||
let initialFocusedId: AnyHashable?
|
||||
let initialContent: [StoryContentItemSlice]
|
||||
let transitionIn: StoryContainerScreen.TransitionIn?
|
||||
let transitionOut: (EnginePeer.Id) -> StoryContainerScreen.TransitionOut?
|
||||
let transitionOut: (EnginePeer.Id, AnyHashable) -> StoryContainerScreen.TransitionOut?
|
||||
|
||||
init(
|
||||
context: AccountContext,
|
||||
initialFocusedId: AnyHashable?,
|
||||
initialContent: [StoryContentItemSlice],
|
||||
transitionIn: StoryContainerScreen.TransitionIn?,
|
||||
transitionOut: @escaping (EnginePeer.Id) -> StoryContainerScreen.TransitionOut?
|
||||
transitionOut: @escaping (EnginePeer.Id, AnyHashable) -> StoryContainerScreen.TransitionOut?
|
||||
) {
|
||||
self.context = context
|
||||
self.initialFocusedId = initialFocusedId
|
||||
@ -130,6 +130,7 @@ private final class StoryContainerScreenComponent: Component {
|
||||
override init(frame: CGRect) {
|
||||
self.backgroundLayer = SimpleLayer()
|
||||
self.backgroundLayer.backgroundColor = UIColor.black.cgColor
|
||||
self.backgroundLayer.zPosition = -1000.0
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
@ -347,7 +348,7 @@ private final class StoryContainerScreenComponent: Component {
|
||||
self.isAnimatingOut = true
|
||||
self.state?.updated(transition: .immediate)
|
||||
|
||||
if let component = self.component, let focusedItemSet = self.focusedItemSet, let peerId = focusedItemSet.base as? EnginePeer.Id, let itemSetView = self.visibleItemSetViews[focusedItemSet], let itemSetComponentView = itemSetView.view.view as? StoryItemSetContainerComponent.View, let transitionOut = component.transitionOut(peerId) {
|
||||
if let component = self.component, let focusedItemSet = self.focusedItemSet, let peerId = focusedItemSet.base as? EnginePeer.Id, let itemSetView = self.visibleItemSetViews[focusedItemSet], let itemSetComponentView = itemSetView.view.view as? StoryItemSetContainerComponent.View, let focusedItemId = itemSetComponentView.focusedItemId, let transitionOut = component.transitionOut(peerId, focusedItemId) {
|
||||
let transition = Transition(animation: .curve(duration: 0.25, curve: .easeInOut))
|
||||
transition.setAlpha(layer: self.backgroundLayer, alpha: 0.0)
|
||||
|
||||
@ -654,10 +655,12 @@ private final class StoryContainerScreenComponent: Component {
|
||||
if let previousRotationFraction = itemSetView.rotationFraction {
|
||||
let fromT = previousRotationFraction
|
||||
let toT = panFraction
|
||||
itemSetTransition.setTransformAsKeyframes(view: itemSetView, transform: { sourceT in
|
||||
itemSetTransition.setTransformAsKeyframes(view: itemSetView, transform: { sourceT, isFinal in
|
||||
let t = fromT * (1.0 - sourceT) + toT * sourceT
|
||||
if abs((t + cubeAdditionalRotationFraction) - 0.0) < 0.0001 {
|
||||
return CATransform3DIdentity
|
||||
if isFinal {
|
||||
return CATransform3DIdentity
|
||||
}
|
||||
}
|
||||
|
||||
return calculateCubeTransform(rotationFraction: t + cubeAdditionalRotationFraction, sideAngle: sideAngle, cubeSize: itemFrame.size)
|
||||
@ -776,7 +779,7 @@ public class StoryContainerScreen: ViewControllerComponentContainer {
|
||||
initialFocusedId: AnyHashable?,
|
||||
initialContent: [StoryContentItemSlice],
|
||||
transitionIn: TransitionIn?,
|
||||
transitionOut: @escaping (EnginePeer.Id) -> TransitionOut?
|
||||
transitionOut: @escaping (EnginePeer.Id, AnyHashable) -> TransitionOut?
|
||||
) {
|
||||
self.context = context
|
||||
|
||||
|
@ -250,27 +250,12 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
self.sendMessageContext.audioRecorderValue = audioRecorder
|
||||
self.component?.controller()?.lockOrientation = audioRecorder != nil
|
||||
|
||||
/*strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
||||
$0.updatedInputTextPanelState { panelState in
|
||||
let isLocked = strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId
|
||||
if let audioRecorder = audioRecorder {
|
||||
if panelState.mediaRecordingState == nil {
|
||||
return panelState.withUpdatedMediaRecordingState(.audio(recorder: audioRecorder, isLocked: isLocked))
|
||||
}
|
||||
} else {
|
||||
if case .waitingForPreview = panelState.mediaRecordingState {
|
||||
return panelState
|
||||
}
|
||||
return panelState.withUpdatedMediaRecordingState(nil)
|
||||
}
|
||||
return panelState
|
||||
}
|
||||
})*/
|
||||
|
||||
self.audioRecorderStatusDisposable?.dispose()
|
||||
self.audioRecorderStatusDisposable = nil
|
||||
|
||||
if let audioRecorder = audioRecorder {
|
||||
self.sendMessageContext.wasRecordingDismissed = false
|
||||
|
||||
if !audioRecorder.beginWithTone {
|
||||
HapticFeedback().impact(.light)
|
||||
}
|
||||
@ -281,7 +266,7 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
return
|
||||
}
|
||||
if case .stopped = value {
|
||||
self.sendMessageContext.stopMediaRecorder()
|
||||
self.sendMessageContext.stopMediaRecording(view: self)
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -300,15 +285,14 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
self.sendMessageContext.videoRecorderValue = videoRecorder
|
||||
|
||||
if let videoRecorder = videoRecorder {
|
||||
self.sendMessageContext.wasRecordingDismissed = false
|
||||
HapticFeedback().impact(.light)
|
||||
|
||||
videoRecorder.onDismiss = { [weak self] isCancelled in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
//self?.chatDisplayNode.updateRecordedMediaDeleted(isCancelled)
|
||||
//self?.beginMediaRecordingRequestId += 1
|
||||
//self?.lockMediaRecordingRequestId = nil
|
||||
self.sendMessageContext.wasRecordingDismissed = true
|
||||
self.sendMessageContext.videoRecorder.set(.single(nil))
|
||||
}
|
||||
videoRecorder.onStop = { [weak self] in
|
||||
@ -327,9 +311,9 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
}
|
||||
self.component?.controller()?.present(videoRecorder, in: .window(.root))
|
||||
|
||||
/*if strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId {
|
||||
if self.sendMessageContext.isMediaRecordingLocked {
|
||||
videoRecorder.lockVideo()
|
||||
}*/
|
||||
}
|
||||
}
|
||||
|
||||
if let previousVideoRecorderValue {
|
||||
@ -353,7 +337,17 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
}
|
||||
|
||||
func isPointInsideContentArea(point: CGPoint) -> Bool {
|
||||
return self.contentContainerView.frame.contains(point)
|
||||
if let inputPanelView = self.inputPanel.view {
|
||||
if inputPanelView.frame.contains(point) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if self.contentContainerView.frame.contains(point) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
@objc public func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRequireFailureOf otherGestureRecognizer: UIGestureRecognizer) -> Bool {
|
||||
@ -539,7 +533,7 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
for (_, visibleItem) in self.visibleItems {
|
||||
if let view = visibleItem.view.view {
|
||||
if let view = view as? StoryContentItem.View {
|
||||
view.setIsProgressPaused(self.inputPanelExternalState.isEditing || component.isProgressPaused || self.reactionItems != nil || self.actionSheet != nil || self.contextController != nil || self.sendMessageContext.audioRecorderValue != nil || self.sendMessageContext.videoRecorderValue != nil)
|
||||
view.setIsProgressPaused(self.inputPanelExternalState.isEditing || component.isProgressPaused || self.displayReactions || self.actionSheet != nil || self.contextController != nil || self.sendMessageContext.audioRecorderValue != nil || self.sendMessageContext.videoRecorderValue != nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -813,6 +807,25 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
}
|
||||
self.sendMessageContext.setMediaRecordingActive(view: self, isActive: isActive, isVideo: isVideo, sendAction: sendAction)
|
||||
},
|
||||
lockMediaRecording: { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.sendMessageContext.lockMediaRecording()
|
||||
self.state?.updated(transition: Transition(animation: .curve(duration: 0.3, curve: .spring)))
|
||||
},
|
||||
stopAndPreviewMediaRecording: { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.sendMessageContext.stopMediaRecording(view: self)
|
||||
},
|
||||
discardMediaRecordingPreview: { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.sendMessageContext.discardMediaRecordingPreview(view: self)
|
||||
},
|
||||
attachmentAction: { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
@ -843,6 +856,9 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
},
|
||||
audioRecorder: self.sendMessageContext.audioRecorderValue,
|
||||
videoRecordingStatus: self.sendMessageContext.videoRecorderValue?.audioStatus,
|
||||
isRecordingLocked: self.sendMessageContext.isMediaRecordingLocked,
|
||||
recordedAudioPreview: self.sendMessageContext.recordedAudioPreview,
|
||||
wasRecordingDismissed: self.sendMessageContext.wasRecordingDismissed,
|
||||
displayGradient: component.inputHeight != 0.0,
|
||||
bottomInset: component.inputHeight != 0.0 ? 0.0 : bottomContentInset
|
||||
)),
|
||||
@ -1159,7 +1175,18 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
|
||||
let reactionsAnchorRect = CGRect(origin: CGPoint(x: inputPanelFrame.maxX - 40.0, y: inputPanelFrame.minY + 9.0), size: CGSize(width: 32.0, height: 32.0)).insetBy(dx: -4.0, dy: -4.0)
|
||||
|
||||
if let reactionItems = self.reactionItems, (self.displayReactions || self.inputPanelExternalState.isEditing) {
|
||||
var effectiveDisplayReactions = self.displayReactions
|
||||
if self.inputPanelExternalState.isEditing && !self.inputPanelExternalState.hasText {
|
||||
effectiveDisplayReactions = true
|
||||
}
|
||||
if self.sendMessageContext.audioRecorderValue != nil || self.sendMessageContext.videoRecorderValue != nil {
|
||||
effectiveDisplayReactions = false
|
||||
}
|
||||
if self.sendMessageContext.recordedAudioPreview != nil {
|
||||
effectiveDisplayReactions = false
|
||||
}
|
||||
|
||||
if let reactionItems = self.reactionItems, effectiveDisplayReactions {
|
||||
let reactionContextNode: ReactionContextNode
|
||||
var reactionContextNodeTransition = transition
|
||||
if let current = self.reactionContextNode {
|
||||
|
@ -3,6 +3,7 @@ import SwiftSignalKit
|
||||
import TelegramCore
|
||||
import AccountContext
|
||||
import Display
|
||||
import ComponentFlow
|
||||
import MessageInputPanelComponent
|
||||
import UndoUI
|
||||
import AttachmentUI
|
||||
@ -29,12 +30,14 @@ import TelegramPresentationData
|
||||
import LegacyInstantVideoController
|
||||
import TelegramPresentationData
|
||||
import ShareController
|
||||
import ChatPresentationInterfaceState
|
||||
|
||||
final class StoryItemSetContainerSendMessage {
|
||||
weak var attachmentController: AttachmentController?
|
||||
|
||||
var audioRecorderValue: ManagedAudioRecorder?
|
||||
var audioRecorder = Promise<ManagedAudioRecorder?>()
|
||||
var recordedAudioPreview: ChatRecordedMediaPreview?
|
||||
|
||||
var videoRecorderValue: InstantVideoController?
|
||||
var tempVideoRecorderValue: InstantVideoController?
|
||||
@ -42,6 +45,9 @@ final class StoryItemSetContainerSendMessage {
|
||||
let controllerNavigationDisposable = MetaDisposable()
|
||||
let enqueueMediaMessageDisposable = MetaDisposable()
|
||||
|
||||
private(set) var isMediaRecordingLocked: Bool = false
|
||||
var wasRecordingDismissed: Bool = false
|
||||
|
||||
deinit {
|
||||
self.controllerNavigationDisposable.dispose()
|
||||
self.enqueueMediaMessageDisposable.dispose()
|
||||
@ -63,26 +69,38 @@ final class StoryItemSetContainerSendMessage {
|
||||
return
|
||||
}
|
||||
|
||||
switch inputPanelView.getSendMessageInput() {
|
||||
case let .text(text):
|
||||
if !text.isEmpty {
|
||||
component.context.engine.messages.enqueueOutgoingMessage(
|
||||
to: peerId,
|
||||
replyTo: nil,
|
||||
content: .text(text)
|
||||
)
|
||||
inputPanelView.clearSendMessageInput()
|
||||
view.endEditing(true)
|
||||
|
||||
if let controller = component.controller() {
|
||||
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
|
||||
controller.present(UndoOverlayController(
|
||||
presentationData: presentationData,
|
||||
content: .succeed(text: "Message Sent"),
|
||||
elevatedLayout: false,
|
||||
animateInAsReplacement: false,
|
||||
action: { _ in return false }
|
||||
), in: .current)
|
||||
if let recordedAudioPreview = self.recordedAudioPreview {
|
||||
self.recordedAudioPreview = nil
|
||||
|
||||
let waveformBuffer = recordedAudioPreview.waveform.makeBitstream()
|
||||
|
||||
let messages: [EnqueueMessage] = [.message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: recordedAudioPreview.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(recordedAudioPreview.fileSize), attributes: [.Audio(isVoice: true, duration: Int(recordedAudioPreview.duration), title: nil, performer: nil, waveform: waveformBuffer)])), replyToMessageId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])]
|
||||
|
||||
let _ = enqueueMessages(account: component.context.account, peerId: peerId, messages: messages).start()
|
||||
|
||||
view.state?.updated(transition: Transition(animation: .curve(duration: 0.3, curve: .spring)))
|
||||
} else {
|
||||
switch inputPanelView.getSendMessageInput() {
|
||||
case let .text(text):
|
||||
if !text.isEmpty {
|
||||
component.context.engine.messages.enqueueOutgoingMessage(
|
||||
to: peerId,
|
||||
replyTo: nil,
|
||||
content: .text(text)
|
||||
)
|
||||
inputPanelView.clearSendMessageInput()
|
||||
view.endEditing(true)
|
||||
|
||||
if let controller = component.controller() {
|
||||
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
|
||||
controller.present(UndoOverlayController(
|
||||
presentationData: presentationData,
|
||||
content: .succeed(text: "Message Sent"),
|
||||
elevatedLayout: false,
|
||||
animateInAsReplacement: false,
|
||||
action: { _ in return false }
|
||||
), in: .current)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -94,6 +112,8 @@ final class StoryItemSetContainerSendMessage {
|
||||
isVideo: Bool,
|
||||
sendAction: Bool
|
||||
) {
|
||||
self.isMediaRecordingLocked = false
|
||||
|
||||
guard let component = view.component else {
|
||||
return
|
||||
}
|
||||
@ -167,6 +187,7 @@ final class StoryItemSetContainerSendMessage {
|
||||
return
|
||||
}
|
||||
|
||||
self.wasRecordingDismissed = !sendAction
|
||||
self.audioRecorder.set(.single(nil))
|
||||
|
||||
guard let data else {
|
||||
@ -205,7 +226,50 @@ final class StoryItemSetContainerSendMessage {
|
||||
})
|
||||
}
|
||||
|
||||
func stopMediaRecorder() {
|
||||
func lockMediaRecording() {
|
||||
self.isMediaRecordingLocked = true
|
||||
}
|
||||
|
||||
func stopMediaRecording(view: StoryItemSetContainerComponent.View) {
|
||||
if let audioRecorderValue = self.audioRecorderValue {
|
||||
let _ = (audioRecorderValue.takenRecordedData() |> deliverOnMainQueue).start(next: { [weak self, weak view] data in
|
||||
guard let self, let view, let component = view.component else {
|
||||
return
|
||||
}
|
||||
self.audioRecorder.set(.single(nil))
|
||||
|
||||
guard let data else {
|
||||
return
|
||||
}
|
||||
if data.duration < 0.5 {
|
||||
HapticFeedback().error()
|
||||
} else if let waveform = data.waveform {
|
||||
let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max), size: Int64(data.compressedData.count))
|
||||
|
||||
component.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data.compressedData)
|
||||
self.recordedAudioPreview = ChatRecordedMediaPreview(resource: resource, duration: Int32(data.duration), fileSize: Int32(data.compressedData.count), waveform: AudioWaveform(bitstream: waveform, bitsPerSample: 5))
|
||||
view.state?.updated(transition: Transition(animation: .curve(duration: 0.3, curve: .spring)))
|
||||
}
|
||||
})
|
||||
} else if let videoRecorderValue = self.videoRecorderValue {
|
||||
if videoRecorderValue.stopVideo() {
|
||||
/*self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
||||
$0.updatedInputTextPanelState { panelState in
|
||||
return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false))
|
||||
}
|
||||
})*/
|
||||
} else {
|
||||
self.videoRecorder.set(.single(nil))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func discardMediaRecordingPreview(view: StoryItemSetContainerComponent.View) {
|
||||
if self.recordedAudioPreview != nil {
|
||||
self.recordedAudioPreview = nil
|
||||
self.wasRecordingDismissed = true
|
||||
view.state?.updated(transition: Transition(animation: .curve(duration: 0.3, curve: .spring)))
|
||||
}
|
||||
}
|
||||
|
||||
func performInlineAction(view: StoryItemSetContainerComponent.View, item: StoryActionsComponent.Item) {
|
||||
|
@ -8,7 +8,7 @@ import TelegramCore
|
||||
import StoryContainerScreen
|
||||
|
||||
public enum StoryChatContent {
|
||||
public static func stories(context: AccountContext, storyList: StoryListContext, focusItem: Int64?) -> Signal<[StoryContentItemSlice], NoError> {
|
||||
public static func stories(context: AccountContext, storyList: StoryListContext, focusItem: Int32?) -> Signal<[StoryContentItemSlice], NoError> {
|
||||
return storyList.state
|
||||
|> map { state -> [StoryContentItemSlice] in
|
||||
var itemSlices: [StoryContentItemSlice] = []
|
||||
@ -59,7 +59,7 @@ public enum StoryChatContent {
|
||||
}
|
||||
|
||||
var sliceFocusedItemId: AnyHashable?
|
||||
if let focusItem, items.contains(where: { ($0.id.base as? Int64) == focusItem }) {
|
||||
if let focusItem, items.contains(where: { ($0.id.base as? Int32) == focusItem }) {
|
||||
sliceFocusedItemId = AnyHashable(focusItem)
|
||||
} else {
|
||||
if let id = itemSet.items.first(where: { $0.id > itemSet.maxReadId })?.id {
|
||||
@ -73,8 +73,8 @@ public enum StoryChatContent {
|
||||
items: items,
|
||||
totalCount: items.count,
|
||||
update: { requestedItemSet, itemId in
|
||||
var focusItem: Int64?
|
||||
if let id = itemId.base as? Int64 {
|
||||
var focusItem: Int32?
|
||||
if let id = itemId.base as? Int32 {
|
||||
focusItem = id
|
||||
}
|
||||
return StoryChatContent.stories(context: context, storyList: storyList, focusItem: focusItem)
|
||||
|
@ -228,7 +228,7 @@ final class StoryItemContentComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
#if DEBUG && false
|
||||
#if DEBUG// && false
|
||||
let currentProgressTimerLimit: Double = 5 * 60.0
|
||||
#else
|
||||
let currentProgressTimerLimit: Double = 5.0
|
||||
|
@ -218,8 +218,7 @@ public final class StoryPeerListComponent: Component {
|
||||
collapsedContentWidth += titleSpacing
|
||||
}
|
||||
|
||||
let collapseEndIndex = collapseStartIndex + Int(collapsedItemCount)
|
||||
let _ = collapseEndIndex
|
||||
let collapseEndIndex = collapseStartIndex + max(0, Int(collapsedItemCount) - 1)
|
||||
|
||||
let titleOffset = collapsedContentWidth
|
||||
collapsedContentWidth += titleSize.width
|
||||
@ -307,13 +306,13 @@ public final class StoryPeerListComponent: Component {
|
||||
|
||||
var itemAlpha: CGFloat = 1.0
|
||||
|
||||
if i >= collapseStartIndex && i <= (collapseStartIndex + 2) {
|
||||
if i >= collapseStartIndex && i <= collapseEndIndex {
|
||||
if i != collapseStartIndex {
|
||||
let regularLeftItemFrame = itemLayout.frame(at: i - 1)
|
||||
let collapsedLeftItemFrame = CGRect(origin: CGPoint(x: collapsedContentOrigin + CGFloat(i - collapseStartIndex - 1) * collapsedItemDistance, y: regularLeftItemFrame.minY), size: CGSize(width: collapsedItemWidth, height: regularLeftItemFrame.height))
|
||||
leftItemFrame = regularLeftItemFrame.interpolate(to: collapsedLeftItemFrame, amount: component.collapseFraction)
|
||||
}
|
||||
if i != collapseStartIndex + 2 {
|
||||
if i != collapseEndIndex {
|
||||
let regularRightItemFrame = itemLayout.frame(at: i - 1)
|
||||
let collapsedRightItemFrame = CGRect(origin: CGPoint(x: collapsedContentOrigin + CGFloat(i - collapseStartIndex - 1) * collapsedItemDistance, y: regularRightItemFrame.minY), size: CGSize(width: collapsedItemWidth, height: regularRightItemFrame.height))
|
||||
rightItemFrame = regularRightItemFrame.interpolate(to: collapsedRightItemFrame, amount: component.collapseFraction)
|
||||
@ -414,7 +413,7 @@ public final class StoryPeerListComponent: Component {
|
||||
if let myIndex = state.itemSets.firstIndex(where: { $0.peerId == component.context.account.peerId }) {
|
||||
self.sortedItemSets.append(state.itemSets[myIndex])
|
||||
}
|
||||
for i in 0 ..< 4 {
|
||||
for i in 0 ..< 1 {
|
||||
for itemSet in state.itemSets {
|
||||
if itemSet.peerId == component.context.account.peerId {
|
||||
continue
|
||||
|
@ -1,22 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"idiom" : "universal",
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"idiom" : "universal",
|
||||
"filename" : "ModernConversationAudioSlideToCancel@2x.png",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"idiom" : "universal",
|
||||
"filename" : "ModernConversationAudioSlideToCancel@3x.png",
|
||||
"scale" : "3x"
|
||||
"filename" : "arrowleft.svg",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"version" : 1,
|
||||
"author" : "xcode"
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 496 B |
Binary file not shown.
Before Width: | Height: | Size: 645 B |
@ -0,0 +1,4 @@
|
||||
<svg width="9" height="18" viewBox="0 0 9 18" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.43787 0.499507C8.71427 0.741355 8.74227 1.16148 8.50042 1.43788L1.88359 8.99997L8.50042 16.5621C8.74227 16.8385 8.71427 17.2586 8.43787 17.5004C8.16147 17.7423 7.74135 17.7143 7.4995 17.4379L0.499497 9.43788C0.280115 9.18715 0.280115 8.81279 0.499497 8.56206L7.4995 0.562064C7.74135 0.285666 8.16147 0.257658 8.43787 0.499507Z" fill="white" style="mix-blend-mode:overlay"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.43787 0.499507C8.71427 0.741355 8.74227 1.16148 8.50042 1.43788L1.88359 8.99997L8.50042 16.5621C8.74227 16.8385 8.71427 17.2586 8.43787 17.5004C8.16147 17.7423 7.74135 17.7143 7.4995 17.4379L0.499497 9.43788C0.280115 9.18715 0.280115 8.81279 0.499497 8.56206L7.4995 0.562064C7.74135 0.285666 8.16147 0.257658 8.43787 0.499507Z" fill="white" style="mix-blend-mode:overlay"/>
|
||||
</svg>
|
After Width: | Height: | Size: 953 B |
@ -1192,6 +1192,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
||||
backgroundColor: isTranscriptionInProgress ? messageTheme.mediaInactiveControlColor : waveformColor,
|
||||
foregroundColor: messageTheme.mediaActiveControlColor,
|
||||
shimmerColor: isTranscriptionInProgress ? messageTheme.mediaActiveControlColor : nil,
|
||||
style: .bottom,
|
||||
samples: audioWaveform?.samples ?? Data(),
|
||||
peak: audioWaveform?.peak ?? 0,
|
||||
status: strongSelf.playbackStatus.get(),
|
||||
@ -1199,7 +1200,8 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
||||
if let strongSelf = self, let context = strongSelf.context, let message = strongSelf.message, let type = peerMessageMediaPlayerType(EngineMessage(message)) {
|
||||
context.sharedContext.mediaManager.playlistControl(.seek(timestamp), type: type)
|
||||
}
|
||||
}
|
||||
},
|
||||
updateIsSeeking: nil
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: scrubbingFrame.size
|
||||
|
@ -12,6 +12,7 @@ import ItemListPeerItem
|
||||
import MergeLists
|
||||
import ItemListUI
|
||||
import ChatControllerInteraction
|
||||
import PeerInfoVisualMediaPaneNode
|
||||
|
||||
private struct GroupsInCommonListTransaction {
|
||||
let deletions: [ListViewDeleteItem]
|
||||
|
@ -16,6 +16,7 @@ import ListMessageItem
|
||||
import UndoUI
|
||||
import ChatPresentationInterfaceState
|
||||
import ChatControllerInteraction
|
||||
import PeerInfoVisualMediaPaneNode
|
||||
|
||||
final class PeerInfoListPaneNode: ASDisplayNode, PeerInfoPaneNode {
|
||||
private let context: AccountContext
|
||||
|
@ -12,6 +12,7 @@ import ItemListPeerItem
|
||||
import ItemListPeerActionItem
|
||||
import MergeLists
|
||||
import ItemListUI
|
||||
import PeerInfoVisualMediaPaneNode
|
||||
|
||||
private struct PeerMembersListTransaction {
|
||||
let deletions: [ListViewDeleteItem]
|
||||
|
@ -12,6 +12,7 @@ import TelegramUIPreferences
|
||||
import TelegramNotices
|
||||
import AccountUtils
|
||||
import DeviceAccess
|
||||
import PeerInfoVisualMediaPaneNode
|
||||
|
||||
enum PeerInfoUpdatingAvatar {
|
||||
case none
|
||||
@ -329,12 +330,6 @@ private func peerInfoAvailableMediaPanes(context: AccountContext, peerId: PeerId
|
||||
|> distinctUntilChanged
|
||||
}
|
||||
|
||||
struct PeerInfoStatusData: Equatable {
|
||||
var text: String
|
||||
var isActivity: Bool
|
||||
var key: PeerInfoPaneKey?
|
||||
}
|
||||
|
||||
enum PeerInfoMembersData: Equatable {
|
||||
case shortList(membersContext: PeerInfoMembersContext, members: [PeerInfoMember])
|
||||
case longList(PeerInfoMembersContext)
|
||||
@ -681,6 +676,11 @@ func peerInfoScreenData(context: AccountContext, peerId: PeerId, strings: Presen
|
||||
)
|
||||
|> map { peerView, availablePanes, globalNotificationSettings, encryptionKeyFingerprint, status -> PeerInfoScreenData in
|
||||
var availablePanes = availablePanes
|
||||
|
||||
if peerView.peers[peerView.peerId] is TelegramUser {
|
||||
availablePanes?.insert(.stories, at: 0)
|
||||
}
|
||||
|
||||
if availablePanes != nil, groupsInCommon != nil, let cachedData = peerView.cachedData as? CachedUserData {
|
||||
if cachedData.commonGroupCount != 0 {
|
||||
availablePanes?.append(.groupsInCommon)
|
||||
|
@ -30,6 +30,7 @@ import ComponentDisplayAdapters
|
||||
import ChatTitleView
|
||||
import AppBundle
|
||||
import AvatarVideoNode
|
||||
import PeerInfoVisualMediaPaneNode
|
||||
|
||||
enum PeerInfoHeaderButtonKey: Hashable {
|
||||
case message
|
||||
|
@ -9,27 +9,7 @@ import TelegramCore
|
||||
import AccountContext
|
||||
import ContextUI
|
||||
import ChatControllerInteraction
|
||||
|
||||
protocol PeerInfoPaneNode: ASDisplayNode {
|
||||
var isReady: Signal<Bool, NoError> { get }
|
||||
|
||||
var parentController: ViewController? { get set }
|
||||
|
||||
var status: Signal<PeerInfoStatusData?, NoError> { get }
|
||||
var tabBarOffsetUpdated: ((ContainedViewLayoutTransition) -> Void)? { get set }
|
||||
var tabBarOffset: CGFloat { get }
|
||||
|
||||
func update(size: CGSize, topInset: CGFloat, sideInset: CGFloat, bottomInset: CGFloat, visibleHeight: CGFloat, isScrollingLockedAtTop: Bool, expandProgress: CGFloat, presentationData: PresentationData, synchronous: Bool, transition: ContainedViewLayoutTransition)
|
||||
func scrollToTop() -> Bool
|
||||
func transferVelocity(_ velocity: CGFloat)
|
||||
func cancelPreviewGestures()
|
||||
func findLoadedMessage(id: MessageId) -> Message?
|
||||
func transitionNodeForGallery(messageId: MessageId, media: Media) -> (ASDisplayNode, CGRect, () -> (UIView?, UIView?))?
|
||||
func addToTransitionSurface(view: UIView)
|
||||
func updateHiddenMedia()
|
||||
func updateSelectedMessages(animated: Bool)
|
||||
func ensureMessageIsVisible(id: MessageId)
|
||||
}
|
||||
import PeerInfoVisualMediaPaneNode
|
||||
|
||||
final class PeerInfoPaneWrapper {
|
||||
let key: PeerInfoPaneKey
|
||||
@ -53,17 +33,6 @@ final class PeerInfoPaneWrapper {
|
||||
}
|
||||
}
|
||||
|
||||
enum PeerInfoPaneKey: Int32 {
|
||||
case members
|
||||
case media
|
||||
case files
|
||||
case music
|
||||
case voice
|
||||
case links
|
||||
case gifs
|
||||
case groupsInCommon
|
||||
}
|
||||
|
||||
final class PeerInfoPaneTabsContainerPaneNode: ASDisplayNode {
|
||||
private let pressed: () -> Void
|
||||
|
||||
@ -398,6 +367,15 @@ private final class PeerInfoPendingPane {
|
||||
let captureProtected = data.peer?.isCopyProtectionEnabled ?? false
|
||||
let paneNode: PeerInfoPaneNode
|
||||
switch key {
|
||||
case .stories:
|
||||
let visualPaneNode = PeerInfoStoryPaneNode(context: context, chatControllerInteraction: chatControllerInteraction, peerId: peerId, chatLocation: chatLocation, chatLocationContextHolder: chatLocationContextHolder, contentType: .photoOrVideo, captureProtected: captureProtected)
|
||||
paneNode = visualPaneNode
|
||||
visualPaneNode.openCurrentDate = {
|
||||
openMediaCalendar()
|
||||
}
|
||||
visualPaneNode.paneDidScroll = {
|
||||
paneDidScroll()
|
||||
}
|
||||
case .media:
|
||||
let visualPaneNode = PeerInfoVisualMediaPaneNode(context: context, chatControllerInteraction: chatControllerInteraction, peerId: peerId, chatLocation: chatLocation, chatLocationContextHolder: chatLocationContextHolder, contentType: .photoOrVideo, captureProtected: captureProtected)
|
||||
paneNode = visualPaneNode
|
||||
@ -977,6 +955,9 @@ final class PeerInfoPaneContainerNode: ASDisplayNode, UIGestureRecognizerDelegat
|
||||
self.tabsContainerNode.update(size: CGSize(width: size.width, height: tabsHeight), presentationData: presentationData, paneList: availablePanes.map { key in
|
||||
let title: String
|
||||
switch key {
|
||||
case .stories:
|
||||
//TODO:localize
|
||||
title = "Stories"
|
||||
case .media:
|
||||
title = presentationData.strings.PeerInfo_PaneMedia
|
||||
case .files:
|
||||
|
@ -86,6 +86,7 @@ import ChatControllerInteraction
|
||||
import StorageUsageScreen
|
||||
import AvatarEditorScreen
|
||||
import SendInviteLinkScreen
|
||||
import PeerInfoVisualMediaPaneNode
|
||||
|
||||
enum PeerInfoAvatarEditingMode {
|
||||
case generic
|
||||
@ -2016,7 +2017,7 @@ private func editingItems(data: PeerInfoScreenData?, state: PeerInfoState, chatL
|
||||
return result
|
||||
}
|
||||
|
||||
final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewDelegate {
|
||||
final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodeProtocol, UIScrollViewDelegate {
|
||||
private weak var controller: PeerInfoScreenImpl?
|
||||
|
||||
private let context: AccountContext
|
||||
|
@ -15,6 +15,7 @@ import ListMessageItem
|
||||
import ChatMessageInteractiveMediaBadge
|
||||
import SoftwareVideo
|
||||
import ChatControllerInteraction
|
||||
import PeerInfoVisualMediaPaneNode
|
||||
|
||||
private final class FrameSequenceThumbnailNode: ASDisplayNode {
|
||||
private let context: AccountContext
|
||||
|
@ -25,6 +25,7 @@ import LegacyCamera
|
||||
import AvatarNode
|
||||
import LocalMediaResources
|
||||
import ShareWithPeersScreen
|
||||
import ImageCompression
|
||||
|
||||
private class DetailsChatPlaceholderNode: ASDisplayNode, NavigationDetailsPlaceholderNode {
|
||||
private var presentationData: PresentationData
|
||||
@ -359,11 +360,16 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
|
||||
if let chatListController = self.chatListController as? ChatListControllerImpl, let storyListContext = chatListController.storyListContext {
|
||||
switch mediaResult {
|
||||
case let .image(image, dimensions, caption):
|
||||
if let data = image.jpegData(compressionQuality: 0.8) {
|
||||
storyListContext.upload(media: .image(dimensions: dimensions, data: data), text: caption?.string ?? "", entities: [], privacy: privacy)
|
||||
Queue.mainQueue().after(0.2, { [weak chatListController] in
|
||||
chatListController?.animateStoryUploadRipple()
|
||||
})
|
||||
var randomId: Int64 = 0
|
||||
arc4random_buf(&randomId, 8)
|
||||
let scaledSize = image.size.aspectFittedOrSmaller(CGSize(width: 1280.0, height: 1280.0))
|
||||
if let scaledImage = scaleImageToPixelSize(image: image, size: scaledSize) {
|
||||
if let scaledImageData = compressImageToJPEG(scaledImage, quality: 0.6) {
|
||||
storyListContext.upload(media: .image(dimensions: dimensions, data: scaledImageData), text: caption?.string ?? "", entities: [], privacy: privacy)
|
||||
Queue.mainQueue().after(0.2, { [weak chatListController] in
|
||||
chatListController?.animateStoryUploadRipple()
|
||||
})
|
||||
}
|
||||
}
|
||||
case let .video(content, _, values, duration, dimensions, caption):
|
||||
let adjustments: VideoMediaResourceAdjustments
|
||||
|
Loading…
x
Reference in New Issue
Block a user