Merge commit '148947a0e9e6182156a3c90fc1bc885ec0827f4d'

This commit is contained in:
Ali 2023-07-11 23:41:38 +04:00
commit 860124e3be
24 changed files with 1240 additions and 650 deletions

View File

@ -1112,7 +1112,6 @@ public struct StoriesConfiguration {
default:
posting = .disabled
}
posting = .enabled
return StoriesConfiguration(posting: posting)
} else {
return .defaultValue

View File

@ -108,7 +108,7 @@ private final class CameraContext {
private let session: CameraSession
private var mainDeviceContext: CameraDeviceContext
private var mainDeviceContext: CameraDeviceContext?
private var additionalDeviceContext: CameraDeviceContext?
private let cameraImageContext = CIContext()
@ -132,11 +132,11 @@ private final class CameraContext {
private var lastSnapshotTimestamp: Double = CACurrentMediaTime()
private var lastAdditionalSnapshotTimestamp: Double = CACurrentMediaTime()
private func savePreviewSnapshot(pixelBuffer: CVPixelBuffer, mirror: Bool) {
private func savePreviewSnapshot(pixelBuffer: CVPixelBuffer, front: Bool) {
Queue.concurrentDefaultQueue().async {
var ciImage = CIImage(cvImageBuffer: pixelBuffer)
let size = ciImage.extent.size
if mirror {
if front {
var transform = CGAffineTransformMakeScale(1.0, -1.0)
transform = CGAffineTransformTranslate(transform, 0.0, -size.height)
ciImage = ciImage.transformed(by: transform)
@ -144,7 +144,7 @@ private final class CameraContext {
ciImage = ciImage.clampedToExtent().applyingGaussianBlur(sigma: 40.0).cropped(to: CGRect(origin: .zero, size: size))
if let cgImage = self.cameraImageContext.createCGImage(ciImage, from: ciImage.extent) {
let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
if mirror {
if front {
CameraSimplePreviewView.saveLastFrontImage(uiImage)
} else {
CameraSimplePreviewView.saveLastBackImage(uiImage)
@ -161,42 +161,10 @@ private final class CameraContext {
self.secondaryPreviewView = secondaryPreviewView
self.positionValue = configuration.position
self._positionPromise = ValuePromise<Camera.Position>(configuration.position)
self.mainDeviceContext = CameraDeviceContext(session: session, exclusive: true, additional: false)
self.configure {
self.mainDeviceContext.configure(position: configuration.position, previewView: self.simplePreviewView, audio: configuration.audio, photo: configuration.photo, metadata: configuration.metadata)
}
self.mainDeviceContext.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
guard let self else {
return
}
self.previewNode?.enqueue(sampleBuffer)
let timestamp = CACurrentMediaTime()
if timestamp > self.lastSnapshotTimestamp + 2.5 {
var mirror = false
if #available(iOS 13.0, *) {
mirror = connection.inputPorts.first?.sourceDevicePosition == .front
}
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, mirror: mirror)
self.lastSnapshotTimestamp = timestamp
}
}
self.mainDeviceContext.output.processFaceLandmarks = { [weak self] observations in
guard let self else {
return
}
if let previewView = self.previewView {
previewView.drawFaceObservations(observations)
}
}
self.mainDeviceContext.output.processCodes = { [weak self] codes in
self?.detectedCodesPipe.putNext(codes)
}
self.setDualCameraEnabled(configuration.isDualEnabled, change: false)
NotificationCenter.default.addObserver(
self,
selector: #selector(self.sessionRuntimeError),
@ -216,10 +184,10 @@ private final class CameraContext {
func stopCapture(invalidate: Bool = false) {
if invalidate {
self.mainDeviceContext.device.resetZoom()
self.mainDeviceContext?.device.resetZoom()
self.configure {
self.mainDeviceContext.invalidate()
self.mainDeviceContext?.invalidate()
}
}
@ -236,11 +204,11 @@ private final class CameraContext {
focusMode = .autoFocus
exposureMode = .autoExpose
}
self.mainDeviceContext.device.setFocusPoint(point, focusMode: focusMode, exposureMode: exposureMode, monitorSubjectAreaChange: true)
self.mainDeviceContext?.device.setFocusPoint(point, focusMode: focusMode, exposureMode: exposureMode, monitorSubjectAreaChange: true)
}
func setFps(_ fps: Float64) {
self.mainDeviceContext.device.fps = fps
self.mainDeviceContext?.device.fps = fps
}
private var modeChange: Camera.ModeChange = .none {
@ -251,14 +219,17 @@ private final class CameraContext {
}
}
private var _positionPromise = ValuePromise<Camera.Position>(.unspecified)
private var _positionPromise: ValuePromise<Camera.Position>
var position: Signal<Camera.Position, NoError> {
return self._positionPromise.get()
}
private var positionValue: Camera.Position = .back
func togglePosition() {
if self.isDualCamEnabled {
guard let mainDeviceContext = self.mainDeviceContext else {
return
}
if self.isDualCameraEnabled == true {
let targetPosition: Camera.Position
if case .back = self.positionValue {
targetPosition = .front
@ -268,13 +239,13 @@ private final class CameraContext {
self.positionValue = targetPosition
self._positionPromise.set(targetPosition)
self.mainDeviceContext.output.markPositionChange(position: targetPosition)
mainDeviceContext.output.markPositionChange(position: targetPosition)
} else {
self.configure {
self.mainDeviceContext.invalidate()
self.mainDeviceContext?.invalidate()
let targetPosition: Camera.Position
if case .back = self.mainDeviceContext.device.position {
if case .back = mainDeviceContext.device.position {
targetPosition = .front
} else {
targetPosition = .back
@ -283,7 +254,7 @@ private final class CameraContext {
self._positionPromise.set(targetPosition)
self.modeChange = .position
self.mainDeviceContext.configure(position: targetPosition, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
mainDeviceContext.configure(position: targetPosition, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
self.queue.after(0.5) {
self.modeChange = .none
@ -294,13 +265,13 @@ private final class CameraContext {
public func setPosition(_ position: Camera.Position) {
self.configure {
self.mainDeviceContext.invalidate()
self.mainDeviceContext?.invalidate()
self._positionPromise.set(position)
self.positionValue = position
self.modeChange = .position
self.mainDeviceContext.configure(position: position, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
self.mainDeviceContext?.configure(position: position, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
self.queue.after(0.5) {
self.modeChange = .none
@ -308,103 +279,111 @@ private final class CameraContext {
}
}
private var isDualCamEnabled = false
public func setDualCamEnabled(_ enabled: Bool) {
guard enabled != self.isDualCamEnabled else {
private var isDualCameraEnabled: Bool?
public func setDualCameraEnabled(_ enabled: Bool, change: Bool = true) {
guard enabled != self.isDualCameraEnabled else {
return
}
self.isDualCamEnabled = enabled
self.isDualCameraEnabled = enabled
if change {
self.modeChange = .dualCamera
}
self.modeChange = .dualCamera
if enabled {
self.configure {
self.mainDeviceContext.invalidate()
self.mainDeviceContext?.invalidate()
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: false)
self.mainDeviceContext.configure(position: .back, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
self.mainDeviceContext?.configure(position: .back, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
self.additionalDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: true)
self.additionalDeviceContext?.configure(position: .front, previewView: self.secondaryPreviewView, audio: false, photo: true, metadata: false)
}
self.mainDeviceContext.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
guard let self else {
self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
guard let self, let mainDeviceContext = self.mainDeviceContext else {
return
}
self.previewNode?.enqueue(sampleBuffer)
let timestamp = CACurrentMediaTime()
if timestamp > self.lastSnapshotTimestamp + 2.5 {
var mirror = false
if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording {
var front = false
if #available(iOS 13.0, *) {
mirror = connection.inputPorts.first?.sourceDevicePosition == .front
front = connection.inputPorts.first?.sourceDevicePosition == .front
}
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, mirror: mirror)
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
self.lastSnapshotTimestamp = timestamp
}
}
self.additionalDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
guard let self else {
guard let self, let additionalDeviceContext = self.additionalDeviceContext else {
return
}
let timestamp = CACurrentMediaTime()
if timestamp > self.lastAdditionalSnapshotTimestamp + 2.5 {
var mirror = false
if timestamp > self.lastAdditionalSnapshotTimestamp + 2.5, !additionalDeviceContext.output.isRecording {
var front = false
if #available(iOS 13.0, *) {
mirror = connection.inputPorts.first?.sourceDevicePosition == .front
front = connection.inputPorts.first?.sourceDevicePosition == .front
}
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, mirror: mirror)
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
self.lastAdditionalSnapshotTimestamp = timestamp
}
}
} else {
self.configure {
self.mainDeviceContext.invalidate()
self.mainDeviceContext?.invalidate()
self.additionalDeviceContext?.invalidate()
self.additionalDeviceContext = nil
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false)
self.mainDeviceContext.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
}
self.mainDeviceContext.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
guard let self else {
self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
guard let self, let mainDeviceContext = self.mainDeviceContext else {
return
}
self.previewNode?.enqueue(sampleBuffer)
let timestamp = CACurrentMediaTime()
if timestamp > self.lastSnapshotTimestamp + 2.5, !self.mainDeviceContext.output.isRecording {
var mirror = false
if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording {
var front = false
if #available(iOS 13.0, *) {
mirror = connection.inputPorts.first?.sourceDevicePosition == .front
front = connection.inputPorts.first?.sourceDevicePosition == .front
}
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, mirror: mirror)
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
self.lastSnapshotTimestamp = timestamp
}
}
self.mainDeviceContext?.output.processCodes = { [weak self] codes in
self?.detectedCodesPipe.putNext(codes)
}
}
if #available(iOS 13.0, *), let previewView = self.simplePreviewView {
if enabled, let secondaryPreviewView = self.secondaryPreviewView {
let _ = (combineLatest(previewView.isPreviewing, secondaryPreviewView.isPreviewing)
|> map { first, second in
return first && second
if change {
if #available(iOS 13.0, *), let previewView = self.simplePreviewView {
if enabled, let secondaryPreviewView = self.secondaryPreviewView {
let _ = (combineLatest(previewView.isPreviewing, secondaryPreviewView.isPreviewing)
|> map { first, second in
return first && second
}
|> filter { $0 }
|> take(1)
|> delay(0.1, queue: self.queue)
|> deliverOn(self.queue)).start(next: { [weak self] _ in
self?.modeChange = .none
})
} else {
let _ = (previewView.isPreviewing
|> filter { $0 }
|> take(1)
|> deliverOn(self.queue)).start(next: { [weak self] _ in
self?.modeChange = .none
})
}
|> filter { $0 }
|> take(1)
|> delay(0.1, queue: self.queue)
|> deliverOn(self.queue)).start(next: { [weak self] _ in
self?.modeChange = .none
})
} else {
let _ = (previewView.isPreviewing
|> filter { $0 }
|> take(1)
|> deliverOn(self.queue)).start(next: { [weak self] _ in
self?.modeChange = .none
})
}
} else {
self.queue.after(0.4) {
self.modeChange = .none
self.queue.after(0.4) {
self.modeChange = .none
}
}
}
}
@ -416,15 +395,15 @@ private final class CameraContext {
}
var hasTorch: Signal<Bool, NoError> {
return self.mainDeviceContext.device.isTorchAvailable
return self.mainDeviceContext?.device.isTorchAvailable ?? .never()
}
func setTorchActive(_ active: Bool) {
self.mainDeviceContext.device.setTorchActive(active)
self.mainDeviceContext?.device.setTorchActive(active)
}
var isFlashActive: Signal<Bool, NoError> {
return self.mainDeviceContext.output.isFlashActive
return self.mainDeviceContext?.output.isFlashActive ?? .never()
}
private var _flashMode: Camera.FlashMode = .off {
@ -442,19 +421,22 @@ private final class CameraContext {
}
func setZoomLevel(_ zoomLevel: CGFloat) {
self.mainDeviceContext.device.setZoomLevel(zoomLevel)
self.mainDeviceContext?.device.setZoomLevel(zoomLevel)
}
func setZoomDelta(_ zoomDelta: CGFloat) {
self.mainDeviceContext.device.setZoomDelta(zoomDelta)
self.mainDeviceContext?.device.setZoomDelta(zoomDelta)
}
func takePhoto() -> Signal<PhotoCaptureResult, NoError> {
guard let mainDeviceContext = self.mainDeviceContext else {
return .complete()
}
let orientation = self.simplePreviewView?.videoPreviewLayer.connection?.videoOrientation ?? .portrait
if let additionalDeviceContext = self.additionalDeviceContext {
let dualPosition = self.positionValue
return combineLatest(
self.mainDeviceContext.output.takePhoto(orientation: orientation, flashMode: self._flashMode),
mainDeviceContext.output.takePhoto(orientation: orientation, flashMode: self._flashMode),
additionalDeviceContext.output.takePhoto(orientation: orientation, flashMode: self._flashMode)
) |> map { main, additional in
if case let .finished(mainImage, _, _) = main, case let .finished(additionalImage, _, _) = additional {
@ -468,29 +450,35 @@ private final class CameraContext {
}
} |> distinctUntilChanged
} else {
return self.mainDeviceContext.output.takePhoto(orientation: orientation, flashMode: self._flashMode)
return mainDeviceContext.output.takePhoto(orientation: orientation, flashMode: self._flashMode)
}
}
public func startRecording() -> Signal<Double, NoError> {
self.mainDeviceContext.device.setTorchMode(self._flashMode)
guard let mainDeviceContext = self.mainDeviceContext else {
return .complete()
}
mainDeviceContext.device.setTorchMode(self._flashMode)
if let additionalDeviceContext = self.additionalDeviceContext {
return combineLatest(
self.mainDeviceContext.output.startRecording(isDualCamera: true, position: self.positionValue),
mainDeviceContext.output.startRecording(isDualCamera: true, position: self.positionValue),
additionalDeviceContext.output.startRecording(isDualCamera: true)
) |> map { value, _ in
return value
}
} else {
return self.mainDeviceContext.output.startRecording(isDualCamera: false)
return mainDeviceContext.output.startRecording(isDualCamera: false)
}
}
public func stopRecording() -> Signal<VideoCaptureResult, NoError> {
guard let mainDeviceContext = self.mainDeviceContext else {
return .complete()
}
if let additionalDeviceContext = self.additionalDeviceContext {
return combineLatest(
self.mainDeviceContext.output.stopRecording(),
mainDeviceContext.output.stopRecording(),
additionalDeviceContext.output.stopRecording()
) |> mapToSignal { main, additional in
if case let .finished(mainResult, _, duration, positionChangeTimestamps, _) = main, case let .finished(additionalResult, _, _, _, _) = additional {
@ -505,7 +493,7 @@ private final class CameraContext {
}
} else {
let mirror = self.positionValue == .front
return self.mainDeviceContext.output.stopRecording()
return mainDeviceContext.output.stopRecording()
|> map { result -> VideoCaptureResult in
if case let .finished(mainResult, _, duration, positionChangeTimestamps, time) = result {
var transitionImage = mainResult.1
@ -556,14 +544,16 @@ public final class Camera {
public struct Configuration {
let preset: Preset
let position: Position
let isDualEnabled: Bool
let audio: Bool
let photo: Bool
let metadata: Bool
let preferredFps: Double
public init(preset: Preset, position: Position, audio: Bool, photo: Bool, metadata: Bool, preferredFps: Double) {
public init(preset: Preset, position: Position, isDualEnabled: Bool = false, audio: Bool, photo: Bool, metadata: Bool, preferredFps: Double) {
self.preset = preset
self.position = position
self.isDualEnabled = isDualEnabled
self.audio = audio
self.photo = photo
self.metadata = metadata
@ -659,10 +649,10 @@ public final class Camera {
}
}
public func setDualCamEnabled(_ enabled: Bool) {
public func setDualCameraEnabled(_ enabled: Bool) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.setDualCamEnabled(enabled)
context.setDualCameraEnabled(enabled)
}
}
}
@ -888,8 +878,8 @@ public final class Camera {
}
}
public static var isDualCamSupported: Bool {
if #available(iOS 13.0, *), AVCaptureMultiCamSession.isMultiCamSupported {
public static var isDualCameraSupported: Bool {
if #available(iOS 13.0, *), AVCaptureMultiCamSession.isMultiCamSupported && !DeviceModel.current.isIpad {
return true
} else {
return false

View File

@ -198,6 +198,10 @@ enum DeviceModel: CaseIterable, Equatable {
}
}
var isIpad: Bool {
return self.modelId.first?.hasPrefix("iPad") ?? false
}
static let current = DeviceModel()
private init() {

View File

@ -76,24 +76,9 @@ public class CameraSimplePreviewView: UIView {
super.init(frame: frame)
self.videoPreviewLayer.videoGravity = main ? .resizeAspectFill : .resizeAspect
self.placeholderView.contentMode = main ? .scaleAspectFill : .scaleAspectFit
self.addSubview(self.placeholderView)
if main {
if #available(iOS 13.0, *) {
self.previewingDisposable = (self.isPreviewing
|> filter { $0 }
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] _ in
self?.removePlaceholder(delay: 0.15)
})
} else {
Queue.mainQueue().after(0.35) {
self.removePlaceholder(delay: 0.15)
}
}
}
self.addSubview(self.placeholderView)
}
required init?(coder: NSCoder) {

View File

@ -2542,16 +2542,41 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
switch self.storyPostingAvailability {
case .premium:
guard self.isPremium else {
let context = self.context
var replaceImpl: ((ViewController) -> Void)?
let controller = context.sharedContext.makePremiumDemoController(context: self.context, subject: .stories, action: {
let controller = context.sharedContext.makePremiumIntroController(context: context, source: .stories)
replaceImpl?(controller)
})
replaceImpl = { [weak controller] c in
controller?.replace(with: c)
if let componentView = self.chatListHeaderView() {
var sourceFrame: CGRect?
if fromList {
if let (transitionView, _) = componentView.storyPeerListView()?.transitionViewForItem(peerId: self.context.account.peerId) {
sourceFrame = transitionView.convert(transitionView.bounds, to: nil).offsetBy(dx: 18.0 - UIScreenPixel, dy: 1.0)
}
} else {
if let rightButtonView = componentView.rightButtonViews["story"] {
sourceFrame = rightButtonView.convert(rightButtonView.bounds, to: nil).offsetBy(dx: 5.0, dy: -8.0)
}
}
if let sourceFrame {
let context = self.context
let location = CGRect(origin: CGPoint(x: sourceFrame.midX, y: sourceFrame.maxY), size: CGSize())
let tooltipController = TooltipScreen(
context: context,
account: context.account,
sharedContext: context.sharedContext,
text: .markdown(text: "Posting stories is currently available only\nto subscribers of [Telegram Premium]()."),
style: .customBlur(UIColor(rgb: 0x2a2a2a), 2.0),
icon: .none,
location: .point(location, .top),
shouldDismissOnTouch: { [weak self] point, containerFrame in
if containerFrame.contains(point) {
let controller = context.sharedContext.makePremiumIntroController(context: context, source: .stories)
self?.push(controller)
return .dismiss(consume: true)
} else {
return .dismiss(consume: false)
}
}
)
self.present(tooltipController, in: .window(.root))
}
}
self.push(controller)
return
}
case .disabled:

View File

@ -219,6 +219,8 @@ private final class StickerSelectionComponent: Component {
let topPanelHeight: CGFloat = 42.0
let defaultToEmoji = component.getController()?.defaultToEmoji ?? false
let context = component.context
let stickerPeekBehavior = EmojiContentPeekBehaviorImpl(
context: context,
@ -247,7 +249,7 @@ private final class StickerSelectionComponent: Component {
gifContent: nil,
hasRecentGifs: false,
availableGifSearchEmojies: [],
defaultToEmojiTab: false,
defaultToEmojiTab: defaultToEmoji,
externalTopPanelContainer: self.panelHostView,
externalBottomPanelContainer: nil,
displayTopPanelBackground: .blur,
@ -1629,6 +1631,7 @@ public class StickerPickerScreen: ViewController {
private let context: AccountContext
private let theme: PresentationTheme
private let inputData: Signal<StickerPickerInputData, NoError>
fileprivate let defaultToEmoji: Bool
private var currentLayout: ContainerViewLayout?
@ -1639,10 +1642,11 @@ public class StickerPickerScreen: ViewController {
public var presentGallery: () -> Void = { }
public init(context: AccountContext, inputData: Signal<StickerPickerInputData, NoError>) {
public init(context: AccountContext, inputData: Signal<StickerPickerInputData, NoError>, defaultToEmoji: Bool = false) {
self.context = context
self.theme = defaultDarkColorPresentationTheme
self.inputData = inputData
self.defaultToEmoji = defaultToEmoji
super.init(navigationBarPresentationData: nil)

View File

@ -12,6 +12,186 @@ private let imageManager: PHCachingImageManager = {
private let assetsQueue = Queue()
final class AssetDownloadManager {
private final class DownloadingAssetContext {
let identifier: String
let updated: () -> Void
var status: AssetDownloadStatus = .progress(0.0)
var disposable: Disposable?
init(identifier: String, updated: @escaping () -> Void) {
self.identifier = identifier
self.updated = updated
}
deinit {
self.disposable?.dispose()
}
}
private let queue = Queue()
private var currentAssetContext: DownloadingAssetContext?
init() {
}
deinit {
}
func download(asset: PHAsset) {
if let currentAssetContext = self.currentAssetContext {
currentAssetContext.disposable?.dispose()
}
let queue = self.queue
let identifier = asset.localIdentifier
let assetContext = DownloadingAssetContext(identifier: identifier, updated: { [weak self] in
queue.async {
guard let self else {
return
}
if let currentAssetContext = self.currentAssetContext, currentAssetContext.identifier == identifier, let bag = self.progressObserverContexts[identifier] {
for f in bag.copyItems() {
f(currentAssetContext.status)
}
}
}
})
assetContext.disposable = (downloadAssetMediaData(asset)
|> deliverOn(queue)).start(next: { [weak self] status in
guard let self else {
return
}
if let currentAssetContext = self.currentAssetContext, currentAssetContext.identifier == identifier {
currentAssetContext.status = status
currentAssetContext.updated()
}
})
self.currentAssetContext = assetContext
}
func cancel(identifier: String) {
if let currentAssetContext = self.currentAssetContext, currentAssetContext.identifier == identifier {
currentAssetContext.disposable?.dispose()
self.currentAssetContext = nil
}
}
private var progressObserverContexts: [String: Bag<(AssetDownloadStatus) -> Void>] = [:]
private func downloadProgress(identifier: String, next: @escaping (AssetDownloadStatus) -> Void) -> Disposable {
let bag: Bag<(AssetDownloadStatus) -> Void>
if let current = self.progressObserverContexts[identifier] {
bag = current
} else {
bag = Bag()
self.progressObserverContexts[identifier] = bag
}
let index = bag.add(next)
if let currentAssetContext = self.currentAssetContext, currentAssetContext.identifier == identifier {
next(currentAssetContext.status)
} else {
next(.progress(0.0))
}
let queue = self.queue
return ActionDisposable { [weak self, weak bag] in
queue.async {
guard let `self` = self else {
return
}
if let bag = bag, let listBag = self.progressObserverContexts[identifier], listBag === bag {
bag.remove(index)
if bag.isEmpty {
self.progressObserverContexts.removeValue(forKey: identifier)
}
}
}
}
}
func downloadProgress(identifier: String) -> Signal<AssetDownloadStatus, NoError> {
return Signal { [weak self] subscriber in
if let self {
return self.downloadProgress(identifier: identifier, next: { status in
subscriber.putNext(status)
if case .completed = status {
subscriber.putCompletion()
}
})
} else {
return EmptyDisposable
}
}
}
}
func checkIfAssetIsLocal(_ asset: PHAsset) -> Signal<Bool, NoError> {
return Signal { subscriber in
let options = PHImageRequestOptions()
options.isNetworkAccessAllowed = false
let requestId: PHImageRequestID
if #available(iOS 13, *) {
requestId = imageManager.requestImageDataAndOrientation(for: asset, options: options) { data, _, _, _ in
if data != nil {
subscriber.putNext(data != nil)
}
subscriber.putCompletion()
}
} else {
requestId = imageManager.requestImageData(for: asset, options: options) { data, _, _, _ in
if data != nil {
subscriber.putNext(data != nil)
}
subscriber.putCompletion()
}
}
return ActionDisposable {
imageManager.cancelImageRequest(requestId)
}
}
}
enum AssetDownloadStatus {
case progress(Float)
case completed
}
private func downloadAssetMediaData(_ asset: PHAsset) -> Signal<AssetDownloadStatus, NoError> {
return Signal { subscriber in
let options = PHImageRequestOptions()
options.isNetworkAccessAllowed = true
options.progressHandler = { progress, _, _, _ in
subscriber.putNext(.progress(Float(progress)))
}
let requestId: PHImageRequestID
if #available(iOS 13, *) {
requestId = imageManager.requestImageDataAndOrientation(for: asset, options: options) { data, _, _, _ in
if data != nil {
subscriber.putNext(.completed)
}
subscriber.putCompletion()
}
} else {
requestId = imageManager.requestImageData(for: asset, options: options) { data, _, _, _ in
if data != nil {
subscriber.putNext(.completed)
}
subscriber.putCompletion()
}
}
return ActionDisposable {
imageManager.cancelImageRequest(requestId)
}
}
}
func assetImage(fetchResult: PHFetchResult<PHAsset>, index: Int, targetSize: CGSize, exact: Bool, deliveryMode: PHImageRequestOptionsDeliveryMode = .opportunistic, synchronous: Bool = false) -> Signal<UIImage?, NoError> {
let asset = fetchResult[index]
return assetImage(asset: asset, targetSize: targetSize, exact: exact, deliveryMode: deliveryMode, synchronous: synchronous)
@ -68,3 +248,10 @@ func assetVideo(fetchResult: PHFetchResult<PHAsset>, index: Int) -> Signal<AVAss
}
}
}
extension PHAsset {
var isLocallyAvailable: Bool? {
let resourceArray = PHAssetResource.assetResources(for: self)
return resourceArray.first?.value(forKey: "locallyAvailable") as? Bool
}
}

View File

@ -642,7 +642,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
}
#endif
if case .notDetermined = cameraAccess, !self.requestedCameraAccess {
if !stories, case .notDetermined = cameraAccess, !self.requestedCameraAccess {
self.requestedCameraAccess = true
self.mediaAssetsContext.requestCameraAccess()
}
@ -777,6 +777,10 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
private weak var currentGalleryController: TGModernGalleryController?
private func requestAssetDownload(_ asset: PHAsset) {
}
private var openingMedia = false
fileprivate func openMedia(fetchResult: PHFetchResult<PHAsset>, index: Int, immediateThumbnail: UIImage?) {
guard let controller = self.controller, let interaction = controller.interaction, let (layout, _) = self.validLayout, !self.openingMedia else {
@ -788,7 +792,29 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
if let customSelection = controller.customSelection {
self.openingMedia = true
customSelection(controller, fetchResult[index])
let asset = fetchResult[index]
customSelection(controller, asset)
// let isLocallyAvailable = asset.isLocallyAvailable
//
// if let isLocallyAvailable {
// if isLocallyAvailable {
// customSelection(controller, asset)
// } else {
// self.requestAssetDownload(asset)
// }
// } else {
// let _ = (checkIfAssetIsLocal(asset)
// |> deliverOnMainQueue).start(next: { [weak self] isLocallyAvailable in
// if isLocallyAvailable {
// customSelection(controller, asset)
// } else {
// self?.requestAssetDownload(asset)
// }
// })
// }
Queue.mainQueue().after(0.3) {
self.openingMedia = false
}
@ -1275,8 +1301,15 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
if let bannedSendPhotos = self.controller?.bannedSendPhotos, let bannedSendVideos = self.controller?.bannedSendVideos {
bannedSendMedia = (max(bannedSendPhotos.0, bannedSendVideos.0), bannedSendPhotos.1 || bannedSendVideos.1)
}
if case let .noAccess(cameraAccess) = self.state {
var hasCamera = cameraAccess == .authorized
if let subject = self.controller?.subject, case .assets(_, .story) = subject {
hasCamera = false
self.controller?.navigationItem.rightBarButtonItem = nil
}
var placeholderTransition = transition
let placeholderNode: MediaPickerPlaceholderNode
if let current = self.placeholderNode {
@ -1300,7 +1333,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
self.updateNavigation(transition: .immediate)
}
placeholderNode.update(layout: layout, theme: self.presentationData.theme, strings: self.presentationData.strings, hasCamera: cameraAccess == .authorized, transition: placeholderTransition)
placeholderNode.update(layout: layout, theme: self.presentationData.theme, strings: self.presentationData.strings, hasCamera: hasCamera, transition: placeholderTransition)
placeholderTransition.updateFrame(node: placeholderNode, frame: innerBounds)
} else if let placeholderNode = self.placeholderNode, bannedSendMedia == nil {
self.placeholderNode = nil

View File

@ -1729,7 +1729,7 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
if let strongSelf = self, (count < 2 && currentTimestamp > timestamp + 24 * 60 * 60) {
strongSelf.displayedPreviewTooltip = true
let controller = TooltipScreen(account: strongSelf.context.account, sharedContext: strongSelf.context.sharedContext, text: .plain(text: isDark ? strongSelf.presentationData.strings.WallpaperPreview_PreviewInDayMode : strongSelf.presentationData.strings.WallpaperPreview_PreviewInNightMode), style: .customBlur(UIColor(rgb: 0x333333, alpha: 0.35)), icon: nil, location: .point(frame.offsetBy(dx: 1.0, dy: 6.0), .bottom), displayDuration: .custom(3.0), inset: 3.0, shouldDismissOnTouch: { _, _ in
let controller = TooltipScreen(account: strongSelf.context.account, sharedContext: strongSelf.context.sharedContext, text: .plain(text: isDark ? strongSelf.presentationData.strings.WallpaperPreview_PreviewInDayMode : strongSelf.presentationData.strings.WallpaperPreview_PreviewInNightMode), style: .customBlur(UIColor(rgb: 0x333333, alpha: 0.35), 0.0), icon: nil, location: .point(frame.offsetBy(dx: 1.0, dy: 6.0), .bottom), displayDuration: .custom(3.0), inset: 3.0, shouldDismissOnTouch: { _, _ in
return .dismiss(consume: false)
})
strongSelf.galleryController()?.present(controller, in: .current)

View File

@ -224,13 +224,22 @@ public extension TelegramEngine {
to peerId: EnginePeer.Id,
replyTo replyToMessageId: EngineMessage.Id?,
storyId: StoryId? = nil,
content: EngineOutgoingMessageContent
content: EngineOutgoingMessageContent,
silentPosting: Bool = false,
scheduleTime: Int32? = nil
) -> Signal<[MessageId?], NoError> {
let message: EnqueueMessage?
var message: EnqueueMessage?
if case let .contextResult(results, result) = content {
message = self.outgoingMessageWithChatContextResult(to: peerId, threadId: nil, botId: results.botId, result: result, replyToMessageId: replyToMessageId, replyToStoryId: storyId, hideVia: true, silentPosting: false, scheduleTime: nil, correlationId: nil)
message = self.outgoingMessageWithChatContextResult(to: peerId, threadId: nil, botId: results.botId, result: result, replyToMessageId: replyToMessageId, replyToStoryId: storyId, hideVia: true, silentPosting: silentPosting, scheduleTime: scheduleTime, correlationId: nil)
} else {
var attributes: [MessageAttribute] = []
if silentPosting {
attributes.append(NotificationInfoMessageAttribute(flags: .muted))
}
if let scheduleTime = scheduleTime {
attributes.append(OutgoingScheduleInfoMessageAttribute(scheduleTime: scheduleTime))
}
var text: String = ""
var mediaReference: AnyMediaReference?
switch content {
@ -257,6 +266,8 @@ public extension TelegramEngine {
)
}
guard let message = message else {
return .complete()
}

View File

@ -1117,7 +1117,7 @@ final class CaptureControlsComponent: Component {
),
minSize: hintIconSize,
action: {
component.flipTapped()
component.lockRecording()
}
)
),

View File

@ -37,6 +37,8 @@ public final class ChatScheduleTimeController: ViewController {
private var presentationData: PresentationData
private var presentationDataDisposable: Disposable?
public var dismissed: () -> Void = {}
public init(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)? = nil, peerId: PeerId, mode: ChatScheduleTimeControllerMode, style: ChatScheduleTimeControllerStyle, currentTime: Int32? = nil, minimalTime: Int32? = nil, dismissByTapOutside: Bool = true, completion: @escaping (Int32) -> Void) {
self.context = context
self.peerId = peerId
@ -105,6 +107,7 @@ public final class ChatScheduleTimeController: ViewController {
}
override public func dismiss(completion: (() -> Void)? = nil) {
self.dismissed()
self.controllerNode.animateOut(completion: completion)
}

View File

@ -1136,17 +1136,19 @@ public func recommendedVideoExportConfiguration(values: MediaEditorValues, durat
let compressionProperties: [String: Any]
let codecType: AVVideoCodecType
if hasHEVCHardwareEncoder {
var bitrate: Int = 3700
if image {
var bitrate: Int = 3700
if image {
bitrate = 5000
} else {
if duration < 10 {
bitrate = 5800
} else if duration < 20 {
bitrate = 5500
} else if duration < 30 {
bitrate = 5000
} else {
if duration < 10 {
bitrate = 5500
} else if duration < 25 {
bitrate = 4500
}
}
}
if hasHEVCHardwareEncoder {
codecType = AVVideoCodecType.hevc
compressionProperties = [
AVVideoAverageBitRateKey: bitrate * 1000,
@ -1155,7 +1157,7 @@ public func recommendedVideoExportConfiguration(values: MediaEditorValues, durat
} else {
codecType = AVVideoCodecType.h264
compressionProperties = [
AVVideoAverageBitRateKey: 3800000,
AVVideoAverageBitRateKey: bitrate * 1000,
AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC
]

View File

@ -341,41 +341,54 @@ private func verticesData(
bottomRight = simd_float2(1.0, 1.0)
}
let relativeSize = CGSize(
width: size.width / containerSize.width,
height: size.height / containerSize.height
)
let relativeOffset = CGPoint(
x: position.x / containerSize.width,
y: position.y / containerSize.height
)
let rect = CGRect(
origin: CGPoint(
x: relativeOffset.x - relativeSize.width / 2.0,
y: relativeOffset.y - relativeSize.height / 2.0
),
size: relativeSize
)
let angle = Float(.pi - rotation)
let cosAngle = cos(angle)
let sinAngle = sin(angle)
let centerX = Float(position.x)
let centerY = Float(position.y)
let halfWidth = Float(size.width / 2.0)
let halfHeight = Float(size.height / 2.0)
return [
VertexData(
pos: simd_float4(x: Float(rect.minX) * 2.0, y: Float(rect.minY) * 2.0, z: z, w: 1),
pos: simd_float4(
x: (centerX + (halfWidth * cosAngle) - (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0,
y: (centerY + (halfWidth * sinAngle) + (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0,
z: z,
w: 1
),
texCoord: topLeft,
localPos: simd_float2(0.0, 0.0)
),
VertexData(
pos: simd_float4(x: Float(rect.maxX) * 2.0, y: Float(rect.minY) * 2.0, z: z, w: 1),
pos: simd_float4(
x: (centerX - (halfWidth * cosAngle) - (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0,
y: (centerY - (halfWidth * sinAngle) + (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0,
z: z,
w: 1
),
texCoord: topRight,
localPos: simd_float2(1.0, 0.0)
),
VertexData(
pos: simd_float4(x: Float(rect.minX) * 2.0, y: Float(rect.maxY) * 2.0, z: z, w: 1),
pos: simd_float4(
x: (centerX + (halfWidth * cosAngle) + (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0,
y: (centerY + (halfWidth * sinAngle) - (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0,
z: z,
w: 1
),
texCoord: bottomLeft,
localPos: simd_float2(0.0, 1.0)
),
VertexData(
pos: simd_float4(x: Float(rect.maxX) * 2.0, y: Float(rect.maxY) * 2.0, z: z, w: 1),
pos: simd_float4(
x: (centerX - (halfWidth * cosAngle) + (halfHeight * sinAngle)) / Float(containerSize.width) * 2.0,
y: (centerY - (halfWidth * sinAngle) - (halfHeight * cosAngle)) / Float(containerSize.height) * 2.0,
z: z,
w: 1
),
texCoord: bottomRight,
localPos: simd_float2(1.0, 1.0)
)
@ -650,13 +663,13 @@ final class VideoInputScalePass: RenderPass {
}
func process(input: MTLTexture, secondInput: MTLTexture?, timestamp: CMTime, device: MTLDevice, commandBuffer: MTLCommandBuffer) -> MTLTexture? {
//#if targetEnvironment(simulator)
//
//#else
#if targetEnvironment(simulator)
#else
guard max(input.width, input.height) > 1920 || secondInput != nil else {
return input
}
//#endif
#endif
let scaledSize = CGSize(width: input.width, height: input.height).fitted(CGSize(width: 1920.0, height: 1920.0))
let width: Int
@ -705,9 +718,9 @@ final class VideoInputScalePass: RenderPass {
renderCommandEncoder.setRenderPipelineState(self.mainPipelineState!)
//#if targetEnvironment(simulator)
// let secondInput = input
//#endif
#if targetEnvironment(simulator)
let secondInput = input
#endif
let (mainVideoState, additionalVideoState, transitionVideoState) = self.transitionState(for: timestamp, mainInput: input, additionalInput: secondInput)

View File

@ -935,7 +935,9 @@ final class MediaEditorScreenComponent: Component {
let scrubberFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - scrubberSize.width) / 2.0), y: availableSize.height - environment.safeInsets.bottom - scrubberSize.height - 8.0 + controlsBottomInset), size: scrubberSize)
if let scrubberView = self.scrubber.view {
var animateIn = false
if scrubberView.superview == nil {
animateIn = true
if let inputPanelBackgroundView = self.inputPanelBackground.view, inputPanelBackgroundView.superview != nil {
self.insertSubview(scrubberView, belowSubview: inputPanelBackgroundView)
} else {
@ -945,6 +947,10 @@ final class MediaEditorScreenComponent: Component {
transition.setFrame(view: scrubberView, frame: scrubberFrame)
if !self.animatingButtons {
transition.setAlpha(view: scrubberView, alpha: component.isDisplayingTool || component.isDismissing || component.isInteractingWithEntities ? 0.0 : 1.0)
} else if animateIn {
scrubberView.layer.animatePosition(from: CGPoint(x: 0.0, y: 44.0), to: .zero, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
scrubberView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
scrubberView.layer.animateScale(from: 0.6, to: 1.0, duration: 0.2)
}
}
@ -1111,7 +1117,7 @@ final class MediaEditorScreenComponent: Component {
}
self.deactivateInput()
},
sendMessageOptionsAction: { },
sendMessageOptionsAction: nil,
sendStickerAction: { _ in },
setMediaRecordingActive: nil,
lockMediaRecording: nil,
@ -1225,13 +1231,13 @@ final class MediaEditorScreenComponent: Component {
transition: transition,
component: AnyComponent(BlurredGradientComponent(position: .bottom, tag: nil)),
environment: {},
containerSize: CGSize(width: availableSize.width, height: keyboardHeight + 100.0)
containerSize: CGSize(width: availableSize.width, height: keyboardHeight + 60.0)
)
if let inputPanelBackgroundView = self.inputPanelBackground.view {
if inputPanelBackgroundView.superview == nil {
self.addSubview(inputPanelBackgroundView)
}
let isVisible = inputHeight > 44.0
let isVisible = isEditingCaption && inputHeight > 44.0
transition.setFrame(view: inputPanelBackgroundView, frame: CGRect(origin: CGPoint(x: 0.0, y: isVisible ? availableSize.height - inputPanelBackgroundSize.height : availableSize.height), size: inputPanelBackgroundSize))
if !self.animatingButtons {
transition.setAlpha(view: inputPanelBackgroundView, alpha: isVisible ? 1.0 : 0.0, delay: isVisible ? 0.0 : 0.4)
@ -1922,7 +1928,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
})
let imageEntity = DrawingStickerEntity(content: .image(image ?? additionalImage, false))
imageEntity.referenceDrawingSize = storyDimensions
imageEntity.scale = 1.49
imageEntity.scale = 1.625
imageEntity.position = position.getPosition(storyDimensions)
self.entitiesView.add(imageEntity, announce: false)
} else if case let .video(_, _, mirror, additionalVideoPath, _, _, _, changes, position) = subject {
@ -1930,7 +1936,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
if let additionalVideoPath {
let videoEntity = DrawingStickerEntity(content: .dualVideoReference)
videoEntity.referenceDrawingSize = storyDimensions
videoEntity.scale = 1.49
videoEntity.scale = 1.625
videoEntity.position = position.getPosition(storyDimensions)
self.entitiesView.add(videoEntity, announce: false)
@ -1949,8 +1955,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
if case let .asset(asset) = subject, asset.mediaType == .video {
let videoEntity = DrawingStickerEntity(content: .dualVideoReference)
videoEntity.referenceDrawingSize = storyDimensions
videoEntity.scale = 1.49
videoEntity.position = PIPPosition.bottomRight.getPosition(storyDimensions)
videoEntity.scale = 1.625
videoEntity.position = PIPPosition.topRight.getPosition(storyDimensions)
self.entitiesView.add(videoEntity, announce: false)
mediaEditor.setAdditionalVideo("", positionChanges: [VideoPositionChange(additional: false, timestamp: 0.0), VideoPositionChange(additional: true, timestamp: 3.0)])
@ -2369,7 +2375,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
backgroundImage.draw(in: CGRect(origin: .zero, size: size))
let ellipsePosition = pipPosition.getPosition(storyDimensions)
let ellipseSize = CGSize(width: 401.0, height: 401.0)
let ellipseSize = CGSize(width: 439.0, height: 439.0)
let ellipseRect = CGRect(origin: CGPoint(x: ellipsePosition.x - ellipseSize.width / 2.0, y: ellipsePosition.y - ellipseSize.height / 2.0), size: ellipseSize)
let foregroundSize = foregroundImage.size.aspectFilled(ellipseSize)
let foregroundRect = CGRect(origin: CGPoint(x: ellipseRect.center.x - foregroundSize.width / 2.0, y: ellipseRect.center.y - foregroundSize.height / 2.0), size: foregroundSize)
@ -2421,10 +2427,6 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
view.animateIn(from: .camera, completion: completion)
}
}
// Queue.mainQueue().after(0.5) {
// self.presentPrivacyTooltip()
// }
}
func animateOut(finished: Bool, saveDraft: Bool, completion: @escaping () -> Void) {
@ -2603,22 +2605,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
}
self.requestUpdate(transition: transition)
}
func presentPrivacyTooltip() {
guard let sourceView = self.componentHost.findTaggedView(tag: privacyButtonTag) else {
return
}
let parentFrame = self.view.convert(self.bounds, to: nil)
let absoluteFrame = sourceView.convert(sourceView.bounds, to: nil).offsetBy(dx: -parentFrame.minX, dy: 0.0)
let location = CGRect(origin: CGPoint(x: absoluteFrame.midX, y: absoluteFrame.maxY + 3.0), size: CGSize())
let tooltipController = TooltipScreen(account: self.context.account, sharedContext: self.context.sharedContext, text: .plain(text: "You can set who can view this story."), location: .point(location, .top), displayDuration: .manual, inset: 16.0, shouldDismissOnTouch: { _, _ in
return .ignore
})
self.controller?.present(tooltipController, in: .current)
}
private weak var muteTooltip: ViewController?
func presentMutedTooltip() {
guard let sourceView = self.componentHost.findTaggedView(tag: muteButtonTag) else {
@ -2791,6 +2778,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
private var drawingScreen: DrawingScreen?
private var stickerScreen: StickerPickerScreen?
private var defaultToEmoji = false
private var previousDrawingData: Data?
private var previousDrawingEntities: [DrawingEntity]?
@ -2889,7 +2877,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
}
switch mode {
case .sticker:
let controller = StickerPickerScreen(context: self.context, inputData: self.stickerPickerInputData.get())
self.mediaEditor?.stop()
let controller = StickerPickerScreen(context: self.context, inputData: self.stickerPickerInputData.get(), defaultToEmoji: self.defaultToEmoji)
controller.completion = { [weak self] content in
if let self {
if let content {
@ -2899,8 +2888,17 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
self.hasAnyChanges = true
self.controller?.isSavingAvailable = true
self.controller?.requestLayout(transition: .immediate)
if case let .file(file) = content {
if file.isCustomEmoji {
self.defaultToEmoji = true
} else {
self.defaultToEmoji = false
}
}
}
self.stickerScreen = nil
self.mediaEditor?.play()
}
}
controller.customModalStyleOverlayTransitionFactorUpdated = { [weak self, weak controller] transition in
@ -3134,15 +3132,17 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
case bottomRight
func getPosition(_ size: CGSize) -> CGPoint {
let topOffset = CGPoint(x: 267.0, y: 438.0)
let bottomOffset = CGPoint(x: 267.0, y: 438.0)
switch self {
case .topLeft:
return CGPoint(x: 224.0, y: 477.0)
return CGPoint(x: topOffset.x, y: topOffset.y)
case .topRight:
return CGPoint(x: size.width - 224.0, y: 477.0)
return CGPoint(x: size.width - topOffset.x, y: topOffset.y)
case .bottomLeft:
return CGPoint(x: 224.0, y: size.height - 477.0)
return CGPoint(x: bottomOffset.x, y: size.height - bottomOffset.y)
case .bottomRight:
return CGPoint(x: size.width - 224.0, y: size.height - 477.0)
return CGPoint(x: size.width - bottomOffset.x, y: size.height - bottomOffset.y)
}
}
}
@ -3374,28 +3374,29 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
guard let self else {
return
}
self.push(
ShareWithPeersScreen(
context: self.context,
initialPrivacy: privacy,
allowScreenshots: !isForwardingDisabled,
pin: pin,
stateContext: stateContext,
completion: { [weak self] result, isForwardingDisabled, pin in
guard let self else {
return
}
if case .closeFriends = privacy.base {
let _ = self.context.engine.privacy.updateCloseFriends(peerIds: result.additionallyIncludePeers).start()
completion(EngineStoryPrivacy(base: .closeFriends, additionallyIncludePeers: []))
} else {
completion(result)
}
},
editCategory: { _, _, _ in }
)
let controller = ShareWithPeersScreen(
context: self.context,
initialPrivacy: privacy,
allowScreenshots: !isForwardingDisabled,
pin: pin,
stateContext: stateContext,
completion: { [weak self] result, isForwardingDisabled, pin in
guard let self else {
return
}
if case .closeFriends = privacy.base {
let _ = self.context.engine.privacy.updateCloseFriends(peerIds: result.additionallyIncludePeers).start()
completion(EngineStoryPrivacy(base: .closeFriends, additionallyIncludePeers: []))
} else {
completion(result)
}
},
editCategory: { _, _, _ in }
)
controller.dismissed = {
self.node.mediaEditor?.play()
}
self.push(controller)
})
}
@ -4464,6 +4465,7 @@ public final class BlurredGradientComponent: Component {
}
private var gradientMask = UIImageView()
private var gradientBackground = SimpleLayer()
private var gradientForeground = SimpleGradientLayer()
public func update(component: BlurredGradientComponent, availableSize: CGSize, transition: Transition) -> CGSize {
@ -4473,17 +4475,21 @@ public final class BlurredGradientComponent: Component {
self.updateColor(color: UIColor(rgb: 0x000000, alpha: component.position == .top ? 0.15 : 0.25), transition: transition.containedViewLayoutTransition)
let gradientHeight: CGFloat = 100.0
if self.mask == nil {
self.mask = self.gradientMask
self.gradientMask.image = generateGradientImage(
size: CGSize(width: 1.0, height: availableSize.height),
size: CGSize(width: 1.0, height: gradientHeight),
colors: [UIColor(rgb: 0xffffff, alpha: 1.0), UIColor(rgb: 0xffffff, alpha: 1.0), UIColor(rgb: 0xffffff, alpha: 0.0)],
locations: component.position == .top ? [0.0, 0.8, 1.0] : [1.0, 0.20, 0.0],
direction: .vertical
)
self.gradientMask.layer.addSublayer(self.gradientBackground)
self.gradientBackground.backgroundColor = UIColor(rgb: 0xffffff).cgColor
if component.dark {
self.gradientForeground.colors = [UIColor(rgb: 0x000000, alpha: 0.6).cgColor, UIColor(rgb: 0x000000, alpha: 0.6).cgColor, UIColor(rgb: 0x000000, alpha: 0.0).cgColor]
self.gradientForeground.colors = [UIColor(rgb: 0x000000, alpha: 0.4).cgColor, UIColor(rgb: 0x000000, alpha: 0.4).cgColor, UIColor(rgb: 0x000000, alpha: 0.0).cgColor]
self.gradientForeground.locations = [0.0, 0.8, 1.0]
} else {
self.gradientForeground.colors = [UIColor(rgb: 0x000000, alpha: 0.35).cgColor, UIColor(rgb: 0x000000, alpha: 0.0).cgColor]
@ -4494,7 +4500,8 @@ public final class BlurredGradientComponent: Component {
self.layer.addSublayer(self.gradientForeground)
}
transition.setFrame(view: self.gradientMask, frame: CGRect(origin: .zero, size: availableSize))
transition.setFrame(view: self.gradientMask, frame: CGRect(origin: .zero, size: CGSize(width: availableSize.width, height: gradientHeight)))
transition.setFrame(layer: self.gradientBackground, frame: CGRect(origin: CGPoint(x: 0.0, y: gradientHeight), size: availableSize))
transition.setFrame(layer: self.gradientForeground, frame: CGRect(origin: .zero, size: availableSize))
self.update(size: availableSize, transition: transition.containedViewLayoutTransition)

View File

@ -258,7 +258,7 @@ final class StoryPreviewComponent: Component {
presentController: { _ in },
presentInGlobalOverlay: { _ in },
sendMessageAction: { },
sendMessageOptionsAction: { },
sendMessageOptionsAction: nil,
sendStickerAction: { _ in },
setMediaRecordingActive: { _, _, _ in },
lockMediaRecording: nil,

View File

@ -46,7 +46,7 @@ public final class MessageInputActionButtonComponent: Component {
public let mode: Mode
public let action: (Mode, Action, Bool) -> Void
public let longPressAction: () -> Void
public let longPressAction: ((UIView, ContextGesture?) -> Void)?
public let switchMediaInputMode: () -> Void
public let updateMediaCancelFraction: (CGFloat) -> Void
public let lockMediaRecording: () -> Void
@ -62,7 +62,7 @@ public final class MessageInputActionButtonComponent: Component {
public init(
mode: Mode,
action: @escaping (Mode, Action, Bool) -> Void,
longPressAction: @escaping () -> Void,
longPressAction: ((UIView, ContextGesture?) -> Void)?,
switchMediaInputMode: @escaping () -> Void,
updateMediaCancelFraction: @escaping (CGFloat) -> Void,
lockMediaRecording: @escaping () -> Void,
@ -113,9 +113,14 @@ public final class MessageInputActionButtonComponent: Component {
return true
}
public final class View: HighlightTrackingButton {
public final class View: UIView {
private var micButton: ChatTextInputMediaRecordingButton?
public let button: HighlightTrackingButtonNode
public let referenceNode: ContextReferenceContentNode
public let containerNode: ContextControllerSourceNode
private let sendIconView: UIImageView
private var moreButton: MoreHeaderButton?
private var component: MessageInputActionButtonComponent?
@ -124,13 +129,31 @@ public final class MessageInputActionButtonComponent: Component {
override init(frame: CGRect) {
self.sendIconView = UIImageView()
self.button = HighlightTrackingButtonNode()
self.referenceNode = ContextReferenceContentNode()
self.containerNode = ContextControllerSourceNode()
super.init(frame: frame)
self.addSubview(self.button.view)
self.containerNode.addSubnode(self.referenceNode)
self.referenceNode.view.addSubview(self.sendIconView)
self.button.addSubnode(self.containerNode)
self.containerNode.shouldBegin = { [weak self] location in
guard let self, let component = self.component, let _ = component.longPressAction else {
return false
}
return true
}
self.containerNode.activated = { [weak self] gesture, _ in
guard let self, let component = self.component, let longPressAction = component.longPressAction else {
return
}
longPressAction(self, gesture)
}
self.isMultipleTouchEnabled = false
self.addSubview(self.sendIconView)
self.highligthedChanged = { [weak self] highlighted in
self.button.highligthedChanged = { [weak self] highlighted in
guard let self else {
return
}
@ -141,8 +164,10 @@ public final class MessageInputActionButtonComponent: Component {
transition.setSublayerTransform(view: self, transform: CATransform3DMakeScale(scale, scale, 1.0))
}
self.addTarget(self, action: #selector(self.touchDown), for: .touchDown)
self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside)
self.button.addTarget(self, action: #selector(self.touchDown), forControlEvents: .touchDown)
self.button.addTarget(self, action: #selector(self.pressed), forControlEvents: .touchUpInside)
// but.addTarget(self, action: #selector(self.touchDown), for: .touchDown)
// self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside)
}
required init?(coder: NSCoder) {
@ -162,10 +187,11 @@ public final class MessageInputActionButtonComponent: Component {
}
component.action(component.mode, .up, false)
}
override public func continueTracking(_ touch: UITouch, with event: UIEvent?) -> Bool {
return super.continueTracking(touch, with: event)
}
// public override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
// let result = super.hitTest(point, with: event)
// return result
// }
func update(component: MessageInputActionButtonComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
let previousComponent = self.component
@ -174,6 +200,8 @@ public final class MessageInputActionButtonComponent: Component {
let themeUpdated = previousComponent?.theme !== component.theme
self.containerNode.isUserInteractionEnabled = component.longPressAction != nil
if self.micButton == nil {
let micButton = ChatTextInputMediaRecordingButton(
context: component.context,
@ -240,7 +268,7 @@ public final class MessageInputActionButtonComponent: Component {
}
}
if self.moreButton == nil {
if case .more = component.mode, self.moreButton == nil {
let moreButton = MoreHeaderButton(color: .white)
self.moreButton = moreButton
self.addSubnode(moreButton)
@ -340,6 +368,10 @@ public final class MessageInputActionButtonComponent: Component {
}
}
transition.setFrame(view: self.button.view, frame: CGRect(origin: .zero, size: availableSize))
transition.setFrame(view: self.containerNode.view, frame: CGRect(origin: .zero, size: availableSize))
transition.setFrame(view: self.referenceNode.view, frame: CGRect(origin: .zero, size: availableSize))
transition.setAlpha(view: self.sendIconView, alpha: sendAlpha)
transition.setScale(view: self.sendIconView, scale: sendAlpha == 0.0 ? 0.01 : 1.0)

View File

@ -72,7 +72,7 @@ public final class MessageInputPanelComponent: Component {
public let presentController: (ViewController) -> Void
public let presentInGlobalOverlay: (ViewController) -> Void
public let sendMessageAction: () -> Void
public let sendMessageOptionsAction: () -> Void
public let sendMessageOptionsAction: ((UIView, ContextGesture?) -> Void)?
public let sendStickerAction: (TelegramMediaFile) -> Void
public let setMediaRecordingActive: ((Bool, Bool, Bool) -> Void)?
public let lockMediaRecording: (() -> Void)?
@ -114,7 +114,7 @@ public final class MessageInputPanelComponent: Component {
presentController: @escaping (ViewController) -> Void,
presentInGlobalOverlay: @escaping (ViewController) -> Void,
sendMessageAction: @escaping () -> Void,
sendMessageOptionsAction: @escaping () -> Void,
sendMessageOptionsAction: ((UIView, ContextGesture?) -> Void)?,
sendStickerAction: @escaping (TelegramMediaFile) -> Void,
setMediaRecordingActive: ((Bool, Bool, Bool) -> Void)?,
lockMediaRecording: (() -> Void)?,
@ -747,7 +747,7 @@ public final class MessageInputPanelComponent: Component {
break
}
},
longPressAction: {},
longPressAction: nil,
switchMediaInputMode: {
},
updateMediaCancelFraction: { _ in
@ -926,12 +926,7 @@ public final class MessageInputPanelComponent: Component {
break
}
},
longPressAction: { [weak self] in
guard let self, let component = self.component else {
return
}
component.sendMessageOptionsAction()
},
longPressAction: component.sendMessageOptionsAction,
switchMediaInputMode: { [weak self] in
guard let self else {
return

View File

@ -53,6 +53,7 @@ public final class StoryContentContextImpl: StoryContentContext {
PostboxViewKey.cachedPeerData(peerId: peerId),
PostboxViewKey.storiesState(key: .peer(peerId)),
PostboxViewKey.storyItems(peerId: peerId),
PostboxViewKey.peerPresences(peerIds: Set([peerId]))
]
if peerId == context.account.peerId {
inputKeys.append(PostboxViewKey.storiesState(key: .local))
@ -113,6 +114,11 @@ public final class StoryContentContextImpl: StoryContentContext {
return
}
let additionalPeerData: StoryContentContextState.AdditionalPeerData
var peerPresence: PeerPresence?
if let presencesView = views.views[PostboxViewKey.peerPresences(peerIds: Set([peerId]))] as? PeerPresencesView {
peerPresence = presencesView.presences[peerId]
}
if let cachedPeerDataView = views.views[PostboxViewKey.cachedPeerData(peerId: peerId)] as? CachedPeerDataView, let cachedUserData = cachedPeerDataView.cachedPeerData as? CachedUserData {
var isMuted = false
if let notificationSettings = peerView.notificationSettings as? TelegramPeerNotificationSettings {
@ -120,9 +126,17 @@ public final class StoryContentContextImpl: StoryContentContext {
} else {
isMuted = resolvedAreStoriesMuted(globalSettings: globalNotificationSettings._asGlobalNotificationSettings(), peer: peer._asPeer(), peerSettings: nil)
}
additionalPeerData = StoryContentContextState.AdditionalPeerData(isMuted: isMuted, areVoiceMessagesAvailable: cachedUserData.voiceMessagesAvailable)
additionalPeerData = StoryContentContextState.AdditionalPeerData(
isMuted: isMuted,
areVoiceMessagesAvailable: cachedUserData.voiceMessagesAvailable,
presence: peerPresence.flatMap { EnginePeer.Presence($0) }
)
} else {
additionalPeerData = StoryContentContextState.AdditionalPeerData(isMuted: true, areVoiceMessagesAvailable: true)
additionalPeerData = StoryContentContextState.AdditionalPeerData(
isMuted: true,
areVoiceMessagesAvailable: true,
presence: peerPresence.flatMap { EnginePeer.Presence($0) }
)
}
let state = stateView.value?.get(Stories.PeerState.self)
@ -928,6 +942,7 @@ public final class SingleStoryContentContextImpl: StoryContentContext {
self.storyDisposable = (combineLatest(queue: .mainQueue(),
context.engine.data.subscribe(
TelegramEngine.EngineData.Item.Peer.Peer(id: storyId.peerId),
TelegramEngine.EngineData.Item.Peer.Presence(id: storyId.peerId),
TelegramEngine.EngineData.Item.Peer.AreVoiceMessagesAvailable(id: storyId.peerId),
TelegramEngine.EngineData.Item.Peer.NotificationSettings(id: storyId.peerId),
TelegramEngine.EngineData.Item.NotificationSettings.Global()
@ -965,18 +980,19 @@ public final class SingleStoryContentContextImpl: StoryContentContext {
return
}
let (peer, areVoiceMessagesAvailable, notificationSettings, globalNotificationSettings) = data
let (peer, presence, areVoiceMessagesAvailable, notificationSettings, globalNotificationSettings) = data
let (item, peers, allEntityFiles) = itemAndPeers
guard let peer else {
return
}
let isMuted = resolvedAreStoriesMuted(globalSettings: globalNotificationSettings._asGlobalNotificationSettings(), peer: peer._asPeer(), peerSettings: notificationSettings._asNotificationSettings())
let additionalPeerData = StoryContentContextState.AdditionalPeerData(
isMuted: isMuted,
areVoiceMessagesAvailable: areVoiceMessagesAvailable
areVoiceMessagesAvailable: areVoiceMessagesAvailable,
presence: presence
)
if item == nil {
@ -1104,6 +1120,7 @@ public final class PeerStoryListContentContextImpl: StoryContentContext {
self.storyDisposable = (combineLatest(queue: .mainQueue(),
context.engine.data.subscribe(
TelegramEngine.EngineData.Item.Peer.Peer(id: peerId),
TelegramEngine.EngineData.Item.Peer.Presence(id: peerId),
TelegramEngine.EngineData.Item.Peer.AreVoiceMessagesAvailable(id: peerId),
TelegramEngine.EngineData.Item.Peer.NotificationSettings(id: peerId),
TelegramEngine.EngineData.Item.NotificationSettings.Global()
@ -1117,7 +1134,7 @@ public final class PeerStoryListContentContextImpl: StoryContentContext {
return
}
let (peer, areVoiceMessagesAvailable, notificationSettings, globalNotificationSettings) = data
let (peer, presence, areVoiceMessagesAvailable, notificationSettings, globalNotificationSettings) = data
guard let peer else {
return
@ -1127,7 +1144,8 @@ public final class PeerStoryListContentContextImpl: StoryContentContext {
let additionalPeerData = StoryContentContextState.AdditionalPeerData(
isMuted: isMuted,
areVoiceMessagesAvailable: areVoiceMessagesAvailable
areVoiceMessagesAvailable: areVoiceMessagesAvailable,
presence: presence
)
self.listState = state

View File

@ -135,19 +135,31 @@ public final class StoryContentItem: Equatable {
public final class StoryContentContextState {
public final class AdditionalPeerData: Equatable {
public static func == (lhs: StoryContentContextState.AdditionalPeerData, rhs: StoryContentContextState.AdditionalPeerData) -> Bool {
return lhs.isMuted == rhs.isMuted && lhs.areVoiceMessagesAvailable == rhs.areVoiceMessagesAvailable
}
public let isMuted: Bool
public let areVoiceMessagesAvailable: Bool
public let presence: EnginePeer.Presence?
public init(
isMuted: Bool,
areVoiceMessagesAvailable: Bool
areVoiceMessagesAvailable: Bool,
presence: EnginePeer.Presence?
) {
self.isMuted = isMuted
self.areVoiceMessagesAvailable = areVoiceMessagesAvailable
self.presence = presence
}
public static func == (lhs: StoryContentContextState.AdditionalPeerData, rhs: StoryContentContextState.AdditionalPeerData) -> Bool {
if lhs.isMuted != rhs.isMuted {
return false
}
if lhs.areVoiceMessagesAvailable != rhs.areVoiceMessagesAvailable {
return false
}
if lhs.presence != rhs.presence {
return false
}
return true
}
}

View File

@ -359,6 +359,7 @@ public final class StoryItemSetContainerComponent: Component {
var videoRecordingBackgroundView: UIVisualEffectView?
let inputPanel = ComponentView<Empty>()
let inputPanelExternalState = MessageInputPanelComponent.ExternalState()
private let inputPanelContainer = UIView()
private let inputPanelBackground = ComponentView<Empty>()
var preparingToDisplayViewList: Bool = false
@ -436,6 +437,9 @@ public final class StoryItemSetContainerComponent: Component {
self.transitionCloneContainerView = UIView()
self.inputPanelContainer.isUserInteractionEnabled = false
self.inputPanelContainer.layer.cornerRadius = 11.0
super.init(frame: frame)
self.itemsContainerView.addSubview(self.scroller)
@ -1252,7 +1256,7 @@ public final class StoryItemSetContainerComponent: Component {
centerInfoView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25)
}
if let moreButtonView = self.moreButton.view {
moreButtonView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25)
moreButtonView.layer.animateAlpha(from: 0.0, to: moreButtonView.alpha, duration: 0.25)
}
if let soundButtonView = self.soundButton.view {
soundButtonView.layer.animateAlpha(from: 0.0, to: soundButtonView.alpha, duration: 0.25)
@ -1378,7 +1382,7 @@ public final class StoryItemSetContainerComponent: Component {
centerInfoView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false)
}
if let moreButtonView = self.moreButton.view {
moreButtonView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false)
moreButtonView.layer.animateAlpha(from: moreButtonView.alpha, to: 0.0, duration: 0.25, removeOnCompletion: false)
}
if let soundButtonView = self.soundButton.view {
soundButtonView.layer.animateAlpha(from: soundButtonView.alpha, to: 0.0, duration: 0.25, removeOnCompletion: false)
@ -1609,6 +1613,16 @@ public final class StoryItemSetContainerComponent: Component {
if self.component?.slice.item.storyItem.id != component.slice.item.storyItem.id {
self.initializedOffset = false
if let inputPanelView = self.inputPanel.view as? MessageInputPanelComponent.View {
Queue.mainQueue().justDispatch {
inputPanelView.clearSendMessageInput()
}
}
if let tooltipScreen = self.sendMessageContext.tooltipScreen {
tooltipScreen.dismiss()
}
}
var itemsTransition = transition
var resetScrollingOffsetWithItemTransition = false
@ -1735,11 +1749,11 @@ public final class StoryItemSetContainerComponent: Component {
}
self.sendMessageContext.performSendMessageAction(view: self)
},
sendMessageOptionsAction: { [weak self] in
sendMessageOptionsAction: { [weak self] sourceView, gesture in
guard let self else {
return
}
self.sendMessageContext.presentSendMessageOptions(view: self)
self.sendMessageContext.presentSendMessageOptions(view: self, sourceView: sourceView, gesture: gesture)
},
sendStickerAction: { [weak self] sticker in
guard let self else {
@ -1869,11 +1883,11 @@ public final class StoryItemSetContainerComponent: Component {
}
}
let inputMediaNodeHeight = self.sendMessageContext.updateInputMediaNode(inputPanel: self.inputPanel, availableSize: availableSize, bottomInset: component.safeInsets.bottom, inputHeight: component.inputHeight, effectiveInputHeight: inputHeight, metrics: component.metrics, deviceMetrics: component.deviceMetrics, transition: transition)
let inputMediaNodeHeight = self.sendMessageContext.updateInputMediaNode(inputPanel: self.inputPanel, availableSize: availableSize, bottomInset: component.safeInsets.bottom, bottomContainerInset: component.containerInsets.bottom, inputHeight: component.inputHeight, effectiveInputHeight: inputHeight, metrics: component.metrics, deviceMetrics: component.deviceMetrics, transition: transition)
if inputMediaNodeHeight > 0.0 {
inputHeight = inputMediaNodeHeight
}
keyboardHeight = max(keyboardHeight, inputMediaNodeHeight)
keyboardHeight = inputHeight
let hasRecordingBlurBackground = self.sendMessageContext.videoRecorderValue != nil || self.sendMessageContext.hasRecordedVideoPreview
if hasRecordingBlurBackground {
@ -1904,11 +1918,12 @@ public final class StoryItemSetContainerComponent: Component {
transition: transition,
component: AnyComponent(BlurredGradientComponent(position: .bottom, dark: true, tag: nil)),
environment: {},
containerSize: CGSize(width: availableSize.width, height: keyboardHeight + 100.0)
containerSize: CGSize(width: availableSize.width, height: max(0.0, keyboardHeight + 100.0 - component.containerInsets.bottom))
)
if let inputPanelBackgroundView = self.inputPanelBackground.view {
if inputPanelBackgroundView.superview == nil {
self.addSubview(inputPanelBackgroundView)
self.addSubview(self.inputPanelContainer)
self.inputPanelContainer.addSubview(inputPanelBackgroundView)
}
let isVisible = inputHeight > 44.0 && !hasRecordingBlurBackground
transition.setFrame(view: inputPanelBackgroundView, frame: CGRect(origin: CGPoint(x: 0.0, y: isVisible ? availableSize.height - inputPanelBackgroundSize.height : availableSize.height), size: inputPanelBackgroundSize))
@ -2188,6 +2203,8 @@ public final class StoryItemSetContainerComponent: Component {
let contentFrame = CGRect(origin: CGPoint(x: 0.0, y: component.containerInsets.top - (contentSize.height - contentVisualHeight) * 0.5), size: contentSize)
transition.setFrame(view: self.inputPanelContainer, frame: contentFrame)
let itemLayout = ItemLayout(
containerSize: availableSize,
contentFrame: contentFrame,
@ -2238,7 +2255,7 @@ public final class StoryItemSetContainerComponent: Component {
mode: .more,
action: { _, _, _ in
},
longPressAction: {},
longPressAction: nil,
switchMediaInputMode: {
},
updateMediaCancelFraction: { _ in
@ -2855,18 +2872,25 @@ public final class StoryItemSetContainerComponent: Component {
let _ = (enqueueMessages(account: context.account, peerId: peer.id, messages: [message])
|> deliverOnMainQueue).start(next: { [weak self] messageIds in
if let animation {
presentController(UndoOverlayController(
if let animation, let self {
let controller = UndoOverlayController(
presentationData: presentationData,
content: .sticker(context: context, file: animation, loop: false, title: nil, text: "Reaction Sent.", undoText: "View in Chat", customAction: { [weak self] in
if let messageId = messageIds.first, let self {
self.navigateToPeer(peer: peer, chat: true, messageId: messageId)
self.navigateToPeer(peer: peer, chat: true, subject: messageId.flatMap { .message(id: .id($0), highlight: false, timecode: nil) })
}
}),
elevatedLayout: false,
animateInAsReplacement: false,
action: { _ in return false }
), nil)
action: { [weak self] _ in
self?.sendMessageContext.tooltipScreen = nil
self?.updateIsProgressPaused()
return false
}
)
self.sendMessageContext.tooltipScreen = controller
self.updateIsProgressPaused()
presentController(controller, nil)
}
})
})
@ -3218,7 +3242,7 @@ public final class StoryItemSetContainerComponent: Component {
}
}
func navigateToPeer(peer: EnginePeer, chat: Bool, messageId: EngineMessage.Id? = nil) {
func navigateToPeer(peer: EnginePeer, chat: Bool, subject: ChatControllerSubject? = nil) {
guard let component = self.component else {
return
}
@ -3228,11 +3252,7 @@ public final class StoryItemSetContainerComponent: Component {
guard let navigationController = controller.navigationController as? NavigationController else {
return
}
if messageId != nil || chat {
var subject: ChatControllerSubject?
if let messageId {
subject = .message(id: .id(messageId), highlight: false, timecode: nil)
}
if subject != nil || chat {
component.context.sharedContext.navigateToChatController(NavigateToChatControllerParams(navigationController: navigationController, context: component.context, chatLocation: .peer(peer), subject: subject, keepStack: .always, animated: true, pushController: { [weak controller, weak navigationController] chatController, animated, completion in
guard let controller, let navigationController else {
return
@ -3330,7 +3350,6 @@ public final class StoryItemSetContainerComponent: Component {
return .single(nil)
|> then(
.single(.video(symlinkPath, nil, false, nil, nil, PixelDimensions(width: 720, height: 1280), duration ?? 0.0, [], .bottomRight))
|> delay(0.1, queue: Queue.mainQueue())
)
}
}
@ -3517,12 +3536,12 @@ public final class StoryItemSetContainerComponent: Component {
}
}
private func performMyMoreAction(sourceView: UIView, gesture: ContextGesture?) {
func dismissAllTooltips() {
guard let component = self.component, let controller = component.controller() else {
return
}
component.controller()?.forEachController { c in
controller.forEachController { c in
if let c = c as? UndoOverlayController {
c.dismiss()
} else if let c = c as? TooltipScreen {
@ -3530,6 +3549,14 @@ public final class StoryItemSetContainerComponent: Component {
}
return true
}
}
private func performMyMoreAction(sourceView: UIView, gesture: ContextGesture?) {
guard let component = self.component, let controller = component.controller() else {
return
}
self.dismissAllTooltips()
let presentationData = component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: component.theme)
var items: [ContextMenuItem] = []
@ -3733,7 +3760,7 @@ public final class StoryItemSetContainerComponent: Component {
let contextItems = ContextController.Items(content: .list(items), tip: tip, tipSignal: tipSignal)
let contextController = ContextController(account: component.context.account, presentationData: presentationData, source: .reference(HeaderContextReferenceContentSource(controller: controller, sourceView: sourceView)), items: .single(contextItems), gesture: gesture)
let contextController = ContextController(account: component.context.account, presentationData: presentationData, source: .reference(HeaderContextReferenceContentSource(controller: controller, sourceView: sourceView, position: .bottom)), items: .single(contextItems), gesture: gesture)
contextController.dismissed = { [weak self] in
guard let self else {
return
@ -3759,12 +3786,7 @@ public final class StoryItemSetContainerComponent: Component {
return
}
component.controller()?.forEachController { c in
if let c = c as? UndoOverlayController {
c.dismiss()
}
return true
}
self.dismissAllTooltips()
let presentationData = component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: component.theme)
var items: [ContextMenuItem] = []
@ -3877,7 +3899,7 @@ public final class StoryItemSetContainerComponent: Component {
account: component.context.account,
sharedContext: component.context.sharedContext,
text: .markdown(text: text),
style: .customBlur(UIColor(rgb: 0x1c1c1c)),
style: .customBlur(UIColor(rgb: 0x1c1c1c), 0.0),
icon: .peer(peer: component.slice.peer, isStory: true),
action: TooltipScreen.Action(
title: "Undo",
@ -4015,7 +4037,7 @@ public final class StoryItemSetContainerComponent: Component {
let contextItems = ContextController.Items(content: .list(items), tip: tip, tipSignal: tipSignal)
let contextController = ContextController(account: component.context.account, presentationData: presentationData, source: .reference(HeaderContextReferenceContentSource(controller: controller, sourceView: sourceView)), items: .single(contextItems), gesture: gesture)
let contextController = ContextController(account: component.context.account, presentationData: presentationData, source: .reference(HeaderContextReferenceContentSource(controller: controller, sourceView: sourceView, position: .bottom)), items: .single(contextItems), gesture: gesture)
contextController.dismissed = { [weak self] in
guard let self else {
return
@ -4039,20 +4061,22 @@ public final class StoryItemSetContainerComponent: Component {
}
}
private final class HeaderContextReferenceContentSource: ContextReferenceContentSource {
final class HeaderContextReferenceContentSource: ContextReferenceContentSource {
private let controller: ViewController
private let sourceView: UIView
private let position: ContextControllerReferenceViewInfo.ActionsPosition
var keepInPlace: Bool {
return true
}
init(controller: ViewController, sourceView: UIView) {
init(controller: ViewController, sourceView: UIView, position: ContextControllerReferenceViewInfo.ActionsPosition) {
self.controller = controller
self.sourceView = sourceView
self.position = position
}
func transitionInfo() -> ContextControllerReferenceViewInfo? {
return ContextControllerReferenceViewInfo(referenceView: self.sourceView, contentAreaInScreenSpace: UIScreen.main.bounds, actionsPosition: .bottom)
return ContextControllerReferenceViewInfo(referenceView: self.sourceView, contentAreaInScreenSpace: UIScreen.main.bounds, actionsPosition: self.position)
}
}

View File

@ -40,6 +40,8 @@ import OpenInExternalAppUI
import SafariServices
import MediaPasteboardUI
import WebPBinding
import ContextUI
import ChatScheduleTimeController
final class StoryItemSetContainerSendMessage {
enum InputMode {
@ -197,7 +199,7 @@ final class StoryItemSetContainerSendMessage {
}
}
func updateInputMediaNode(inputPanel: ComponentView<Empty>, availableSize: CGSize, bottomInset: CGFloat, inputHeight: CGFloat, effectiveInputHeight: CGFloat, metrics: LayoutMetrics, deviceMetrics: DeviceMetrics, transition: Transition) -> CGFloat {
func updateInputMediaNode(inputPanel: ComponentView<Empty>, availableSize: CGSize, bottomInset: CGFloat, bottomContainerInset: CGFloat, inputHeight: CGFloat, effectiveInputHeight: CGFloat, metrics: LayoutMetrics, deviceMetrics: DeviceMetrics, transition: Transition) -> CGFloat {
guard let context = self.context, let inputPanelView = inputPanel.view as? MessageInputPanelComponent.View else {
return 0.0
}
@ -252,7 +254,7 @@ final class StoryItemSetContainerSendMessage {
isGeneralThreadClosed: nil
)
let heightAndOverflow = inputMediaNode.updateLayout(width: availableSize.width, leftInset: 0.0, rightInset: 0.0, bottomInset: bottomInset, standardInputHeight: deviceMetrics.standardInputHeight(inLandscape: false), inputHeight: inputHeight, maximumHeight: availableSize.height, inputPanelHeight: 0.0, transition: .immediate, interfaceState: presentationInterfaceState, layoutMetrics: metrics, deviceMetrics: deviceMetrics, isVisible: true, isExpanded: false)
let heightAndOverflow = inputMediaNode.updateLayout(width: availableSize.width, leftInset: 0.0, rightInset: 0.0, bottomInset: bottomInset, standardInputHeight: deviceMetrics.standardInputHeight(inLandscape: false), inputHeight: inputHeight < 100.0 ? inputHeight - bottomContainerInset : inputHeight, maximumHeight: availableSize.height, inputPanelHeight: 0.0, transition: .immediate, interfaceState: presentationInterfaceState, layoutMetrics: metrics, deviceMetrics: deviceMetrics, isVisible: true, isExpanded: false)
let inputNodeHeight = heightAndOverflow.0
let inputNodeFrame = CGRect(origin: CGPoint(x: 0.0, y: availableSize.height - inputNodeHeight), size: CGSize(width: availableSize.width, height: inputNodeHeight))
@ -331,7 +333,7 @@ final class StoryItemSetContainerSendMessage {
}
}
private func presentMessageSentTooltip(view: StoryItemSetContainerComponent.View, peer: EnginePeer, messageId: EngineMessage.Id?) {
private func presentMessageSentTooltip(view: StoryItemSetContainerComponent.View, peer: EnginePeer, messageId: EngineMessage.Id?, isScheduled: Bool = false) {
guard let component = view.component, let controller = component.controller() as? StoryContainerScreen else {
return
}
@ -341,14 +343,17 @@ final class StoryItemSetContainerSendMessage {
}
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
let text = isScheduled ? "Message Scheduled" : "Message Sent"
let tooltipScreen = UndoOverlayController(
presentationData: presentationData,
content: .actionSucceeded(title: "", text: "Message Sent", cancel: messageId != nil ? "View in Chat" : "", destructive: false),
content: .actionSucceeded(title: "", text: text, cancel: messageId != nil ? "View in Chat" : "", destructive: false),
elevatedLayout: false,
animateInAsReplacement: false,
action: { [weak view, weak self] action in
if case .undo = action, let messageId {
view?.navigateToPeer(peer: peer, chat: true, messageId: messageId)
view?.navigateToPeer(peer: peer, chat: true, subject: isScheduled ? .scheduledMessages : .message(id: .id(messageId), highlight: false, timecode: nil))
}
self?.tooltipScreen = nil
view?.updateIsProgressPaused()
@ -360,12 +365,110 @@ final class StoryItemSetContainerSendMessage {
view.updateIsProgressPaused()
}
func presentSendMessageOptions(view: StoryItemSetContainerComponent.View) {
func presentSendMessageOptions(view: StoryItemSetContainerComponent.View, sourceView: UIView, gesture: ContextGesture?) {
guard let component = view.component, let controller = component.controller() as? StoryContainerScreen else {
return
}
view.dismissAllTooltips()
var sendWhenOnlineAvailable = false
if let presence = component.slice.additionalPeerData.presence, case .present = presence.status {
sendWhenOnlineAvailable = true
}
let presentationData = component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: component.theme)
var items: [ContextMenuItem] = []
items.append(.action(ContextMenuActionItem(text: presentationData.strings.Conversation_SendMessage_SendSilently, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Input/Menu/SilentIcon"), color: theme.contextMenu.primaryColor)
}, action: { [weak self, weak view] _, a in
a(.default)
guard let self, let view else {
return
}
self.performSendMessageAction(view: view, silentPosting: true)
})))
if sendWhenOnlineAvailable {
items.append(.action(ContextMenuActionItem(text: presentationData.strings.Conversation_SendMessage_SendWhenOnline, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Input/Menu/WhenOnlineIcon"), color: theme.contextMenu.primaryColor)
}, action: { [weak self, weak view] _, a in
a(.default)
guard let self, let view else {
return
}
self.performSendMessageAction(view: view, scheduleTime: scheduleWhenOnlineTimestamp)
})))
}
items.append(.action(ContextMenuActionItem(text: presentationData.strings.Conversation_SendMessage_ScheduleMessage, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Input/Menu/ScheduleIcon"), color: theme.contextMenu.primaryColor)
}, action: { [weak self, weak view] _, a in
a(.default)
guard let self, let view else {
return
}
self.presentScheduleTimePicker(view: view)
})))
let contextItems = ContextController.Items(content: .list(items))
let contextController = ContextController(account: component.context.account, presentationData: presentationData, source: .reference(HeaderContextReferenceContentSource(controller: controller, sourceView: sourceView, position: .top)), items: .single(contextItems), gesture: gesture)
contextController.dismissed = { [weak view] in
guard let view else {
return
}
view.contextController = nil
view.updateIsProgressPaused()
}
view.contextController = contextController
view.updateIsProgressPaused()
controller.present(contextController, in: .window(.root))
}
func presentScheduleTimePicker(
view: StoryItemSetContainerComponent.View
) {
guard let component = view.component else {
return
}
let focusedItem = component.slice.item
guard let peerId = focusedItem.peerId else {
return
}
let controller = component.controller() as? StoryContainerScreen
var sendWhenOnlineAvailable = false
if let presence = component.slice.additionalPeerData.presence, case .present = presence.status {
sendWhenOnlineAvailable = true
}
let timeController = ChatScheduleTimeController(context: component.context, updatedPresentationData: nil, peerId: peerId, mode: .scheduledMessages(sendWhenOnlineAvailable: sendWhenOnlineAvailable), style: .media, currentTime: nil, minimalTime: nil, dismissByTapOutside: true, completion: { [weak self, weak view] time in
guard let self, let view else {
return
}
self.performSendMessageAction(view: view, scheduleTime: time)
})
timeController.dismissed = { [weak self, weak view] in
guard let self, let view else {
return
}
self.actionSheet = nil
view.updateIsProgressPaused()
}
view.endEditing(true)
controller?.present(timeController, in: .window(.root))
self.actionSheet = timeController
view.updateIsProgressPaused()
}
func performSendMessageAction(
view: StoryItemSetContainerComponent.View
view: StoryItemSetContainerComponent.View,
silentPosting: Bool = false,
scheduleTime: Int32? = nil
) {
guard let component = view.component else {
return
@ -406,11 +509,13 @@ final class StoryItemSetContainerSendMessage {
to: peerId,
replyTo: nil,
storyId: focusedStoryId,
content: .text(text.string, entities)
content: .text(text.string, entities),
silentPosting: silentPosting,
scheduleTime: scheduleTime
) |> deliverOnMainQueue).start(next: { [weak self, weak view] messageIds in
Queue.mainQueue().after(0.3) {
if let self, let view {
self.presentMessageSentTooltip(view: view, peer: peer, messageId: messageIds.first.flatMap { $0 })
self.presentMessageSentTooltip(view: view, peer: peer, messageId: messageIds.first.flatMap { $0 }, isScheduled: scheduleTime != nil)
}
}
})
@ -2181,7 +2286,7 @@ final class StoryItemSetContainerSendMessage {
|> deliverOnMainQueue).start(next: { [weak self, weak view] messageIds in
Queue.mainQueue().after(0.3) {
if let view {
self?.presentMessageSentTooltip(view: view, peer: peer, messageId: messageIds.first.flatMap { $0 })
self?.presentMessageSentTooltip(view: view, peer: peer, messageId: messageIds.first.flatMap { $0 }, isScheduled: scheduleTime != nil)
}
}
})

View File

@ -240,7 +240,7 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
if !hasArrow {
let backgroundColor: UIColor
var enableSaturation = true
if case let .customBlur(color) = style {
if case let .customBlur(color, _) = style {
backgroundColor = color
enableSaturation = false
} else {
@ -297,7 +297,7 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
} else {
var enableSaturation = true
let backgroundColor: UIColor
if case let .customBlur(color) = style {
if case let .customBlur(color, _) = style {
backgroundColor = color
enableSaturation = false
} else if case .light = style {
@ -356,10 +356,11 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
case let .entities(text, entities):
attributedText = stringWithAppliedEntities(text, entities: entities, baseColor: textColor, linkColor: textColor, baseFont: baseFont, linkFont: baseFont, boldFont: boldFont, italicFont: italicFont, boldItalicFont: boldItalicFont, fixedFont: fixedFont, blockQuoteFont: baseFont, underlineLinks: true, external: false, message: nil)
case let .markdown(text):
let linkColor = UIColor(rgb: 0x64d2ff)
let markdownAttributes = MarkdownAttributes(
body: MarkdownAttributeSet(font: baseFont, textColor: textColor),
bold: MarkdownAttributeSet(font: boldFont, textColor: textColor),
link: MarkdownAttributeSet(font: baseFont, textColor: textColor),
link: MarkdownAttributeSet(font: boldFont, textColor: linkColor),
linkAttribute: { _ in
return nil
}
@ -541,7 +542,7 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
animationSpacing = 8.0
}
let containerWidth = max(100.0, min(layout.size.width, 614.0) - (sideInset + layout.safeInsets.left) * 2.0)
let containerWidth = max(100.0, min(layout.size.width, 614.0) - sideInset * 2.0)
var actionSize: CGSize = .zero
@ -560,14 +561,16 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
var backgroundHeight: CGFloat
switch self.tooltipStyle {
case .default, .gradient, .customBlur, .wide:
case .default, .gradient:
backgroundHeight = max(animationSize.height, textSize.height) + contentVerticalInset * 2.0
case .wide:
backgroundHeight = max(animationSize.height, textSize.height) + contentVerticalInset * 2.0 + 4.0
case let .customBlur(_, inset):
backgroundHeight = max(animationSize.height, textSize.height) + contentVerticalInset * 2.0 + inset * 2.0
case .light:
backgroundHeight = max(28.0, max(animationSize.height, textSize.height) + 4.0 * 2.0)
}
if case .wide = self.tooltipStyle {
backgroundHeight += 4.0
} else if self.actionButtonNode != nil {
if self.actionButtonNode != nil {
backgroundHeight += 4.0
}
@ -649,11 +652,12 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
self.arrowNode.frame = arrowBounds
self.arrowGradientNode?.frame = CGRect(origin: CGPoint(x: -arrowFrame.minX + backgroundFrame.minX, y: 0.0), size: backgroundFrame.size)
case .right:
arrowFrame = CGRect(origin: CGPoint(x: backgroundFrame.width + arrowSize.height, y: rect.midY), size: CGSize(width: arrowSize.height, height: arrowSize.width))
let arrowCenterY = floorToScreenPixels(rect.midY - arrowSize.height / 2.0)
arrowFrame = CGRect(origin: CGPoint(x: backgroundFrame.width + arrowSize.height, y: self.view.convert(CGPoint(x: 0.0, y: arrowCenterY), to: self.arrowContainer.supernode?.view).y), size: CGSize(width: arrowSize.height, height: arrowSize.width))
ContainedViewLayoutTransition.immediate.updateTransformRotation(node: self.arrowContainer, angle: -CGFloat.pi / 2.0)
transition.updateFrame(node: self.arrowContainer, frame: arrowFrame.offsetBy(dx: 8.0 - UIScreenPixel, dy: 16.0 + -backgroundFrame.minY - floorToScreenPixels((backgroundFrame.height + 20.0 - arrowSize.width) / 2.0)))
transition.updateFrame(node: self.arrowContainer, frame: arrowFrame.offsetBy(dx: 8.0 - UIScreenPixel, dy: 0.0))
let arrowBounds = CGRect(origin: .zero, size: arrowSize)
self.arrowNode.frame = arrowBounds
@ -739,6 +743,7 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
}
}
private var didRequestDismiss = false
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
if let event = event {
if let _ = self.openActiveTextItem, let result = self.textNode.hitTest(self.view.convert(point, to: self.textNode.view), with: event) {
@ -753,14 +758,19 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
if let actionButtonNode = self.actionButtonNode, let result = actionButtonNode.hitTest(self.convert(point, to: actionButtonNode), with: event) {
return result
}
switch self.shouldDismissOnTouch(point, self.containerNode.frame) {
case .ignore:
break
case let .dismiss(consume):
self.requestDismiss()
if consume {
return self.view
if !self.didRequestDismiss {
switch self.shouldDismissOnTouch(point, self.containerNode.frame) {
case .ignore:
break
case let .dismiss(consume):
self.requestDismiss()
if consume {
self.didRequestDismiss = true
return self.view
}
}
} else {
return self.view
}
return nil
}
@ -915,7 +925,7 @@ public final class TooltipScreen: ViewController {
public enum Style {
case `default`
case light
case customBlur(UIColor)
case customBlur(UIColor, CGFloat)
case gradient(UIColor, UIColor)
case wide
}