mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-12-03 21:16:35 +00:00
Merge commit '3985c538a8b07af04d0f6b5f1389769a85c5828d'
This commit is contained in:
commit
cf5223ab46
@ -14338,3 +14338,33 @@ Sorry for the inconvenience.";
|
|||||||
|
|
||||||
"Gift.Buy.ErrorTooEarly.Title" = "Try Later";
|
"Gift.Buy.ErrorTooEarly.Title" = "Try Later";
|
||||||
"Gift.Buy.ErrorTooEarly.Text" = "You will be able to buy this gift on %@.";
|
"Gift.Buy.ErrorTooEarly.Text" = "You will be able to buy this gift on %@.";
|
||||||
|
|
||||||
|
"Chat.PauseVoiceMessageTooltip" = "Pause to trim or replay.";
|
||||||
|
"Chat.PauseVideoMessageTooltip" = "Pause to trim or replay.";
|
||||||
|
|
||||||
|
"Chat.TrimVoiceMessageToResume.Title" = "Trim to selected range?";
|
||||||
|
"Chat.TrimVoiceMessageToResume.Text" = "Audio outside that range will be discarded, and recording will start immediately.";
|
||||||
|
"Chat.TrimVoiceMessageToResume.Proceed" = "Proceed";
|
||||||
|
|
||||||
|
"Contacts.LimitedAccess.Text" = "You have limited Telegram from accessing all of your contacts.";
|
||||||
|
"Contacts.LimitedAccess.Manage" = "MANAGE";
|
||||||
|
|
||||||
|
"Media.PhotoHdOn" = "The photo will be sent in high quality.";
|
||||||
|
"Media.PhotoHdOff" = "The photo will be sent in standard quality.";
|
||||||
|
|
||||||
|
"Attachment.SendInHd" = "Send in High Quality";
|
||||||
|
|
||||||
|
"Share.PostToStory" = "Post\nto Story";
|
||||||
|
|
||||||
|
"PeerInfo.QRCode.Scan" = "Scan QR Code";
|
||||||
|
|
||||||
|
"PeerInfo.Topics.Title" = "Topics";
|
||||||
|
"PeerInfo.Topics.EnableTopics" = "Enable Topics";
|
||||||
|
"PeerInfo.Topics.EnableTopicsInfo" = "The group chat will be divided into topics created by admins or users.";
|
||||||
|
"PeerInfo.Topics.DisplayAs" = "DISPLAY AS";
|
||||||
|
"PeerInfo.Topics.DisplayAsInfo" = "Choose how topics appear for all members.";
|
||||||
|
"PeerInfo.Topics.Tabs" = "Tabs";
|
||||||
|
"PeerInfo.Topics.List" = "List";
|
||||||
|
|
||||||
|
"PeerInfo.OptionTopics.Enabled" = "Enabled";
|
||||||
|
"PeerInfo.OptionTopics.Disabled" = "Disabled";
|
||||||
|
|||||||
@ -1162,7 +1162,7 @@ public protocol SharedAccountContext: AnyObject {
|
|||||||
|
|
||||||
func makeMediaPickerScreen(context: AccountContext, hasSearch: Bool, completion: @escaping (Any) -> Void) -> ViewController
|
func makeMediaPickerScreen(context: AccountContext, hasSearch: Bool, completion: @escaping (Any) -> Void) -> ViewController
|
||||||
|
|
||||||
func makeStoryMediaEditorScreen(context: AccountContext, source: Any?, text: String?, link: (url: String, name: String?)?, completion: @escaping ([MediaEditorScreenResult], @escaping (@escaping () -> Void) -> Void) -> Void) -> ViewController
|
func makeStoryMediaEditorScreen(context: AccountContext, source: Any?, text: String?, link: (url: String, name: String?)?, remainingCount: Int32, completion: @escaping ([MediaEditorScreenResult], MediaEditorTransitionOutExternalState, @escaping (@escaping () -> Void) -> Void) -> Void) -> ViewController
|
||||||
|
|
||||||
func makeBotPreviewEditorScreen(context: AccountContext, source: Any?, target: Stories.PendingTarget, transitionArguments: (UIView, CGRect, UIImage?)?, transitionOut: @escaping () -> BotPreviewEditorTransitionOut?, externalState: MediaEditorTransitionOutExternalState, completion: @escaping (MediaEditorScreenResult, @escaping (@escaping () -> Void) -> Void) -> Void, cancelled: @escaping () -> Void) -> ViewController
|
func makeBotPreviewEditorScreen(context: AccountContext, source: Any?, target: Stories.PendingTarget, transitionArguments: (UIView, CGRect, UIImage?)?, transitionOut: @escaping () -> BotPreviewEditorTransitionOut?, externalState: MediaEditorTransitionOutExternalState, completion: @escaping (MediaEditorScreenResult, @escaping (@escaping () -> Void) -> Void) -> Void, cancelled: @escaping () -> Void) -> ViewController
|
||||||
|
|
||||||
@ -1198,7 +1198,7 @@ public protocol SharedAccountContext: AnyObject {
|
|||||||
func makeStarsAmountScreen(context: AccountContext, initialValue: Int64?, completion: @escaping (Int64) -> Void) -> ViewController
|
func makeStarsAmountScreen(context: AccountContext, initialValue: Int64?, completion: @escaping (Int64) -> Void) -> ViewController
|
||||||
func makeStarsWithdrawalScreen(context: AccountContext, stats: StarsRevenueStats, completion: @escaping (Int64) -> Void) -> ViewController
|
func makeStarsWithdrawalScreen(context: AccountContext, stats: StarsRevenueStats, completion: @escaping (Int64) -> Void) -> ViewController
|
||||||
func makeStarsWithdrawalScreen(context: AccountContext, subject: StarsWithdrawalScreenSubject, completion: @escaping (Int64) -> Void) -> ViewController
|
func makeStarsWithdrawalScreen(context: AccountContext, subject: StarsWithdrawalScreenSubject, completion: @escaping (Int64) -> Void) -> ViewController
|
||||||
func makeStarGiftResellScreen(context: AccountContext, update: Bool, completion: @escaping (Int64) -> Void) -> ViewController
|
func makeStarGiftResellScreen(context: AccountContext, gift: StarGift.UniqueGift, update: Bool, completion: @escaping (Int64) -> Void) -> ViewController
|
||||||
func makeStarsGiftScreen(context: AccountContext, message: EngineMessage) -> ViewController
|
func makeStarsGiftScreen(context: AccountContext, message: EngineMessage) -> ViewController
|
||||||
func makeStarsGiveawayBoostScreen(context: AccountContext, peerId: EnginePeer.Id, boost: ChannelBoostersContext.State.Boost) -> ViewController
|
func makeStarsGiveawayBoostScreen(context: AccountContext, peerId: EnginePeer.Id, boost: ChannelBoostersContext.State.Boost) -> ViewController
|
||||||
func makeStarsIntroScreen(context: AccountContext) -> ViewController
|
func makeStarsIntroScreen(context: AccountContext) -> ViewController
|
||||||
|
|||||||
@ -1265,6 +1265,7 @@ final class AttachmentPanel: ASDisplayNode, ASScrollViewDelegate {
|
|||||||
}, openMessagePayment: {
|
}, openMessagePayment: {
|
||||||
}, openBoostToUnrestrict: {
|
}, openBoostToUnrestrict: {
|
||||||
}, updateRecordingTrimRange: { _, _, _, _ in
|
}, updateRecordingTrimRange: { _, _, _, _ in
|
||||||
|
}, dismissAllTooltips: {
|
||||||
}, updateHistoryFilter: { _ in
|
}, updateHistoryFilter: { _ in
|
||||||
}, updateChatLocationThread: { _, _ in
|
}, updateChatLocationThread: { _, _ in
|
||||||
}, toggleChatSidebarMode: {
|
}, toggleChatSidebarMode: {
|
||||||
|
|||||||
@ -12,8 +12,8 @@ final class CameraSession {
|
|||||||
|
|
||||||
let hasMultiCam: Bool
|
let hasMultiCam: Bool
|
||||||
|
|
||||||
init() {
|
init(forRoundVideo: Bool) {
|
||||||
if #available(iOS 13.0, *), AVCaptureMultiCamSession.isMultiCamSupported {
|
if #available(iOS 13.0, *), Camera.isDualCameraSupported(forRoundVideo: forRoundVideo) {
|
||||||
self.multiSession = AVCaptureMultiCamSession()
|
self.multiSession = AVCaptureMultiCamSession()
|
||||||
self.singleSession = nil
|
self.singleSession = nil
|
||||||
self.hasMultiCam = true
|
self.hasMultiCam = true
|
||||||
@ -765,7 +765,7 @@ public final class Camera {
|
|||||||
|
|
||||||
self.metrics = Camera.Metrics(model: DeviceModel.current)
|
self.metrics = Camera.Metrics(model: DeviceModel.current)
|
||||||
|
|
||||||
let session = CameraSession()
|
let session = CameraSession(forRoundVideo: configuration.isRoundVideo)
|
||||||
session.session.automaticallyConfiguresApplicationAudioSession = false
|
session.session.automaticallyConfiguresApplicationAudioSession = false
|
||||||
session.session.automaticallyConfiguresCaptureDeviceForWideColor = false
|
session.session.automaticallyConfiguresCaptureDeviceForWideColor = false
|
||||||
session.session.usesApplicationAudioSession = true
|
session.session.usesApplicationAudioSession = true
|
||||||
|
|||||||
@ -109,7 +109,7 @@ final class CameraOutput: NSObject {
|
|||||||
private var videoConnection: AVCaptureConnection?
|
private var videoConnection: AVCaptureConnection?
|
||||||
private var previewConnection: AVCaptureConnection?
|
private var previewConnection: AVCaptureConnection?
|
||||||
|
|
||||||
private var roundVideoFilter: CameraRoundVideoFilter?
|
private var roundVideoFilter: CameraRoundLegacyVideoFilter?
|
||||||
private let semaphore = DispatchSemaphore(value: 1)
|
private let semaphore = DispatchSemaphore(value: 1)
|
||||||
|
|
||||||
private let videoQueue = DispatchQueue(label: "", qos: .userInitiated)
|
private let videoQueue = DispatchQueue(label: "", qos: .userInitiated)
|
||||||
@ -577,11 +577,11 @@ final class CameraOutput: NSObject {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
let filter: CameraRoundVideoFilter
|
let filter: CameraRoundLegacyVideoFilter
|
||||||
if let current = self.roundVideoFilter {
|
if let current = self.roundVideoFilter {
|
||||||
filter = current
|
filter = current
|
||||||
} else {
|
} else {
|
||||||
filter = CameraRoundVideoFilter(ciContext: self.ciContext, colorSpace: self.colorSpace, simple: self.exclusive)
|
filter = CameraRoundLegacyVideoFilter(ciContext: self.ciContext, colorSpace: self.colorSpace, simple: self.exclusive)
|
||||||
self.roundVideoFilter = filter
|
self.roundVideoFilter = filter
|
||||||
}
|
}
|
||||||
if !filter.isPrepared {
|
if !filter.isPrepared {
|
||||||
|
|||||||
168
submodules/Camera/Sources/CameraRoundLegacyVideoFilter.swift
Normal file
168
submodules/Camera/Sources/CameraRoundLegacyVideoFilter.swift
Normal file
@ -0,0 +1,168 @@
|
|||||||
|
import Foundation
|
||||||
|
import UIKit
|
||||||
|
import AVFoundation
|
||||||
|
import CoreImage
|
||||||
|
import CoreMedia
|
||||||
|
import CoreVideo
|
||||||
|
import Metal
|
||||||
|
import Display
|
||||||
|
import TelegramCore
|
||||||
|
|
||||||
|
final class CameraRoundLegacyVideoFilter {
|
||||||
|
private let ciContext: CIContext
|
||||||
|
private let colorSpace: CGColorSpace
|
||||||
|
private let simple: Bool
|
||||||
|
|
||||||
|
private var resizeFilter: CIFilter?
|
||||||
|
private var overlayFilter: CIFilter?
|
||||||
|
private var compositeFilter: CIFilter?
|
||||||
|
private var borderFilter: CIFilter?
|
||||||
|
|
||||||
|
private var outputColorSpace: CGColorSpace?
|
||||||
|
private var outputPixelBufferPool: CVPixelBufferPool?
|
||||||
|
private(set) var outputFormatDescription: CMFormatDescription?
|
||||||
|
private(set) var inputFormatDescription: CMFormatDescription?
|
||||||
|
|
||||||
|
private(set) var isPrepared = false
|
||||||
|
|
||||||
|
init(ciContext: CIContext, colorSpace: CGColorSpace, simple: Bool) {
|
||||||
|
self.ciContext = ciContext
|
||||||
|
self.colorSpace = colorSpace
|
||||||
|
self.simple = simple
|
||||||
|
}
|
||||||
|
|
||||||
|
func prepare(with formatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) {
|
||||||
|
self.reset()
|
||||||
|
|
||||||
|
(self.outputPixelBufferPool, self.outputColorSpace, self.outputFormatDescription) = allocateOutputBufferPool(with: formatDescription, outputRetainedBufferCountHint: outputRetainedBufferCountHint)
|
||||||
|
if self.outputPixelBufferPool == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self.inputFormatDescription = formatDescription
|
||||||
|
|
||||||
|
let circleImage = generateImage(videoMessageDimensions.cgSize, opaque: false, scale: 1.0, rotatedContext: { size, context in
|
||||||
|
let bounds = CGRect(origin: .zero, size: size)
|
||||||
|
context.clear(bounds)
|
||||||
|
context.setFillColor(UIColor.white.cgColor)
|
||||||
|
context.fill(bounds)
|
||||||
|
context.setBlendMode(.clear)
|
||||||
|
context.fillEllipse(in: bounds.insetBy(dx: -2.0, dy: -2.0))
|
||||||
|
})!
|
||||||
|
|
||||||
|
self.resizeFilter = CIFilter(name: "CILanczosScaleTransform")
|
||||||
|
self.overlayFilter = CIFilter(name: "CIColorMatrix")
|
||||||
|
self.compositeFilter = CIFilter(name: "CISourceOverCompositing")
|
||||||
|
|
||||||
|
self.borderFilter = CIFilter(name: "CISourceOverCompositing")
|
||||||
|
self.borderFilter?.setValue(CIImage(image: circleImage), forKey: kCIInputImageKey)
|
||||||
|
|
||||||
|
self.isPrepared = true
|
||||||
|
}
|
||||||
|
|
||||||
|
func reset() {
|
||||||
|
self.resizeFilter = nil
|
||||||
|
self.overlayFilter = nil
|
||||||
|
self.compositeFilter = nil
|
||||||
|
self.borderFilter = nil
|
||||||
|
self.outputColorSpace = nil
|
||||||
|
self.outputPixelBufferPool = nil
|
||||||
|
self.outputFormatDescription = nil
|
||||||
|
self.inputFormatDescription = nil
|
||||||
|
self.isPrepared = false
|
||||||
|
self.lastMainSourceImage = nil
|
||||||
|
self.lastAdditionalSourceImage = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
private var lastMainSourceImage: CIImage?
|
||||||
|
private var lastAdditionalSourceImage: CIImage?
|
||||||
|
|
||||||
|
func render(pixelBuffer: CVPixelBuffer, additional: Bool, captureOrientation: AVCaptureVideoOrientation, transitionFactor: CGFloat) -> CVPixelBuffer? {
|
||||||
|
guard let resizeFilter = self.resizeFilter, let overlayFilter = self.overlayFilter, let compositeFilter = self.compositeFilter, let borderFilter = self.borderFilter, self.isPrepared else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var sourceImage = CIImage(cvImageBuffer: pixelBuffer, options: [.colorSpace: self.colorSpace])
|
||||||
|
var sourceOrientation: CGImagePropertyOrientation
|
||||||
|
var sourceIsLandscape = false
|
||||||
|
switch captureOrientation {
|
||||||
|
case .portrait:
|
||||||
|
sourceOrientation = additional ? .leftMirrored : .right
|
||||||
|
case .landscapeLeft:
|
||||||
|
sourceOrientation = additional ? .upMirrored : .down
|
||||||
|
sourceIsLandscape = true
|
||||||
|
case .landscapeRight:
|
||||||
|
sourceOrientation = additional ? .downMirrored : .up
|
||||||
|
sourceIsLandscape = true
|
||||||
|
case .portraitUpsideDown:
|
||||||
|
sourceOrientation = additional ? .rightMirrored : .left
|
||||||
|
@unknown default:
|
||||||
|
sourceOrientation = additional ? .leftMirrored : .right
|
||||||
|
}
|
||||||
|
sourceImage = sourceImage.oriented(sourceOrientation)
|
||||||
|
let scale = CGFloat(videoMessageDimensions.width) / min(sourceImage.extent.width, sourceImage.extent.height)
|
||||||
|
|
||||||
|
if !self.simple {
|
||||||
|
resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey)
|
||||||
|
resizeFilter.setValue(scale, forKey: kCIInputScaleKey)
|
||||||
|
|
||||||
|
if let resizedImage = resizeFilter.outputImage {
|
||||||
|
sourceImage = resizedImage
|
||||||
|
} else {
|
||||||
|
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true)
|
||||||
|
}
|
||||||
|
|
||||||
|
if sourceIsLandscape {
|
||||||
|
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(-(sourceImage.extent.width - sourceImage.extent.height) / 2.0, 0.0))
|
||||||
|
sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.height, height: sourceImage.extent.height))
|
||||||
|
} else {
|
||||||
|
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(0.0, -(sourceImage.extent.height - sourceImage.extent.width) / 2.0))
|
||||||
|
sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.width, height: sourceImage.extent.width))
|
||||||
|
}
|
||||||
|
|
||||||
|
if additional {
|
||||||
|
self.lastAdditionalSourceImage = sourceImage
|
||||||
|
} else {
|
||||||
|
self.lastMainSourceImage = sourceImage
|
||||||
|
}
|
||||||
|
|
||||||
|
var effectiveSourceImage: CIImage
|
||||||
|
if transitionFactor == 0.0 {
|
||||||
|
effectiveSourceImage = !additional ? sourceImage : (self.lastMainSourceImage ?? sourceImage)
|
||||||
|
} else if transitionFactor == 1.0 {
|
||||||
|
effectiveSourceImage = additional ? sourceImage : (self.lastAdditionalSourceImage ?? sourceImage)
|
||||||
|
} else {
|
||||||
|
if let mainSourceImage = self.lastMainSourceImage, let additionalSourceImage = self.lastAdditionalSourceImage {
|
||||||
|
let overlayRgba: [CGFloat] = [0, 0, 0, transitionFactor]
|
||||||
|
let alphaVector: CIVector = CIVector(values: overlayRgba, count: 4)
|
||||||
|
overlayFilter.setValue(additionalSourceImage, forKey: kCIInputImageKey)
|
||||||
|
overlayFilter.setValue(alphaVector, forKey: "inputAVector")
|
||||||
|
|
||||||
|
compositeFilter.setValue(mainSourceImage, forKey: kCIInputBackgroundImageKey)
|
||||||
|
compositeFilter.setValue(overlayFilter.outputImage, forKey: kCIInputImageKey)
|
||||||
|
effectiveSourceImage = compositeFilter.outputImage ?? sourceImage
|
||||||
|
} else {
|
||||||
|
effectiveSourceImage = sourceImage
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
borderFilter.setValue(effectiveSourceImage, forKey: kCIInputBackgroundImageKey)
|
||||||
|
|
||||||
|
let finalImage = borderFilter.outputImage
|
||||||
|
guard let finalImage else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var pbuf: CVPixelBuffer?
|
||||||
|
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf)
|
||||||
|
guard let outputPixelBuffer = pbuf else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: videoMessageDimensions.cgSize), colorSpace: outputColorSpace)
|
||||||
|
|
||||||
|
return outputPixelBuffer
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -78,7 +78,7 @@ func allocateOutputBufferPool(with inputFormatDescription: CMFormatDescription,
|
|||||||
return (pixelBufferPool, cgColorSpace, outputFormatDescription)
|
return (pixelBufferPool, cgColorSpace, outputFormatDescription)
|
||||||
}
|
}
|
||||||
|
|
||||||
private func preallocateBuffers(pool: CVPixelBufferPool, allocationThreshold: Int) {
|
func preallocateBuffers(pool: CVPixelBufferPool, allocationThreshold: Int) {
|
||||||
var pixelBuffers = [CVPixelBuffer]()
|
var pixelBuffers = [CVPixelBuffer]()
|
||||||
var error: CVReturn = kCVReturnSuccess
|
var error: CVReturn = kCVReturnSuccess
|
||||||
let auxAttributes = [kCVPixelBufferPoolAllocationThresholdKey as String: allocationThreshold] as NSDictionary
|
let auxAttributes = [kCVPixelBufferPoolAllocationThresholdKey as String: allocationThreshold] as NSDictionary
|
||||||
|
|||||||
@ -176,6 +176,7 @@ public final class ChatPanelInterfaceInteraction {
|
|||||||
public let updateDisplayHistoryFilterAsList: (Bool) -> Void
|
public let updateDisplayHistoryFilterAsList: (Bool) -> Void
|
||||||
public let openBoostToUnrestrict: () -> Void
|
public let openBoostToUnrestrict: () -> Void
|
||||||
public let updateRecordingTrimRange: (Double, Double, Bool, Bool) -> Void
|
public let updateRecordingTrimRange: (Double, Double, Bool, Bool) -> Void
|
||||||
|
public let dismissAllTooltips: () -> Void
|
||||||
public let requestLayout: (ContainedViewLayoutTransition) -> Void
|
public let requestLayout: (ContainedViewLayoutTransition) -> Void
|
||||||
public let chatController: () -> ViewController?
|
public let chatController: () -> ViewController?
|
||||||
public let statuses: ChatPanelInterfaceInteractionStatuses?
|
public let statuses: ChatPanelInterfaceInteractionStatuses?
|
||||||
@ -291,6 +292,7 @@ public final class ChatPanelInterfaceInteraction {
|
|||||||
openMessagePayment: @escaping () -> Void,
|
openMessagePayment: @escaping () -> Void,
|
||||||
openBoostToUnrestrict: @escaping () -> Void,
|
openBoostToUnrestrict: @escaping () -> Void,
|
||||||
updateRecordingTrimRange: @escaping (Double, Double, Bool, Bool) -> Void,
|
updateRecordingTrimRange: @escaping (Double, Double, Bool, Bool) -> Void,
|
||||||
|
dismissAllTooltips: @escaping () -> Void,
|
||||||
updateHistoryFilter: @escaping ((ChatPresentationInterfaceState.HistoryFilter?) -> ChatPresentationInterfaceState.HistoryFilter?) -> Void,
|
updateHistoryFilter: @escaping ((ChatPresentationInterfaceState.HistoryFilter?) -> ChatPresentationInterfaceState.HistoryFilter?) -> Void,
|
||||||
updateChatLocationThread: @escaping (Int64?, ChatControllerAnimateInnerChatSwitchDirection?) -> Void,
|
updateChatLocationThread: @escaping (Int64?, ChatControllerAnimateInnerChatSwitchDirection?) -> Void,
|
||||||
toggleChatSidebarMode: @escaping () -> Void,
|
toggleChatSidebarMode: @escaping () -> Void,
|
||||||
@ -409,6 +411,7 @@ public final class ChatPanelInterfaceInteraction {
|
|||||||
self.openMessagePayment = openMessagePayment
|
self.openMessagePayment = openMessagePayment
|
||||||
self.openBoostToUnrestrict = openBoostToUnrestrict
|
self.openBoostToUnrestrict = openBoostToUnrestrict
|
||||||
self.updateRecordingTrimRange = updateRecordingTrimRange
|
self.updateRecordingTrimRange = updateRecordingTrimRange
|
||||||
|
self.dismissAllTooltips = dismissAllTooltips
|
||||||
self.updateHistoryFilter = updateHistoryFilter
|
self.updateHistoryFilter = updateHistoryFilter
|
||||||
self.updateChatLocationThread = updateChatLocationThread
|
self.updateChatLocationThread = updateChatLocationThread
|
||||||
self.toggleChatSidebarMode = toggleChatSidebarMode
|
self.toggleChatSidebarMode = toggleChatSidebarMode
|
||||||
@ -536,6 +539,7 @@ public final class ChatPanelInterfaceInteraction {
|
|||||||
}, openMessagePayment: {
|
}, openMessagePayment: {
|
||||||
}, openBoostToUnrestrict: {
|
}, openBoostToUnrestrict: {
|
||||||
}, updateRecordingTrimRange: { _, _, _, _ in
|
}, updateRecordingTrimRange: { _, _, _, _ in
|
||||||
|
}, dismissAllTooltips: {
|
||||||
}, updateHistoryFilter: { _ in
|
}, updateHistoryFilter: { _ in
|
||||||
}, updateChatLocationThread: { _, _ in
|
}, updateChatLocationThread: { _, _ in
|
||||||
}, toggleChatSidebarMode: {
|
}, toggleChatSidebarMode: {
|
||||||
|
|||||||
@ -149,8 +149,7 @@ private enum ContactListNodeEntry: Comparable, Identifiable {
|
|||||||
interaction.authorize()
|
interaction.authorize()
|
||||||
})
|
})
|
||||||
case .permissionLimited:
|
case .permissionLimited:
|
||||||
//TODO:localize
|
return LimitedPermissionItem(presentationData: ItemListPresentationData(presentationData), text: presentationData.strings.Contacts_LimitedAccess_Text, action: {
|
||||||
return LimitedPermissionItem(presentationData: ItemListPresentationData(presentationData), text: "You have limited Telegram from accessing all of your contacts.", action: {
|
|
||||||
interaction.openContactAccessPicker()
|
interaction.openContactAccessPicker()
|
||||||
})
|
})
|
||||||
case let .option(_, option, header, _, _):
|
case let .option(_, option, header, _, _):
|
||||||
|
|||||||
@ -160,8 +160,7 @@ public class LimitedPermissionItemNode: ListViewItemNode {
|
|||||||
|
|
||||||
let attributedText = NSAttributedString(string: item.text, font: textFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor)
|
let attributedText = NSAttributedString(string: item.text, font: textFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor)
|
||||||
|
|
||||||
//TODO:localize
|
let (buttonTextLayout, buttonTextApply) = makeButtonTitleLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.Contacts_LimitedAccess_Manage, font: Font.semibold(15.0), textColor: item.presentationData.theme.list.itemCheckColors.foregroundColor), backgroundColor: nil, maximumNumberOfLines: 0, truncationType: .end, constrainedSize: CGSize(width: params.width - leftInset - rightInset, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
|
||||||
let (buttonTextLayout, buttonTextApply) = makeButtonTitleLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: "MANAGE", font: Font.semibold(15.0), textColor: item.presentationData.theme.list.itemCheckColors.foregroundColor), backgroundColor: nil, maximumNumberOfLines: 0, truncationType: .end, constrainedSize: CGSize(width: params.width - leftInset - rightInset, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
|
|
||||||
|
|
||||||
let (textLayout, textApply) = makeTextLayout(TextNodeLayoutArguments(attributedString: attributedText, backgroundColor: nil, maximumNumberOfLines: 0, truncationType: .end, constrainedSize: CGSize(width: params.width - leftInset - rightInset - buttonTextLayout.size.width - 20.0, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
|
let (textLayout, textApply) = makeTextLayout(TextNodeLayoutArguments(attributedString: attributedText, backgroundColor: nil, maximumNumberOfLines: 0, truncationType: .end, constrainedSize: CGSize(width: params.width - leftInset - rightInset - buttonTextLayout.size.width - 20.0, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
|
||||||
|
|
||||||
|
|||||||
@ -2852,6 +2852,8 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool isHighQualityPhoto = editingContext.isHighQualityPhoto;
|
||||||
|
|
||||||
if (storeAssets && !isScan) {
|
if (storeAssets && !isScan) {
|
||||||
NSMutableArray *fullSizeSignals = [[NSMutableArray alloc] init];
|
NSMutableArray *fullSizeSignals = [[NSMutableArray alloc] init];
|
||||||
for (id<TGMediaEditableItem> item in selectedItems)
|
for (id<TGMediaEditableItem> item in selectedItems)
|
||||||
@ -2968,7 +2970,9 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
|
|||||||
id<TGMediaEditAdjustments> adjustments = [editingContext adjustmentsForItem:asset];
|
id<TGMediaEditAdjustments> adjustments = [editingContext adjustmentsForItem:asset];
|
||||||
NSNumber *timer = [editingContext timerForItem:asset];
|
NSNumber *timer = [editingContext timerForItem:asset];
|
||||||
|
|
||||||
SSignal *inlineSignal = [[asset screenImageSignal:0.0] map:^id(UIImage *originalImage)
|
|
||||||
|
SSignal *originalSignal = isHighQualityPhoto ? [asset originalImageSignal:0.0] : [asset screenImageSignal:0.0];
|
||||||
|
SSignal *inlineSignal = [originalSignal map:^id(UIImage *originalImage)
|
||||||
{
|
{
|
||||||
NSMutableDictionary *dict = [[NSMutableDictionary alloc] init];
|
NSMutableDictionary *dict = [[NSMutableDictionary alloc] init];
|
||||||
dict[@"type"] = @"editedPhoto";
|
dict[@"type"] = @"editedPhoto";
|
||||||
@ -2979,6 +2983,9 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
|
|||||||
else if (groupedId != nil && !hasAnyTimers)
|
else if (groupedId != nil && !hasAnyTimers)
|
||||||
dict[@"groupedId"] = groupedId;
|
dict[@"groupedId"] = groupedId;
|
||||||
|
|
||||||
|
if (isHighQualityPhoto)
|
||||||
|
dict[@"hd"] = @true;
|
||||||
|
|
||||||
if (isScan) {
|
if (isScan) {
|
||||||
if (caption != nil)
|
if (caption != nil)
|
||||||
dict[@"caption"] = caption;
|
dict[@"caption"] = caption;
|
||||||
@ -3058,6 +3065,9 @@ static CGPoint TGCameraControllerClampPointToScreenSize(__unused id self, __unus
|
|||||||
else if (groupedId != nil && !hasAnyTimers)
|
else if (groupedId != nil && !hasAnyTimers)
|
||||||
dict[@"groupedId"] = groupedId;
|
dict[@"groupedId"] = groupedId;
|
||||||
|
|
||||||
|
if (isHighQualityPhoto)
|
||||||
|
dict[@"hd"] = @true;
|
||||||
|
|
||||||
if (isScan) {
|
if (isScan) {
|
||||||
if (caption != nil)
|
if (caption != nil)
|
||||||
dict[@"caption"] = caption;
|
dict[@"caption"] = caption;
|
||||||
|
|||||||
@ -231,6 +231,10 @@ UIImage *TGScaleAndBlurImage(NSData *data, __unused CGSize size, __autoreleasing
|
|||||||
|
|
||||||
UIImage *TGScaleImageToPixelSize(UIImage *image, CGSize size)
|
UIImage *TGScaleImageToPixelSize(UIImage *image, CGSize size)
|
||||||
{
|
{
|
||||||
|
if (image.size.width <= size.width && image.size.height <= size.height) {
|
||||||
|
return image;
|
||||||
|
}
|
||||||
|
|
||||||
UIGraphicsBeginImageContextWithOptions(size, true, 1.0f);
|
UIGraphicsBeginImageContextWithOptions(size, true, 1.0f);
|
||||||
[image drawInRect:CGRectMake(0, 0, size.width, size.height) blendMode:kCGBlendModeCopy alpha:1.0f];
|
[image drawInRect:CGRectMake(0, 0, size.width, size.height) blendMode:kCGBlendModeCopy alpha:1.0f];
|
||||||
UIImage *result = UIGraphicsGetImageFromCurrentImageContext();
|
UIImage *result = UIGraphicsGetImageFromCurrentImageContext();
|
||||||
|
|||||||
@ -1108,7 +1108,7 @@
|
|||||||
TGPhotoEditorButton *qualityButton = [_portraitToolbarView buttonForTab:TGPhotoEditorQualityTab];
|
TGPhotoEditorButton *qualityButton = [_portraitToolbarView buttonForTab:TGPhotoEditorQualityTab];
|
||||||
if (qualityButton != nil)
|
if (qualityButton != nil)
|
||||||
{
|
{
|
||||||
bool isPhoto = [_currentItemView isKindOfClass:[TGMediaPickerGalleryPhotoItemView class]];
|
bool isPhoto = [_currentItemView isKindOfClass:[TGMediaPickerGalleryPhotoItemView class]] || [_currentItem isKindOfClass:[TGCameraCapturedPhoto class]];
|
||||||
if (isPhoto) {
|
if (isPhoto) {
|
||||||
bool isHd = _editingContext.isHighQualityPhoto;
|
bool isHd = _editingContext.isHighQualityPhoto;
|
||||||
UIImage *icon = [TGPhotoEditorInterfaceAssets qualityIconForHighQuality:isHd filled: false];
|
UIImage *icon = [TGPhotoEditorInterfaceAssets qualityIconForHighQuality:isHd filled: false];
|
||||||
@ -1239,8 +1239,7 @@
|
|||||||
[self tooltipTimerTick];
|
[self tooltipTimerTick];
|
||||||
}
|
}
|
||||||
|
|
||||||
//TODO:localize
|
NSString *text = hd ? TGLocalized(@"Media.PhotoHdOn") : TGLocalized(@"Media.PhotoHdOff");
|
||||||
NSString *text = hd ? @"The photo will be sent in high quality." : @"The photo will be sent in standard quality.";
|
|
||||||
[_context presentTooltip:text icon:[TGPhotoEditorInterfaceAssets qualityIconForHighQuality:hd filled: true] sourceRect:rect];
|
[_context presentTooltip:text icon:[TGPhotoEditorInterfaceAssets qualityIconForHighQuality:hd filled: true] sourceRect:rect];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -197,7 +197,8 @@
|
|||||||
|
|
||||||
__strong TGModernGalleryController *controller = strongSelf.controller;
|
__strong TGModernGalleryController *controller = strongSelf.controller;
|
||||||
if ([controller.currentItem conformsToProtocol:@protocol(TGModernGalleryEditableItem)]) {
|
if ([controller.currentItem conformsToProtocol:@protocol(TGModernGalleryEditableItem)]) {
|
||||||
if (tab == TGPhotoEditorQualityTab && [controller.currentItem isKindOfClass:[TGMediaPickerGalleryFetchResultItem class]] && [((TGMediaPickerGalleryFetchResultItem *)controller.currentItem).backingItem isKindOfClass:[TGMediaPickerGalleryPhotoItem class]]) {
|
bool isPhoto = [controller.currentItem isKindOfClass:[TGMediaPickerGalleryPhotoItem class]] || ([controller.currentItem isKindOfClass:[TGMediaPickerGalleryFetchResultItem class]] && [((TGMediaPickerGalleryFetchResultItem *)controller.currentItem).backingItem isKindOfClass:[TGMediaPickerGalleryPhotoItem class]]);
|
||||||
|
if (tab == TGPhotoEditorQualityTab && isPhoto) {
|
||||||
[strongSelf->_editingContext setHighQualityPhoto:!strongSelf->_editingContext.isHighQualityPhoto];
|
[strongSelf->_editingContext setHighQualityPhoto:!strongSelf->_editingContext.isHighQualityPhoto];
|
||||||
[strongSelf->_interfaceView showPhotoQualityTooltip:strongSelf->_editingContext.isHighQualityPhoto];
|
[strongSelf->_interfaceView showPhotoQualityTooltip:strongSelf->_editingContext.isHighQualityPhoto];
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@ -6,7 +6,7 @@
|
|||||||
#import <AVFoundation/AVFoundation.h>
|
#import <AVFoundation/AVFoundation.h>
|
||||||
#import <Accelerate/Accelerate.h>
|
#import <Accelerate/Accelerate.h>
|
||||||
|
|
||||||
const CGSize TGPhotoEditorResultImageMaxSize = { 1280, 1280 };
|
const CGSize TGPhotoEditorResultImageMaxSize = { 2560, 2560 };
|
||||||
const CGSize TGPhotoEditorResultImageWallpaperMaxSize = { 2048, 2048 };
|
const CGSize TGPhotoEditorResultImageWallpaperMaxSize = { 2048, 2048 };
|
||||||
const CGSize TGPhotoEditorResultImageAvatarMaxSize = { 2048, 2048 };
|
const CGSize TGPhotoEditorResultImageAvatarMaxSize = { 2048, 2048 };
|
||||||
const CGSize TGPhotoEditorScreenImageHardLimitSize = { 1280, 1280 };
|
const CGSize TGPhotoEditorScreenImageHardLimitSize = { 1280, 1280 };
|
||||||
|
|||||||
@ -175,6 +175,7 @@ public func legacyAssetPickerItemGenerator() -> ((Any?, NSAttributedString?, Str
|
|||||||
let image = dict["image"] as! UIImage
|
let image = dict["image"] as! UIImage
|
||||||
let thumbnail = dict["previewImage"] as? UIImage
|
let thumbnail = dict["previewImage"] as? UIImage
|
||||||
let cover = dict["coverImage"] as? UIImage
|
let cover = dict["coverImage"] as? UIImage
|
||||||
|
let forceHd = (dict["hd"] as? NSNumber)?.boolValue ?? false
|
||||||
|
|
||||||
var result: [AnyHashable : Any] = [:]
|
var result: [AnyHashable : Any] = [:]
|
||||||
if let isAnimation = dict["isAnimation"] as? NSNumber, isAnimation.boolValue {
|
if let isAnimation = dict["isAnimation"] as? NSNumber, isAnimation.boolValue {
|
||||||
@ -184,7 +185,7 @@ public func legacyAssetPickerItemGenerator() -> ((Any?, NSAttributedString?, Str
|
|||||||
result["item" as NSString] = LegacyAssetItemWrapper(item: .video(data: .tempFile(path: url, dimensions: dimensions, duration: 4.0), thumbnail: thumbnail, cover: cover, adjustments: dict["adjustments"] as? TGVideoEditAdjustments, caption: caption, asFile: false, asAnimation: true, stickers: stickers), timer: (dict["timer"] as? NSNumber)?.intValue, spoiler: (dict["spoiler"] as? NSNumber)?.boolValue, price: price, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
|
result["item" as NSString] = LegacyAssetItemWrapper(item: .video(data: .tempFile(path: url, dimensions: dimensions, duration: 4.0), thumbnail: thumbnail, cover: cover, adjustments: dict["adjustments"] as? TGVideoEditAdjustments, caption: caption, asFile: false, asAnimation: true, stickers: stickers), timer: (dict["timer"] as? NSNumber)?.intValue, spoiler: (dict["spoiler"] as? NSNumber)?.boolValue, price: price, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
result["item" as NSString] = LegacyAssetItemWrapper(item: .image(data: .image(image), thumbnail: thumbnail, caption: caption, stickers: stickers), timer: (dict["timer"] as? NSNumber)?.intValue, spoiler: (dict["spoiler"] as? NSNumber)?.boolValue, price: price, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
|
result["item" as NSString] = LegacyAssetItemWrapper(item: .image(data: .image(image), thumbnail: thumbnail, caption: caption, stickers: stickers), timer: (dict["timer"] as? NSNumber)?.intValue, spoiler: (dict["spoiler"] as? NSNumber)?.boolValue, price: price, forceHd: forceHd, groupedId: (dict["groupedId"] as? NSNumber)?.int64Value, uniqueId: uniqueId)
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
} else if (dict["type"] as! NSString) == "cloudPhoto" {
|
} else if (dict["type"] as! NSString) == "cloudPhoto" {
|
||||||
@ -400,7 +401,9 @@ public func legacyAssetPickerEnqueueMessages(context: AccountContext, account: A
|
|||||||
var randomId: Int64 = 0
|
var randomId: Int64 = 0
|
||||||
arc4random_buf(&randomId, 8)
|
arc4random_buf(&randomId, 8)
|
||||||
let tempFilePath = NSTemporaryDirectory() + "\(randomId).jpeg"
|
let tempFilePath = NSTemporaryDirectory() + "\(randomId).jpeg"
|
||||||
let scaledSize = image.size.aspectFittedOrSmaller(CGSize(width: 1280.0, height: 1280.0))
|
let maxSize = item.forceHd ? CGSize(width: 2560.0, height: 2560.0) : CGSize(width: 1280.0, height: 1280.0)
|
||||||
|
let scaledSize = image.size.aspectFittedOrSmaller(maxSize)
|
||||||
|
|
||||||
if let scaledImage = TGScaleImageToPixelSize(image, scaledSize) {
|
if let scaledImage = TGScaleImageToPixelSize(image, scaledSize) {
|
||||||
let tempFile = TempBox.shared.tempFile(fileName: "file")
|
let tempFile = TempBox.shared.tempFile(fileName: "file")
|
||||||
defer {
|
defer {
|
||||||
|
|||||||
@ -2719,7 +2719,7 @@ public final class MediaPickerScreenImpl: ViewController, MediaPickerScreen, Att
|
|||||||
}
|
}
|
||||||
if price == nil {
|
if price == nil {
|
||||||
//TODO:localize
|
//TODO:localize
|
||||||
items.append(.action(ContextMenuActionItem(text: "Send in High Quality", icon: { theme in
|
items.append(.action(ContextMenuActionItem(text: strings.Attachment_SendInHd, icon: { theme in
|
||||||
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/QualityHd"), color: theme.contextMenu.primaryColor)
|
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/QualityHd"), color: theme.contextMenu.primaryColor)
|
||||||
}, action: { [weak self] _, f in
|
}, action: { [weak self] _, f in
|
||||||
f(.default)
|
f(.default)
|
||||||
|
|||||||
@ -190,8 +190,7 @@ public final class SelectablePeerNode: ASDisplayNode {
|
|||||||
|
|
||||||
switch storyMode {
|
switch storyMode {
|
||||||
case .createStory:
|
case .createStory:
|
||||||
//TODO:localize
|
title = strings.Share_PostToStory
|
||||||
title = "Post\nto Story"
|
|
||||||
overrideImage = .storyIcon
|
overrideImage = .storyIcon
|
||||||
case .repostStory:
|
case .repostStory:
|
||||||
title = strings.Share_RepostStory
|
title = strings.Share_RepostStory
|
||||||
|
|||||||
@ -121,39 +121,61 @@ private func preparedShareItem(postbox: Postbox, network: Network, to peerId: Pe
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getThumbnail(_ avAsset: AVURLAsset) -> Signal<UIImage?, NoError> {
|
||||||
|
return Signal { subscriber in
|
||||||
|
let imageGenerator = AVAssetImageGenerator(asset: asset)
|
||||||
|
imageGenerator.appliesPreferredTrackTransform = true
|
||||||
|
imageGenerator.maximumSize = CGSize(width: 640, height: 640)
|
||||||
|
imageGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: CMTime(seconds: 0, preferredTimescale: CMTimeScale(30.0)))]) { _, image, _, _, _ in
|
||||||
|
subscriber.putNext(image.flatMap { UIImage(cgImage: $0) })
|
||||||
|
subscriber.putCompletion()
|
||||||
|
}
|
||||||
|
return ActionDisposable {
|
||||||
|
imageGenerator.cancelAllCGImageGeneration()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
return .single(.preparing(true))
|
return .single(.preparing(true))
|
||||||
|> then(
|
|> then(
|
||||||
loadValues(asset)
|
loadValues(asset)
|
||||||
|> mapToSignal { asset -> Signal<PreparedShareItem, PreparedShareItemError> in
|
|> mapToSignal { asset -> Signal<PreparedShareItem, PreparedShareItemError> in
|
||||||
let preset = adjustments?.preset ?? TGMediaVideoConversionPresetCompressedMedium
|
return getThumbnail(asset)
|
||||||
let finalDimensions = TGMediaVideoConverter.dimensions(for: asset.originalSize, adjustments: adjustments, preset: preset)
|
|> castError(PreparedShareItemError.self)
|
||||||
|
|> mapToSignal { thumbnail -> Signal<PreparedShareItem, PreparedShareItemError> in
|
||||||
var resourceAdjustments: VideoMediaResourceAdjustments?
|
let preset = adjustments?.preset ?? TGMediaVideoConversionPresetCompressedMedium
|
||||||
if let adjustments = adjustments {
|
let finalDimensions = TGMediaVideoConverter.dimensions(for: asset.originalSize, adjustments: adjustments, preset: preset)
|
||||||
if adjustments.trimApplied() {
|
|
||||||
finalDuration = adjustments.trimEndValue - adjustments.trimStartValue
|
var resourceAdjustments: VideoMediaResourceAdjustments?
|
||||||
|
if let adjustments = adjustments {
|
||||||
|
if adjustments.trimApplied() {
|
||||||
|
finalDuration = adjustments.trimEndValue - adjustments.trimStartValue
|
||||||
|
}
|
||||||
|
|
||||||
|
if let dict = adjustments.dictionary(), let data = try? NSKeyedArchiver.archivedData(withRootObject: dict, requiringSecureCoding: false) {
|
||||||
|
let adjustmentsData = MemoryBuffer(data: data)
|
||||||
|
let digest = MemoryBuffer(data: adjustmentsData.md5Digest())
|
||||||
|
resourceAdjustments = VideoMediaResourceAdjustments(data: adjustmentsData, digest: digest, isStory: false)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let dict = adjustments.dictionary(), let data = try? NSKeyedArchiver.archivedData(withRootObject: dict, requiringSecureCoding: false) {
|
let estimatedSize = TGMediaVideoConverter.estimatedSize(for: preset, duration: finalDuration, hasAudio: true)
|
||||||
let adjustmentsData = MemoryBuffer(data: data)
|
|
||||||
let digest = MemoryBuffer(data: adjustmentsData.md5Digest())
|
let thumbnailData = thumbnail?.jpegData(compressionQuality: 0.6)
|
||||||
resourceAdjustments = VideoMediaResourceAdjustments(data: adjustmentsData, digest: digest, isStory: false)
|
|
||||||
|
let resource = LocalFileVideoMediaResource(randomId: Int64.random(in: Int64.min ... Int64.max), path: asset.url.path, adjustments: resourceAdjustments)
|
||||||
|
return standaloneUploadedFile(postbox: postbox, network: network, peerId: peerId, text: "", source: .resource(.standalone(resource: resource)), thumbnailData: thumbnailData, mimeType: "video/mp4", attributes: [.Video(duration: finalDuration, size: PixelDimensions(width: Int32(finalDimensions.width), height: Int32(finalDimensions.height)), flags: flags, preloadSize: nil, coverTime: 0.0, videoCodec: nil)], hintFileIsLarge: estimatedSize > 10 * 1024 * 1024)
|
||||||
|
|> mapError { _ -> PreparedShareItemError in
|
||||||
|
return .generic
|
||||||
}
|
}
|
||||||
}
|
|> mapToSignal { event -> Signal<PreparedShareItem, PreparedShareItemError> in
|
||||||
|
switch event {
|
||||||
let estimatedSize = TGMediaVideoConverter.estimatedSize(for: preset, duration: finalDuration, hasAudio: true)
|
case let .progress(value):
|
||||||
|
return .single(.progress(value))
|
||||||
let resource = LocalFileVideoMediaResource(randomId: Int64.random(in: Int64.min ... Int64.max), path: asset.url.path, adjustments: resourceAdjustments)
|
case let .result(media):
|
||||||
return standaloneUploadedFile(postbox: postbox, network: network, peerId: peerId, text: "", source: .resource(.standalone(resource: resource)), mimeType: "video/mp4", attributes: [.Video(duration: finalDuration, size: PixelDimensions(width: Int32(finalDimensions.width), height: Int32(finalDimensions.height)), flags: flags, preloadSize: nil, coverTime: nil, videoCodec: nil)], hintFileIsLarge: estimatedSize > 10 * 1024 * 1024)
|
return .single(.done(.media(media)))
|
||||||
|> mapError { _ -> PreparedShareItemError in
|
}
|
||||||
return .generic
|
|
||||||
}
|
|
||||||
|> mapToSignal { event -> Signal<PreparedShareItem, PreparedShareItemError> in
|
|
||||||
switch event {
|
|
||||||
case let .progress(value):
|
|
||||||
return .single(.progress(value))
|
|
||||||
case let .result(media):
|
|
||||||
return .single(.done(.media(media)))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -204,6 +204,8 @@ private enum ApplicationSpecificGlobalNotice: Int32 {
|
|||||||
case starGiftWearTips = 77
|
case starGiftWearTips = 77
|
||||||
case channelSuggestTooltip = 78
|
case channelSuggestTooltip = 78
|
||||||
case multipleStoriesTooltip = 79
|
case multipleStoriesTooltip = 79
|
||||||
|
case voiceMessagesPauseSuggestion = 80
|
||||||
|
case videoMessagesPauseSuggestion = 81
|
||||||
|
|
||||||
var key: ValueBoxKey {
|
var key: ValueBoxKey {
|
||||||
let v = ValueBoxKey(length: 4)
|
let v = ValueBoxKey(length: 4)
|
||||||
@ -569,6 +571,14 @@ private struct ApplicationSpecificNoticeKeys {
|
|||||||
static func multipleStoriesTooltip() -> NoticeEntryKey {
|
static func multipleStoriesTooltip() -> NoticeEntryKey {
|
||||||
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.multipleStoriesTooltip.key)
|
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.multipleStoriesTooltip.key)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static func voiceMessagesPauseSuggestion() -> NoticeEntryKey {
|
||||||
|
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.voiceMessagesPauseSuggestion.key)
|
||||||
|
}
|
||||||
|
|
||||||
|
static func videoMessagesPauseSuggestion() -> NoticeEntryKey {
|
||||||
|
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.videoMessagesPauseSuggestion.key)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public struct ApplicationSpecificNotice {
|
public struct ApplicationSpecificNotice {
|
||||||
@ -2458,4 +2468,58 @@ public struct ApplicationSpecificNotice {
|
|||||||
return Int(previousValue)
|
return Int(previousValue)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static func getVoiceMessagesPauseSuggestion(accountManager: AccountManager<TelegramAccountManagerTypes>) -> Signal<Int32, NoError> {
|
||||||
|
return accountManager.transaction { transaction -> Int32 in
|
||||||
|
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.voiceMessagesPauseSuggestion())?.get(ApplicationSpecificCounterNotice.self) {
|
||||||
|
return value.value
|
||||||
|
} else {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static func incrementVoiceMessagesPauseSuggestion(accountManager: AccountManager<TelegramAccountManagerTypes>, count: Int = 1) -> Signal<Int, NoError> {
|
||||||
|
return accountManager.transaction { transaction -> Int in
|
||||||
|
var currentValue: Int32 = 0
|
||||||
|
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.voiceMessagesPauseSuggestion())?.get(ApplicationSpecificCounterNotice.self) {
|
||||||
|
currentValue = value.value
|
||||||
|
}
|
||||||
|
let previousValue = currentValue
|
||||||
|
currentValue += Int32(count)
|
||||||
|
|
||||||
|
if let entry = CodableEntry(ApplicationSpecificCounterNotice(value: currentValue)) {
|
||||||
|
transaction.setNotice(ApplicationSpecificNoticeKeys.voiceMessagesPauseSuggestion(), entry)
|
||||||
|
}
|
||||||
|
|
||||||
|
return Int(previousValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static func getVideoMessagesPauseSuggestion(accountManager: AccountManager<TelegramAccountManagerTypes>) -> Signal<Int32, NoError> {
|
||||||
|
return accountManager.transaction { transaction -> Int32 in
|
||||||
|
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.videoMessagesPauseSuggestion())?.get(ApplicationSpecificCounterNotice.self) {
|
||||||
|
return value.value
|
||||||
|
} else {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static func incrementVideoMessagesPauseSuggestion(accountManager: AccountManager<TelegramAccountManagerTypes>, count: Int = 1) -> Signal<Int, NoError> {
|
||||||
|
return accountManager.transaction { transaction -> Int in
|
||||||
|
var currentValue: Int32 = 0
|
||||||
|
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.videoMessagesPauseSuggestion())?.get(ApplicationSpecificCounterNotice.self) {
|
||||||
|
currentValue = value.value
|
||||||
|
}
|
||||||
|
let previousValue = currentValue
|
||||||
|
currentValue += Int32(count)
|
||||||
|
|
||||||
|
if let entry = CodableEntry(ApplicationSpecificCounterNotice(value: currentValue)) {
|
||||||
|
transaction.setNotice(ApplicationSpecificNoticeKeys.videoMessagesPauseSuggestion(), entry)
|
||||||
|
}
|
||||||
|
|
||||||
|
return Int(previousValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -6,13 +6,15 @@ import AudioWaveform
|
|||||||
|
|
||||||
private final class AudioWaveformNodeParameters: NSObject {
|
private final class AudioWaveformNodeParameters: NSObject {
|
||||||
let waveform: AudioWaveform?
|
let waveform: AudioWaveform?
|
||||||
|
let drawFakeSamplesIfNeeded: Bool
|
||||||
let color: UIColor?
|
let color: UIColor?
|
||||||
let gravity: AudioWaveformNode.Gravity?
|
let gravity: AudioWaveformNode.Gravity?
|
||||||
let progress: CGFloat?
|
let progress: CGFloat?
|
||||||
let trimRange: Range<CGFloat>?
|
let trimRange: Range<CGFloat>?
|
||||||
|
|
||||||
init(waveform: AudioWaveform?, color: UIColor?, gravity: AudioWaveformNode.Gravity?, progress: CGFloat?, trimRange: Range<CGFloat>?) {
|
init(waveform: AudioWaveform?, drawFakeSamplesIfNeeded: Bool, color: UIColor?, gravity: AudioWaveformNode.Gravity?, progress: CGFloat?, trimRange: Range<CGFloat>?) {
|
||||||
self.waveform = waveform
|
self.waveform = waveform
|
||||||
|
self.drawFakeSamplesIfNeeded = drawFakeSamplesIfNeeded
|
||||||
self.color = color
|
self.color = color
|
||||||
self.gravity = gravity
|
self.gravity = gravity
|
||||||
self.progress = progress
|
self.progress = progress
|
||||||
@ -31,6 +33,7 @@ public final class AudioWaveformNode: ASDisplayNode {
|
|||||||
private var waveform: AudioWaveform?
|
private var waveform: AudioWaveform?
|
||||||
private var color: UIColor?
|
private var color: UIColor?
|
||||||
private var gravity: Gravity?
|
private var gravity: Gravity?
|
||||||
|
public var drawFakeSamplesIfNeeded = false
|
||||||
|
|
||||||
public var progress: CGFloat? {
|
public var progress: CGFloat? {
|
||||||
didSet {
|
didSet {
|
||||||
@ -77,7 +80,7 @@ public final class AudioWaveformNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
|
override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
|
||||||
return AudioWaveformNodeParameters(waveform: self.waveform, color: self.color, gravity: self.gravity, progress: self.progress, trimRange: self.trimRange)
|
return AudioWaveformNodeParameters(waveform: self.waveform, drawFakeSamplesIfNeeded: self.drawFakeSamplesIfNeeded, color: self.color, gravity: self.gravity, progress: self.progress, trimRange: self.trimRange)
|
||||||
}
|
}
|
||||||
|
|
||||||
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
|
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
|
||||||
@ -117,18 +120,12 @@ public final class AudioWaveformNode: ASDisplayNode {
|
|||||||
|
|
||||||
let numSamples = Int(floor(size.width / (sampleWidth + distance)))
|
let numSamples = Int(floor(size.width / (sampleWidth + distance)))
|
||||||
|
|
||||||
let adjustedSamplesMemory = malloc(numSamples * 2)!
|
var adjustedSamples = Array<UInt16>(repeating: 0, count: numSamples)
|
||||||
let adjustedSamples = adjustedSamplesMemory.assumingMemoryBound(to: UInt16.self)
|
|
||||||
defer {
|
|
||||||
free(adjustedSamplesMemory)
|
|
||||||
}
|
|
||||||
memset(adjustedSamplesMemory, 0, numSamples * 2)
|
|
||||||
|
|
||||||
var generateFakeSamples = false
|
var generateFakeSamples = false
|
||||||
|
|
||||||
var bins: [UInt16: Int] = [:]
|
var bins: [UInt16: Int] = [:]
|
||||||
for i in 0 ..< maxReadSamples {
|
for i in 0 ..< maxReadSamples {
|
||||||
let index = i * numSamples / maxReadSamples
|
let index = min(i * numSamples / max(1, maxReadSamples), numSamples - 1)
|
||||||
let sample = samples[i]
|
let sample = samples[i]
|
||||||
if adjustedSamples[index] < sample {
|
if adjustedSamples[index] < sample {
|
||||||
adjustedSamples[index] = sample
|
adjustedSamples[index] = sample
|
||||||
@ -158,7 +155,7 @@ public final class AudioWaveformNode: ASDisplayNode {
|
|||||||
topCountPercent = Float(topCount) / Float(totalCount)
|
topCountPercent = Float(topCount) / Float(totalCount)
|
||||||
}
|
}
|
||||||
|
|
||||||
if topCountPercent > 0.75 {
|
if parameters.drawFakeSamplesIfNeeded && topCountPercent > 0.75 {
|
||||||
generateFakeSamples = true
|
generateFakeSamples = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -898,9 +898,8 @@ private class ChatQrCodeScreenNode: ViewControllerTracingNode, ASScrollViewDeleg
|
|||||||
self.doneButton.title = self.presentationData.strings.Share_ShareMessage
|
self.doneButton.title = self.presentationData.strings.Share_ShareMessage
|
||||||
}
|
}
|
||||||
|
|
||||||
//TODO:localize
|
|
||||||
self.scanButton = SolidRoundedButtonNode(theme: SolidRoundedButtonTheme(backgroundColor: .clear, foregroundColor: self.presentationData.theme.actionSheet.controlAccentColor), font: .regular, height: 42.0, cornerRadius: 0.0, gloss: false)
|
self.scanButton = SolidRoundedButtonNode(theme: SolidRoundedButtonTheme(backgroundColor: .clear, foregroundColor: self.presentationData.theme.actionSheet.controlAccentColor), font: .regular, height: 42.0, cornerRadius: 0.0, gloss: false)
|
||||||
self.scanButton.title = "Scan QR Code"
|
self.scanButton.title = presentationData.strings.PeerInfo_QRCode_Scan
|
||||||
self.scanButton.icon = UIImage(bundleImageName: "Settings/ScanQr")
|
self.scanButton.icon = UIImage(bundleImageName: "Settings/ScanQr")
|
||||||
|
|
||||||
self.listNode = ListView()
|
self.listNode = ListView()
|
||||||
|
|||||||
@ -169,6 +169,7 @@ public final class ChatRecentActionsController: TelegramBaseController {
|
|||||||
}, openMessagePayment: {
|
}, openMessagePayment: {
|
||||||
}, openBoostToUnrestrict: {
|
}, openBoostToUnrestrict: {
|
||||||
}, updateRecordingTrimRange: { _, _, _, _ in
|
}, updateRecordingTrimRange: { _, _, _, _ in
|
||||||
|
}, dismissAllTooltips: {
|
||||||
}, updateHistoryFilter: { _ in
|
}, updateHistoryFilter: { _ in
|
||||||
}, updateChatLocationThread: { _, _ in
|
}, updateChatLocationThread: { _, _ in
|
||||||
}, toggleChatSidebarMode: {
|
}, toggleChatSidebarMode: {
|
||||||
|
|||||||
@ -580,7 +580,8 @@ public final class ChatTextInputMediaRecordingButton: TGModernConversationInputM
|
|||||||
self.previousSize = size
|
self.previousSize = size
|
||||||
if let view = self.animationView.view {
|
if let view = self.animationView.view {
|
||||||
let iconSize = view.bounds.size
|
let iconSize = view.bounds.size
|
||||||
view.frame = CGRect(origin: CGPoint(x: floor((size.width - iconSize.width) / 2.0), y: floor((size.height - iconSize.height) / 2.0)), size: iconSize)
|
view.bounds = CGRect(origin: .zero, size: iconSize)
|
||||||
|
view.center = CGPoint(x: size.width / 2.0, y: size.height / 2.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -75,7 +75,7 @@ final class ForumModeComponent: Component {
|
|||||||
ItemComponent(
|
ItemComponent(
|
||||||
theme: component.theme,
|
theme: component.theme,
|
||||||
animation: "ForumTabs",
|
animation: "ForumTabs",
|
||||||
title: "Tabs",
|
title: component.strings.PeerInfo_Topics_Tabs,
|
||||||
isSelected: component.mode == .tabs
|
isSelected: component.mode == .tabs
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
@ -104,7 +104,7 @@ final class ForumModeComponent: Component {
|
|||||||
ItemComponent(
|
ItemComponent(
|
||||||
theme: component.theme,
|
theme: component.theme,
|
||||||
animation: "ForumList",
|
animation: "ForumList",
|
||||||
title: "List",
|
title: component.strings.PeerInfo_Topics_List,
|
||||||
isSelected: component.mode == .list
|
isSelected: component.mode == .list
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
|
|||||||
@ -153,7 +153,7 @@ final class ForumSettingsScreenComponent: Component {
|
|||||||
if case .legacyGroup = peer {
|
if case .legacyGroup = peer {
|
||||||
let context = component.context
|
let context = component.context
|
||||||
let signal: Signal<EnginePeer.Id?, NoError> = context.engine.peers.convertGroupToSupergroup(peerId: peer.id, additionalProcessing: { upgradedPeerId -> Signal<Never, NoError> in
|
let signal: Signal<EnginePeer.Id?, NoError> = context.engine.peers.convertGroupToSupergroup(peerId: peer.id, additionalProcessing: { upgradedPeerId -> Signal<Never, NoError> in
|
||||||
return context.engine.peers.setChannelForumMode(id: upgradedPeerId, isForum: true, displayForumAsTabs: false)
|
return context.engine.peers.setChannelForumMode(id: upgradedPeerId, isForum: true, displayForumAsTabs: displayForumAsTabs)
|
||||||
})
|
})
|
||||||
|> map(Optional.init)
|
|> map(Optional.init)
|
||||||
|> `catch` { [weak self] error -> Signal<PeerId?, NoError> in
|
|> `catch` { [weak self] error -> Signal<PeerId?, NoError> in
|
||||||
@ -244,11 +244,10 @@ final class ForumSettingsScreenComponent: Component {
|
|||||||
|
|
||||||
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
|
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
|
||||||
|
|
||||||
//TODO:localize
|
|
||||||
let navigationTitleSize = self.navigationTitle.update(
|
let navigationTitleSize = self.navigationTitle.update(
|
||||||
transition: transition,
|
transition: transition,
|
||||||
component: AnyComponent(MultilineTextComponent(
|
component: AnyComponent(MultilineTextComponent(
|
||||||
text: .plain(NSAttributedString(string: "Topics", font: Font.semibold(17.0), textColor: environment.theme.rootController.navigationBar.primaryTextColor)),
|
text: .plain(NSAttributedString(string: environment.strings.PeerInfo_Topics_Title, font: Font.semibold(17.0), textColor: environment.theme.rootController.navigationBar.primaryTextColor)),
|
||||||
horizontalAlignment: .center
|
horizontalAlignment: .center
|
||||||
)),
|
)),
|
||||||
environment: {},
|
environment: {},
|
||||||
@ -293,7 +292,7 @@ final class ForumSettingsScreenComponent: Component {
|
|||||||
|
|
||||||
contentHeight += 124.0
|
contentHeight += 124.0
|
||||||
|
|
||||||
let subtitleString = NSMutableAttributedString(attributedString: parseMarkdownIntoAttributedString("The group chat will be divided into topics created by admins or users.", attributes: MarkdownAttributes(
|
let subtitleString = NSMutableAttributedString(attributedString: parseMarkdownIntoAttributedString(environment.strings.PeerInfo_Topics_EnableTopicsInfo, attributes: MarkdownAttributes(
|
||||||
body: MarkdownAttributeSet(font: Font.regular(15.0), textColor: environment.theme.list.freeTextColor),
|
body: MarkdownAttributeSet(font: Font.regular(15.0), textColor: environment.theme.list.freeTextColor),
|
||||||
bold: MarkdownAttributeSet(font: Font.semibold(15.0), textColor: environment.theme.list.freeTextColor),
|
bold: MarkdownAttributeSet(font: Font.semibold(15.0), textColor: environment.theme.list.freeTextColor),
|
||||||
link: MarkdownAttributeSet(font: Font.regular(15.0), textColor: environment.theme.list.itemAccentColor),
|
link: MarkdownAttributeSet(font: Font.regular(15.0), textColor: environment.theme.list.itemAccentColor),
|
||||||
@ -344,7 +343,7 @@ final class ForumSettingsScreenComponent: Component {
|
|||||||
title: AnyComponent(VStack([
|
title: AnyComponent(VStack([
|
||||||
AnyComponentWithIdentity(id: AnyHashable(0), component: AnyComponent(MultilineTextComponent(
|
AnyComponentWithIdentity(id: AnyHashable(0), component: AnyComponent(MultilineTextComponent(
|
||||||
text: .plain(NSAttributedString(
|
text: .plain(NSAttributedString(
|
||||||
string: "Enable Topics",
|
string: environment.strings.PeerInfo_Topics_EnableTopics,
|
||||||
font: Font.regular(presentationData.listsFontSize.baseDisplaySize),
|
font: Font.regular(presentationData.listsFontSize.baseDisplaySize),
|
||||||
textColor: environment.theme.list.itemPrimaryTextColor
|
textColor: environment.theme.list.itemPrimaryTextColor
|
||||||
)),
|
)),
|
||||||
@ -389,7 +388,7 @@ final class ForumSettingsScreenComponent: Component {
|
|||||||
theme: environment.theme,
|
theme: environment.theme,
|
||||||
header: AnyComponent(MultilineTextComponent(
|
header: AnyComponent(MultilineTextComponent(
|
||||||
text: .plain(NSAttributedString(
|
text: .plain(NSAttributedString(
|
||||||
string: "DISPLAY AS",
|
string: environment.strings.PeerInfo_Topics_DisplayAs,
|
||||||
font: Font.regular(presentationData.listsFontSize.itemListBaseHeaderFontSize),
|
font: Font.regular(presentationData.listsFontSize.itemListBaseHeaderFontSize),
|
||||||
textColor: environment.theme.list.freeTextColor
|
textColor: environment.theme.list.freeTextColor
|
||||||
)),
|
)),
|
||||||
@ -397,7 +396,7 @@ final class ForumSettingsScreenComponent: Component {
|
|||||||
)),
|
)),
|
||||||
footer: AnyComponent(MultilineTextComponent(
|
footer: AnyComponent(MultilineTextComponent(
|
||||||
text: .markdown(
|
text: .markdown(
|
||||||
text: "Choose how topics appear for all members.",
|
text: environment.strings.PeerInfo_Topics_DisplayAsInfo,
|
||||||
attributes: MarkdownAttributes(
|
attributes: MarkdownAttributes(
|
||||||
body: MarkdownAttributeSet(font: Font.regular(presentationData.listsFontSize.itemListBaseHeaderFontSize), textColor: environment.theme.list.freeTextColor),
|
body: MarkdownAttributeSet(font: Font.regular(presentationData.listsFontSize.itemListBaseHeaderFontSize), textColor: environment.theme.list.freeTextColor),
|
||||||
bold: MarkdownAttributeSet(font: Font.semibold(presentationData.listsFontSize.itemListBaseHeaderFontSize), textColor: environment.theme.list.freeTextColor),
|
bold: MarkdownAttributeSet(font: Font.semibold(presentationData.listsFontSize.itemListBaseHeaderFontSize), textColor: environment.theme.list.freeTextColor),
|
||||||
|
|||||||
@ -862,7 +862,7 @@ private final class GiftViewSheetContent: CombinedComponent {
|
|||||||
)
|
)
|
||||||
controller.present(alertController, in: .window(.root))
|
controller.present(alertController, in: .window(.root))
|
||||||
} else {
|
} else {
|
||||||
let resellController = self.context.sharedContext.makeStarGiftResellScreen(context: self.context, update: update, completion: { [weak self, weak controller] price in
|
let resellController = self.context.sharedContext.makeStarGiftResellScreen(context: self.context, gift: gift, update: update, completion: { [weak self, weak controller] price in
|
||||||
guard let self, let controller else {
|
guard let self, let controller else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1672,20 +1672,43 @@ public class TrimView: UIView {
|
|||||||
self.rightHandleView.addSubview(self.rightCapsuleView)
|
self.rightHandleView.addSubview(self.rightCapsuleView)
|
||||||
self.addSubview(self.borderView)
|
self.addSubview(self.borderView)
|
||||||
|
|
||||||
self.zoneView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleZoneHandlePan(_:))))
|
let zoneHandlePanGesture = UILongPressGestureRecognizer(target: self, action: #selector(self.handleZoneHandlePan(_:)))
|
||||||
self.leftHandleView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleLeftHandlePan(_:))))
|
zoneHandlePanGesture.minimumPressDuration = 0.0
|
||||||
self.rightHandleView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleRightHandlePan(_:))))
|
zoneHandlePanGesture.allowableMovement = .infinity
|
||||||
|
|
||||||
|
let leftHandlePanGesture = UILongPressGestureRecognizer(target: self, action: #selector(self.handleLeftHandlePan(_:)))
|
||||||
|
leftHandlePanGesture.minimumPressDuration = 0.0
|
||||||
|
leftHandlePanGesture.allowableMovement = .infinity
|
||||||
|
|
||||||
|
let rightHandlePanGesture = UILongPressGestureRecognizer(target: self, action: #selector(self.handleRightHandlePan(_:)))
|
||||||
|
rightHandlePanGesture.minimumPressDuration = 0.0
|
||||||
|
rightHandlePanGesture.allowableMovement = .infinity
|
||||||
|
|
||||||
|
self.zoneView.addGestureRecognizer(zoneHandlePanGesture)
|
||||||
|
self.leftHandleView.addGestureRecognizer(leftHandlePanGesture)
|
||||||
|
self.rightHandleView.addGestureRecognizer(rightHandlePanGesture)
|
||||||
}
|
}
|
||||||
|
|
||||||
required init?(coder: NSCoder) {
|
required init?(coder: NSCoder) {
|
||||||
fatalError("init(coder:) has not been implemented")
|
fatalError("init(coder:) has not been implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
@objc private func handleZoneHandlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
|
private var panStartLocation: CGPoint?
|
||||||
|
|
||||||
|
@objc private func handleZoneHandlePan(_ gestureRecognizer: UILongPressGestureRecognizer) {
|
||||||
guard let params = self.params else {
|
guard let params = self.params else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
let translation = gestureRecognizer.translation(in: self)
|
|
||||||
|
let location = gestureRecognizer.location(in: self)
|
||||||
|
if case .began = gestureRecognizer.state {
|
||||||
|
self.panStartLocation = location
|
||||||
|
}
|
||||||
|
|
||||||
|
let translation = CGPoint(
|
||||||
|
x: location.x - (self.panStartLocation?.x ?? 0.0),
|
||||||
|
y: location.y - (self.panStartLocation?.y ?? 0.0)
|
||||||
|
)
|
||||||
|
|
||||||
let start = handleWidth / 2.0
|
let start = handleWidth / 2.0
|
||||||
let end = self.frame.width - handleWidth / 2.0
|
let end = self.frame.width - handleWidth / 2.0
|
||||||
@ -1706,6 +1729,7 @@ public class TrimView: UIView {
|
|||||||
transition = .easeInOut(duration: 0.25)
|
transition = .easeInOut(duration: 0.25)
|
||||||
}
|
}
|
||||||
case .ended, .cancelled:
|
case .ended, .cancelled:
|
||||||
|
self.panStartLocation = nil
|
||||||
self.isPanningTrimHandle = false
|
self.isPanningTrimHandle = false
|
||||||
self.trimUpdated(startValue, endValue, false, true)
|
self.trimUpdated(startValue, endValue, false, true)
|
||||||
transition = .easeInOut(duration: 0.25)
|
transition = .easeInOut(duration: 0.25)
|
||||||
@ -1713,15 +1737,15 @@ public class TrimView: UIView {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
gestureRecognizer.setTranslation(.zero, in: self)
|
|
||||||
self.updated(transition)
|
self.updated(transition)
|
||||||
}
|
}
|
||||||
|
|
||||||
@objc private func handleLeftHandlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
|
@objc private func handleLeftHandlePan(_ gestureRecognizer: UILongPressGestureRecognizer) {
|
||||||
guard let params = self.params else {
|
guard let params = self.params else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
let location = gestureRecognizer.location(in: self)
|
let location = gestureRecognizer.location(in: self)
|
||||||
|
|
||||||
let start = handleWidth / 2.0
|
let start = handleWidth / 2.0
|
||||||
let end = params.scrubberSize.width - handleWidth / 2.0
|
let end = params.scrubberSize.width - handleWidth / 2.0
|
||||||
let length = end - start
|
let length = end - start
|
||||||
@ -1746,6 +1770,7 @@ public class TrimView: UIView {
|
|||||||
transition = .easeInOut(duration: 0.25)
|
transition = .easeInOut(duration: 0.25)
|
||||||
}
|
}
|
||||||
case .ended, .cancelled:
|
case .ended, .cancelled:
|
||||||
|
self.panStartLocation = nil
|
||||||
self.isPanningTrimHandle = false
|
self.isPanningTrimHandle = false
|
||||||
self.trimUpdated(startValue, endValue, false, true)
|
self.trimUpdated(startValue, endValue, false, true)
|
||||||
transition = .easeInOut(duration: 0.25)
|
transition = .easeInOut(duration: 0.25)
|
||||||
@ -1755,7 +1780,7 @@ public class TrimView: UIView {
|
|||||||
self.updated(transition)
|
self.updated(transition)
|
||||||
}
|
}
|
||||||
|
|
||||||
@objc private func handleRightHandlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
|
@objc private func handleRightHandlePan(_ gestureRecognizer: UILongPressGestureRecognizer) {
|
||||||
guard let params = self.params else {
|
guard let params = self.params else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -1784,6 +1809,7 @@ public class TrimView: UIView {
|
|||||||
transition = .easeInOut(duration: 0.25)
|
transition = .easeInOut(duration: 0.25)
|
||||||
}
|
}
|
||||||
case .ended, .cancelled:
|
case .ended, .cancelled:
|
||||||
|
self.panStartLocation = nil
|
||||||
self.isPanningTrimHandle = false
|
self.isPanningTrimHandle = false
|
||||||
self.trimUpdated(startValue, endValue, true, true)
|
self.trimUpdated(startValue, endValue, true, true)
|
||||||
transition = .easeInOut(duration: 0.25)
|
transition = .easeInOut(duration: 0.25)
|
||||||
|
|||||||
@ -434,6 +434,7 @@ final class PeerInfoSelectionPanelNode: ASDisplayNode {
|
|||||||
}, openMessagePayment: {
|
}, openMessagePayment: {
|
||||||
}, openBoostToUnrestrict: {
|
}, openBoostToUnrestrict: {
|
||||||
}, updateRecordingTrimRange: { _, _, _, _ in
|
}, updateRecordingTrimRange: { _, _, _, _ in
|
||||||
|
}, dismissAllTooltips: {
|
||||||
}, updateHistoryFilter: { _ in
|
}, updateHistoryFilter: { _ in
|
||||||
}, updateChatLocationThread: { _, _ in
|
}, updateChatLocationThread: { _, _ in
|
||||||
}, toggleChatSidebarMode: {
|
}, toggleChatSidebarMode: {
|
||||||
@ -2580,8 +2581,7 @@ private func editingItems(data: PeerInfoScreenData?, boostStatus: ChannelBoostSt
|
|||||||
}
|
}
|
||||||
|
|
||||||
if canSetupTopics {
|
if canSetupTopics {
|
||||||
//TODO:localize
|
let label = channel.flags.contains(.isForum) ? presentationData.strings.PeerInfo_OptionTopics_Enabled : presentationData.strings.PeerInfo_OptionTopics_Disabled
|
||||||
let label = channel.flags.contains(.isForum) ? "Enabled" : "Disabled"
|
|
||||||
items[.peerDataSettings]!.append(PeerInfoScreenDisclosureItem(id: ItemTopics, label: .text(label), text: presentationData.strings.PeerInfo_OptionTopics, icon: UIImage(bundleImageName: "Settings/Menu/Topics"), action: {
|
items[.peerDataSettings]!.append(PeerInfoScreenDisclosureItem(id: ItemTopics, label: .text(label), text: presentationData.strings.PeerInfo_OptionTopics, icon: UIImage(bundleImageName: "Settings/Menu/Topics"), action: {
|
||||||
if let topicsLimitedReason = topicsLimitedReason {
|
if let topicsLimitedReason = topicsLimitedReason {
|
||||||
interaction.displayTopicsLimited(topicsLimitedReason)
|
interaction.displayTopicsLimited(topicsLimitedReason)
|
||||||
@ -2709,8 +2709,7 @@ private func editingItems(data: PeerInfoScreenData?, boostStatus: ChannelBoostSt
|
|||||||
}
|
}
|
||||||
|
|
||||||
if canSetupTopics {
|
if canSetupTopics {
|
||||||
//TODO:localize
|
items[.peerPublicSettings]!.append(PeerInfoScreenDisclosureItem(id: ItemTopics, label: .text(presentationData.strings.PeerInfo_OptionTopics_Disabled), text: presentationData.strings.PeerInfo_OptionTopics, icon: UIImage(bundleImageName: "Settings/Menu/Topics"), action: {
|
||||||
items[.peerPublicSettings]!.append(PeerInfoScreenDisclosureItem(id: ItemTopics, label: .text("Disabled"), text: presentationData.strings.PeerInfo_OptionTopics, icon: UIImage(bundleImageName: "Settings/Menu/Topics"), action: {
|
|
||||||
if let topicsLimitedReason = topicsLimitedReason {
|
if let topicsLimitedReason = topicsLimitedReason {
|
||||||
interaction.displayTopicsLimited(topicsLimitedReason)
|
interaction.displayTopicsLimited(topicsLimitedReason)
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@ -783,6 +783,7 @@ final class PeerSelectionControllerNode: ASDisplayNode {
|
|||||||
}, openMessagePayment: {
|
}, openMessagePayment: {
|
||||||
}, openBoostToUnrestrict: {
|
}, openBoostToUnrestrict: {
|
||||||
}, updateRecordingTrimRange: { _, _, _, _ in
|
}, updateRecordingTrimRange: { _, _, _, _ in
|
||||||
|
}, dismissAllTooltips: {
|
||||||
}, updateHistoryFilter: { _ in
|
}, updateHistoryFilter: { _ in
|
||||||
}, updateChatLocationThread: { _, _ in
|
}, updateChatLocationThread: { _, _ in
|
||||||
}, toggleChatSidebarMode: {
|
}, toggleChatSidebarMode: {
|
||||||
|
|||||||
@ -596,58 +596,71 @@ public class ShareRootControllerImpl {
|
|||||||
//inForeground.set(false)
|
//inForeground.set(false)
|
||||||
self?.getExtensionContext()?.completeRequest(returningItems: nil, completionHandler: nil)
|
self?.getExtensionContext()?.completeRequest(returningItems: nil, completionHandler: nil)
|
||||||
}
|
}
|
||||||
shareController.shareStory = { [weak self] in
|
|
||||||
guard let self else {
|
var canShareToStory = true
|
||||||
return
|
if let inputItems = self?.getExtensionContext()?.inputItems, inputItems.count == 1, let item = inputItems[0] as? NSExtensionItem, let attachments = item.attachments {
|
||||||
}
|
for attachment in attachments {
|
||||||
|
if attachment.hasItemConformingToTypeIdentifier(kUTTypeImage as String) {
|
||||||
if let inputItems = self.getExtensionContext()?.inputItems, inputItems.count == 1, let item = inputItems[0] as? NSExtensionItem, let attachments = item.attachments {
|
} else if attachment.hasItemConformingToTypeIdentifier(kUTTypeMovie as String) {
|
||||||
let sessionId = Int64.random(in: 1000000 ..< .max)
|
} else {
|
||||||
|
canShareToStory = false
|
||||||
let storiesPath = rootPath + "/share/stories/\(sessionId)"
|
|
||||||
let _ = try? FileManager.default.createDirectory(atPath: storiesPath, withIntermediateDirectories: true, attributes: nil)
|
|
||||||
var index = 0
|
|
||||||
|
|
||||||
let dispatchGroup = DispatchGroup()
|
|
||||||
|
|
||||||
for attachment in attachments {
|
|
||||||
let fileIndex = index
|
|
||||||
if attachment.hasItemConformingToTypeIdentifier(kUTTypeImage as String) {
|
|
||||||
dispatchGroup.enter()
|
|
||||||
attachment.loadFileRepresentation(forTypeIdentifier: kUTTypeImage as String, completionHandler: { url, _ in
|
|
||||||
if let url, let imageData = try? Data(contentsOf: url) {
|
|
||||||
let filePath = storiesPath + "/\(fileIndex).jpg"
|
|
||||||
try? FileManager.default.removeItem(atPath: filePath)
|
|
||||||
|
|
||||||
do {
|
|
||||||
try imageData.write(to: URL(fileURLWithPath: filePath))
|
|
||||||
} catch {
|
|
||||||
print("Error: \(error)")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
dispatchGroup.leave()
|
|
||||||
})
|
|
||||||
} else if attachment.hasItemConformingToTypeIdentifier(kUTTypeMovie as String) {
|
|
||||||
dispatchGroup.enter()
|
|
||||||
attachment.loadFileRepresentation(forTypeIdentifier: kUTTypeMovie as String, completionHandler: { url, _ in
|
|
||||||
if let url {
|
|
||||||
let filePath = storiesPath + "/\(fileIndex).mp4"
|
|
||||||
try? FileManager.default.removeItem(atPath: filePath)
|
|
||||||
|
|
||||||
do {
|
|
||||||
try FileManager.default.copyItem(at: url, to: URL(fileURLWithPath: filePath))
|
|
||||||
} catch {
|
|
||||||
print("Error: \(error)")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
dispatchGroup.leave()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
index += 1
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
dispatchGroup.notify(queue: .main) {
|
}
|
||||||
self.openUrl("tg://shareStory?session=\(sessionId)")
|
|
||||||
|
if canShareToStory {
|
||||||
|
shareController.shareStory = { [weak self] in
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if let inputItems = self.getExtensionContext()?.inputItems, inputItems.count == 1, let item = inputItems[0] as? NSExtensionItem, let attachments = item.attachments {
|
||||||
|
let sessionId = Int64.random(in: 1000000 ..< .max)
|
||||||
|
|
||||||
|
let storiesPath = rootPath + "/share/stories/\(sessionId)"
|
||||||
|
let _ = try? FileManager.default.createDirectory(atPath: storiesPath, withIntermediateDirectories: true, attributes: nil)
|
||||||
|
var index = 0
|
||||||
|
|
||||||
|
let dispatchGroup = DispatchGroup()
|
||||||
|
|
||||||
|
for attachment in attachments {
|
||||||
|
let fileIndex = index
|
||||||
|
if attachment.hasItemConformingToTypeIdentifier(kUTTypeImage as String) {
|
||||||
|
dispatchGroup.enter()
|
||||||
|
attachment.loadFileRepresentation(forTypeIdentifier: kUTTypeImage as String, completionHandler: { url, _ in
|
||||||
|
if let url, let imageData = try? Data(contentsOf: url) {
|
||||||
|
let filePath = storiesPath + "/\(fileIndex).jpg"
|
||||||
|
try? FileManager.default.removeItem(atPath: filePath)
|
||||||
|
|
||||||
|
do {
|
||||||
|
try imageData.write(to: URL(fileURLWithPath: filePath))
|
||||||
|
} catch {
|
||||||
|
print("Error: \(error)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dispatchGroup.leave()
|
||||||
|
})
|
||||||
|
} else if attachment.hasItemConformingToTypeIdentifier(kUTTypeMovie as String) {
|
||||||
|
dispatchGroup.enter()
|
||||||
|
attachment.loadFileRepresentation(forTypeIdentifier: kUTTypeMovie as String, completionHandler: { url, _ in
|
||||||
|
if let url {
|
||||||
|
let filePath = storiesPath + "/\(fileIndex).mp4"
|
||||||
|
try? FileManager.default.removeItem(atPath: filePath)
|
||||||
|
|
||||||
|
do {
|
||||||
|
try FileManager.default.copyItem(at: url, to: URL(fileURLWithPath: filePath))
|
||||||
|
} catch {
|
||||||
|
print("Error: \(error)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dispatchGroup.leave()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
index += 1
|
||||||
|
}
|
||||||
|
|
||||||
|
dispatchGroup.notify(queue: .main) {
|
||||||
|
self.openUrl("tg://shareStory?session=\(sessionId)")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -144,7 +144,7 @@ private final class SheetContent: CombinedComponent {
|
|||||||
|
|
||||||
minAmount = StarsAmount(value: 1, nanos: 0)
|
minAmount = StarsAmount(value: 1, nanos: 0)
|
||||||
maxAmount = withdrawConfiguration.maxPaidMediaAmount.flatMap { StarsAmount(value: $0, nanos: 0) }
|
maxAmount = withdrawConfiguration.maxPaidMediaAmount.flatMap { StarsAmount(value: $0, nanos: 0) }
|
||||||
case let .starGiftResell(update):
|
case let .starGiftResell(_, update):
|
||||||
titleString = update ? environment.strings.Stars_SellGift_EditTitle : environment.strings.Stars_SellGift_Title
|
titleString = update ? environment.strings.Stars_SellGift_EditTitle : environment.strings.Stars_SellGift_Title
|
||||||
amountTitle = environment.strings.Stars_SellGift_AmountTitle
|
amountTitle = environment.strings.Stars_SellGift_AmountTitle
|
||||||
amountPlaceholder = environment.strings.Stars_SellGift_AmountPlaceholder
|
amountPlaceholder = environment.strings.Stars_SellGift_AmountPlaceholder
|
||||||
@ -487,6 +487,36 @@ private final class SheetContent: CombinedComponent {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if case let .starGiftResell(giftToMatch, update) = self.mode {
|
||||||
|
if update {
|
||||||
|
if let resellStars = giftToMatch.resellStars {
|
||||||
|
self.amount = StarsAmount(value: resellStars, nanos: 0)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let _ = (context.engine.payments.cachedStarGifts()
|
||||||
|
|> filter { $0 != nil }
|
||||||
|
|> take(1)
|
||||||
|
|> deliverOnMainQueue).start(next: { [weak self] gifts in
|
||||||
|
guard let self, let gifts else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
guard let matchingGift = gifts.first(where: { gift in
|
||||||
|
if case let .generic(gift) = gift, gift.title == giftToMatch.title {
|
||||||
|
return true
|
||||||
|
} else {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}) else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if case let .generic(genericGift) = matchingGift, let minResaleStars = genericGift.availability?.minResaleStars {
|
||||||
|
self.amount = StarsAmount(value: minResaleStars, nanos: 0)
|
||||||
|
self.updated()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
deinit {
|
deinit {
|
||||||
@ -592,7 +622,7 @@ public final class StarsWithdrawScreen: ViewControllerComponentContainer {
|
|||||||
case accountWithdraw
|
case accountWithdraw
|
||||||
case paidMedia(Int64?)
|
case paidMedia(Int64?)
|
||||||
case reaction(Int64?)
|
case reaction(Int64?)
|
||||||
case starGiftResell(Bool)
|
case starGiftResell(StarGift.UniqueGift, Bool)
|
||||||
case paidMessages(current: Int64, minValue: Int64, fractionAfterCommission: Int, kind: StarsWithdrawalScreenSubject.PaidMessageKind)
|
case paidMessages(current: Int64, minValue: Int64, fractionAfterCommission: Int, kind: StarsWithdrawalScreenSubject.PaidMessageKind)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1199,6 +1199,8 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
self.currentLiveUploadData = nil
|
self.currentLiveUploadData = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let _ = ApplicationSpecificNotice.incrementVideoMessagesPauseSuggestion(accountManager: self.context.sharedContext.accountManager, count: 3).startStandalone()
|
||||||
|
|
||||||
self.pauseCameraCapture()
|
self.pauseCameraCapture()
|
||||||
|
|
||||||
self.results.append(result)
|
self.results.append(result)
|
||||||
@ -1251,22 +1253,35 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
fileprivate func maybePresentViewOnceTooltip() {
|
fileprivate func maybePresentTooltips() {
|
||||||
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
|
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
|
||||||
let _ = (ApplicationSpecificNotice.getVideoMessagesPlayOnceSuggestion(accountManager: context.sharedContext.accountManager)
|
|
||||||
|> deliverOnMainQueue).startStandalone(next: { [weak self] counter in
|
let _ = (ApplicationSpecificNotice.getVideoMessagesPauseSuggestion(accountManager: self.context.sharedContext.accountManager)
|
||||||
|
|> deliverOnMainQueue).startStandalone(next: { [weak self] pauseCounter in
|
||||||
guard let self else {
|
guard let self else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if counter >= 3 {
|
|
||||||
return
|
if pauseCounter >= 3 {
|
||||||
|
let _ = (ApplicationSpecificNotice.getVideoMessagesPlayOnceSuggestion(accountManager: self.context.sharedContext.accountManager)
|
||||||
|
|> deliverOnMainQueue).startStandalone(next: { [weak self] counter in
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if counter >= 3 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
Queue.mainQueue().after(0.3) {
|
||||||
|
self.displayViewOnceTooltip(text: presentationData.strings.Chat_TapToPlayVideoMessageOnceTooltip, hasIcon: true)
|
||||||
|
}
|
||||||
|
let _ = ApplicationSpecificNotice.incrementVideoMessagesPlayOnceSuggestion(accountManager: self.context.sharedContext.accountManager).startStandalone()
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Queue.mainQueue().after(0.3) {
|
||||||
|
self.displayPauseTooltip(text: presentationData.strings.Chat_PauseVideoMessageTooltip)
|
||||||
|
}
|
||||||
|
let _ = ApplicationSpecificNotice.incrementVideoMessagesPauseSuggestion(accountManager: self.context.sharedContext.accountManager).startStandalone()
|
||||||
}
|
}
|
||||||
|
|
||||||
Queue.mainQueue().after(0.3) {
|
|
||||||
self.displayViewOnceTooltip(text: presentationData.strings.Chat_TapToPlayVideoMessageOnceTooltip, hasIcon: true)
|
|
||||||
}
|
|
||||||
|
|
||||||
let _ = ApplicationSpecificNotice.incrementVideoMessagesPlayOnceSuggestion(accountManager: self.context.sharedContext.accountManager).startStandalone()
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1299,6 +1314,36 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
)
|
)
|
||||||
controller.present(tooltipController, in: .window(.root))
|
controller.present(tooltipController, in: .window(.root))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private func displayPauseTooltip(text: String) {
|
||||||
|
guard let controller = self.controller, let sourceView = self.componentHost.findTaggedView(tag: viewOnceButtonTag) else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
self.dismissAllTooltips()
|
||||||
|
|
||||||
|
let absoluteFrame = sourceView.convert(sourceView.bounds, to: self.view)
|
||||||
|
let location = CGRect(origin: CGPoint(x: absoluteFrame.midX - 20.0, y: absoluteFrame.midY + 53.0), size: CGSize())
|
||||||
|
|
||||||
|
let tooltipController = TooltipScreen(
|
||||||
|
account: context.account,
|
||||||
|
sharedContext: context.sharedContext,
|
||||||
|
text: .markdown(text: text),
|
||||||
|
balancedTextLayout: true,
|
||||||
|
constrainWidth: 240.0,
|
||||||
|
style: .customBlur(UIColor(rgb: 0x18181a), 0.0),
|
||||||
|
arrowStyle: .small,
|
||||||
|
icon: nil,
|
||||||
|
location: .point(location, .right),
|
||||||
|
displayDuration: .default,
|
||||||
|
inset: 8.0,
|
||||||
|
cornerRadius: 8.0,
|
||||||
|
shouldDismissOnTouch: { _, _ in
|
||||||
|
return .ignore
|
||||||
|
}
|
||||||
|
)
|
||||||
|
controller.present(tooltipController, in: .window(.root))
|
||||||
|
}
|
||||||
|
|
||||||
fileprivate func dismissAllTooltips() {
|
fileprivate func dismissAllTooltips() {
|
||||||
guard let controller = self.controller else {
|
guard let controller = self.controller else {
|
||||||
@ -1934,7 +1979,7 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
self.updateCameraState({ $0.updatedRecording(.handsFree) }, transition: .spring(duration: 0.4))
|
self.updateCameraState({ $0.updatedRecording(.handsFree) }, transition: .spring(duration: 0.4))
|
||||||
}
|
}
|
||||||
|
|
||||||
self.node.maybePresentViewOnceTooltip()
|
self.node.maybePresentTooltips()
|
||||||
}
|
}
|
||||||
|
|
||||||
public func discardVideo() {
|
public func discardVideo() {
|
||||||
|
|||||||
@ -4112,6 +4112,11 @@ extension ChatControllerImpl {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
self.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply)
|
self.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply)
|
||||||
|
}, dismissAllTooltips: { [weak self] in
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self.dismissAllTooltips()
|
||||||
}, updateHistoryFilter: { [weak self] update in
|
}, updateHistoryFilter: { [weak self] update in
|
||||||
guard let self else {
|
guard let self else {
|
||||||
return
|
return
|
||||||
|
|||||||
@ -276,6 +276,8 @@ extension ChatControllerImpl {
|
|||||||
audioRecorderValue.stop()
|
audioRecorderValue.stop()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.dismissAllTooltips()
|
||||||
|
|
||||||
switch updatedAction {
|
switch updatedAction {
|
||||||
case .dismiss:
|
case .dismiss:
|
||||||
self.recorderDataDisposable.set(nil)
|
self.recorderDataDisposable.set(nil)
|
||||||
@ -297,12 +299,13 @@ extension ChatControllerImpl {
|
|||||||
if data.duration < 0.5 {
|
if data.duration < 0.5 {
|
||||||
strongSelf.recorderFeedback?.error()
|
strongSelf.recorderFeedback?.error()
|
||||||
strongSelf.recorderFeedback = nil
|
strongSelf.recorderFeedback = nil
|
||||||
|
strongSelf.audioRecorder.set(.single(nil))
|
||||||
|
strongSelf.recorderDataDisposable.set(nil)
|
||||||
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
||||||
$0.updatedInputTextPanelState { panelState in
|
$0.updatedInputTextPanelState { panelState in
|
||||||
return panelState.withUpdatedMediaRecordingState(nil)
|
return panelState.withUpdatedMediaRecordingState(nil)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
strongSelf.recorderDataDisposable.set(nil)
|
|
||||||
} else if let waveform = data.waveform {
|
} else if let waveform = data.waveform {
|
||||||
if resource == nil {
|
if resource == nil {
|
||||||
resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max), size: Int64(data.compressedData.count))
|
resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max), size: Int64(data.compressedData.count))
|
||||||
@ -351,6 +354,7 @@ extension ChatControllerImpl {
|
|||||||
strongSelf.recorderFeedback?.error()
|
strongSelf.recorderFeedback?.error()
|
||||||
strongSelf.recorderFeedback = nil
|
strongSelf.recorderFeedback = nil
|
||||||
strongSelf.audioRecorder.set(.single(nil))
|
strongSelf.audioRecorder.set(.single(nil))
|
||||||
|
strongSelf.recorderDataDisposable.set(nil)
|
||||||
} else {
|
} else {
|
||||||
let randomId = Int64.random(in: Int64.min ... Int64.max)
|
let randomId = Int64.random(in: Int64.min ... Int64.max)
|
||||||
|
|
||||||
@ -496,15 +500,16 @@ extension ChatControllerImpl {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//TODO:localize
|
||||||
if let recordedMediaPreview = self.presentationInterfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let _ = audio.trimRange {
|
if let recordedMediaPreview = self.presentationInterfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let _ = audio.trimRange {
|
||||||
self.present(
|
self.present(
|
||||||
textAlertController(
|
textAlertController(
|
||||||
context: self.context,
|
context: self.context,
|
||||||
title: "Trim to selected range?",
|
title: self.presentationData.strings.Chat_TrimVoiceMessageToResume_Title,
|
||||||
text: "Audio outside that range will be discarded, and recording will start immediately.",
|
text: self.presentationData.strings.Chat_TrimVoiceMessageToResume_Text,
|
||||||
actions: [
|
actions: [
|
||||||
TextAlertAction(type: .genericAction, title: "Cancel", action: {}),
|
TextAlertAction(type: .genericAction, title: self.presentationData.strings.Common_Cancel, action: {}),
|
||||||
TextAlertAction(type: .defaultAction, title: "Proceed", action: {
|
TextAlertAction(type: .defaultAction, title: self.presentationData.strings.Chat_TrimVoiceMessageToResume_Proceed, action: {
|
||||||
proceed()
|
proceed()
|
||||||
})
|
})
|
||||||
]
|
]
|
||||||
@ -525,7 +530,11 @@ extension ChatControllerImpl {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
self.videoRecorderValue?.lockVideoRecording()
|
if let _ = self.audioRecorderValue {
|
||||||
|
self.maybePresentAudioPauseTooltip()
|
||||||
|
} else if let videoRecorderValue = self.videoRecorderValue {
|
||||||
|
videoRecorderValue.lockVideoRecording()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func deleteMediaRecording() {
|
func deleteMediaRecording() {
|
||||||
@ -541,6 +550,56 @@ extension ChatControllerImpl {
|
|||||||
$0.updatedInterfaceState { $0.withUpdatedMediaDraftState(nil) }
|
$0.updatedInterfaceState { $0.withUpdatedMediaDraftState(nil) }
|
||||||
})
|
})
|
||||||
self.updateDownButtonVisibility()
|
self.updateDownButtonVisibility()
|
||||||
|
|
||||||
|
self.dismissAllTooltips()
|
||||||
|
}
|
||||||
|
|
||||||
|
private func maybePresentAudioPauseTooltip() {
|
||||||
|
let _ = (ApplicationSpecificNotice.getVoiceMessagesPauseSuggestion(accountManager: self.context.sharedContext.accountManager)
|
||||||
|
|> deliverOnMainQueue).startStandalone(next: { [weak self] pauseCounter in
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if pauseCounter >= 3 {
|
||||||
|
return
|
||||||
|
} else {
|
||||||
|
Queue.mainQueue().after(0.3) {
|
||||||
|
self.displayPauseTooltip(text: self.presentationData.strings.Chat_PauseVoiceMessageTooltip)
|
||||||
|
}
|
||||||
|
let _ = ApplicationSpecificNotice.incrementVoiceMessagesPauseSuggestion(accountManager: self.context.sharedContext.accountManager).startStandalone()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private func displayPauseTooltip(text: String) {
|
||||||
|
guard let layout = self.validLayout else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
self.dismissAllTooltips()
|
||||||
|
|
||||||
|
let insets = layout.insets(options: [.input])
|
||||||
|
let location = CGRect(origin: CGPoint(x: layout.size.width - layout.safeInsets.right - 42.0 - UIScreenPixel, y: layout.size.height - insets.bottom - 122.0), size: CGSize())
|
||||||
|
|
||||||
|
let tooltipController = TooltipScreen(
|
||||||
|
account: self.context.account,
|
||||||
|
sharedContext: self.context.sharedContext,
|
||||||
|
text: .markdown(text: text),
|
||||||
|
balancedTextLayout: true,
|
||||||
|
constrainWidth: 240.0,
|
||||||
|
style: .customBlur(UIColor(rgb: 0x18181a), 0.0),
|
||||||
|
arrowStyle: .small,
|
||||||
|
icon: nil,
|
||||||
|
location: .point(location, .right),
|
||||||
|
displayDuration: .default,
|
||||||
|
inset: 8.0,
|
||||||
|
cornerRadius: 8.0,
|
||||||
|
shouldDismissOnTouch: { _, _ in
|
||||||
|
return .ignore
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.present(tooltipController, in: .window(.root))
|
||||||
}
|
}
|
||||||
|
|
||||||
private func withAudioRecorder(_ f: (ManagedAudioRecorder) -> Void) {
|
private func withAudioRecorder(_ f: (ManagedAudioRecorder) -> Void) {
|
||||||
|
|||||||
@ -9111,6 +9111,9 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
if let controller = controller as? QuickShareToastScreen {
|
if let controller = controller as? QuickShareToastScreen {
|
||||||
controller.dismissWithCommitAction()
|
controller.dismissWithCommitAction()
|
||||||
}
|
}
|
||||||
|
if let controller = controller as? TooltipScreen, !controller.alwaysVisible {
|
||||||
|
controller.dismiss()
|
||||||
|
}
|
||||||
})
|
})
|
||||||
self.forEachController({ controller in
|
self.forEachController({ controller in
|
||||||
if let controller = controller as? UndoOverlayController {
|
if let controller = controller as? UndoOverlayController {
|
||||||
|
|||||||
@ -2225,7 +2225,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
|
|||||||
hideInfo = true
|
hideInfo = true
|
||||||
}
|
}
|
||||||
case .waitingForPreview:
|
case .waitingForPreview:
|
||||||
Queue.mainQueue().after(0.3, {
|
Queue.mainQueue().after(0.5, {
|
||||||
self.actionButtons.micButton.audioRecorder = nil
|
self.actionButtons.micButton.audioRecorder = nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -2924,6 +2924,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
|
|||||||
|
|
||||||
self.tooltipController?.dismiss()
|
self.tooltipController?.dismiss()
|
||||||
if self.viewOnce {
|
if self.viewOnce {
|
||||||
|
self.interfaceInteraction?.dismissAllTooltips()
|
||||||
self.displayViewOnceTooltip(text: interfaceState.strings.Chat_PlayVoiceMessageOnceTooltip)
|
self.displayViewOnceTooltip(text: interfaceState.strings.Chat_PlayVoiceMessageOnceTooltip)
|
||||||
|
|
||||||
let _ = ApplicationSpecificNotice.incrementVoiceMessagesPlayOnceSuggestion(accountManager: context.sharedContext.accountManager, count: 3).startStandalone()
|
let _ = ApplicationSpecificNotice.incrementVoiceMessagesPlayOnceSuggestion(accountManager: context.sharedContext.accountManager, count: 3).startStandalone()
|
||||||
|
|||||||
@ -926,24 +926,50 @@ func openResolvedUrlImpl(
|
|||||||
source = subject
|
source = subject
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let externalState = MediaEditorTransitionOutExternalState(
|
let _ = (context.engine.messages.checkStoriesUploadAvailability(target: .myStories)
|
||||||
storyTarget: nil,
|
|> deliverOnMainQueue).start(next: { availability in
|
||||||
isForcedTarget: false,
|
if case let .available(remainingCount) = availability {
|
||||||
isPeerArchived: false,
|
let controller = context.sharedContext.makeStoryMediaEditorScreen(context: context, source: source, text: nil, link: nil, remainingCount: remainingCount, completion: { results, externalState, commit in
|
||||||
transitionOut: nil
|
let target: Stories.PendingTarget = results.first!.target
|
||||||
)
|
externalState.storyTarget = target
|
||||||
let controller = context.sharedContext.makeStoryMediaEditorScreen(context: context, source: source, text: nil, link: nil, completion: { results, commit in
|
|
||||||
let target: Stories.PendingTarget = results.first!.target
|
if let rootController = context.sharedContext.mainWindow?.viewController as? TelegramRootControllerInterface {
|
||||||
externalState.storyTarget = target
|
rootController.popToRoot(animated: false)
|
||||||
|
rootController.proceedWithStoryUpload(target: target, results: results, existingMedia: nil, forwardInfo: nil, externalState: externalState, commit: commit)
|
||||||
if let rootController = context.sharedContext.mainWindow?.viewController as? TelegramRootControllerInterface {
|
}
|
||||||
rootController.proceedWithStoryUpload(target: target, results: results, existingMedia: nil, forwardInfo: nil, externalState: externalState, commit: commit)
|
})
|
||||||
|
if let navigationController {
|
||||||
|
navigationController.pushViewController(controller)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let subject: PremiumLimitSubject
|
||||||
|
switch availability {
|
||||||
|
case .expiringLimit:
|
||||||
|
subject = .expiringStories
|
||||||
|
case .weeklyLimit:
|
||||||
|
subject = .storiesWeekly
|
||||||
|
case .monthlyLimit:
|
||||||
|
subject = .storiesMonthly
|
||||||
|
default:
|
||||||
|
subject = .expiringStories
|
||||||
|
}
|
||||||
|
var replaceImpl: ((ViewController) -> Void)?
|
||||||
|
let controller = context.sharedContext.makePremiumLimitController(context: context, subject: subject, count: 10, forceDark: false, cancel: {
|
||||||
|
}, action: {
|
||||||
|
let controller = context.sharedContext.makePremiumIntroController(context: context, source: .stories, forceDark: true, dismissed: {
|
||||||
|
})
|
||||||
|
replaceImpl?(controller)
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
replaceImpl = { [weak controller] c in
|
||||||
|
controller?.replace(with: c)
|
||||||
|
}
|
||||||
|
if let navigationController {
|
||||||
|
navigationController.pushViewController(controller)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
if let navigationController {
|
|
||||||
navigationController.pushViewController(controller)
|
|
||||||
}
|
|
||||||
case let .startAttach(peerId, payload, choose):
|
case let .startAttach(peerId, payload, choose):
|
||||||
let presentError: (String) -> Void = { errorText in
|
let presentError: (String) -> Void = { errorText in
|
||||||
present(UndoOverlayController(presentationData: presentationData, content: .info(title: nil, text: errorText, timeout: nil, customUndoText: nil), elevatedLayout: true, animateInAsReplacement: false, action: { _ in
|
present(UndoOverlayController(presentationData: presentationData, content: .info(title: nil, text: errorText, timeout: nil, customUndoText: nil), elevatedLayout: true, animateInAsReplacement: false, action: { _ in
|
||||||
|
|||||||
@ -3572,31 +3572,49 @@ public final class SharedAccountContextImpl: SharedAccountContext {
|
|||||||
return editorController
|
return editorController
|
||||||
}
|
}
|
||||||
|
|
||||||
public func makeStoryMediaEditorScreen(context: AccountContext, source: Any?, text: String?, link: (url: String, name: String?)?, completion: @escaping ([MediaEditorScreenResult], @escaping (@escaping () -> Void) -> Void) -> Void) -> ViewController {
|
public func makeStoryMediaEditorScreen(context: AccountContext, source: Any?, text: String?, link: (url: String, name: String?)?, remainingCount: Int32, completion: @escaping ([MediaEditorScreenResult], MediaEditorTransitionOutExternalState, @escaping (@escaping () -> Void) -> Void) -> Void) -> ViewController {
|
||||||
let subject: Signal<MediaEditorScreenImpl.Subject?, NoError>
|
let editorSubject: Signal<MediaEditorScreenImpl.Subject?, NoError>
|
||||||
if let image = source as? UIImage {
|
if let image = source as? UIImage {
|
||||||
subject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight, fromCamera: false))
|
editorSubject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight, fromCamera: false))
|
||||||
} else if let path = source as? String {
|
} else if let path = source as? String {
|
||||||
subject = .single(.video(videoPath: path, thumbnail: nil, mirror: false, additionalVideoPath: nil, additionalThumbnail: nil, dimensions: PixelDimensions(width: 1080, height: 1920), duration: 0.0, videoPositionChanges: [], additionalVideoPosition: .bottomRight, fromCamera: false))
|
editorSubject = .single(.video(videoPath: path, thumbnail: nil, mirror: false, additionalVideoPath: nil, additionalThumbnail: nil, dimensions: PixelDimensions(width: 1080, height: 1920), duration: 0.0, videoPositionChanges: [], additionalVideoPosition: .bottomRight, fromCamera: false))
|
||||||
} else if let subjects = source as? [MediaEditorScreenImpl.Subject] {
|
} else if let subjects = source as? [MediaEditorScreenImpl.Subject] {
|
||||||
subject = .single(.multiple(subjects))
|
editorSubject = .single(.multiple(subjects))
|
||||||
} else if let subjectValue = source as? MediaEditorScreenImpl.Subject {
|
} else if let subjectValue = source as? MediaEditorScreenImpl.Subject {
|
||||||
subject = .single(subjectValue)
|
editorSubject = .single(subjectValue)
|
||||||
} else {
|
} else {
|
||||||
subject = .single(.empty(PixelDimensions(width: 1080, height: 1920)))
|
editorSubject = .single(.empty(PixelDimensions(width: 1080, height: 1920)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let externalState = MediaEditorTransitionOutExternalState(
|
||||||
|
storyTarget: nil,
|
||||||
|
isForcedTarget: false,
|
||||||
|
isPeerArchived: false,
|
||||||
|
transitionOut: nil
|
||||||
|
)
|
||||||
|
|
||||||
let editorController = MediaEditorScreenImpl(
|
let editorController = MediaEditorScreenImpl(
|
||||||
context: context,
|
context: context,
|
||||||
mode: .storyEditor(remainingCount: 1),
|
mode: .storyEditor(remainingCount: remainingCount),
|
||||||
subject: subject,
|
subject: editorSubject,
|
||||||
customTarget: nil,
|
customTarget: nil,
|
||||||
initialCaption: text.flatMap { NSAttributedString(string: $0) },
|
initialCaption: text.flatMap { NSAttributedString(string: $0) },
|
||||||
initialLink: link,
|
initialLink: link,
|
||||||
transitionIn: nil,
|
transitionIn: nil,
|
||||||
transitionOut: { finished, isNew in
|
transitionOut: { finished, isNew in
|
||||||
|
if let externalTransitionOut = externalState.transitionOut {
|
||||||
|
if finished, let transitionOut = externalTransitionOut(externalState.storyTarget, false), let destinationView = transitionOut.destinationView {
|
||||||
|
return MediaEditorScreenImpl.TransitionOut(
|
||||||
|
destinationView: destinationView,
|
||||||
|
destinationRect: transitionOut.destinationRect,
|
||||||
|
destinationCornerRadius: transitionOut.destinationCornerRadius,
|
||||||
|
completion: transitionOut.completion
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
}, completion: { results, commit in
|
}, completion: { results, commit in
|
||||||
completion(results, commit)
|
completion(results, externalState, commit)
|
||||||
} as ([MediaEditorScreenImpl.Result], @escaping (@escaping () -> Void) -> Void) -> Void
|
} as ([MediaEditorScreenImpl.Result], @escaping (@escaping () -> Void) -> Void) -> Void
|
||||||
)
|
)
|
||||||
return editorController
|
return editorController
|
||||||
@ -3708,8 +3726,8 @@ public final class SharedAccountContextImpl: SharedAccountContext {
|
|||||||
return StarsWithdrawScreen(context: context, mode: mode, completion: completion)
|
return StarsWithdrawScreen(context: context, mode: mode, completion: completion)
|
||||||
}
|
}
|
||||||
|
|
||||||
public func makeStarGiftResellScreen(context: AccountContext, update: Bool, completion: @escaping (Int64) -> Void) -> ViewController {
|
public func makeStarGiftResellScreen(context: AccountContext, gift: StarGift.UniqueGift, update: Bool, completion: @escaping (Int64) -> Void) -> ViewController {
|
||||||
return StarsWithdrawScreen(context: context, mode: .starGiftResell(update), completion: completion)
|
return StarsWithdrawScreen(context: context, mode: .starGiftResell(gift, update), completion: completion)
|
||||||
}
|
}
|
||||||
|
|
||||||
public func makeStarsGiftScreen(context: AccountContext, message: EngineMessage) -> ViewController {
|
public func makeStarsGiftScreen(context: AccountContext, message: EngineMessage) -> ViewController {
|
||||||
|
|||||||
@ -858,15 +858,17 @@ public final class WebAppController: ViewController, AttachmentContainable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if previousLayout != nil && (previousLayout?.inputHeight ?? 0.0).isZero, let inputHeight = layout.inputHeight, inputHeight > 44.0, transition.isAnimated {
|
if previousLayout != nil && (previousLayout?.inputHeight ?? 0.0).isZero, let inputHeight = layout.inputHeight, inputHeight > 44.0, transition.isAnimated {
|
||||||
webView.scrollToActiveElement(layout: layout, completion: { [weak self] contentOffset in
|
|
||||||
self?.targetContentOffset = contentOffset
|
|
||||||
}, transition: transition)
|
|
||||||
Queue.mainQueue().after(0.4, {
|
Queue.mainQueue().after(0.4, {
|
||||||
if let inputHeight = self.validLayout?.0.inputHeight, inputHeight > 44.0 {
|
if let inputHeight = self.validLayout?.0.inputHeight, inputHeight > 44.0 {
|
||||||
|
webView.scrollToActiveElement(layout: layout, completion: { [weak self] contentOffset in
|
||||||
|
let _ = self
|
||||||
|
// self?.targetContentOffset = contentOffset
|
||||||
|
}, transition: transition)
|
||||||
|
|
||||||
transition.updateFrame(view: webView, frame: webViewFrame)
|
transition.updateFrame(view: webView, frame: webViewFrame)
|
||||||
Queue.mainQueue().after(0.1) {
|
// Queue.mainQueue().after(0.1) {
|
||||||
self.targetContentOffset = nil
|
// self.targetContentOffset = nil
|
||||||
}
|
// }
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
@ -1485,13 +1487,7 @@ public final class WebAppController: ViewController, AttachmentContainable {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let source {
|
if let source {
|
||||||
let externalState = MediaEditorTransitionOutExternalState(
|
let controller = self.context.sharedContext.makeStoryMediaEditorScreen(context: self.context, source: source, text: text, link: linkUrl.flatMap { ($0, linkName) }, remainingCount: 1, completion: { results, externalState, commit in
|
||||||
storyTarget: nil,
|
|
||||||
isForcedTarget: false,
|
|
||||||
isPeerArchived: false,
|
|
||||||
transitionOut: nil
|
|
||||||
)
|
|
||||||
let controller = self.context.sharedContext.makeStoryMediaEditorScreen(context: self.context, source: source, text: text, link: linkUrl.flatMap { ($0, linkName) }, completion: { results, commit in
|
|
||||||
let target: Stories.PendingTarget = results.first!.target
|
let target: Stories.PendingTarget = results.first!.target
|
||||||
externalState.storyTarget = target
|
externalState.storyTarget = target
|
||||||
|
|
||||||
|
|||||||
@ -249,7 +249,7 @@ final class WebAppWebView: WKWebView {
|
|||||||
if let result = result as? CGFloat {
|
if let result = result as? CGFloat {
|
||||||
Queue.mainQueue().async {
|
Queue.mainQueue().async {
|
||||||
let convertedY = result - self.scrollView.contentOffset.y
|
let convertedY = result - self.scrollView.contentOffset.y
|
||||||
let viewportHeight = self.frame.height - (layout.inputHeight ?? 0.0) + 26.0
|
let viewportHeight = self.frame.height
|
||||||
if convertedY < 0.0 || (convertedY + 44.0) > viewportHeight {
|
if convertedY < 0.0 || (convertedY + 44.0) > viewportHeight {
|
||||||
let targetOffset: CGFloat
|
let targetOffset: CGFloat
|
||||||
if convertedY < 0.0 {
|
if convertedY < 0.0 {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user