mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Merge commit '40c4378bdeecb1b19e1d6224c6f3fb52280e8c8a'
# Conflicts: # Telegram/Telegram-iOS/en.lproj/Localizable.strings
This commit is contained in:
commit
c27cb4f532
@ -10895,3 +10895,6 @@ Sorry for the inconvenience.";
|
||||
"Chat.PlayVideoMessageOnceTooltip" = "The recipient will be able to play it only once.";
|
||||
|
||||
"PeerInfo.HiddenStatusBadge" = "when?";
|
||||
|
||||
"Conversation.DiscardRecordedVoiceMessageDescription" = "Are you sure you want to discard\nyour voice message?";
|
||||
"Conversation.DiscardRecordedVoiceMessageAction" = "Discard";
|
||||
|
@ -92,6 +92,7 @@ final class CameraOutput: NSObject {
|
||||
private var previewConnection: AVCaptureConnection?
|
||||
|
||||
private var roundVideoFilter: CameraRoundVideoFilter?
|
||||
private let semaphore = DispatchSemaphore(value: 1)
|
||||
|
||||
private let queue = DispatchQueue(label: "")
|
||||
private let metadataQueue = DispatchQueue(label: "")
|
||||
@ -301,6 +302,7 @@ final class CameraOutput: NSObject {
|
||||
}
|
||||
|
||||
self.currentMode = mode
|
||||
self.lastSampleTimestamp = nil
|
||||
|
||||
let codecType: AVVideoCodecType
|
||||
if case .roundVideo = mode {
|
||||
@ -375,7 +377,7 @@ final class CameraOutput: NSObject {
|
||||
}
|
||||
|
||||
return Signal { subscriber in
|
||||
let timer = SwiftSignalKit.Timer(timeout: 0.02, repeat: true, completion: { [weak videoRecorder] in
|
||||
let timer = SwiftSignalKit.Timer(timeout: 0.033, repeat: true, completion: { [weak videoRecorder] in
|
||||
let recordingData = CameraRecordingData(duration: videoRecorder?.duration ?? 0.0, filePath: outputFilePath)
|
||||
subscriber.putNext(recordingData)
|
||||
}, queue: Queue.mainQueue())
|
||||
@ -405,17 +407,38 @@ final class CameraOutput: NSObject {
|
||||
}
|
||||
|
||||
private weak var masterOutput: CameraOutput?
|
||||
|
||||
private var lastSampleTimestamp: CMTime?
|
||||
func processVideoRecording(_ sampleBuffer: CMSampleBuffer, fromAdditionalOutput: Bool) {
|
||||
guard let formatDescriptor = CMSampleBufferGetFormatDescription(sampleBuffer) else {
|
||||
return
|
||||
}
|
||||
let type = CMFormatDescriptionGetMediaType(formatDescriptor)
|
||||
|
||||
if let videoRecorder = self.videoRecorder, videoRecorder.isRecording {
|
||||
if case .roundVideo = self.currentMode {
|
||||
if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, mirror: fromAdditionalOutput) {
|
||||
if case .front = self.currentPosition {
|
||||
if fromAdditionalOutput {
|
||||
videoRecorder.appendSampleBuffer(processedSampleBuffer)
|
||||
}
|
||||
if case .roundVideo = self.currentMode, type == kCMMediaType_Video {
|
||||
var transitionFactor: CGFloat = 0.0
|
||||
let currentTimestamp = CACurrentMediaTime()
|
||||
let duration: Double = 0.2
|
||||
if case .front = self.currentPosition {
|
||||
transitionFactor = 1.0
|
||||
if self.lastSwitchTimestamp > 0.0, currentTimestamp - self.lastSwitchTimestamp < duration {
|
||||
transitionFactor = max(0.0, (currentTimestamp - self.lastSwitchTimestamp) / duration)
|
||||
}
|
||||
} else {
|
||||
transitionFactor = 0.0
|
||||
if self.lastSwitchTimestamp > 0.0, currentTimestamp - self.lastSwitchTimestamp < duration {
|
||||
transitionFactor = 1.0 - max(0.0, (currentTimestamp - self.lastSwitchTimestamp) / duration)
|
||||
}
|
||||
}
|
||||
if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: fromAdditionalOutput, transitionFactor: transitionFactor) {
|
||||
let presentationTime = CMSampleBufferGetPresentationTimeStamp(processedSampleBuffer)
|
||||
if let lastSampleTimestamp = self.lastSampleTimestamp, lastSampleTimestamp > presentationTime {
|
||||
|
||||
} else {
|
||||
if !fromAdditionalOutput {
|
||||
if (transitionFactor == 1.0 && fromAdditionalOutput) || (transitionFactor == 0.0 && !fromAdditionalOutput) || (transitionFactor > 0.0 && transitionFactor < 1.0) {
|
||||
videoRecorder.appendSampleBuffer(processedSampleBuffer)
|
||||
self.lastSampleTimestamp = presentationTime
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -427,10 +450,12 @@ final class CameraOutput: NSObject {
|
||||
}
|
||||
}
|
||||
|
||||
private func processRoundVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer, mirror: Bool) -> CMSampleBuffer? {
|
||||
private func processRoundVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer, additional: Bool, transitionFactor: CGFloat) -> CMSampleBuffer? {
|
||||
guard let videoPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) else {
|
||||
return nil
|
||||
}
|
||||
self.semaphore.wait()
|
||||
|
||||
let mediaSubType = CMFormatDescriptionGetMediaSubType(formatDescription)
|
||||
let extensions = CMFormatDescriptionGetExtensions(formatDescription) as! [String: Any]
|
||||
|
||||
@ -453,7 +478,8 @@ final class CameraOutput: NSObject {
|
||||
if !filter.isPrepared {
|
||||
filter.prepare(with: newFormatDescription, outputRetainedBufferCountHint: 3)
|
||||
}
|
||||
guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, mirror: mirror) else {
|
||||
guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, additional: additional, transitionFactor: transitionFactor) else {
|
||||
self.semaphore.signal()
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -473,8 +499,10 @@ final class CameraOutput: NSObject {
|
||||
)
|
||||
|
||||
if status == noErr, let newSampleBuffer {
|
||||
self.semaphore.signal()
|
||||
return newSampleBuffer
|
||||
}
|
||||
self.semaphore.signal()
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -483,6 +511,7 @@ final class CameraOutput: NSObject {
|
||||
|
||||
func markPositionChange(position: Camera.Position) {
|
||||
self.currentPosition = position
|
||||
self.lastSwitchTimestamp = CACurrentMediaTime()
|
||||
|
||||
if let videoRecorder = self.videoRecorder {
|
||||
videoRecorder.markPositionChange(position: position)
|
||||
|
@ -90,7 +90,9 @@ class CameraRoundVideoFilter {
|
||||
private let ciContext: CIContext
|
||||
|
||||
private var resizeFilter: CIFilter?
|
||||
private var overlayFilter: CIFilter?
|
||||
private var compositeFilter: CIFilter?
|
||||
private var borderFilter: CIFilter?
|
||||
|
||||
private var outputColorSpace: CGColorSpace?
|
||||
private var outputPixelBufferPool: CVPixelBufferPool?
|
||||
@ -99,8 +101,6 @@ class CameraRoundVideoFilter {
|
||||
|
||||
private(set) var isPrepared = false
|
||||
|
||||
let semaphore = DispatchSemaphore(value: 1)
|
||||
|
||||
init(ciContext: CIContext) {
|
||||
self.ciContext = ciContext
|
||||
}
|
||||
@ -121,36 +121,43 @@ class CameraRoundVideoFilter {
|
||||
context.setFillColor(UIColor.white.cgColor)
|
||||
context.fill(bounds)
|
||||
context.setBlendMode(.clear)
|
||||
context.fillEllipse(in: bounds)
|
||||
context.fillEllipse(in: bounds.insetBy(dx: -2.0, dy: -2.0))
|
||||
})!
|
||||
|
||||
self.resizeFilter = CIFilter(name: "CILanczosScaleTransform")
|
||||
|
||||
self.overlayFilter = CIFilter(name: "CIColorMatrix")
|
||||
self.compositeFilter = CIFilter(name: "CISourceOverCompositing")
|
||||
self.compositeFilter?.setValue(CIImage(image: circleImage), forKey: kCIInputImageKey)
|
||||
|
||||
self.borderFilter = CIFilter(name: "CISourceOverCompositing")
|
||||
self.borderFilter?.setValue(CIImage(image: circleImage), forKey: kCIInputImageKey)
|
||||
|
||||
self.isPrepared = true
|
||||
}
|
||||
|
||||
func reset() {
|
||||
self.resizeFilter = nil
|
||||
self.overlayFilter = nil
|
||||
self.compositeFilter = nil
|
||||
self.borderFilter = nil
|
||||
self.outputColorSpace = nil
|
||||
self.outputPixelBufferPool = nil
|
||||
self.outputFormatDescription = nil
|
||||
self.inputFormatDescription = nil
|
||||
self.isPrepared = false
|
||||
self.lastMainSourceImage = nil
|
||||
self.lastAdditionalSourceImage = nil
|
||||
}
|
||||
|
||||
func render(pixelBuffer: CVPixelBuffer, mirror: Bool) -> CVPixelBuffer? {
|
||||
self.semaphore.wait()
|
||||
|
||||
guard let resizeFilter = self.resizeFilter, let compositeFilter = self.compositeFilter, self.isPrepared else {
|
||||
private var lastMainSourceImage: CIImage?
|
||||
private var lastAdditionalSourceImage: CIImage?
|
||||
|
||||
func render(pixelBuffer: CVPixelBuffer, additional: Bool, transitionFactor: CGFloat) -> CVPixelBuffer? {
|
||||
guard let resizeFilter = self.resizeFilter, let overlayFilter = self.overlayFilter, let compositeFilter = self.compositeFilter, let borderFilter = self.borderFilter, self.isPrepared else {
|
||||
return nil
|
||||
}
|
||||
|
||||
var sourceImage = CIImage(cvImageBuffer: pixelBuffer)
|
||||
sourceImage = sourceImage.oriented(mirror ? .leftMirrored : .right)
|
||||
sourceImage = sourceImage.oriented(additional ? .leftMirrored : .right)
|
||||
let scale = 400.0 / min(sourceImage.extent.width, sourceImage.extent.height)
|
||||
|
||||
resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey)
|
||||
@ -161,18 +168,46 @@ class CameraRoundVideoFilter {
|
||||
} else {
|
||||
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true)
|
||||
}
|
||||
|
||||
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(0.0, -(sourceImage.extent.height - sourceImage.extent.width) / 2.0))
|
||||
|
||||
sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.width, height: sourceImage.extent.width))
|
||||
|
||||
compositeFilter.setValue(sourceImage, forKey: kCIInputBackgroundImageKey)
|
||||
if additional {
|
||||
self.lastAdditionalSourceImage = sourceImage
|
||||
} else {
|
||||
self.lastMainSourceImage = sourceImage
|
||||
}
|
||||
|
||||
let finalImage = compositeFilter.outputImage
|
||||
var effectiveSourceImage: CIImage
|
||||
if transitionFactor == 0.0 {
|
||||
effectiveSourceImage = !additional ? sourceImage : (self.lastMainSourceImage ?? sourceImage)
|
||||
} else if transitionFactor == 1.0 {
|
||||
effectiveSourceImage = additional ? sourceImage : (self.lastAdditionalSourceImage ?? sourceImage)
|
||||
} else {
|
||||
if let mainSourceImage = self.lastMainSourceImage, let additionalSourceImage = self.lastAdditionalSourceImage {
|
||||
let overlayRgba: [CGFloat] = [0, 0, 0, transitionFactor]
|
||||
let alphaVector: CIVector = CIVector(values: overlayRgba, count: 4)
|
||||
overlayFilter.setValue(additionalSourceImage, forKey: kCIInputImageKey)
|
||||
overlayFilter.setValue(alphaVector, forKey: "inputAVector")
|
||||
|
||||
compositeFilter.setValue(mainSourceImage, forKey: kCIInputBackgroundImageKey)
|
||||
compositeFilter.setValue(overlayFilter.outputImage, forKey: kCIInputImageKey)
|
||||
effectiveSourceImage = compositeFilter.outputImage ?? sourceImage
|
||||
} else {
|
||||
effectiveSourceImage = sourceImage
|
||||
}
|
||||
}
|
||||
|
||||
borderFilter.setValue(effectiveSourceImage, forKey: kCIInputBackgroundImageKey)
|
||||
|
||||
let finalImage = borderFilter.outputImage
|
||||
guard let finalImage else {
|
||||
return nil
|
||||
}
|
||||
|
||||
if finalImage.extent.width != 400 {
|
||||
print("wtf: \(finalImage)")
|
||||
}
|
||||
|
||||
var pbuf: CVPixelBuffer?
|
||||
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf)
|
||||
guard let outputPixelBuffer = pbuf else {
|
||||
@ -181,8 +216,6 @@ class CameraRoundVideoFilter {
|
||||
|
||||
self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: CGSize(width: 400, height: 400)), colorSpace: outputColorSpace)
|
||||
|
||||
self.semaphore.signal()
|
||||
|
||||
return outputPixelBuffer
|
||||
}
|
||||
}
|
||||
|
@ -298,8 +298,12 @@ public func mediaContentKind(_ media: EngineMedia, message: EngineMessage? = nil
|
||||
switch expiredMedia.data {
|
||||
case .image:
|
||||
return .expiredImage
|
||||
case .file, .videoMessage, .voiceMessage:
|
||||
case .file:
|
||||
return .expiredVideo
|
||||
case .voiceMessage:
|
||||
return .expiredVoiceMessage
|
||||
case .videoMessage:
|
||||
return .expiredVideoMessage
|
||||
}
|
||||
case .image:
|
||||
return .image
|
||||
|
@ -1983,6 +1983,8 @@ public class CameraScreen: ViewController {
|
||||
self.requestUpdateLayout(hasAppeared: self.hasAppeared, transition: .immediate)
|
||||
CATransaction.commit()
|
||||
|
||||
self.animatingDualCameraPositionSwitch = true
|
||||
|
||||
self.additionalPreviewContainerView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
||||
self.additionalPreviewContainerView.layer.animateScale(from: 0.01, to: 1.0, duration: duration, timingFunction: timingFunction)
|
||||
|
||||
@ -2009,7 +2011,6 @@ public class CameraScreen: ViewController {
|
||||
timingFunction: timingFunction
|
||||
)
|
||||
|
||||
self.animatingDualCameraPositionSwitch = true
|
||||
self.mainPreviewContainerView.layer.animateBounds(
|
||||
from: CGRect(origin: CGPoint(x: 0.0, y: floorToScreenPixels((self.mainPreviewContainerView.bounds.height - self.mainPreviewContainerView.bounds.width) / 2.0)), size: CGSize(width: self.mainPreviewContainerView.bounds.width, height: self.mainPreviewContainerView.bounds.width)),
|
||||
to: self.mainPreviewContainerView.bounds,
|
||||
@ -2533,11 +2534,13 @@ public class CameraScreen: ViewController {
|
||||
|
||||
let additionalPreviewFrame = CGRect(origin: CGPoint(x: origin.x - circleSide / 2.0, y: origin.y - circleSide / 2.0), size: CGSize(width: circleSide, height: circleSide))
|
||||
|
||||
transition.setPosition(view: self.additionalPreviewContainerView, position: additionalPreviewFrame.center)
|
||||
transition.setBounds(view: self.additionalPreviewContainerView, bounds: CGRect(origin: .zero, size: additionalPreviewFrame.size))
|
||||
self.additionalPreviewContainerView.layer.cornerRadius = additionalPreviewFrame.width / 2.0
|
||||
if !self.animatingDualCameraPositionSwitch {
|
||||
transition.setPosition(view: self.additionalPreviewContainerView, position: additionalPreviewFrame.center)
|
||||
transition.setBounds(view: self.additionalPreviewContainerView, bounds: CGRect(origin: .zero, size: additionalPreviewFrame.size))
|
||||
self.additionalPreviewContainerView.layer.cornerRadius = additionalPreviewFrame.width / 2.0
|
||||
transition.setScale(view: self.additionalPreviewContainerView, scale: isDualCameraEnabled ? 1.0 : 0.1)
|
||||
}
|
||||
|
||||
transition.setScale(view: self.additionalPreviewContainerView, scale: isDualCameraEnabled ? 1.0 : 0.1)
|
||||
transition.setAlpha(view: self.additionalPreviewContainerView, alpha: isDualCameraEnabled ? 1.0 : 0.0)
|
||||
|
||||
if dualCamUpdated && isDualCameraEnabled {
|
||||
|
@ -556,8 +556,6 @@ public final class MediaScrubberComponent: Component {
|
||||
transition: transition
|
||||
)
|
||||
|
||||
let _ = leftHandleFrame
|
||||
let _ = rightHandleFrame
|
||||
let _ = ghostLeftHandleFrame
|
||||
let _ = ghostRightHandleFrame
|
||||
|
||||
@ -585,12 +583,15 @@ public final class MediaScrubberComponent: Component {
|
||||
transition.setFrame(view: self.ghostTrimView, frame: ghostTrimViewFrame)
|
||||
transition.setAlpha(view: self.ghostTrimView, alpha: ghostTrimVisible ? 0.75 : 0.0)
|
||||
|
||||
// var containerLeftEdge = leftHandleFrame.maxX
|
||||
// var containerRightEdge = rightHandleFrame.minX
|
||||
// if self.isAudioSelected && component.duration > 0.0 {
|
||||
// containerLeftEdge = ghostLeftHandleFrame.maxX
|
||||
// containerRightEdge = ghostRightHandleFrame.minX
|
||||
// }
|
||||
if case .videoMessage = component.style {
|
||||
for (_ , trackView) in self.trackViews {
|
||||
trackView.updateOpaqueEdges(
|
||||
left: leftHandleFrame.minX,
|
||||
right: rightHandleFrame.maxX,
|
||||
transition: transition
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
let isDraggingTracks = self.trackViews.values.contains(where: { $0.isDragging })
|
||||
let isCursorHidden = isDraggingTracks || self.trimView.isPanningTrimHandle || self.ghostTrimView.isPanningTrimHandle
|
||||
@ -738,7 +739,7 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
|
||||
}
|
||||
|
||||
@objc private func handleTap(_ gestureRecognizer: UITapGestureRecognizer) {
|
||||
guard let (track, _, _) = self.params else {
|
||||
guard let (track, _, _, _) = self.params else {
|
||||
return
|
||||
}
|
||||
self.onSelection(track.id)
|
||||
@ -787,9 +788,42 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
|
||||
private var params: (
|
||||
track: MediaScrubberComponent.Track,
|
||||
isSelected: Bool,
|
||||
availableSize: CGSize,
|
||||
duration: Double
|
||||
)?
|
||||
|
||||
private var leftOpaqueEdge: CGFloat?
|
||||
private var rightOpaqueEdge: CGFloat?
|
||||
func updateOpaqueEdges(
|
||||
left: CGFloat,
|
||||
right: CGFloat,
|
||||
transition: Transition
|
||||
) {
|
||||
self.leftOpaqueEdge = left
|
||||
self.rightOpaqueEdge = right
|
||||
|
||||
if let params = self.params {
|
||||
self.updateThumbnailContainers(
|
||||
scrubberSize: CGSize(width: params.availableSize.width, height: 33.0),
|
||||
availableSize: params.availableSize,
|
||||
transition: transition
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private func updateThumbnailContainers(
|
||||
scrubberSize: CGSize,
|
||||
availableSize: CGSize,
|
||||
transition: Transition
|
||||
) {
|
||||
let containerLeftEdge: CGFloat = self.leftOpaqueEdge ?? 0.0
|
||||
let containerRightEdge: CGFloat = self.rightOpaqueEdge ?? availableSize.width
|
||||
|
||||
transition.setFrame(view: self.videoTransparentFramesContainer, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: scrubberSize.width, height: scrubberSize.height)))
|
||||
transition.setFrame(view: self.videoOpaqueFramesContainer, frame: CGRect(origin: CGPoint(x: containerLeftEdge, y: 0.0), size: CGSize(width: containerRightEdge - containerLeftEdge, height: scrubberSize.height)))
|
||||
transition.setBounds(view: self.videoOpaqueFramesContainer, bounds: CGRect(origin: CGPoint(x: containerLeftEdge, y: 0.0), size: CGSize(width: containerRightEdge - containerLeftEdge, height: scrubberSize.height)))
|
||||
}
|
||||
|
||||
func update(
|
||||
context: AccountContext,
|
||||
style: MediaScrubberComponent.Style,
|
||||
@ -800,7 +834,7 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
|
||||
transition: Transition
|
||||
) -> CGSize {
|
||||
let previousParams = self.params
|
||||
self.params = (track, isSelected, duration)
|
||||
self.params = (track, isSelected, availableSize, duration)
|
||||
|
||||
let fullTrackHeight: CGFloat
|
||||
let framesCornerRadius: CGFloat
|
||||
@ -968,13 +1002,12 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let containerLeftEdge: CGFloat = 0.0
|
||||
let containerRightEdge: CGFloat = availableSize.width
|
||||
|
||||
transition.setFrame(view: self.videoTransparentFramesContainer, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: scrubberSize.width, height: scrubberSize.height)))
|
||||
transition.setFrame(view: self.videoOpaqueFramesContainer, frame: CGRect(origin: CGPoint(x: containerLeftEdge, y: 0.0), size: CGSize(width: containerRightEdge - containerLeftEdge, height: scrubberSize.height)))
|
||||
transition.setBounds(view: self.videoOpaqueFramesContainer, bounds: CGRect(origin: CGPoint(x: containerLeftEdge, y: 0.0), size: CGSize(width: containerRightEdge - containerLeftEdge, height: scrubberSize.height)))
|
||||
|
||||
self.updateThumbnailContainers(
|
||||
scrubberSize: scrubberSize,
|
||||
availableSize: availableSize,
|
||||
transition: transition
|
||||
)
|
||||
|
||||
var frameAspectRatio = 0.66
|
||||
if let image = frames.first, image.size.height > 0.0 {
|
||||
|
@ -71,7 +71,7 @@ enum CameraScreenTransition {
|
||||
|
||||
private let viewOnceButtonTag = GenericComponentViewTag()
|
||||
|
||||
private final class CameraScreenComponent: CombinedComponent {
|
||||
private final class VideoMessageCameraScreenComponent: CombinedComponent {
|
||||
typealias EnvironmentType = ViewControllerComponentContainer.Environment
|
||||
|
||||
let context: AccountContext
|
||||
@ -109,7 +109,7 @@ private final class CameraScreenComponent: CombinedComponent {
|
||||
self.completion = completion
|
||||
}
|
||||
|
||||
static func ==(lhs: CameraScreenComponent, rhs: CameraScreenComponent) -> Bool {
|
||||
static func ==(lhs: VideoMessageCameraScreenComponent, rhs: VideoMessageCameraScreenComponent) -> Bool {
|
||||
if lhs.context !== rhs.context {
|
||||
return false
|
||||
}
|
||||
@ -184,6 +184,9 @@ private final class CameraScreenComponent: CombinedComponent {
|
||||
if let self, let controller = getController() {
|
||||
self.startVideoRecording(pressing: !controller.scheduledLock)
|
||||
controller.scheduledLock = false
|
||||
if controller.recordingStartTime == nil {
|
||||
controller.recordingStartTime = CACurrentMediaTime()
|
||||
}
|
||||
}
|
||||
})
|
||||
self.stopRecording.connect({ [weak self] _ in
|
||||
@ -241,7 +244,7 @@ private final class CameraScreenComponent: CombinedComponent {
|
||||
let duration = initialDuration + recordingData.duration
|
||||
if let self, let controller = self.getController() {
|
||||
controller.updateCameraState({ $0.updatedDuration(duration) }, transition: .easeInOut(duration: 0.1))
|
||||
if recordingData.duration > 59.0 {
|
||||
if duration > 59.0 {
|
||||
self.stopVideoRecording()
|
||||
}
|
||||
if isFirstRecording {
|
||||
@ -323,6 +326,10 @@ private final class CameraScreenComponent: CombinedComponent {
|
||||
showViewOnce = true
|
||||
}
|
||||
|
||||
if let controller = component.getController(), !controller.viewOnceAvailable {
|
||||
showViewOnce = false
|
||||
}
|
||||
|
||||
if !component.isPreviewing {
|
||||
let flipButton = flipButton.update(
|
||||
component: CameraButton(
|
||||
@ -484,7 +491,6 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
private var resultPreviewView: ResultPreviewView?
|
||||
|
||||
private var cameraStateDisposable: Disposable?
|
||||
private var changingPositionDisposable: Disposable?
|
||||
|
||||
private let idleTimerExtensionDisposable = MetaDisposable()
|
||||
|
||||
@ -603,7 +609,6 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
|
||||
deinit {
|
||||
self.cameraStateDisposable?.dispose()
|
||||
self.changingPositionDisposable?.dispose()
|
||||
self.idleTimerExtensionDisposable.dispose()
|
||||
}
|
||||
|
||||
@ -670,13 +675,6 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
self.requestUpdateLayout(transition: .easeInOut(duration: 0.2))
|
||||
})
|
||||
|
||||
self.changingPositionDisposable = (camera.modeChange
|
||||
|> deliverOnMainQueue).start(next: { [weak self] modeChange in
|
||||
if let self {
|
||||
let _ = self
|
||||
}
|
||||
})
|
||||
|
||||
camera.focus(at: CGPoint(x: 0.5, y: 0.5), autoFocus: true)
|
||||
camera.startCapture()
|
||||
|
||||
@ -799,8 +797,15 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
|
||||
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||
let result = super.hitTest(point, with: event)
|
||||
if let controller = self.controller, point.y > self.frame.height - controller.inputPanelFrame.height - 34.0 {
|
||||
return nil
|
||||
if let controller = self.controller, let layout = self.validLayout {
|
||||
if point.y > layout.size.height - controller.inputPanelFrame.height - 34.0 {
|
||||
if layout.metrics.isTablet {
|
||||
if point.x < layout.size.width * 0.33 {
|
||||
return result
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
@ -944,7 +949,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
let componentSize = self.componentHost.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(
|
||||
CameraScreenComponent(
|
||||
VideoMessageCameraScreenComponent(
|
||||
context: self.context,
|
||||
cameraState: self.cameraState,
|
||||
isPreviewing: self.previewState != nil || self.transitioningToPreview,
|
||||
@ -987,8 +992,12 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
|
||||
let availableHeight = layout.size.height - (layout.inputHeight ?? 0.0)
|
||||
let previewSide = min(369.0, layout.size.width - 24.0)
|
||||
let previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 16.0, availableHeight * 0.4 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
|
||||
|
||||
let previewFrame: CGRect
|
||||
if layout.metrics.isTablet {
|
||||
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 24.0, availableHeight * 0.2 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
|
||||
} else {
|
||||
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 16.0, availableHeight * 0.4 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
|
||||
}
|
||||
if !self.animatingIn {
|
||||
transition.setFrame(view: self.previewContainerView, frame: previewFrame)
|
||||
}
|
||||
@ -1054,6 +1063,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
private let updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?
|
||||
private let inputPanelFrame: CGRect
|
||||
fileprivate var allowLiveUpload: Bool
|
||||
fileprivate var viewOnceAvailable: Bool
|
||||
|
||||
fileprivate let completion: (EnqueueMessage?) -> Void
|
||||
|
||||
@ -1145,13 +1155,20 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
initialPlaceholder = self.camera?.transitionImage ?? .single(nil)
|
||||
}
|
||||
|
||||
var approximateDuration: Double
|
||||
if let recordingStartTime = self.recordingStartTime {
|
||||
approximateDuration = CACurrentMediaTime() - recordingStartTime
|
||||
} else {
|
||||
approximateDuration = 1.0
|
||||
}
|
||||
|
||||
let immediateResult: Signal<RecordedVideoData?, NoError> = initialPlaceholder
|
||||
|> take(1)
|
||||
|> mapToSignal { initialPlaceholder in
|
||||
return videoFrames(asset: nil, count: count, initialPlaceholder: initialPlaceholder)
|
||||
|> map { framesAndUpdateTimestamp in
|
||||
return RecordedVideoData(
|
||||
duration: 1.0,
|
||||
duration: approximateDuration,
|
||||
frames: framesAndUpdateTimestamp.0,
|
||||
framesUpdateTimestamp: framesAndUpdateTimestamp.1,
|
||||
trimRange: nil
|
||||
@ -1194,14 +1211,15 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
public init(
|
||||
context: AccountContext,
|
||||
updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?,
|
||||
peerId: EnginePeer.Id,
|
||||
inputPanelFrame: CGRect,
|
||||
allowLiveUpload: Bool,
|
||||
completion: @escaping (EnqueueMessage?) -> Void
|
||||
) {
|
||||
self.context = context
|
||||
self.updatedPresentationData = updatedPresentationData
|
||||
self.inputPanelFrame = inputPanelFrame
|
||||
self.allowLiveUpload = allowLiveUpload
|
||||
self.allowLiveUpload = peerId.namespace != Namespaces.Peer.SecretChat
|
||||
self.viewOnceAvailable = peerId.namespace == Namespaces.Peer.CloudUser && peerId != context.account.peerId
|
||||
self.completion = completion
|
||||
|
||||
self.recordingStatus = RecordingStatus(micLevel: self.micLevelValue.get(), duration: self.durationValue.get())
|
||||
@ -1263,6 +1281,11 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
}
|
||||
}
|
||||
|
||||
if duration < 1.0 {
|
||||
self.completion(nil)
|
||||
return
|
||||
}
|
||||
|
||||
let finalDuration: Double
|
||||
if let trimRange = self.node.previewState?.trimRange {
|
||||
finalDuration = trimRange.upperBound - trimRange.lowerBound
|
||||
@ -1348,6 +1371,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
return true
|
||||
}
|
||||
|
||||
fileprivate var recordingStartTime: Double?
|
||||
fileprivate var scheduledLock = false
|
||||
public func lockVideoRecording() {
|
||||
if case .none = self.cameraState.recording {
|
||||
|
@ -689,6 +689,10 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
return false
|
||||
}
|
||||
|
||||
if strongSelf.presentRecordedVoiceMessageDiscardAlert(action: action, performAction: false) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
@ -743,6 +747,10 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
strongSelf.openViewOnceMediaMessage(message)
|
||||
return false
|
||||
}
|
||||
} else if file.isVideo {
|
||||
if !displayVoiceMessageDiscardAlert() {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
if let invoice = media as? TelegramMediaInvoice, let extendedMedia = invoice.extendedMedia {
|
||||
@ -15381,8 +15389,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
let controller = VideoMessageCameraScreen(
|
||||
context: self.context,
|
||||
updatedPresentationData: self.updatedPresentationData,
|
||||
peerId: peerId,
|
||||
inputPanelFrame: currentInputPanelFrame,
|
||||
allowLiveUpload: peerId.namespace != Namespaces.Peer.SecretChat,
|
||||
completion: { [weak self] message in
|
||||
guard let self, let videoController = self.videoRecorderValue else {
|
||||
return
|
||||
@ -15574,7 +15582,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
self.recorderDataDisposable.set((videoRecorderValue.takenRecordedData()
|
||||
|> deliverOnMainQueue).startStrict(next: { [weak self] data in
|
||||
if let strongSelf = self, let data = data {
|
||||
if data.duration < 0.5 {
|
||||
if data.duration < 1.0 {
|
||||
strongSelf.recorderFeedback?.error()
|
||||
strongSelf.recorderFeedback = nil
|
||||
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
||||
@ -17359,6 +17367,25 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
return false
|
||||
}
|
||||
|
||||
func presentRecordedVoiceMessageDiscardAlert(action: @escaping () -> Void = {}, alertAction: (() -> Void)? = nil, delay: Bool = false, performAction: Bool = true) -> Bool {
|
||||
if let _ = self.presentationInterfaceState.recordedMediaPreview {
|
||||
alertAction?()
|
||||
Queue.mainQueue().after(delay ? 0.2 : 0.0) {
|
||||
self.present(textAlertController(context: self.context, updatedPresentationData: self.updatedPresentationData, title: nil, text: self.presentationData.strings.Conversation_DiscardRecordedVoiceMessageDescription, actions: [TextAlertAction(type: .genericAction, title: self.presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .defaultAction, title: self.presentationData.strings.Conversation_DiscardRecordedVoiceMessageAction, action: { [weak self] in
|
||||
self?.stopMediaRecorder()
|
||||
Queue.mainQueue().after(0.1) {
|
||||
action()
|
||||
}
|
||||
})]), in: .window(.root))
|
||||
}
|
||||
|
||||
return true
|
||||
} else if performAction {
|
||||
action()
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func presentAutoremoveSetup() {
|
||||
guard let peer = self.presentationInterfaceState.renderedPeer?.peer else {
|
||||
return
|
||||
|
@ -169,6 +169,7 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo
|
||||
return nil
|
||||
}
|
||||
|
||||
let context = self.context
|
||||
self.idleTimerExtensionDisposable.set(self.context.sharedContext.applicationBindings.pushIdleTimerExtension())
|
||||
|
||||
let isIncoming = self.message.effectivelyIncoming(self.context.account.peerId)
|
||||
@ -196,6 +197,7 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo
|
||||
var tooltipSourceRect: CGRect = .zero
|
||||
|
||||
if let sourceNode {
|
||||
let videoWidth = min(404.0, chatNode.frame.width - 2.0)
|
||||
var bubbleWidth: CGFloat = 0.0
|
||||
|
||||
if (isIncoming || "".isEmpty) {
|
||||
@ -230,7 +232,9 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo
|
||||
})
|
||||
|
||||
if let messageNode = node as? ChatMessageItemView, let copyContentNode = messageNode.getMessageContextSourceNode(stableId: self.message.stableId) {
|
||||
self.initialAppearanceOffset = CGPoint(x: 0.0, y: width - 20.0 - copyContentNode.frame.height)
|
||||
if isVideo {
|
||||
self.initialAppearanceOffset = CGPoint(x: 0.0, y: min(videoWidth, width - 20.0) - copyContentNode.frame.height)
|
||||
}
|
||||
|
||||
messageNode.frame.origin.y = sourceRect.origin.y
|
||||
chatNode.addSubnode(messageNode)
|
||||
@ -250,79 +254,100 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo
|
||||
result = ContextControllerTakeViewInfo(containingItem: .node(sourceNode), contentAreaInScreenSpace: chatNode.convert(chatNode.frameForVisibleArea(), to: nil))
|
||||
}
|
||||
|
||||
tooltipSourceRect = CGRect(x: isIncoming ? 22.0 : chatNode.frame.width - bubbleWidth + 10.0, y: floorToScreenPixels((chatNode.frame.height - 75.0) / 2.0) - 43.0, width: 44.0, height: 44.0)
|
||||
let mappedParentRect = chatNode.view.convert(chatNode.bounds, to: nil)
|
||||
if isVideo {
|
||||
tooltipSourceRect = CGRect(x: mappedParentRect.minX + (isIncoming ? videoWidth / 2.0 : chatNode.frame.width - videoWidth / 2.0), y: floorToScreenPixels((chatNode.frame.height - videoWidth) / 2.0) + 8.0, width: 0.0, height: 0.0)
|
||||
} else {
|
||||
tooltipSourceRect = CGRect(x: mappedParentRect.minX + (isIncoming ? 22.0 : chatNode.frame.width - bubbleWidth + 10.0), y: floorToScreenPixels((chatNode.frame.height - 75.0) / 2.0) - 43.0, width: 44.0, height: 44.0)
|
||||
}
|
||||
}
|
||||
|
||||
if !isVideo {
|
||||
let displayTooltip = { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
let absoluteFrame = tooltipSourceRect
|
||||
let location = CGRect(origin: CGPoint(x: absoluteFrame.midX, y: absoluteFrame.maxY), size: CGSize())
|
||||
|
||||
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
|
||||
var tooltipText: String?
|
||||
if isIncoming {
|
||||
let displayTooltip = { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
let absoluteFrame = tooltipSourceRect
|
||||
let location = CGRect(origin: CGPoint(x: absoluteFrame.midX, y: absoluteFrame.maxY), size: CGSize())
|
||||
|
||||
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
|
||||
var tooltipText: String?
|
||||
if isIncoming {
|
||||
if isVideo {
|
||||
tooltipText = presentationData.strings.Chat_PlayOnceVideoMessageTooltip
|
||||
} else {
|
||||
tooltipText = presentationData.strings.Chat_PlayOnceVoiceMessageTooltip
|
||||
} else if let peer = self.message.peers[self.message.id.peerId] {
|
||||
let peerName = EnginePeer(peer).compactDisplayTitle
|
||||
tooltipText = presentationData.strings.Chat_PlayOnceVoiceMessageYourTooltip(peerName).string
|
||||
}
|
||||
|
||||
if let tooltipText {
|
||||
let tooltipController = TooltipScreen(
|
||||
account: self.context.account,
|
||||
sharedContext: self.context.sharedContext,
|
||||
text: .markdown(text: tooltipText),
|
||||
balancedTextLayout: true,
|
||||
constrainWidth: 240.0,
|
||||
style: .customBlur(UIColor(rgb: 0x18181a), 0.0),
|
||||
arrowStyle: .small,
|
||||
icon: nil,
|
||||
location: .point(location, .bottom),
|
||||
displayDuration: .custom(3.0),
|
||||
inset: 8.0,
|
||||
cornerRadius: 11.0,
|
||||
shouldDismissOnTouch: { _, _ in
|
||||
return .ignore
|
||||
}
|
||||
)
|
||||
self.tooltipController = tooltipController
|
||||
self.present(tooltipController)
|
||||
} else if let peer = self.message.peers[self.message.id.peerId] {
|
||||
let peerName = EnginePeer(peer).compactDisplayTitle
|
||||
if isVideo {
|
||||
tooltipText = presentationData.strings.Chat_PlayOnceVideoMessageYourTooltip(peerName).string
|
||||
} else {
|
||||
tooltipText = presentationData.strings.Chat_PlayOnceVoiceMessageYourTooltip(peerName).string
|
||||
}
|
||||
}
|
||||
|
||||
if isIncoming {
|
||||
let _ = (ApplicationSpecificNotice.getIncomingVoiceMessagePlayOnceTip(accountManager: self.context.sharedContext.accountManager)
|
||||
|> deliverOnMainQueue).startStandalone(next: { [weak self] counter in
|
||||
guard let self else {
|
||||
return
|
||||
if let tooltipText {
|
||||
let tooltipController = TooltipScreen(
|
||||
account: self.context.account,
|
||||
sharedContext: self.context.sharedContext,
|
||||
text: .markdown(text: tooltipText),
|
||||
balancedTextLayout: true,
|
||||
constrainWidth: 240.0,
|
||||
style: .customBlur(UIColor(rgb: 0x18181a), 0.0),
|
||||
arrowStyle: .small,
|
||||
icon: nil,
|
||||
location: .point(location, .bottom),
|
||||
displayDuration: .custom(3.0),
|
||||
inset: 8.0,
|
||||
cornerRadius: 11.0,
|
||||
shouldDismissOnTouch: { _, _ in
|
||||
return .ignore
|
||||
}
|
||||
if counter >= 2 {
|
||||
return
|
||||
}
|
||||
Queue.mainQueue().after(0.3) {
|
||||
displayTooltip()
|
||||
}
|
||||
let _ = ApplicationSpecificNotice.incrementIncomingVoiceMessagePlayOnceTip(accountManager: self.context.sharedContext.accountManager).startStandalone()
|
||||
})
|
||||
} else {
|
||||
let _ = (ApplicationSpecificNotice.getOutgoingVoiceMessagePlayOnceTip(accountManager: self.context.sharedContext.accountManager)
|
||||
|> deliverOnMainQueue).startStandalone(next: { [weak self] counter in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if counter >= 2 {
|
||||
return
|
||||
}
|
||||
Queue.mainQueue().after(0.3) {
|
||||
displayTooltip()
|
||||
}
|
||||
let _ = ApplicationSpecificNotice.incrementOutgoingVoiceMessagePlayOnceTip(accountManager: self.context.sharedContext.accountManager).startStandalone()
|
||||
})
|
||||
)
|
||||
self.tooltipController = tooltipController
|
||||
self.present(tooltipController)
|
||||
}
|
||||
}
|
||||
|
||||
let tooltipStateSignal: Signal<Int32, NoError>
|
||||
let updateTooltipState: () -> Void
|
||||
if isVideo {
|
||||
if isIncoming {
|
||||
tooltipStateSignal = ApplicationSpecificNotice.getIncomingVideoMessagePlayOnceTip(accountManager: context.sharedContext.accountManager)
|
||||
updateTooltipState = {
|
||||
let _ = ApplicationSpecificNotice.incrementIncomingVideoMessagePlayOnceTip(accountManager: context.sharedContext.accountManager).startStandalone()
|
||||
}
|
||||
} else {
|
||||
tooltipStateSignal = ApplicationSpecificNotice.getOutgoingVideoMessagePlayOnceTip(accountManager: context.sharedContext.accountManager)
|
||||
updateTooltipState = {
|
||||
let _ = ApplicationSpecificNotice.incrementOutgoingVideoMessagePlayOnceTip(accountManager: context.sharedContext.accountManager).startStandalone()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if isIncoming {
|
||||
tooltipStateSignal = ApplicationSpecificNotice.getIncomingVoiceMessagePlayOnceTip(accountManager: context.sharedContext.accountManager)
|
||||
updateTooltipState = {
|
||||
let _ = ApplicationSpecificNotice.incrementIncomingVoiceMessagePlayOnceTip(accountManager: context.sharedContext.accountManager).startStandalone()
|
||||
}
|
||||
} else {
|
||||
tooltipStateSignal = ApplicationSpecificNotice.getOutgoingVoiceMessagePlayOnceTip(accountManager: context.sharedContext.accountManager)
|
||||
updateTooltipState = {
|
||||
let _ = ApplicationSpecificNotice.incrementOutgoingVoiceMessagePlayOnceTip(accountManager: context.sharedContext.accountManager).startStandalone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let _ = (tooltipStateSignal
|
||||
|> deliverOnMainQueue).startStandalone(next: { counter in
|
||||
if counter >= 2 {
|
||||
return
|
||||
}
|
||||
Queue.mainQueue().after(0.3) {
|
||||
displayTooltip()
|
||||
}
|
||||
updateTooltipState()
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
@ -340,7 +365,7 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo
|
||||
|
||||
if let messageNodeCopy = self.messageNodeCopy, let sourceView = messageNodeCopy.supernode?.view, let contentNode = messageNodeCopy.getMessageContextSourceNode(stableId: nil)?.contentNode, let parentNode = contentNode.supernode?.supernode?.supernode {
|
||||
let dustEffectLayer = DustEffectLayer()
|
||||
dustEffectLayer.position = sourceView.bounds.center
|
||||
dustEffectLayer.position = sourceView.bounds.center.offsetBy(dx: (parentNode.frame.width - messageNodeCopy.frame.width), dy: 0.0)
|
||||
dustEffectLayer.bounds = CGRect(origin: CGPoint(), size: sourceView.bounds.size)
|
||||
dustEffectLayer.zPosition = 10.0
|
||||
parentNode.layer.addSublayer(dustEffectLayer)
|
||||
@ -348,7 +373,7 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo
|
||||
guard let (image, subFrame) = messageNodeCopy.makeContentSnapshot() else {
|
||||
return nil
|
||||
}
|
||||
var itemFrame = subFrame //messageNodeCopy.layer.convert(subFrame, to: dustEffectLayer)
|
||||
var itemFrame = subFrame
|
||||
itemFrame.origin.y = floorToScreenPixels((sourceView.frame.height - subFrame.height) / 2.0)
|
||||
dustEffectLayer.addItem(frame: itemFrame, image: image)
|
||||
messageNodeCopy.removeFromSupernode()
|
||||
|
@ -415,9 +415,14 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
||||
prevTextInputPanelNode.viewOnceButton.isHidden = true
|
||||
prevTextInputPanelNode.viewOnce = false
|
||||
|
||||
self.recordMoreButton.isEnabled = false
|
||||
self.viewOnceButton.layer.animatePosition(from: prevTextInputPanelNode.viewOnceButton.position, to: self.viewOnceButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, completion: { _ in
|
||||
prevTextInputPanelNode.viewOnceButton.isHidden = false
|
||||
prevTextInputPanelNode.viewOnceButton.update(isSelected: false, animated: false)
|
||||
|
||||
Queue.mainQueue().after(0.3) {
|
||||
self.recordMoreButton.isEnabled = true
|
||||
}
|
||||
})
|
||||
|
||||
self.recordMoreButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
||||
@ -492,10 +497,11 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
||||
}
|
||||
|
||||
@objc func sendPressed() {
|
||||
self.viewOnce = false
|
||||
self.tooltipController?.dismiss()
|
||||
|
||||
self.interfaceInteraction?.sendRecordedMedia(false, self.viewOnce)
|
||||
|
||||
self.viewOnce = false
|
||||
}
|
||||
|
||||
private weak var tooltipController: TooltipScreen?
|
||||
@ -548,7 +554,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
||||
)
|
||||
self.tooltipController = tooltipController
|
||||
|
||||
parentController.present(tooltipController, in: .window(.root))
|
||||
parentController.present(tooltipController, in: .current)
|
||||
}
|
||||
|
||||
@objc func waveformPressed() {
|
||||
|
@ -2776,7 +2776,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
|
||||
)
|
||||
self.tooltipController = tooltipController
|
||||
|
||||
parentController.present(tooltipController, in: .window(.root))
|
||||
parentController.present(tooltipController, in: .current)
|
||||
}
|
||||
|
||||
override func canHandleTransition(from prevInputPanelNode: ChatInputPanelNode?) -> Bool {
|
||||
|
@ -150,6 +150,7 @@ final class ManagedAudioRecorderContext {
|
||||
private let beganWithTone: (Bool) -> Void
|
||||
|
||||
private var paused = true
|
||||
private var manuallyPaused = false
|
||||
|
||||
private let queue: Queue
|
||||
private let mediaManager: MediaManager
|
||||
@ -413,9 +414,11 @@ final class ManagedAudioRecorderContext {
|
||||
return Signal { subscriber in
|
||||
queue.async {
|
||||
if let strongSelf = self {
|
||||
strongSelf.hasAudioSession = false
|
||||
strongSelf.stop()
|
||||
strongSelf.recordingState.set(.stopped)
|
||||
if !strongSelf.manuallyPaused {
|
||||
strongSelf.hasAudioSession = false
|
||||
strongSelf.stop()
|
||||
strongSelf.recordingState.set(.stopped)
|
||||
}
|
||||
subscriber.putCompletion()
|
||||
}
|
||||
}
|
||||
@ -450,13 +453,17 @@ final class ManagedAudioRecorderContext {
|
||||
func pause() {
|
||||
assert(self.queue.isCurrent())
|
||||
|
||||
self.paused = true
|
||||
self.manuallyPaused = true
|
||||
}
|
||||
|
||||
func resume() {
|
||||
assert(self.queue.isCurrent())
|
||||
|
||||
self.paused = false
|
||||
if self.manuallyPaused {
|
||||
self.manuallyPaused = false
|
||||
} else if self.paused {
|
||||
self.start()
|
||||
}
|
||||
}
|
||||
|
||||
func stop() {
|
||||
@ -500,7 +507,7 @@ final class ManagedAudioRecorderContext {
|
||||
free(buffer.mData)
|
||||
}
|
||||
|
||||
if !self.processSamples || self.paused {
|
||||
if !self.processSamples || self.manuallyPaused {
|
||||
return
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user