mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-15 21:45:19 +00:00
Video message recording improvements
This commit is contained in:
parent
d44b531a58
commit
a3e0c910f7
@ -206,7 +206,7 @@ public final class AnimationNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public func preferredSize() -> CGSize? {
|
public func preferredSize() -> CGSize? {
|
||||||
if let animationView = animationView(), let animation = animationView.animation {
|
if let animationView = self.animationView(), let animation = animationView.animation {
|
||||||
return CGSize(width: animation.size.width * self.scale, height: animation.size.height * self.scale)
|
return CGSize(width: animation.size.width * self.scale, height: animation.size.height * self.scale)
|
||||||
} else {
|
} else {
|
||||||
return nil
|
return nil
|
||||||
|
@ -604,6 +604,10 @@ private final class CameraContext {
|
|||||||
return self.audioLevelPipe.signal()
|
return self.audioLevelPipe.signal()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var transitionImage: Signal<UIImage?, NoError> {
|
||||||
|
return .single(self.mainDeviceContext?.output.transitionImage)
|
||||||
|
}
|
||||||
|
|
||||||
@objc private func sessionInterruptionEnded(notification: NSNotification) {
|
@objc private func sessionInterruptionEnded(notification: NSNotification) {
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -969,6 +973,20 @@ public final class Camera {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public var transitionImage: Signal<UIImage?, NoError> {
|
||||||
|
return Signal { subscriber in
|
||||||
|
let disposable = MetaDisposable()
|
||||||
|
self.queue.async {
|
||||||
|
if let context = self.contextRef?.takeUnretainedValue() {
|
||||||
|
disposable.set(context.transitionImage.start(next: { codes in
|
||||||
|
subscriber.putNext(codes)
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return disposable
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public enum ModeChange: Equatable {
|
public enum ModeChange: Equatable {
|
||||||
case none
|
case none
|
||||||
case position
|
case position
|
||||||
|
@ -399,6 +399,10 @@ final class CameraOutput: NSObject {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var transitionImage: UIImage? {
|
||||||
|
return self.videoRecorder?.transitionImage
|
||||||
|
}
|
||||||
|
|
||||||
private weak var masterOutput: CameraOutput?
|
private weak var masterOutput: CameraOutput?
|
||||||
func processVideoRecording(_ sampleBuffer: CMSampleBuffer, fromAdditionalOutput: Bool) {
|
func processVideoRecording(_ sampleBuffer: CMSampleBuffer, fromAdditionalOutput: Bool) {
|
||||||
if let videoRecorder = self.videoRecorder, videoRecorder.isRecording {
|
if let videoRecorder = self.videoRecorder, videoRecorder.isRecording {
|
||||||
|
@ -99,6 +99,8 @@ class CameraRoundVideoFilter {
|
|||||||
|
|
||||||
private(set) var isPrepared = false
|
private(set) var isPrepared = false
|
||||||
|
|
||||||
|
let semaphore = DispatchSemaphore(value: 1)
|
||||||
|
|
||||||
init(ciContext: CIContext) {
|
init(ciContext: CIContext) {
|
||||||
self.ciContext = ciContext
|
self.ciContext = ciContext
|
||||||
}
|
}
|
||||||
@ -141,6 +143,8 @@ class CameraRoundVideoFilter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func render(pixelBuffer: CVPixelBuffer, mirror: Bool) -> CVPixelBuffer? {
|
func render(pixelBuffer: CVPixelBuffer, mirror: Bool) -> CVPixelBuffer? {
|
||||||
|
self.semaphore.wait()
|
||||||
|
|
||||||
guard let resizeFilter = self.resizeFilter, let compositeFilter = self.compositeFilter, self.isPrepared else {
|
guard let resizeFilter = self.resizeFilter, let compositeFilter = self.compositeFilter, self.isPrepared else {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -176,6 +180,9 @@ class CameraRoundVideoFilter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: CGSize(width: 400, height: 400)), colorSpace: outputColorSpace)
|
self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: CGSize(width: 400, height: 400)), colorSpace: outputColorSpace)
|
||||||
|
|
||||||
|
self.semaphore.signal()
|
||||||
|
|
||||||
return outputPixelBuffer
|
return outputPixelBuffer
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -35,7 +35,7 @@ private final class VideoRecorderImpl {
|
|||||||
private var audioInput: AVAssetWriterInput?
|
private var audioInput: AVAssetWriterInput?
|
||||||
|
|
||||||
private let ciContext: CIContext
|
private let ciContext: CIContext
|
||||||
private var transitionImage: UIImage?
|
fileprivate var transitionImage: UIImage?
|
||||||
private var savedTransitionImage = false
|
private var savedTransitionImage = false
|
||||||
|
|
||||||
private var pendingAudioSampleBuffers: [CMSampleBuffer] = []
|
private var pendingAudioSampleBuffers: [CMSampleBuffer] = []
|
||||||
@ -533,4 +533,8 @@ public final class VideoRecorder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var transitionImage: UIImage? {
|
||||||
|
return self.impl.transitionImage
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1736,7 +1736,7 @@ public final class MediaEditor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public func videoFrames(asset: AVAsset, count: Int, mirror: Bool = false) -> Signal<([UIImage], Double), NoError> {
|
public func videoFrames(asset: AVAsset?, count: Int, initialPlaceholder: UIImage? = nil, initialTimestamp: Double? = nil, mirror: Bool = false) -> Signal<([UIImage], Double), NoError> {
|
||||||
func blurredImage(_ image: UIImage) -> UIImage? {
|
func blurredImage(_ image: UIImage) -> UIImage? {
|
||||||
guard let image = image.cgImage else {
|
guard let image = image.cgImage else {
|
||||||
return nil
|
return nil
|
||||||
@ -1769,55 +1769,82 @@ public func videoFrames(asset: AVAsset, count: Int, mirror: Bool = false) -> Sig
|
|||||||
guard count > 0 else {
|
guard count > 0 else {
|
||||||
return .complete()
|
return .complete()
|
||||||
}
|
}
|
||||||
let scale = UIScreen.main.scale
|
|
||||||
let imageGenerator = AVAssetImageGenerator(asset: asset)
|
|
||||||
imageGenerator.maximumSize = CGSize(width: 48.0 * scale, height: 36.0 * scale)
|
|
||||||
imageGenerator.appliesPreferredTrackTransform = true
|
|
||||||
imageGenerator.requestedTimeToleranceBefore = .zero
|
|
||||||
imageGenerator.requestedTimeToleranceAfter = .zero
|
|
||||||
|
|
||||||
var firstFrame: UIImage
|
var firstFrame: UIImage
|
||||||
if let cgImage = try? imageGenerator.copyCGImage(at: .zero, actualTime: nil) {
|
|
||||||
firstFrame = UIImage(cgImage: cgImage)
|
var imageGenerator: AVAssetImageGenerator?
|
||||||
if let blurred = blurredImage(firstFrame) {
|
if let asset {
|
||||||
|
let scale = UIScreen.main.scale
|
||||||
|
|
||||||
|
imageGenerator = AVAssetImageGenerator(asset: asset)
|
||||||
|
imageGenerator?.maximumSize = CGSize(width: 48.0 * scale, height: 36.0 * scale)
|
||||||
|
imageGenerator?.appliesPreferredTrackTransform = true
|
||||||
|
imageGenerator?.requestedTimeToleranceBefore = .zero
|
||||||
|
imageGenerator?.requestedTimeToleranceAfter = .zero
|
||||||
|
}
|
||||||
|
|
||||||
|
if var initialPlaceholder {
|
||||||
|
initialPlaceholder = generateScaledImage(image: initialPlaceholder, size: initialPlaceholder.size.aspectFitted(CGSize(width: 144.0, height: 144.0)), scale: 1.0)!
|
||||||
|
if let blurred = blurredImage(initialPlaceholder) {
|
||||||
firstFrame = blurred
|
firstFrame = blurred
|
||||||
|
} else {
|
||||||
|
firstFrame = initialPlaceholder
|
||||||
|
}
|
||||||
|
} else if let imageGenerator {
|
||||||
|
if let cgImage = try? imageGenerator.copyCGImage(at: .zero, actualTime: nil) {
|
||||||
|
firstFrame = UIImage(cgImage: cgImage)
|
||||||
|
if let blurred = blurredImage(firstFrame) {
|
||||||
|
firstFrame = blurred
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
firstFrame = generateSingleColorImage(size: CGSize(width: 24.0, height: 36.0), color: .black)!
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
firstFrame = generateSingleColorImage(size: CGSize(width: 24.0, height: 36.0), color: .black)!
|
firstFrame = generateSingleColorImage(size: CGSize(width: 24.0, height: 36.0), color: .black)!
|
||||||
}
|
}
|
||||||
return Signal { subscriber in
|
|
||||||
subscriber.putNext((Array(repeating: firstFrame, count: count), CACurrentMediaTime()))
|
if let asset {
|
||||||
|
return Signal { subscriber in
|
||||||
var timestamps: [NSValue] = []
|
subscriber.putNext((Array(repeating: firstFrame, count: count), initialTimestamp ?? CACurrentMediaTime()))
|
||||||
let duration = asset.duration.seconds
|
|
||||||
let interval = duration / Double(count)
|
var timestamps: [NSValue] = []
|
||||||
for i in 0 ..< count {
|
let duration = asset.duration.seconds
|
||||||
timestamps.append(NSValue(time: CMTime(seconds: Double(i) * interval, preferredTimescale: CMTimeScale(1000))))
|
let interval = duration / Double(count)
|
||||||
}
|
for i in 0 ..< count {
|
||||||
|
timestamps.append(NSValue(time: CMTime(seconds: Double(i) * interval, preferredTimescale: CMTimeScale(1000))))
|
||||||
var updatedFrames: [UIImage] = []
|
}
|
||||||
imageGenerator.generateCGImagesAsynchronously(forTimes: timestamps) { _, image, _, _, _ in
|
|
||||||
if let image {
|
var updatedFrames: [UIImage] = []
|
||||||
updatedFrames.append(UIImage(cgImage: image, scale: 1.0, orientation: mirror ? .upMirrored : .up))
|
imageGenerator?.generateCGImagesAsynchronously(forTimes: timestamps) { _, image, _, _, _ in
|
||||||
if updatedFrames.count == count {
|
if let image {
|
||||||
subscriber.putNext((updatedFrames, CACurrentMediaTime()))
|
updatedFrames.append(UIImage(cgImage: image, scale: 1.0, orientation: mirror ? .upMirrored : .up))
|
||||||
subscriber.putCompletion()
|
if updatedFrames.count == count {
|
||||||
} else {
|
subscriber.putNext((updatedFrames, CACurrentMediaTime()))
|
||||||
var tempFrames = updatedFrames
|
subscriber.putCompletion()
|
||||||
for _ in 0 ..< count - updatedFrames.count {
|
} else {
|
||||||
tempFrames.append(firstFrame)
|
var tempFrames = updatedFrames
|
||||||
|
for _ in 0 ..< count - updatedFrames.count {
|
||||||
|
tempFrames.append(firstFrame)
|
||||||
|
}
|
||||||
|
subscriber.putNext((tempFrames, CACurrentMediaTime()))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if let previous = updatedFrames.last {
|
||||||
|
updatedFrames.append(previous)
|
||||||
}
|
}
|
||||||
subscriber.putNext((tempFrames, CACurrentMediaTime()))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if let previous = updatedFrames.last {
|
|
||||||
updatedFrames.append(previous)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return ActionDisposable {
|
||||||
|
imageGenerator?.cancelAllCGImageGeneration()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
return ActionDisposable {
|
var frames: [UIImage] = []
|
||||||
imageGenerator.cancelAllCGImageGeneration()
|
for _ in 0 ..< count {
|
||||||
|
frames.append(firstFrame)
|
||||||
}
|
}
|
||||||
|
return .single((frames, CACurrentMediaTime()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1361,6 +1361,7 @@ final class MediaEditorScreenComponent: Component {
|
|||||||
component: AnyComponent(MediaScrubberComponent(
|
component: AnyComponent(MediaScrubberComponent(
|
||||||
context: component.context,
|
context: component.context,
|
||||||
style: .editor,
|
style: .editor,
|
||||||
|
theme: environment.theme,
|
||||||
generationTimestamp: playerState.generationTimestamp,
|
generationTimestamp: playerState.generationTimestamp,
|
||||||
position: playerState.position,
|
position: playerState.position,
|
||||||
minDuration: minDuration,
|
minDuration: minDuration,
|
||||||
|
@ -74,6 +74,7 @@ public final class MediaScrubberComponent: Component {
|
|||||||
|
|
||||||
let context: AccountContext
|
let context: AccountContext
|
||||||
let style: Style
|
let style: Style
|
||||||
|
let theme: PresentationTheme
|
||||||
|
|
||||||
let generationTimestamp: Double
|
let generationTimestamp: Double
|
||||||
|
|
||||||
@ -92,6 +93,7 @@ public final class MediaScrubberComponent: Component {
|
|||||||
public init(
|
public init(
|
||||||
context: AccountContext,
|
context: AccountContext,
|
||||||
style: Style,
|
style: Style,
|
||||||
|
theme: PresentationTheme,
|
||||||
generationTimestamp: Double,
|
generationTimestamp: Double,
|
||||||
position: Double,
|
position: Double,
|
||||||
minDuration: Double,
|
minDuration: Double,
|
||||||
@ -105,6 +107,7 @@ public final class MediaScrubberComponent: Component {
|
|||||||
) {
|
) {
|
||||||
self.context = context
|
self.context = context
|
||||||
self.style = style
|
self.style = style
|
||||||
|
self.theme = theme
|
||||||
self.generationTimestamp = generationTimestamp
|
self.generationTimestamp = generationTimestamp
|
||||||
self.position = position
|
self.position = position
|
||||||
self.minDuration = minDuration
|
self.minDuration = minDuration
|
||||||
@ -121,6 +124,9 @@ public final class MediaScrubberComponent: Component {
|
|||||||
if lhs.context !== rhs.context {
|
if lhs.context !== rhs.context {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
if lhs.theme !== rhs.theme {
|
||||||
|
return false
|
||||||
|
}
|
||||||
if lhs.generationTimestamp != rhs.generationTimestamp {
|
if lhs.generationTimestamp != rhs.generationTimestamp {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
@ -524,6 +530,7 @@ public final class MediaScrubberComponent: Component {
|
|||||||
self.trimView.isHollow = self.selectedTrackId != lowestVideoId || self.isAudioOnly
|
self.trimView.isHollow = self.selectedTrackId != lowestVideoId || self.isAudioOnly
|
||||||
let (leftHandleFrame, rightHandleFrame) = self.trimView.update(
|
let (leftHandleFrame, rightHandleFrame) = self.trimView.update(
|
||||||
style: component.style,
|
style: component.style,
|
||||||
|
theme: component.theme,
|
||||||
visualInsets: trimViewVisualInsets,
|
visualInsets: trimViewVisualInsets,
|
||||||
scrubberSize: CGSize(width: trackViewWidth, height: fullTrackHeight),
|
scrubberSize: CGSize(width: trackViewWidth, height: fullTrackHeight),
|
||||||
duration: mainTrimDuration,
|
duration: mainTrimDuration,
|
||||||
@ -537,6 +544,7 @@ public final class MediaScrubberComponent: Component {
|
|||||||
|
|
||||||
let (ghostLeftHandleFrame, ghostRightHandleFrame) = self.ghostTrimView.update(
|
let (ghostLeftHandleFrame, ghostRightHandleFrame) = self.ghostTrimView.update(
|
||||||
style: component.style,
|
style: component.style,
|
||||||
|
theme: component.theme,
|
||||||
visualInsets: .zero,
|
visualInsets: .zero,
|
||||||
scrubberSize: CGSize(width: scrubberSize.width, height: collapsedTrackHeight),
|
scrubberSize: CGSize(width: scrubberSize.width, height: collapsedTrackHeight),
|
||||||
duration: self.duration,
|
duration: self.duration,
|
||||||
@ -1300,6 +1308,7 @@ private class TrimView: UIView {
|
|||||||
|
|
||||||
func update(
|
func update(
|
||||||
style: MediaScrubberComponent.Style,
|
style: MediaScrubberComponent.Style,
|
||||||
|
theme: PresentationTheme,
|
||||||
visualInsets: UIEdgeInsets,
|
visualInsets: UIEdgeInsets,
|
||||||
scrubberSize: CGSize,
|
scrubberSize: CGSize,
|
||||||
duration: Double,
|
duration: Double,
|
||||||
@ -1359,8 +1368,8 @@ private class TrimView: UIView {
|
|||||||
effectiveHandleWidth = 16.0
|
effectiveHandleWidth = 16.0
|
||||||
fullTrackHeight = 33.0
|
fullTrackHeight = 33.0
|
||||||
capsuleOffset = 8.0
|
capsuleOffset = 8.0
|
||||||
color = UIColor(rgb: 0x3478f6)
|
color = theme.chat.inputPanel.panelControlAccentColor
|
||||||
highlightColor = UIColor(rgb: 0x3478f6)
|
highlightColor = theme.chat.inputPanel.panelControlAccentColor
|
||||||
|
|
||||||
if isFirstTime {
|
if isFirstTime {
|
||||||
let handleImage = generateImage(CGSize(width: effectiveHandleWidth, height: fullTrackHeight), rotatedContext: { size, context in
|
let handleImage = generateImage(CGSize(width: effectiveHandleWidth, height: fullTrackHeight), rotatedContext: { size, context in
|
||||||
|
@ -173,7 +173,10 @@ private final class CameraScreenComponent: CombinedComponent {
|
|||||||
super.init()
|
super.init()
|
||||||
|
|
||||||
self.startRecording.connect({ [weak self] _ in
|
self.startRecording.connect({ [weak self] _ in
|
||||||
self?.startVideoRecording(pressing: true)
|
if let self, let controller = getController() {
|
||||||
|
self.startVideoRecording(pressing: !controller.scheduledLock)
|
||||||
|
controller.scheduledLock = false
|
||||||
|
}
|
||||||
})
|
})
|
||||||
self.stopRecording.connect({ [weak self] _ in
|
self.stopRecording.connect({ [weak self] _ in
|
||||||
self?.stopVideoRecording()
|
self?.stopVideoRecording()
|
||||||
@ -508,6 +511,8 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
}
|
}
|
||||||
var previewStatePromise = Promise<PreviewState?>()
|
var previewStatePromise = Promise<PreviewState?>()
|
||||||
|
|
||||||
|
var transitioningToPreview = false
|
||||||
|
|
||||||
init(controller: VideoMessageCameraScreen) {
|
init(controller: VideoMessageCameraScreen) {
|
||||||
self.controller = controller
|
self.controller = controller
|
||||||
self.context = controller.context
|
self.context = controller.context
|
||||||
@ -736,16 +741,12 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
self.results.append(result)
|
self.results.append(result)
|
||||||
self.resultsPipe.putNext(result)
|
self.resultsPipe.putNext(result)
|
||||||
|
|
||||||
|
self.transitioningToPreview = false
|
||||||
|
|
||||||
let composition = composition(with: self.results)
|
let composition = composition(with: self.results)
|
||||||
controller.updatePreviewState({ _ in
|
controller.updatePreviewState({ _ in
|
||||||
return PreviewState(composition: composition, trimRange: nil)
|
return PreviewState(composition: composition, trimRange: nil)
|
||||||
}, transition: .spring(duration: 0.4))
|
}, transition: .spring(duration: 0.4))
|
||||||
|
|
||||||
// #if DEBUG
|
|
||||||
// if case let .video(video) = result {
|
|
||||||
// self.debugSaveResult(path: video.videoPath)
|
|
||||||
// }
|
|
||||||
// #endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private func debugSaveResult(path: String) {
|
private func debugSaveResult(path: String) {
|
||||||
@ -895,7 +896,7 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
CameraScreenComponent(
|
CameraScreenComponent(
|
||||||
context: self.context,
|
context: self.context,
|
||||||
cameraState: self.cameraState,
|
cameraState: self.cameraState,
|
||||||
isPreviewing: self.previewState != nil,
|
isPreviewing: self.previewState != nil || self.transitioningToPreview,
|
||||||
getController: { [weak self] in
|
getController: { [weak self] in
|
||||||
return self?.controller
|
return self?.controller
|
||||||
},
|
},
|
||||||
@ -1065,30 +1066,63 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
|
|
||||||
public func takenRecordedData() -> Signal<RecordedVideoData?, NoError> {
|
public func takenRecordedData() -> Signal<RecordedVideoData?, NoError> {
|
||||||
let previewState = self.node.previewStatePromise.get()
|
let previewState = self.node.previewStatePromise.get()
|
||||||
return self.currentResults
|
let count = 12
|
||||||
|> take(1)
|
|
||||||
|> mapToSignal { results in
|
let initialPlaceholder: Signal<UIImage?, NoError>
|
||||||
var totalDuration: Double = 0.0
|
if let firstResult = self.node.results.first {
|
||||||
for result in results {
|
if case let .video(video) = firstResult {
|
||||||
if case let .video(video) = result {
|
initialPlaceholder = .single(video.thumbnail)
|
||||||
totalDuration += video.duration
|
} else {
|
||||||
}
|
initialPlaceholder = .single(nil)
|
||||||
}
|
}
|
||||||
let composition = composition(with: results)
|
} else {
|
||||||
return combineLatest(
|
initialPlaceholder = self.camera?.transitionImage ?? .single(nil)
|
||||||
queue: Queue.mainQueue(),
|
}
|
||||||
videoFrames(asset: composition, count: 12),
|
|
||||||
previewState
|
let immediateResult: Signal<RecordedVideoData?, NoError> = initialPlaceholder
|
||||||
)
|
|> take(1)
|
||||||
|> map { framesAndUpdateTimestamp, previewState in
|
|> mapToSignal { initialPlaceholder in
|
||||||
|
return videoFrames(asset: nil, count: count, initialPlaceholder: initialPlaceholder)
|
||||||
|
|> map { framesAndUpdateTimestamp in
|
||||||
return RecordedVideoData(
|
return RecordedVideoData(
|
||||||
duration: totalDuration,
|
duration: 1.0,
|
||||||
frames: framesAndUpdateTimestamp.0,
|
frames: framesAndUpdateTimestamp.0,
|
||||||
framesUpdateTimestamp: framesAndUpdateTimestamp.1,
|
framesUpdateTimestamp: framesAndUpdateTimestamp.1,
|
||||||
trimRange: previewState?.trimRange
|
trimRange: nil
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return immediateResult
|
||||||
|
|> mapToSignal { immediateResult in
|
||||||
|
return .single(immediateResult)
|
||||||
|
|> then(
|
||||||
|
self.currentResults
|
||||||
|
|> take(1)
|
||||||
|
|> mapToSignal { results in
|
||||||
|
var totalDuration: Double = 0.0
|
||||||
|
for result in results {
|
||||||
|
if case let .video(video) = result {
|
||||||
|
totalDuration += video.duration
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let composition = composition(with: results)
|
||||||
|
return combineLatest(
|
||||||
|
queue: Queue.mainQueue(),
|
||||||
|
videoFrames(asset: composition, count: count, initialTimestamp: immediateResult?.framesUpdateTimestamp),
|
||||||
|
previewState
|
||||||
|
)
|
||||||
|
|> map { framesAndUpdateTimestamp, previewState in
|
||||||
|
return RecordedVideoData(
|
||||||
|
duration: totalDuration,
|
||||||
|
frames: framesAndUpdateTimestamp.0,
|
||||||
|
framesUpdateTimestamp: framesAndUpdateTimestamp.1,
|
||||||
|
trimRange: previewState?.trimRange
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public init(
|
public init(
|
||||||
@ -1219,13 +1253,21 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
private var waitingForNextResult = false
|
private var waitingForNextResult = false
|
||||||
public func stopVideoRecording() -> Bool {
|
public func stopVideoRecording() -> Bool {
|
||||||
self.waitingForNextResult = true
|
self.waitingForNextResult = true
|
||||||
|
self.node.transitioningToPreview = true
|
||||||
|
self.node.requestUpdateLayout(transition: .spring(duration: 0.4))
|
||||||
|
|
||||||
self.node.stopRecording.invoke(Void())
|
self.node.stopRecording.invoke(Void())
|
||||||
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fileprivate var scheduledLock = false
|
||||||
public func lockVideoRecording() {
|
public func lockVideoRecording() {
|
||||||
self.updateCameraState({ $0.updatedRecording(.handsFree) }, transition: .spring(duration: 0.4))
|
if case .none = self.cameraState.recording {
|
||||||
|
self.scheduledLock = true
|
||||||
|
} else {
|
||||||
|
self.updateCameraState({ $0.updatedRecording(.handsFree) }, transition: .spring(duration: 0.4))
|
||||||
|
}
|
||||||
|
|
||||||
self.node.maybePresentViewOnceTooltip()
|
self.node.maybePresentViewOnceTooltip()
|
||||||
}
|
}
|
||||||
|
@ -304,6 +304,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
MediaScrubberComponent(
|
MediaScrubberComponent(
|
||||||
context: context,
|
context: context,
|
||||||
style: .videoMessage,
|
style: .videoMessage,
|
||||||
|
theme: interfaceState.theme,
|
||||||
generationTimestamp: 0,
|
generationTimestamp: 0,
|
||||||
position: 0,
|
position: 0,
|
||||||
minDuration: 1.0,
|
minDuration: 1.0,
|
||||||
|
@ -2094,7 +2094,6 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
|
|||||||
size: audioRecordingCancelIndicator.bounds.size)
|
size: audioRecordingCancelIndicator.bounds.size)
|
||||||
audioRecordingCancelIndicator.frame = audioRecordingCancelIndicatorFrame
|
audioRecordingCancelIndicator.frame = audioRecordingCancelIndicatorFrame
|
||||||
if self.actionButtons.micButton.cancelTranslation > cancelTransformThreshold {
|
if self.actionButtons.micButton.cancelTranslation > cancelTransformThreshold {
|
||||||
//let progress = 1 - (self.actionButtons.micButton.cancelTranslation - cancelTransformThreshold) / 80
|
|
||||||
let progress: CGFloat = max(0.0, min(1.0, (audioRecordingCancelIndicatorFrame.minX - 100.0) / 10.0))
|
let progress: CGFloat = max(0.0, min(1.0, (audioRecordingCancelIndicatorFrame.minX - 100.0) / 10.0))
|
||||||
audioRecordingCancelIndicator.alpha = progress
|
audioRecordingCancelIndicator.alpha = progress
|
||||||
} else {
|
} else {
|
||||||
@ -2145,6 +2144,8 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
|
|||||||
audioRecordingTimeNode.layer.animateAlpha(from: 0, to: 1, duration: 0.5, timingFunction: kCAMediaTimingFunctionSpring)
|
audioRecordingTimeNode.layer.animateAlpha(from: 0, to: 1, duration: 0.5, timingFunction: kCAMediaTimingFunctionSpring)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let dotFrame = CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: audioRecordingTimeNode.frame.midY - 20), size: CGSize(width: 40.0, height: 40))
|
||||||
|
|
||||||
var animateDotAppearing = false
|
var animateDotAppearing = false
|
||||||
let audioRecordingDotNode: AnimationNode
|
let audioRecordingDotNode: AnimationNode
|
||||||
if let currentAudioRecordingDotNode = self.audioRecordingDotNode, !currentAudioRecordingDotNode.didPlay {
|
if let currentAudioRecordingDotNode = self.audioRecordingDotNode, !currentAudioRecordingDotNode.didPlay {
|
||||||
@ -2152,9 +2153,12 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
|
|||||||
} else {
|
} else {
|
||||||
self.audioRecordingDotNode?.removeFromSupernode()
|
self.audioRecordingDotNode?.removeFromSupernode()
|
||||||
audioRecordingDotNode = AnimationNode(animation: "BinRed")
|
audioRecordingDotNode = AnimationNode(animation: "BinRed")
|
||||||
|
|
||||||
self.audioRecordingDotNode = audioRecordingDotNode
|
self.audioRecordingDotNode = audioRecordingDotNode
|
||||||
self.audioRecordingDotNodeDismissed = false
|
self.audioRecordingDotNodeDismissed = false
|
||||||
self.clippingNode.insertSubnode(audioRecordingDotNode, belowSubnode: self.menuButton)
|
self.clippingNode.insertSubnode(audioRecordingDotNode, belowSubnode: self.menuButton)
|
||||||
|
audioRecordingDotNode.frame = dotFrame
|
||||||
|
|
||||||
self.animatingBinNode?.removeFromSupernode()
|
self.animatingBinNode?.removeFromSupernode()
|
||||||
self.animatingBinNode = nil
|
self.animatingBinNode = nil
|
||||||
}
|
}
|
||||||
@ -2163,10 +2167,14 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
|
|||||||
if let mediaRecordingState = mediaRecordingState, case .waitingForPreview = mediaRecordingState {
|
if let mediaRecordingState = mediaRecordingState, case .waitingForPreview = mediaRecordingState {
|
||||||
animateDotAppearing = false
|
animateDotAppearing = false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
audioRecordingDotNode.bounds = CGRect(origin: .zero, size: dotFrame.size)
|
||||||
|
audioRecordingDotNode.position = dotFrame.center
|
||||||
|
|
||||||
audioRecordingDotNode.frame = CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: audioRecordingTimeNode.frame.midY - 20), size: CGSize(width: 40.0, height: 40))
|
if animateDotAppearing {
|
||||||
if animateDotAppearing || animateCancelSlideIn {
|
Queue.mainQueue().justDispatch {
|
||||||
audioRecordingDotNode.layer.animateScale(from: 0.3, to: 1, duration: 0.15, delay: 0, removeOnCompletion: false)
|
audioRecordingDotNode.layer.animateScale(from: 0.3, to: 1, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||||
|
}
|
||||||
audioRecordingTimeNode.started = { [weak audioRecordingDotNode] in
|
audioRecordingTimeNode.started = { [weak audioRecordingDotNode] in
|
||||||
if let audioRecordingDotNode = audioRecordingDotNode, audioRecordingDotNode.layer.animation(forKey: "recording") == nil {
|
if let audioRecordingDotNode = audioRecordingDotNode, audioRecordingDotNode.layer.animation(forKey: "recording") == nil {
|
||||||
audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 0), to: 1, duration: 0.15, delay: 0, completion: { [weak audioRecordingDotNode] finished in
|
audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 0), to: 1, duration: 0.15, delay: 0, completion: { [weak audioRecordingDotNode] finished in
|
||||||
|
Loading…
x
Reference in New Issue
Block a user