Video message recording improvements

This commit is contained in:
Ilya Laktyushin 2024-01-12 18:17:14 +04:00
parent eab00ad5cf
commit e8ff9f603a
12 changed files with 464 additions and 65 deletions

View File

@ -505,7 +505,7 @@ private final class CameraContext {
}
}
public func startRecording() -> Signal<Double, NoError> {
public func startRecording() -> Signal<CameraRecordingData, NoError> {
guard let mainDeviceContext = self.mainDeviceContext else {
return .complete()
}
@ -779,7 +779,7 @@ public final class Camera {
}
}
public func startRecording() -> Signal<Double, NoError> {
public func startRecording() -> Signal<CameraRecordingData, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.queue.async {
@ -1024,3 +1024,8 @@ public final class CameraHolder {
self.previewView = previewView
}
}
public struct CameraRecordingData {
public let duration: Double
public let filePath: String
}

View File

@ -295,7 +295,7 @@ final class CameraOutput: NSObject {
private var currentMode: RecorderMode = .default
private var recordingCompletionPipe = ValuePipe<VideoCaptureResult>()
func startRecording(mode: RecorderMode, position: Camera.Position? = nil, orientation: AVCaptureVideoOrientation, additionalOutput: CameraOutput? = nil) -> Signal<Double, NoError> {
func startRecording(mode: RecorderMode, position: Camera.Position? = nil, orientation: AVCaptureVideoOrientation, additionalOutput: CameraOutput? = nil) -> Signal<CameraRecordingData, NoError> {
guard self.videoRecorder == nil else {
return .complete()
}
@ -376,7 +376,8 @@ final class CameraOutput: NSObject {
return Signal { subscriber in
let timer = SwiftSignalKit.Timer(timeout: 0.02, repeat: true, completion: { [weak videoRecorder] in
subscriber.putNext(videoRecorder?.duration ?? 0.0)
let recordingData = CameraRecordingData(duration: videoRecorder?.duration ?? 0.0, filePath: outputFilePath)
subscriber.putNext(recordingData)
}, queue: Queue.mainQueue())
timer.start()

View File

@ -147,12 +147,12 @@ public final class EntityVideoRecorder {
self.start = CACurrentMediaTime()
self.recordingDisposable.set((self.camera.startRecording()
|> deliverOnMainQueue).startStrict(next: { [weak self] duration in
|> deliverOnMainQueue).startStrict(next: { [weak self] recordingData in
guard let self else {
return
}
self.durationPromise.set(duration)
if duration >= self.maxDuration {
self.durationPromise.set(recordingData.duration)
if recordingData.duration >= self.maxDuration {
let onAutomaticStop = self.onAutomaticStop
self.stopRecording(save: true, completion: {
onAutomaticStop()

View File

@ -651,10 +651,10 @@ private final class CameraScreenComponent: CombinedComponent {
let startRecording = {
self.resultDisposable.set((camera.startRecording()
|> deliverOnMainQueue).start(next: { [weak self] duration in
|> deliverOnMainQueue).start(next: { [weak self] recordingData in
if let self, let controller = self.getController() {
controller.updateCameraState({ $0.updatedDuration(duration) }, transition: .easeInOut(duration: 0.1))
if duration > 59.0 {
controller.updateCameraState({ $0.updatedDuration(recordingData.duration) }, transition: .easeInOut(duration: 0.1))
if recordingData.duration > 59.0 {
self.stopVideoRecording()
}
}

View File

@ -37,6 +37,7 @@ swift_library(
"//submodules/TelegramNotices",
"//submodules/DeviceAccess",
"//submodules/TelegramUI/Components/MediaEditor",
"//submodules/LegacyMediaPickerUI",
],
visibility = [
"//visibility:public",

View File

@ -20,6 +20,13 @@ final class ResultPreviewView: UIView {
}
}
var onLoop: () -> Void = {}
var isMuted = true {
didSet {
self.player.isMuted = self.isMuted
}
}
init(composition: AVComposition) {
self.composition = composition
@ -42,6 +49,8 @@ final class ResultPreviewView: UIView {
}
self.player.pause()
self.seek(to: start, andPlay: true)
self.onLoop()
})
self.player.play()

View File

@ -25,6 +25,7 @@ import MediaEditor
import MediaResources
import LocalMediaResources
import ImageCompression
import LegacyMediaPickerUI
struct CameraState: Equatable {
enum Recording: Equatable {
@ -59,6 +60,7 @@ struct CameraState: Equatable {
struct PreviewState: Equatable {
let composition: AVComposition
let trimRange: Range<Double>?
let isMuted: Bool
}
enum CameraScreenTransition {
@ -75,6 +77,7 @@ private final class CameraScreenComponent: CombinedComponent {
let context: AccountContext
let cameraState: CameraState
let isPreviewing: Bool
let isMuted: Bool
let getController: () -> VideoMessageCameraScreen?
let present: (ViewController) -> Void
let push: (ViewController) -> Void
@ -86,6 +89,7 @@ private final class CameraScreenComponent: CombinedComponent {
context: AccountContext,
cameraState: CameraState,
isPreviewing: Bool,
isMuted: Bool,
getController: @escaping () -> VideoMessageCameraScreen?,
present: @escaping (ViewController) -> Void,
push: @escaping (ViewController) -> Void,
@ -96,6 +100,7 @@ private final class CameraScreenComponent: CombinedComponent {
self.context = context
self.cameraState = cameraState
self.isPreviewing = isPreviewing
self.isMuted = isMuted
self.getController = getController
self.present = present
self.push = push
@ -114,6 +119,9 @@ private final class CameraScreenComponent: CombinedComponent {
if lhs.isPreviewing != rhs.isPreviewing {
return false
}
if lhs.isMuted != rhs.isMuted {
return false
}
return true
}
@ -224,18 +232,21 @@ private final class CameraScreenComponent: CombinedComponent {
controller.node.dismissAllTooltips()
controller.updateCameraState({ $0.updatedRecording(pressing ? .holding : .handsFree).updatedDuration(initialDuration) }, transition: .spring(duration: 0.4))
let isFirstTime = !controller.node.cameraIsActive
let isFirstRecording = initialDuration.isZero
controller.node.resumeCameraCapture()
controller.node.withReadyCamera(isFirstTime: isFirstTime) {
controller.node.withReadyCamera(isFirstTime: !controller.node.cameraIsActive) {
self.resultDisposable.set((camera.startRecording()
|> deliverOnMainQueue).start(next: { [weak self] duration in
let duration = initialDuration + duration
|> deliverOnMainQueue).start(next: { [weak self] recordingData in
let duration = initialDuration + recordingData.duration
if let self, let controller = self.getController() {
controller.updateCameraState({ $0.updatedDuration(duration) }, transition: .easeInOut(duration: 0.1))
if duration > 59.0 {
if recordingData.duration > 59.0 {
self.stopVideoRecording()
}
if isFirstRecording {
controller.node.setupLiveUpload(filePath: recordingData.filePath)
}
}
}))
}
@ -457,6 +468,10 @@ public class VideoMessageCameraScreen: ViewController {
fileprivate var camera: Camera?
private let updateState: ActionSlot<CameraState>
fileprivate var liveUploadInterface: LegacyLiveUploadInterface?
private var currentLiveUploadPath: String?
fileprivate var currentLiveUploadData: LegacyLiveUploadInterfaceResult?
fileprivate let backgroundView: UIVisualEffectView
fileprivate let containerView: UIView
fileprivate let componentHost: ComponentView<ViewControllerComponentContainer.Environment>
@ -491,7 +506,7 @@ public class VideoMessageCameraScreen: ViewController {
fileprivate let startRecording = ActionSlot<Void>()
fileprivate let stopRecording = ActionSlot<Void>()
private let completion = ActionSlot<VideoMessageCameraScreen.CaptureResult>()
var cameraState: CameraState {
didSet {
if self.cameraState.isViewOnceEnabled != oldValue.isViewOnceEnabled {
@ -507,6 +522,7 @@ public class VideoMessageCameraScreen: ViewController {
var previewState: PreviewState? {
didSet {
self.previewStatePromise.set(.single(self.previewState))
self.resultPreviewView?.isMuted = self.previewState?.isMuted ?? true
}
}
var previewStatePromise = Promise<PreviewState?>()
@ -554,7 +570,7 @@ public class VideoMessageCameraScreen: ViewController {
)
self.previewState = nil
super.init()
self.backgroundColor = .clear
@ -605,6 +621,17 @@ public class VideoMessageCameraScreen: ViewController {
}
}
func setupLiveUpload(filePath: String) {
guard let controller = self.controller, controller.allowLiveUpload, self.liveUploadInterface == nil else {
return
}
let liveUploadInterface = LegacyLiveUploadInterface(context: self.context)
Queue.mainQueue().after(1.5, {
liveUploadInterface.setup(withFileURL: URL(fileURLWithPath: filePath))
})
self.liveUploadInterface = liveUploadInterface
}
override func didLoad() {
super.didLoad()
@ -736,6 +763,14 @@ public class VideoMessageCameraScreen: ViewController {
return
}
if self.results.isEmpty {
if let liveUploadData = self.liveUploadInterface?.fileUpdated(true) as? LegacyLiveUploadInterfaceResult {
self.currentLiveUploadData = liveUploadData
}
} else {
self.currentLiveUploadData = nil
}
self.pauseCameraCapture()
self.results.append(result)
@ -745,7 +780,7 @@ public class VideoMessageCameraScreen: ViewController {
let composition = composition(with: self.results)
controller.updatePreviewState({ _ in
return PreviewState(composition: composition, trimRange: nil)
return PreviewState(composition: composition, trimRange: nil, isMuted: true)
}, transition: .spring(duration: 0.4))
}
@ -837,16 +872,32 @@ public class VideoMessageCameraScreen: ViewController {
}
func updateTrimRange(start: Double, end: Double, updatedEnd: Bool, apply: Bool) {
guard let controller = self.controller else {
return
}
self.resultPreviewView?.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply)
self.controller?.updatePreviewState({ state in
controller.updatePreviewState({ state in
if let state {
return PreviewState(composition: state.composition, trimRange: start..<end)
return PreviewState(composition: state.composition, trimRange: start..<end, isMuted: state.isMuted)
} else {
return nil
}
}, transition: .immediate)
}
@objc func resultTapped() {
guard let controller = self.controller else {
return
}
controller.updatePreviewState({ state in
if let state {
return PreviewState(composition: state.composition, trimRange: state.trimRange, isMuted: !state.isMuted)
} else {
return nil
}
}, transition: .easeInOut(duration: 0.2))
}
func requestUpdateLayout(transition: Transition) {
if let layout = self.validLayout {
self.containerLayoutUpdated(layout: layout, forceUpdate: true, transition: transition)
@ -888,7 +939,7 @@ public class VideoMessageCameraScreen: ViewController {
self.didAppear()
}
let backgroundFrame = CGRect(origin: .zero, size: CGSize(width: layout.size.width, height: layout.size.height - controller.inputPanelFrame.height - layout.intrinsicInsets.bottom))
let backgroundFrame = CGRect(origin: .zero, size: CGSize(width: layout.size.width, height: controller.inputPanelFrame.minY))
let componentSize = self.componentHost.update(
transition: transition,
@ -897,6 +948,7 @@ public class VideoMessageCameraScreen: ViewController {
context: self.context,
cameraState: self.cameraState,
isPreviewing: self.previewState != nil || self.transitioningToPreview,
isMuted: self.previewState?.isMuted ?? true,
getController: { [weak self] in
return self?.controller
},
@ -933,8 +985,9 @@ public class VideoMessageCameraScreen: ViewController {
transition.setPosition(view: self.containerView, position: backgroundFrame.center)
transition.setBounds(view: self.containerView, bounds: CGRect(origin: .zero, size: backgroundFrame.size))
let availableHeight = layout.size.height - (layout.inputHeight ?? 0.0)
let previewSide = min(369.0, layout.size.width - 24.0)
let previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(90.0, layout.size.height * 0.4 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
let previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 16.0, availableHeight * 0.4 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
if !self.animatingIn {
transition.setFrame(view: self.previewContainerView, frame: previewFrame)
@ -964,10 +1017,22 @@ public class VideoMessageCameraScreen: ViewController {
resultPreviewView = current
} else {
resultPreviewView = ResultPreviewView(composition: previewState.composition)
resultPreviewView.onLoop = { [weak self] in
if let self, let controller = self.controller {
controller.updatePreviewState({ state in
if let state {
return PreviewState(composition: state.composition, trimRange: state.trimRange, isMuted: true)
}
return nil
}, transition: .easeInOut(duration: 0.2))
}
}
self.previewContainerView.addSubview(resultPreviewView)
self.resultPreviewView = resultPreviewView
resultPreviewView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
resultPreviewView.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.resultTapped)))
}
resultPreviewView.frame = previewInnerFrame
} else if let resultPreviewView = self.resultPreviewView {
@ -988,8 +1053,9 @@ public class VideoMessageCameraScreen: ViewController {
private let context: AccountContext
private let updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?
private let inputPanelFrame: CGRect
fileprivate let completion: (EnqueueMessage) -> Void
fileprivate var allowLiveUpload: Bool
fileprivate let completion: (EnqueueMessage?) -> Void
private var audioSessionDisposable: Disposable?
@ -1129,11 +1195,13 @@ public class VideoMessageCameraScreen: ViewController {
context: AccountContext,
updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?,
inputPanelFrame: CGRect,
completion: @escaping (EnqueueMessage) -> Void
allowLiveUpload: Bool,
completion: @escaping (EnqueueMessage?) -> Void
) {
self.context = context
self.updatedPresentationData = updatedPresentationData
self.inputPanelFrame = inputPanelFrame
self.allowLiveUpload = allowLiveUpload
self.completion = completion
self.recordingStatus = RecordingStatus(micLevel: self.micLevelValue.get(), duration: self.durationValue.get())
@ -1166,6 +1234,11 @@ public class VideoMessageCameraScreen: ViewController {
}
public func sendVideoRecording() {
if case .none = self.cameraState.recording, self.node.results.isEmpty {
self.completion(nil)
return
}
if case .none = self.cameraState.recording {
} else {
self.waitingForNextResult = true
@ -1181,7 +1254,8 @@ public class VideoMessageCameraScreen: ViewController {
var videoPaths: [String] = []
var duration: Double = 0.0
var hasAdjustments = results.count > 1
for result in results {
if case let .video(video) = result {
videoPaths.append(video.videoPath)
@ -1192,14 +1266,16 @@ public class VideoMessageCameraScreen: ViewController {
let finalDuration: Double
if let trimRange = self.node.previewState?.trimRange {
finalDuration = trimRange.upperBound - trimRange.lowerBound
if finalDuration != duration {
hasAdjustments = true
}
} else {
finalDuration = duration
}
var resourceAdjustments: VideoMediaResourceAdjustments? = nil
let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: PixelDimensions(width: 400, height: 400), cropOffset: .zero, cropRect: CGRect(origin: .zero, size: CGSize(width: 400.0, height: 400.0)), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage)
var resourceAdjustments: VideoMediaResourceAdjustments? = nil
if let valuesData = try? JSONEncoder().encode(values) {
let data = MemoryBuffer(data: valuesData)
let digest = MemoryBuffer(data: data.md5Digest())
@ -1207,7 +1283,18 @@ public class VideoMessageCameraScreen: ViewController {
}
let resource: TelegramMediaResource
resource = LocalFileVideoMediaResource(randomId: Int64.random(in: Int64.min ... Int64.max), paths: videoPaths, adjustments: resourceAdjustments)
let liveUploadData: LegacyLiveUploadInterfaceResult?
if let current = self.node.currentLiveUploadData {
liveUploadData = current
} else {
liveUploadData = self.node.liveUploadInterface?.fileUpdated(true) as? LegacyLiveUploadInterfaceResult
}
if !hasAdjustments, let liveUploadData, let data = try? Data(contentsOf: URL(fileURLWithPath: video.videoPath)) {
resource = LocalFileMediaResource(fileId: liveUploadData.id)
self.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true)
} else {
resource = LocalFileVideoMediaResource(randomId: Int64.random(in: Int64.min ... Int64.max), paths: videoPaths, adjustments: resourceAdjustments)
}
var previewRepresentations: [TelegramMediaImageRepresentation] = []

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "addlink_16.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,236 @@
%PDF-1.7
1 0 obj
<< >>
endobj
2 0 obj
<< /Length 3 0 R >>
stream
/DeviceRGB CS
/DeviceRGB cs
q
1.000000 0.000000 -0.000000 1.000000 8.500000 7.040154 cm
0.000000 0.000000 0.000000 scn
-0.470226 1.930072 m
-0.729925 1.670374 -0.729925 1.249319 -0.470226 0.989621 c
-0.210527 0.729922 0.210527 0.729922 0.470226 0.989621 c
-0.470226 1.930072 l
h
6.470226 6.989621 m
6.729925 7.249319 6.729925 7.670374 6.470226 7.930072 c
6.210527 8.189772 5.789473 8.189772 5.529774 7.930072 c
6.470226 6.989621 l
h
0.470226 0.989621 m
6.470226 6.989621 l
5.529774 7.930072 l
-0.470226 1.930072 l
0.470226 0.989621 l
h
f
n
Q
q
1.000000 0.000000 -0.000000 1.000000 9.500000 8.170006 cm
0.000000 0.000000 0.000000 scn
0.000000 6.994994 m
-0.367269 6.994994 -0.665000 6.697264 -0.665000 6.329994 c
-0.665000 5.962725 -0.367269 5.664994 0.000000 5.664994 c
0.000000 6.994994 l
h
5.000000 6.329994 m
5.665000 6.329994 l
5.665000 6.697264 5.367270 6.994994 5.000000 6.994994 c
5.000000 6.329994 l
h
4.335000 1.329994 m
4.335000 0.962725 4.632730 0.664994 5.000000 0.664994 c
5.367270 0.664994 5.665000 0.962725 5.665000 1.329994 c
4.335000 1.329994 l
h
0.000000 5.664994 m
5.000000 5.664994 l
5.000000 6.994994 l
0.000000 6.994994 l
0.000000 5.664994 l
h
4.335000 6.329994 m
4.335000 1.329994 l
5.665000 1.329994 l
5.665000 6.329994 l
4.335000 6.329994 l
h
f
n
Q
q
1.000000 0.000000 -0.000000 1.000000 3.000000 1.668968 cm
0.000000 0.000000 0.000000 scn
4.000000 10.666032 m
4.367270 10.666032 4.665000 10.963762 4.665000 11.331032 c
4.665000 11.698301 4.367270 11.996032 4.000000 11.996032 c
4.000000 10.666032 l
h
10.665000 5.331032 m
10.665000 5.698301 10.367270 5.996032 10.000000 5.996032 c
9.632730 5.996032 9.335000 5.698301 9.335000 5.331032 c
10.665000 5.331032 l
h
8.907981 1.549019 m
9.209885 0.956499 l
8.907981 1.549019 l
h
9.782013 2.423051 m
10.374533 2.121147 l
9.782013 2.423051 l
h
1.092019 11.113045 m
0.790115 11.705564 l
1.092019 11.113045 l
h
0.217987 10.239013 m
-0.374532 10.540916 l
0.217987 10.239013 l
h
4.000000 11.996032 m
3.200000 11.996032 l
3.200000 10.666032 l
4.000000 10.666032 l
4.000000 11.996032 l
h
-0.665000 8.131032 m
-0.665000 4.531032 l
0.665000 4.531032 l
0.665000 8.131032 l
-0.665000 8.131032 l
h
3.200000 0.666032 m
6.800000 0.666032 l
6.800000 1.996032 l
3.200000 1.996032 l
3.200000 0.666032 l
h
10.665000 4.531032 m
10.665000 5.331032 l
9.335000 5.331032 l
9.335000 4.531032 l
10.665000 4.531032 l
h
6.800000 0.666032 m
7.349080 0.666032 7.800883 0.665515 8.167748 0.695489 c
8.542377 0.726097 8.886601 0.791779 9.209885 0.956499 c
8.606077 2.141538 l
8.501536 2.088272 8.351824 2.044960 8.059443 2.021071 c
7.759301 1.996549 7.371026 1.996032 6.800000 1.996032 c
6.800000 0.666032 l
h
9.335000 4.531032 m
9.335000 3.960006 9.334483 3.571731 9.309960 3.271588 c
9.286072 2.979208 9.242760 2.829495 9.189494 2.724955 c
10.374533 2.121147 l
10.539253 2.444430 10.604935 2.788655 10.635543 3.163283 c
10.665517 3.530149 10.665000 3.981952 10.665000 4.531032 c
9.335000 4.531032 l
h
9.209885 0.956499 m
9.711337 1.212002 10.119030 1.619695 10.374533 2.121147 c
9.189494 2.724955 l
9.061502 2.473758 8.857274 2.269529 8.606077 2.141538 c
9.209885 0.956499 l
h
-0.665000 4.531032 m
-0.665000 3.981952 -0.665517 3.530149 -0.635543 3.163283 c
-0.604935 2.788655 -0.539253 2.444430 -0.374532 2.121147 c
0.810506 2.724955 l
0.757240 2.829495 0.713928 2.979208 0.690040 3.271588 c
0.665517 3.571731 0.665000 3.960006 0.665000 4.531032 c
-0.665000 4.531032 l
h
3.200000 1.996032 m
2.628974 1.996032 2.240699 1.996549 1.940556 2.021071 c
1.648176 2.044960 1.498463 2.088272 1.393923 2.141538 c
0.790115 0.956499 l
1.113398 0.791779 1.457623 0.726097 1.832252 0.695489 c
2.199117 0.665515 2.650921 0.666032 3.200000 0.666032 c
3.200000 1.996032 l
h
-0.374532 2.121147 m
-0.119030 1.619695 0.288663 1.212002 0.790115 0.956499 c
1.393923 2.141538 l
1.142726 2.269529 0.938497 2.473758 0.810506 2.724955 c
-0.374532 2.121147 l
h
3.200000 11.996032 m
2.650921 11.996032 2.199117 11.996549 1.832252 11.966575 c
1.457623 11.935966 1.113398 11.870285 0.790115 11.705564 c
1.393923 10.520525 l
1.498463 10.573792 1.648176 10.617104 1.940556 10.640992 c
2.240699 10.665515 2.628974 10.666032 3.200000 10.666032 c
3.200000 11.996032 l
h
0.665000 8.131032 m
0.665000 8.702057 0.665517 9.090332 0.690040 9.390476 c
0.713928 9.682856 0.757240 9.832568 0.810506 9.937109 c
-0.374532 10.540916 l
-0.539253 10.217633 -0.604935 9.873408 -0.635543 9.498780 c
-0.665517 9.131915 -0.665000 8.680111 -0.665000 8.131032 c
0.665000 8.131032 l
h
0.790115 11.705564 m
0.288663 11.450062 -0.119030 11.042369 -0.374532 10.540916 c
0.810506 9.937109 l
0.938497 10.188306 1.142726 10.392534 1.393923 10.520525 c
0.790115 11.705564 l
h
f
n
Q
endstream
endobj
3 0 obj
4659
endobj
4 0 obj
<< /Annots []
/Type /Page
/MediaBox [ 0.000000 0.000000 16.000000 16.000000 ]
/Resources 1 0 R
/Contents 2 0 R
/Parent 5 0 R
>>
endobj
5 0 obj
<< /Kids [ 4 0 R ]
/Count 1
/Type /Pages
>>
endobj
6 0 obj
<< /Pages 5 0 R
/Type /Catalog
>>
endobj
xref
0 7
0000000000 65535 f
0000000010 00000 n
0000000034 00000 n
0000004749 00000 n
0000004772 00000 n
0000004945 00000 n
0000005019 00000 n
trailer
<< /ID [ (some) (id) ]
/Root 6 0 R
/Size 7
>>
startxref
5078
%%EOF

View File

@ -352,6 +352,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
var videoRecorder = Promise<VideoMessageCameraScreen?>()
var videoRecorderDisposable: Disposable?
var recorderDataDisposable = MetaDisposable()
var buttonKeyboardMessageDisposable: Disposable?
var cachedDataDisposable: Disposable?
var chatUnreadCountDisposable: Disposable?
@ -6599,6 +6601,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
self.stickerSettingsDisposable?.dispose()
self.searchQuerySuggestionState?.1.dispose()
self.preloadSavedMessagesChatsDisposable?.dispose()
self.recorderDataDisposable.dispose()
}
deallocate()
}
@ -15315,21 +15318,28 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
isScheduledMessages = true
}
let _ = peerId
let _ = isScheduledMessages
let controller = VideoMessageCameraScreen(
context: self.context,
updatedPresentationData: self.updatedPresentationData,
inputPanelFrame: currentInputPanelFrame,
allowLiveUpload: peerId.namespace != Namespaces.Peer.SecretChat,
completion: { [weak self] message in
guard let self, let videoController = self.videoRecorderValue else {
return
}
guard var message else {
self.recorderFeedback?.error()
self.recorderFeedback = nil
self.videoRecorder.set(.single(nil))
return
}
let replyMessageSubject = self.presentationInterfaceState.interfaceState.replyMessageSubject
let correlationId = Int64.random(in: 0 ..< Int64.max)
let message = message
message = message
.withUpdatedReplyToMessageId(replyMessageSubject?.subjectModel)
.withUpdatedCorrelationId(correlationId)
@ -15393,6 +15403,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
switch updatedAction {
case .dismiss:
self.recorderDataDisposable.set(nil)
self.chatDisplayNode.updateRecordedMediaDeleted(true)
self.audioRecorder.set(.single(nil))
case .preview, .pause:
@ -15404,8 +15415,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
return panelState.withUpdatedMediaRecordingState(.waitingForPreview)
}
})
let _ = (audioRecorderValue.takenRecordedData()
|> deliverOnMainQueue).startStandalone(next: { [weak self] data in
self.recorderDataDisposable.set((audioRecorderValue.takenRecordedData()
|> deliverOnMainQueue).startStrict(next: { [weak self] data in
if let strongSelf = self, let data = data {
if data.duration < 0.5 {
strongSelf.recorderFeedback?.error()
@ -15415,6 +15426,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
return panelState.withUpdatedMediaRecordingState(nil)
}
})
strongSelf.recorderDataDisposable.set(nil)
} else if let waveform = data.waveform {
let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max), size: Int64(data.compressedData.count))
@ -15427,13 +15439,14 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
})
strongSelf.recorderFeedback = nil
strongSelf.updateDownButtonVisibility()
strongSelf.recorderDataDisposable.set(nil)
}
}
})
}))
case let .send(viewOnce):
self.chatDisplayNode.updateRecordedMediaDeleted(false)
let _ = (audioRecorderValue.takenRecordedData()
|> deliverOnMainQueue).startStandalone(next: { [weak self] data in
self.recorderDataDisposable.set((audioRecorderValue.takenRecordedData()
|> deliverOnMainQueue).startStrict(next: { [weak self] data in
if let strongSelf = self, let data = data {
if data.duration < 0.5 {
strongSelf.recorderFeedback?.error()
@ -15481,24 +15494,27 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
strongSelf.recorderFeedback?.tap()
strongSelf.recorderFeedback = nil
strongSelf.recorderDataDisposable.set(nil)
}
}
})
}))
}
} else if let videoRecorderValue = self.videoRecorderValue {
if case .send = updatedAction {
self.chatDisplayNode.updateRecordedMediaDeleted(false)
videoRecorderValue.sendVideoRecording()
self.recorderDataDisposable.set(nil)
} else {
if case .dismiss = updatedAction {
self.chatDisplayNode.updateRecordedMediaDeleted(true)
self.recorderDataDisposable.set(nil)
}
switch updatedAction {
case .preview, .pause:
if videoRecorderValue.stopVideoRecording() {
let _ = (videoRecorderValue.takenRecordedData()
|> deliverOnMainQueue).startStandalone(next: { [weak self] data in
self.recorderDataDisposable.set((videoRecorderValue.takenRecordedData()
|> deliverOnMainQueue).startStrict(next: { [weak self] data in
if let strongSelf = self, let data = data {
if data.duration < 0.5 {
strongSelf.recorderFeedback?.error()
@ -15508,6 +15524,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
return panelState.withUpdatedMediaRecordingState(nil)
}
})
strongSelf.recorderDataDisposable.set(nil)
strongSelf.videoRecorder.set(.single(nil))
} else {
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedRecordedMediaPreview(.video(
@ -15532,15 +15550,16 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
strongSelf.updateDownButtonVisibility()
}
}
})
}))
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false))
}
})
// self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
// $0.updatedInputTextPanelState { panelState in
// return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false))
// }
// })
}
default:
self.recorderDataDisposable.set(nil)
self.videoRecorder.set(.single(nil))
}
}

View File

@ -354,12 +354,17 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
transition.updateFrame(node: self.sendButton, frame: CGRect(origin: CGPoint(x: width - rightInset - 43.0 - UIScreenPixel, y: 2 - UIScreenPixel), size: CGSize(width: 44.0, height: 44)))
self.binNode.frame = self.deleteButton.bounds
var viewOnceOffset: CGFloat = 0.0
if interfaceState.interfaceState.replyMessageSubject != nil {
viewOnceOffset = -35.0
}
let viewOnceSize = self.viewOnceButton.update(theme: interfaceState.theme)
let viewOnceButtonFrame = CGRect(origin: CGPoint(x: width - rightInset - 44.0 - UIScreenPixel, y: -64.0 - 53.0), size: viewOnceSize)
let viewOnceButtonFrame = CGRect(origin: CGPoint(x: width - rightInset - 44.0 - UIScreenPixel, y: -64.0 - 53.0 + viewOnceOffset), size: viewOnceSize)
transition.updateFrame(node: self.viewOnceButton, frame: viewOnceButtonFrame)
let recordMoreSize = self.recordMoreButton.update(theme: interfaceState.theme)
let recordMoreButtonFrame = CGRect(origin: CGPoint(x: width - rightInset - 44.0 - UIScreenPixel, y: -64.0), size: recordMoreSize)
let recordMoreButtonFrame = CGRect(origin: CGPoint(x: width - rightInset - 44.0 - UIScreenPixel, y: -64.0 + viewOnceOffset), size: recordMoreSize)
transition.updateFrame(node: self.recordMoreButton, frame: recordMoreButtonFrame)
var isScheduledMessages = false
@ -479,6 +484,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
}
@objc func deletePressed() {
self.viewOnce = false
self.tooltipController?.dismiss()
self.mediaPlayer?.pause()

View File

@ -591,6 +591,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
private var extendedSearchLayout = false
var isMediaDeleted: Bool = false
private var recordingPaused = false
private let inputMenu: TextInputMenu
@ -2163,9 +2164,22 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
self.animatingBinNode = nil
}
var resumingRecording = false
animateDotAppearing = transition.isAnimated && !hideInfo
if let mediaRecordingState = mediaRecordingState, case .waitingForPreview = mediaRecordingState {
animateDotAppearing = false
if let mediaRecordingState = mediaRecordingState {
if case .waitingForPreview = mediaRecordingState {
self.recordingPaused = true
animateDotAppearing = false
} else {
if self.recordingPaused {
self.recordingPaused = false
resumingRecording = true
if (audioRecordingDotNode.layer.animationKeys() ?? []).isEmpty {
animateDotAppearing = true
}
}
}
}
audioRecordingDotNode.bounds = CGRect(origin: .zero, size: dotFrame.size)
@ -2174,21 +2188,30 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
if animateDotAppearing {
Queue.mainQueue().justDispatch {
audioRecordingDotNode.layer.animateScale(from: 0.3, to: 1, duration: 0.15, delay: 0, removeOnCompletion: false)
}
audioRecordingTimeNode.started = { [weak audioRecordingDotNode] in
if let audioRecordingDotNode = audioRecordingDotNode, audioRecordingDotNode.layer.animation(forKey: "recording") == nil {
audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 0), to: 1, duration: 0.15, delay: 0, completion: { [weak audioRecordingDotNode] finished in
if finished {
let animation = CAKeyframeAnimation(keyPath: "opacity")
animation.values = [1.0 as NSNumber, 1.0 as NSNumber, 0.0 as NSNumber]
animation.keyTimes = [0.0 as NSNumber, 0.4546 as NSNumber, 0.9091 as NSNumber, 1 as NSNumber]
animation.duration = 0.5
animation.autoreverses = true
animation.repeatCount = Float.infinity
audioRecordingDotNode?.layer.add(animation, forKey: "recording")
}
})
let animateDot = { [weak audioRecordingDotNode] in
if let audioRecordingDotNode, audioRecordingDotNode.layer.animation(forKey: "recording") == nil {
audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 0), to: 1, duration: 0.15, delay: 0, completion: { [weak audioRecordingDotNode] finished in
if finished {
let animation = CAKeyframeAnimation(keyPath: "opacity")
animation.values = [1.0 as NSNumber, 1.0 as NSNumber, 0.0 as NSNumber]
animation.keyTimes = [0.0 as NSNumber, 0.4546 as NSNumber, 0.9091 as NSNumber, 1 as NSNumber]
animation.duration = 0.5
animation.autoreverses = true
animation.repeatCount = Float.infinity
audioRecordingDotNode?.layer.add(animation, forKey: "recording")
}
})
}
}
if resumingRecording {
animateDot()
} else {
audioRecordingTimeNode.started = {
animateDot()
}
}
}
self.attachmentButton.layer.animateAlpha(from: CGFloat(self.attachmentButton.layer.presentation()?.opacity ?? 1), to: 0, duration: 0.15, delay: 0, removeOnCompletion: false)