mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Apply patch
This commit is contained in:
parent
d4bd039b6e
commit
b31e1dfb8b
@ -19,6 +19,7 @@
|
|||||||
|
|
||||||
@property (nonatomic, copy) id (^requestActivityHolder)();
|
@property (nonatomic, copy) id (^requestActivityHolder)();
|
||||||
@property (nonatomic, copy) void (^micLevel)(CGFloat level);
|
@property (nonatomic, copy) void (^micLevel)(CGFloat level);
|
||||||
|
@property (nonatomic, copy) void (^onDuration)(NSTimeInterval duration);
|
||||||
@property (nonatomic, copy) void(^finishedWithVideo)(NSURL *videoURL, UIImage *previewImage, NSUInteger fileSize, NSTimeInterval duration, CGSize dimensions, id liveUploadData, TGVideoEditAdjustments *adjustments, bool, int32_t);
|
@property (nonatomic, copy) void(^finishedWithVideo)(NSURL *videoURL, UIImage *previewImage, NSUInteger fileSize, NSTimeInterval duration, CGSize dimensions, id liveUploadData, TGVideoEditAdjustments *adjustments, bool, int32_t);
|
||||||
@property (nonatomic, copy) void(^onDismiss)(bool isAuto);
|
@property (nonatomic, copy) void(^onDismiss)(bool isAuto);
|
||||||
@property (nonatomic, copy) void(^onStop)(void);
|
@property (nonatomic, copy) void(^onStop)(void);
|
||||||
|
@ -683,6 +683,9 @@ typedef enum
|
|||||||
if (!_capturePipeline.isRecording)
|
if (!_capturePipeline.isRecording)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
|
if (_capturePipeline.videoDuration < 0.33)
|
||||||
|
return false;
|
||||||
|
|
||||||
if ([self.view.window isKindOfClass:[TGVideoMessageCaptureControllerWindow class]]) {
|
if ([self.view.window isKindOfClass:[TGVideoMessageCaptureControllerWindow class]]) {
|
||||||
((TGVideoMessageCaptureControllerWindow *)self.view.window).locked = false;
|
((TGVideoMessageCaptureControllerWindow *)self.view.window).locked = false;
|
||||||
}
|
}
|
||||||
@ -1045,6 +1048,7 @@ typedef enum
|
|||||||
{
|
{
|
||||||
[_controlsView recordingStarted];
|
[_controlsView recordingStarted];
|
||||||
[_controlsView setDurationString:@"0:00,00"];
|
[_controlsView setDurationString:@"0:00,00"];
|
||||||
|
self.onDuration(0);
|
||||||
|
|
||||||
_audioRecordingDurationSeconds = 0;
|
_audioRecordingDurationSeconds = 0;
|
||||||
_audioRecordingDurationMilliseconds = 0.0;
|
_audioRecordingDurationMilliseconds = 0.0;
|
||||||
@ -1078,6 +1082,7 @@ typedef enum
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
self.onDuration(recordingDuration);
|
||||||
_audioRecordingDurationSeconds = currentDurationSeconds;
|
_audioRecordingDurationSeconds = currentDurationSeconds;
|
||||||
_audioRecordingDurationMilliseconds = currentDurationMilliseconds;
|
_audioRecordingDurationMilliseconds = currentDurationMilliseconds;
|
||||||
[_controlsView setDurationString:[[NSString alloc] initWithFormat:@"%d:%02d,%02d", (int)_audioRecordingDurationSeconds / 60, (int)_audioRecordingDurationSeconds % 60, (int)_audioRecordingDurationMilliseconds]];
|
[_controlsView setDurationString:[[NSString alloc] initWithFormat:@"%d:%02d,%02d", (int)_audioRecordingDurationSeconds / 60, (int)_audioRecordingDurationSeconds % 60, (int)_audioRecordingDurationMilliseconds]];
|
||||||
|
@ -160,7 +160,7 @@ static CGRect viewFrame(UIView *view)
|
|||||||
CGRect slideToCancelArrowFrame = viewFrame(_slideToCancelArrow);
|
CGRect slideToCancelArrowFrame = viewFrame(_slideToCancelArrow);
|
||||||
setViewFrame(_slideToCancelArrow, CGRectMake(CGFloor((self.frame.size.width - _slideToCancelLabel.frame.size.width) / 2.0f) - slideToCancelArrowFrame.size.width - 7.0f, CGFloor((self.frame.size.height - _slideToCancelLabel.frame.size.height) / 2.0f), slideToCancelArrowFrame.size.width, slideToCancelArrowFrame.size.height));
|
setViewFrame(_slideToCancelArrow, CGRectMake(CGFloor((self.frame.size.width - _slideToCancelLabel.frame.size.width) / 2.0f) - slideToCancelArrowFrame.size.width - 7.0f, CGFloor((self.frame.size.height - _slideToCancelLabel.frame.size.height) / 2.0f), slideToCancelArrowFrame.size.width, slideToCancelArrowFrame.size.height));
|
||||||
_slideToCancelArrow.alpha = 0.0f;
|
_slideToCancelArrow.alpha = 0.0f;
|
||||||
[self addSubview:_slideToCancelArrow];
|
// [self addSubview:_slideToCancelArrow];
|
||||||
|
|
||||||
_slideToCancelArrow.transform = CGAffineTransformMakeTranslation(hideLeftOffset, 0.0f);
|
_slideToCancelArrow.transform = CGAffineTransformMakeTranslation(hideLeftOffset, 0.0f);
|
||||||
_slideToCancelLabel.transform = CGAffineTransformMakeTranslation(hideLeftOffset, 0.0f);
|
_slideToCancelLabel.transform = CGAffineTransformMakeTranslation(hideLeftOffset, 0.0f);
|
||||||
@ -185,11 +185,11 @@ static CGRect viewFrame(UIView *view)
|
|||||||
_recordDurationLabel.text = @"0:00,00";
|
_recordDurationLabel.text = @"0:00,00";
|
||||||
|
|
||||||
if (_recordIndicatorView.superview == nil)
|
if (_recordIndicatorView.superview == nil)
|
||||||
[self addSubview:_recordIndicatorView];
|
// [self addSubview:_recordIndicatorView];
|
||||||
[_recordIndicatorView.layer removeAllAnimations];
|
[_recordIndicatorView.layer removeAllAnimations];
|
||||||
|
|
||||||
if (_recordDurationLabel.superview == nil)
|
if (_recordDurationLabel.superview == nil)
|
||||||
[self addSubview:_recordDurationLabel];
|
// [self addSubview:_recordDurationLabel];
|
||||||
[_recordDurationLabel.layer removeAllAnimations];
|
[_recordDurationLabel.layer removeAllAnimations];
|
||||||
|
|
||||||
_slideToCancelArrow.transform = CGAffineTransformMakeTranslation(300.0f, 0.0f);
|
_slideToCancelArrow.transform = CGAffineTransformMakeTranslation(300.0f, 0.0f);
|
||||||
@ -211,7 +211,7 @@ static CGRect viewFrame(UIView *view)
|
|||||||
if (!isAlreadyLocked)
|
if (!isAlreadyLocked)
|
||||||
{
|
{
|
||||||
if (_slideToCancelLabel.superview == nil)
|
if (_slideToCancelLabel.superview == nil)
|
||||||
[self addSubview:_slideToCancelLabel];
|
// [self addSubview:_slideToCancelLabel];
|
||||||
|
|
||||||
[UIView animateWithDuration:0.18 delay:0.0 options:animationCurveOption animations:^
|
[UIView animateWithDuration:0.18 delay:0.0 options:animationCurveOption animations:^
|
||||||
{
|
{
|
||||||
@ -445,8 +445,7 @@ static CGRect viewFrame(UIView *view)
|
|||||||
|
|
||||||
[UIView animateWithDuration:0.2 delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^
|
[UIView animateWithDuration:0.2 delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^
|
||||||
{
|
{
|
||||||
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, -22.0f);
|
CGAffineTransform transform = CGAffineTransformScale(transform, 0.25f, 0.25f);
|
||||||
transform = CGAffineTransformScale(transform, 0.25f, 0.25f);
|
|
||||||
_cancelButton.transform = transform;
|
_cancelButton.transform = transform;
|
||||||
_cancelButton.alpha = 0.0f;
|
_cancelButton.alpha = 0.0f;
|
||||||
} completion:nil];
|
} completion:nil];
|
||||||
|
@ -16,27 +16,27 @@ final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration
|
|||||||
maxRandomness: 0.5,
|
maxRandomness: 0.5,
|
||||||
minSpeed: 0.2,
|
minSpeed: 0.2,
|
||||||
maxSpeed: 0.6,
|
maxSpeed: 0.6,
|
||||||
minScale: 0.56,
|
minScale: 0.45,
|
||||||
maxScale: 0.56,
|
maxScale: 0.55,
|
||||||
scaleSpeed: 0
|
scaleSpeed: 0.2
|
||||||
)
|
)
|
||||||
private let mediumBlob = BlobView(
|
private let mediumBlob = BlobView(
|
||||||
pointsCount: 8,
|
pointsCount: 8,
|
||||||
minRandomness: 1,
|
minRandomness: 1,
|
||||||
maxRandomness: 2,
|
maxRandomness: 1,
|
||||||
minSpeed: 3,
|
minSpeed: 3,
|
||||||
maxSpeed: 8,
|
maxSpeed: 7,
|
||||||
minScale: 0.67,
|
minScale: 0.55,
|
||||||
maxScale: 0.8,
|
maxScale: 0.9,
|
||||||
scaleSpeed: 0.2
|
scaleSpeed: 0.2
|
||||||
)
|
)
|
||||||
private let bigBlob = BlobView(
|
private let bigBlob = BlobView(
|
||||||
pointsCount: 8,
|
pointsCount: 8,
|
||||||
minRandomness: 1,
|
minRandomness: 1,
|
||||||
maxRandomness: 2,
|
maxRandomness: 1,
|
||||||
minSpeed: 3,
|
minSpeed: 3,
|
||||||
maxSpeed: 8,
|
maxSpeed: 7,
|
||||||
minScale: 0.67,
|
minScale: 0.55,
|
||||||
maxScale: 1,
|
maxScale: 1,
|
||||||
scaleSpeed: 0.2
|
scaleSpeed: 0.2
|
||||||
)
|
)
|
||||||
@ -105,11 +105,16 @@ final class BlobView: UIView {
|
|||||||
didSet {
|
didSet {
|
||||||
speedLevel = max(level, speedLevel)
|
speedLevel = max(level, speedLevel)
|
||||||
scaleLevel = max(level, scaleLevel)
|
scaleLevel = max(level, scaleLevel)
|
||||||
|
|
||||||
|
if abs(scaleLevel - lastScaleLevel) > 0.4 {
|
||||||
|
animateToNewScale()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private var speedLevel: CGFloat = 0
|
private var speedLevel: CGFloat = 0
|
||||||
private var scaleLevel: CGFloat = 0
|
private var scaleLevel: CGFloat = 0
|
||||||
|
private var lastScaleLevel: CGFloat = 0
|
||||||
|
|
||||||
private let shapeLayer: CAShapeLayer = {
|
private let shapeLayer: CAShapeLayer = {
|
||||||
let layer = CAShapeLayer()
|
let layer = CAShapeLayer()
|
||||||
@ -183,12 +188,15 @@ final class BlobView: UIView {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func animateToNewScale() {
|
func animateToNewScale() {
|
||||||
|
let isDownscale = lastScaleLevel > scaleLevel
|
||||||
|
lastScaleLevel = scaleLevel
|
||||||
|
|
||||||
shapeLayer.pop_removeAnimation(forKey: "scale")
|
shapeLayer.pop_removeAnimation(forKey: "scale")
|
||||||
|
|
||||||
let currentScale = minScale + (maxScale - minScale) * scaleLevel
|
let currentScale = minScale + (maxScale - minScale) * scaleLevel
|
||||||
let scaleAnimation = POPBasicAnimation(propertyNamed: kPOPLayerScaleXY)!
|
let scaleAnimation = POPBasicAnimation(propertyNamed: kPOPLayerScaleXY)!
|
||||||
scaleAnimation.toValue = CGPoint(x: currentScale, y: currentScale)
|
scaleAnimation.toValue = CGPoint(x: currentScale, y: currentScale)
|
||||||
scaleAnimation.duration = CFTimeInterval(scaleSpeed)
|
scaleAnimation.duration = isDownscale ? 0.45 : CFTimeInterval(scaleSpeed)
|
||||||
scaleAnimation.completionBlock = { [weak self] animation, finished in
|
scaleAnimation.completionBlock = { [weak self] animation, finished in
|
||||||
if finished {
|
if finished {
|
||||||
self?.animateToNewScale()
|
self?.animateToNewScale()
|
||||||
|
@ -2557,6 +2557,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
|
|
||||||
videoRecorder.onDismiss = {
|
videoRecorder.onDismiss = {
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
|
strongSelf.beginMediaRecordingRequestId += 1
|
||||||
|
strongSelf.lockMediaRecordingRequestId = nil
|
||||||
strongSelf.videoRecorder.set(.single(nil))
|
strongSelf.videoRecorder.set(.single(nil))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -7289,9 +7291,13 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
self.audioRecorder.set(.single(nil))
|
self.audioRecorder.set(.single(nil))
|
||||||
} else if let videoRecorderValue = self.videoRecorderValue {
|
} else if let videoRecorderValue = self.videoRecorderValue {
|
||||||
if case .send = updatedAction {
|
if case .send = updatedAction {
|
||||||
|
self.chatDisplayNode.updateRecordedMediaDeleted(false)
|
||||||
videoRecorderValue.completeVideo()
|
videoRecorderValue.completeVideo()
|
||||||
self.videoRecorder.set(.single(nil))
|
self.videoRecorder.set(.single(nil))
|
||||||
} else {
|
} else {
|
||||||
|
if case .dismiss = updatedAction {
|
||||||
|
self.chatDisplayNode.updateRecordedMediaDeleted(true)
|
||||||
|
}
|
||||||
if case .preview = updatedAction, videoRecorderValue.stopVideo() {
|
if case .preview = updatedAction, videoRecorderValue.stopVideo() {
|
||||||
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
||||||
$0.updatedInputTextPanelState { panelState in
|
$0.updatedInputTextPanelState { panelState in
|
||||||
|
@ -53,6 +53,28 @@ final class ChatTextInputAudioRecordingTimeNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private var durationDisposable: MetaDisposable?
|
||||||
|
|
||||||
|
var videoRecordingStatus: InstantVideoControllerRecordingStatus? {
|
||||||
|
didSet {
|
||||||
|
if self.videoRecordingStatus !== oldValue {
|
||||||
|
if self.durationDisposable == nil {
|
||||||
|
durationDisposable = MetaDisposable()
|
||||||
|
}
|
||||||
|
|
||||||
|
if let videoRecordingStatus = self.videoRecordingStatus {
|
||||||
|
self.durationDisposable?.set(videoRecordingStatus.duration.start(next: { [weak self] duration in
|
||||||
|
Queue.mainQueue().async { [weak self] in
|
||||||
|
self?.timestamp = duration
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
} else if self.audioRecorder == nil {
|
||||||
|
self.durationDisposable?.set(nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private var theme: PresentationTheme
|
private var theme: PresentationTheme
|
||||||
|
|
||||||
init(theme: PresentationTheme) {
|
init(theme: PresentationTheme) {
|
||||||
|
@ -238,7 +238,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
|||||||
}
|
}
|
||||||
|
|
||||||
private lazy var micDecoration: (UIView & TGModernConversationInputMicButtonDecoration) = {
|
private lazy var micDecoration: (UIView & TGModernConversationInputMicButtonDecoration) = {
|
||||||
let blobView = VoiceBlobView(frame: CGRect(origin: CGPoint(), size: CGSize(width: 180.0, height: 180.0)))
|
let blobView = VoiceBlobView(frame: CGRect(origin: CGPoint(), size: CGSize(width: 220.0, height: 220.0)))
|
||||||
blobView.setColor(self.theme.chat.inputPanel.actionControlFillColor)
|
blobView.setColor(self.theme.chat.inputPanel.actionControlFillColor)
|
||||||
return blobView
|
return blobView
|
||||||
}()
|
}()
|
||||||
|
@ -908,17 +908,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
var audioRecordingItemsAlpha: CGFloat = 1
|
var audioRecordingItemsAlpha: CGFloat = 1
|
||||||
if let mediaRecordingState = interfaceState.inputTextPanelState.mediaRecordingState {
|
if let mediaRecordingState = interfaceState.inputTextPanelState.mediaRecordingState {
|
||||||
audioRecordingItemsAlpha = 0
|
audioRecordingItemsAlpha = 0
|
||||||
transition.updateAlpha(layer: self.textInputBackgroundNode.layer, alpha: 0.0)
|
|
||||||
if let textInputNode = self.textInputNode {
|
|
||||||
transition.updateAlpha(node: textInputNode, alpha: 0.0)
|
|
||||||
}
|
|
||||||
for (_, button) in self.accessoryItemButtons {
|
|
||||||
transition.updateAlpha(layer: button.layer, alpha: 0.0)
|
|
||||||
}
|
|
||||||
|
|
||||||
switch mediaRecordingState {
|
|
||||||
case let .audio(recorder, isLocked):
|
|
||||||
self.actionButtons.micButton.audioRecorder = recorder
|
|
||||||
let audioRecordingInfoContainerNode: ASDisplayNode
|
let audioRecordingInfoContainerNode: ASDisplayNode
|
||||||
if let currentAudioRecordingInfoContainerNode = self.audioRecordingInfoContainerNode {
|
if let currentAudioRecordingInfoContainerNode = self.audioRecordingInfoContainerNode {
|
||||||
audioRecordingInfoContainerNode = currentAudioRecordingInfoContainerNode
|
audioRecordingInfoContainerNode = currentAudioRecordingInfoContainerNode
|
||||||
@ -928,6 +918,21 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
self.insertSubnode(audioRecordingInfoContainerNode, at: 0)
|
self.insertSubnode(audioRecordingInfoContainerNode, at: 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var animateTimeSlideIn = false
|
||||||
|
let audioRecordingTimeNode: ChatTextInputAudioRecordingTimeNode
|
||||||
|
if let currentAudioRecordingTimeNode = self.audioRecordingTimeNode {
|
||||||
|
audioRecordingTimeNode = currentAudioRecordingTimeNode
|
||||||
|
} else {
|
||||||
|
audioRecordingTimeNode = ChatTextInputAudioRecordingTimeNode(theme: interfaceState.theme)
|
||||||
|
self.audioRecordingTimeNode = audioRecordingTimeNode
|
||||||
|
audioRecordingInfoContainerNode.addSubnode(audioRecordingTimeNode)
|
||||||
|
|
||||||
|
if transition.isAnimated {
|
||||||
|
animateTimeSlideIn = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
var animateCancelSlideIn = false
|
var animateCancelSlideIn = false
|
||||||
let audioRecordingCancelIndicator: ChatTextInputAudioRecordingCancelIndicator
|
let audioRecordingCancelIndicator: ChatTextInputAudioRecordingCancelIndicator
|
||||||
if let currentAudioRecordingCancelIndicator = self.audioRecordingCancelIndicator {
|
if let currentAudioRecordingCancelIndicator = self.audioRecordingCancelIndicator {
|
||||||
@ -942,6 +947,37 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
self.insertSubnode(audioRecordingCancelIndicator, at: 0)
|
self.insertSubnode(audioRecordingCancelIndicator, at: 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let isLocked = mediaRecordingState.isLocked
|
||||||
|
var hideInfo = false
|
||||||
|
|
||||||
|
switch mediaRecordingState {
|
||||||
|
case let .audio(recorder, _):
|
||||||
|
self.actionButtons.micButton.audioRecorder = recorder
|
||||||
|
audioRecordingTimeNode.audioRecorder = recorder
|
||||||
|
case let .video(status, _):
|
||||||
|
switch status {
|
||||||
|
case let .recording(recordingStatus):
|
||||||
|
audioRecordingTimeNode.videoRecordingStatus = recordingStatus
|
||||||
|
self.actionButtons.micButton.videoRecordingStatus = recordingStatus
|
||||||
|
if isLocked {
|
||||||
|
audioRecordingCancelIndicator.layer.animateAlpha(from: audioRecordingCancelIndicator.alpha, to: 0, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||||
|
}
|
||||||
|
case .editing:
|
||||||
|
audioRecordingTimeNode.videoRecordingStatus = nil
|
||||||
|
self.actionButtons.micButton.videoRecordingStatus = nil
|
||||||
|
hideMicButton = true
|
||||||
|
hideInfo = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
transition.updateAlpha(layer: self.textInputBackgroundNode.layer, alpha: 0.0)
|
||||||
|
if let textInputNode = self.textInputNode {
|
||||||
|
transition.updateAlpha(node: textInputNode, alpha: 0.0)
|
||||||
|
}
|
||||||
|
for (_, button) in self.accessoryItemButtons {
|
||||||
|
transition.updateAlpha(layer: button.layer, alpha: 0.0)
|
||||||
|
}
|
||||||
|
|
||||||
let cancelTransformThreshold: CGFloat = 8.0
|
let cancelTransformThreshold: CGFloat = 8.0
|
||||||
|
|
||||||
let indicatorTranslation = max(0.0, self.actionButtons.micButton.cancelTranslation - cancelTransformThreshold)
|
let indicatorTranslation = max(0.0, self.actionButtons.micButton.cancelTranslation - cancelTransformThreshold)
|
||||||
@ -985,20 +1021,6 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
audioRecordingCancelIndicator.layer.add(slideJuggleAnimation, forKey: "slide_juggle")
|
audioRecordingCancelIndicator.layer.add(slideJuggleAnimation, forKey: "slide_juggle")
|
||||||
}
|
}
|
||||||
|
|
||||||
var animateTimeSlideIn = false
|
|
||||||
let audioRecordingTimeNode: ChatTextInputAudioRecordingTimeNode
|
|
||||||
if let currentAudioRecordingTimeNode = self.audioRecordingTimeNode {
|
|
||||||
audioRecordingTimeNode = currentAudioRecordingTimeNode
|
|
||||||
} else {
|
|
||||||
audioRecordingTimeNode = ChatTextInputAudioRecordingTimeNode(theme: interfaceState.theme)
|
|
||||||
self.audioRecordingTimeNode = audioRecordingTimeNode
|
|
||||||
audioRecordingInfoContainerNode.addSubnode(audioRecordingTimeNode)
|
|
||||||
|
|
||||||
if transition.isAnimated {
|
|
||||||
animateTimeSlideIn = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let audioRecordingTimeSize = audioRecordingTimeNode.measure(CGSize(width: 200.0, height: 100.0))
|
let audioRecordingTimeSize = audioRecordingTimeNode.measure(CGSize(width: 200.0, height: 100.0))
|
||||||
|
|
||||||
let cancelMinX = audioRecordingCancelIndicator.alpha > 0.5 ? audioRecordingCancelIndicator.frame.minX : width
|
let cancelMinX = audioRecordingCancelIndicator.alpha > 0.5 ? audioRecordingCancelIndicator.frame.minX : width
|
||||||
@ -1018,8 +1040,6 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
audioRecordingTimeNode.layer.animateAlpha(from: 0, to: 1, duration: 0.5, timingFunction: kCAMediaTimingFunctionSpring)
|
audioRecordingTimeNode.layer.animateAlpha(from: 0, to: 1, duration: 0.5, timingFunction: kCAMediaTimingFunctionSpring)
|
||||||
}
|
}
|
||||||
|
|
||||||
audioRecordingTimeNode.audioRecorder = recorder
|
|
||||||
|
|
||||||
var animateDotAppearing = false
|
var animateDotAppearing = false
|
||||||
let audioRecordingDotNode: AnimationNode
|
let audioRecordingDotNode: AnimationNode
|
||||||
if let currentAudioRecordingDotNode = self.audioRecordingDotNode, !currentAudioRecordingDotNode.played {
|
if let currentAudioRecordingDotNode = self.audioRecordingDotNode, !currentAudioRecordingDotNode.played {
|
||||||
@ -1031,7 +1051,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
self.addSubnode(audioRecordingDotNode)
|
self.addSubnode(audioRecordingDotNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
animateDotAppearing = transition.isAnimated && !isLocked
|
animateDotAppearing = transition.isAnimated && !isLocked && !hideInfo
|
||||||
|
|
||||||
audioRecordingDotNode.frame = CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: panelHeight - 44 + 1), size: CGSize(width: 40.0, height: 40))
|
audioRecordingDotNode.frame = CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: panelHeight - 44 + 1), size: CGSize(width: 40.0, height: 40))
|
||||||
if animateDotAppearing {
|
if animateDotAppearing {
|
||||||
@ -1052,14 +1072,11 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
self.attachmentButton.layer.animateAlpha(from: 1, to: 0, duration: 0.15, delay: 0, removeOnCompletion: false)
|
self.attachmentButton.layer.animateAlpha(from: 1, to: 0, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||||
self.attachmentButton.layer.animateScale(from: 1, to: 0.3, duration: 0.15, delay: 0, removeOnCompletion: false)
|
self.attachmentButton.layer.animateScale(from: 1, to: 0.3, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||||
}
|
}
|
||||||
case let .video(status, _):
|
|
||||||
switch status {
|
if hideInfo {
|
||||||
case let .recording(recordingStatus):
|
audioRecordingDotNode.layer.animateAlpha(from: audioRecordingDotNode.alpha, to: 0, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||||
self.actionButtons.micButton.videoRecordingStatus = recordingStatus
|
audioRecordingTimeNode.layer.animateAlpha(from: audioRecordingTimeNode.alpha, to: 0, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||||
case .editing:
|
audioRecordingCancelIndicator.layer.animateAlpha(from: audioRecordingCancelIndicator.alpha, to: 0, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||||
self.actionButtons.micButton.videoRecordingStatus = nil
|
|
||||||
hideMicButton = true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
self.actionButtons.micButton.audioRecorder = nil
|
self.actionButtons.micButton.audioRecorder = nil
|
||||||
|
@ -17,9 +17,11 @@ import AppBundle
|
|||||||
|
|
||||||
final class InstantVideoControllerRecordingStatus {
|
final class InstantVideoControllerRecordingStatus {
|
||||||
let micLevel: Signal<Float, NoError>
|
let micLevel: Signal<Float, NoError>
|
||||||
|
let duration: Signal<TimeInterval, NoError>
|
||||||
|
|
||||||
init(micLevel: Signal<Float, NoError>) {
|
init(micLevel: Signal<Float, NoError>, duration: Signal<TimeInterval, NoError>) {
|
||||||
self.micLevel = micLevel
|
self.micLevel = micLevel
|
||||||
|
self.duration = duration
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -30,12 +32,13 @@ final class InstantVideoController: LegacyController, StandalonePresentableContr
|
|||||||
var onStop: (() -> Void)?
|
var onStop: (() -> Void)?
|
||||||
|
|
||||||
private let micLevelValue = ValuePromise<Float>(0.0)
|
private let micLevelValue = ValuePromise<Float>(0.0)
|
||||||
|
private let durationValue = ValuePromise<TimeInterval>(0.0)
|
||||||
let audioStatus: InstantVideoControllerRecordingStatus
|
let audioStatus: InstantVideoControllerRecordingStatus
|
||||||
|
|
||||||
private var dismissedVideo = false
|
private var dismissedVideo = false
|
||||||
|
|
||||||
override init(presentation: LegacyControllerPresentation, theme: PresentationTheme?, strings: PresentationStrings? = nil, initialLayout: ContainerViewLayout? = nil) {
|
override init(presentation: LegacyControllerPresentation, theme: PresentationTheme?, strings: PresentationStrings? = nil, initialLayout: ContainerViewLayout? = nil) {
|
||||||
self.audioStatus = InstantVideoControllerRecordingStatus(micLevel: self.micLevelValue.get())
|
self.audioStatus = InstantVideoControllerRecordingStatus(micLevel: self.micLevelValue.get(), duration: self.durationValue.get())
|
||||||
|
|
||||||
super.init(presentation: presentation, theme: theme, initialLayout: initialLayout)
|
super.init(presentation: presentation, theme: theme, initialLayout: initialLayout)
|
||||||
|
|
||||||
@ -52,6 +55,9 @@ final class InstantVideoController: LegacyController, StandalonePresentableContr
|
|||||||
captureController.micLevel = { [weak self] (level: CGFloat) -> Void in
|
captureController.micLevel = { [weak self] (level: CGFloat) -> Void in
|
||||||
self?.micLevelValue.set(Float(level))
|
self?.micLevelValue.set(Float(level))
|
||||||
}
|
}
|
||||||
|
captureController.onDuration = { [weak self] duration in
|
||||||
|
self?.durationValue.set(duration)
|
||||||
|
}
|
||||||
captureController.onDismiss = { [weak self] _ in
|
captureController.onDismiss = { [weak self] _ in
|
||||||
self?.onDismiss?()
|
self?.onDismiss?()
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user