Various improvements

This commit is contained in:
Ilya Laktyushin 2025-05-24 16:38:39 +02:00
parent b0c6afdcee
commit ff29e58d4b
16 changed files with 359 additions and 65 deletions

View File

@ -1264,7 +1264,7 @@ final class AttachmentPanel: ASDisplayNode, ASScrollViewDelegate {
}, openStarsPurchase: { _ in
}, openMessagePayment: {
}, openBoostToUnrestrict: {
}, updateVideoTrimRange: { _, _, _, _ in
}, updateRecordingTrimRange: { _, _, _, _ in
}, updateHistoryFilter: { _ in
}, updateChatLocationThread: { _, _ in
}, toggleChatSidebarMode: {

View File

@ -175,7 +175,7 @@ public final class ChatPanelInterfaceInteraction {
public let toggleChatSidebarMode: () -> Void
public let updateDisplayHistoryFilterAsList: (Bool) -> Void
public let openBoostToUnrestrict: () -> Void
public let updateVideoTrimRange: (Double, Double, Bool, Bool) -> Void
public let updateRecordingTrimRange: (Double, Double, Bool, Bool) -> Void
public let requestLayout: (ContainedViewLayoutTransition) -> Void
public let chatController: () -> ViewController?
public let statuses: ChatPanelInterfaceInteractionStatuses?
@ -290,7 +290,7 @@ public final class ChatPanelInterfaceInteraction {
openStarsPurchase: @escaping (Int64?) -> Void,
openMessagePayment: @escaping () -> Void,
openBoostToUnrestrict: @escaping () -> Void,
updateVideoTrimRange: @escaping (Double, Double, Bool, Bool) -> Void,
updateRecordingTrimRange: @escaping (Double, Double, Bool, Bool) -> Void,
updateHistoryFilter: @escaping ((ChatPresentationInterfaceState.HistoryFilter?) -> ChatPresentationInterfaceState.HistoryFilter?) -> Void,
updateChatLocationThread: @escaping (Int64?, ChatControllerAnimateInnerChatSwitchDirection?) -> Void,
toggleChatSidebarMode: @escaping () -> Void,
@ -408,7 +408,7 @@ public final class ChatPanelInterfaceInteraction {
self.openStarsPurchase = openStarsPurchase
self.openMessagePayment = openMessagePayment
self.openBoostToUnrestrict = openBoostToUnrestrict
self.updateVideoTrimRange = updateVideoTrimRange
self.updateRecordingTrimRange = updateRecordingTrimRange
self.updateHistoryFilter = updateHistoryFilter
self.updateChatLocationThread = updateChatLocationThread
self.toggleChatSidebarMode = toggleChatSidebarMode
@ -535,7 +535,7 @@ public final class ChatPanelInterfaceInteraction {
}, openStarsPurchase: { _ in
}, openMessagePayment: {
}, openBoostToUnrestrict: {
}, updateVideoTrimRange: { _, _, _, _ in
}, updateRecordingTrimRange: { _, _, _, _ in
}, updateHistoryFilter: { _ in
}, updateChatLocationThread: { _, _ in
}, toggleChatSidebarMode: {

View File

@ -804,6 +804,12 @@ public class ContactsPeerItemNode: ItemListRevealOptionsItemNode {
rightInset -= 6.0 + rightLabelTextLayoutAndApplyValue.0.size.width
}
var searchAdIcon: UIImage?
if item.isAd, let icon = PresentationResourcesChatList.searchAdIcon(item.presentationData.theme, strings: item.presentationData.strings) {
searchAdIcon = icon
rightInset += icon.size.width + 12.0
}
let premiumConfiguration = PremiumConfiguration.with(appConfiguration: item.context.currentAppConfiguration.with { $0 })
var credibilityIcon: EmojiStatusComponent.Content?
@ -1824,7 +1830,7 @@ public class ContactsPeerItemNode: ItemListRevealOptionsItemNode {
adButton.addTarget(strongSelf, action: #selector(strongSelf.adButtonPressed), forControlEvents: .touchUpInside)
}
if updatedTheme != nil || adButton.image(for: .normal) == nil {
adButton.setImage(PresentationResourcesChatList.searchAdIcon(item.presentationData.theme, strings: item.presentationData.strings), for: .normal)
adButton.setImage(searchAdIcon, for: .normal)
}
if let icon = adButton.image(for: .normal) {
adButton.frame = CGRect(origin: CGPoint(x: params.width - 20.0 - icon.size.width - 13.0, y: 11.0), size: icon.size).insetBy(dx: -11.0, dy: -11.0)

View File

@ -7,6 +7,8 @@ NS_ASSUME_NONNULL_BEGIN
@interface TGOggOpusWriter : NSObject
- (bool)beginWithDataItem:(TGDataItem *)dataItem;
- (bool)beginAppendWithDataItem:(TGDataItem *)dataItem;
- (bool)writeFrame:(uint8_t * _Nullable)framePcmBytes frameByteCount:(NSUInteger)frameByteCount;
- (NSUInteger)encodedBytes;
- (NSTimeInterval)encodedDuration;
@ -14,7 +16,6 @@ NS_ASSUME_NONNULL_BEGIN
- (NSDictionary *)pause;
- (bool)resumeWithDataItem:(TGDataItem *)dataItem encoderState:(NSDictionary *)state;
@end
NS_ASSUME_NONNULL_END

View File

@ -110,6 +110,8 @@ static inline int writeOggPage(ogg_page *page, TGDataItem *fileItem)
opus_int32 lookahead;
}
@property (nonatomic) ogg_sync_state syncState;
@end
@implementation TGOggOpusWriter
@ -343,6 +345,174 @@ static inline int writeOggPage(ogg_page *page, TGDataItem *fileItem)
return true;
}
- (bool)parseExistingOpusFile:(NSData *)data
{
ogg_sync_init(&_syncState);
char *buffer = ogg_sync_buffer(&_syncState, (long)data.length);
memcpy(buffer, data.bytes, data.length);
ogg_sync_wrote(&_syncState, (long)data.length);
ogg_stream_state tempStream;
ogg_page page;
ogg_packet packet;
bool headerParsed = false;
bool foundStream = false;
ogg_int64_t finalGranulePos = 0;
while (ogg_sync_pageout(&_syncState, &page) == 1) {
if (!foundStream) {
serialno = ogg_page_serialno(&page);
if (ogg_stream_init(&tempStream, serialno) != 0) {
ogg_sync_clear(&_syncState);
return false;
}
foundStream = true;
}
if (ogg_page_serialno(&page) == serialno) {
ogg_stream_pagein(&tempStream, &page);
if (ogg_page_granulepos(&page) != -1) {
finalGranulePos = ogg_page_granulepos(&page);
}
while (ogg_stream_packetout(&tempStream, &packet) == 1) {
if (!headerParsed && packet.packetno == 0) {
if (![self parseOpusHeader:packet.packet length:packet.bytes]) {
ogg_stream_clear(&tempStream);
ogg_sync_clear(&_syncState);
return false;
}
headerParsed = true;
}
_packetId = (ogg_int32_t)packet.packetno;
if (packet.granulepos != -1) {
enc_granulepos = packet.granulepos;
last_granulepos = packet.granulepos;
finalGranulePos = packet.granulepos;
}
}
}
}
if (finalGranulePos > header.preskip) {
opus_int64 samples = finalGranulePos - header.preskip;
total_samples = (samples * rate) / 48000;
} else {
total_samples = 0;
}
ogg_stream_clear(&tempStream);
ogg_sync_clear(&_syncState);
if (!headerParsed) {
return false;
}
return true;
}
- (bool)parseOpusHeader:(unsigned char *)data length:(long)length
{
if (length < 19) {
NSLog(@"Opus header too short");
return false;
}
if (memcmp(data, "OpusHead", 8) != 0) {
NSLog(@"Invalid Opus header signature");
return false;
}
header.channels = data[9];
header.preskip = data[10] | (data[11] << 8);
header.input_sample_rate = data[12] | (data[13] << 8) | (data[14] << 16) | (data[15] << 24);
header.gain = (signed short)(data[16] | (data[17] << 8));
header.channel_mapping = data[18];
if (header.channels == 0) {
return false;
}
rate = header.input_sample_rate;
coding_rate = rate;
if (rate > 24000)
coding_rate = 48000;
else if (rate > 16000)
coding_rate = 24000;
else if (rate > 12000)
coding_rate = 16000;
else if (rate > 8000)
coding_rate = 12000;
else
coding_rate = 8000;
header.nb_streams = 1;
return true;
}
- (bool)initializeEncoderForAppend
{
bytes_written = _dataItem.data.length;
inopt.channels = header.channels;
inopt.rate = rate;
inopt.gain = header.gain;
inopt.samplesize = 16;
inopt.endianness = 0;
inopt.rawmode = 0;
inopt.ignorelength = 0;
inopt.copy_comments = 0;
int result = OPUS_OK;
_encoder = opus_encoder_create(coding_rate, header.channels, OPUS_APPLICATION_AUDIO, &result);
if (result != OPUS_OK) {
NSLog(@"Error cannot create encoder: %s", opus_strerror(result));
return false;
}
bitrate = 30 * 1024;
frame_size = 960;
opus_encoder_ctl(_encoder, OPUS_SET_BITRATE(bitrate));
#ifdef OPUS_SET_LSB_DEPTH
opus_encoder_ctl(_encoder, OPUS_SET_LSB_DEPTH(16));
#endif
opus_encoder_ctl(_encoder, OPUS_GET_LOOKAHEAD(&lookahead));
if (ogg_stream_init(&os, serialno) == -1) {
NSLog(@"Error: stream init failed");
return false;
}
max_frame_bytes = (1275 * 3 + 7) * header.nb_streams;
_packet = malloc(max_frame_bytes);
return true;
}
- (bool)beginAppendWithDataItem:(TGDataItem *)dataItem
{
if (dataItem.data.length == 0) {
return [self beginWithDataItem:dataItem];
}
_dataItem = dataItem;
if (![self parseExistingOpusFile:_dataItem.data]) {
return false;
}
return [self initializeEncoderForAppend];
}
- (bool)writeFrame:(uint8_t *)framePcmBytes frameByteCount:(NSUInteger)frameByteCount
{
// Main encoding loop (one frame per iteration)

View File

@ -765,6 +765,7 @@ private class ChatQrCodeScreenNode: ViewControllerTracingNode, ASScrollViewDeleg
private let contentNode: ContentNode
private let wrappingScrollNode: ASScrollNode
private let scrollNodeContentNode: ASDisplayNode
private let contentContainerNode: ASDisplayNode
private let topContentContainerNode: SparseNode
private let shadowNode: ASImageNode
@ -824,6 +825,9 @@ private class ChatQrCodeScreenNode: ViewControllerTracingNode, ASScrollViewDeleg
self.wrappingScrollNode.view.delaysContentTouches = false
self.wrappingScrollNode.view.canCancelContentTouches = true
self.scrollNodeContentNode = ASDisplayNode()
self.scrollNodeContentNode.clipsToBounds = true
switch controller.subject {
case let .peer(peer, threadId, temporary):
self.contentNode = QrContentNode(context: context, peer: peer, threadId: threadId, isStatic: false, temporary: temporary)
@ -909,12 +913,14 @@ private class ChatQrCodeScreenNode: ViewControllerTracingNode, ASScrollViewDeleg
self.addSubnode(self.wrappingScrollNode)
self.wrappingScrollNode.addSubnode(self.contentNode)
self.wrappingScrollNode.addSubnode(self.scrollNodeContentNode)
self.wrappingScrollNode.addSubnode(self.shadowNode)
self.wrappingScrollNode.addSubnode(self.backgroundNode)
self.wrappingScrollNode.addSubnode(self.contentContainerNode)
self.wrappingScrollNode.addSubnode(self.topContentContainerNode)
self.scrollNodeContentNode.addSubnode(self.contentNode)
self.scrollNodeContentNode.addSubnode(self.shadowNode)
self.scrollNodeContentNode.addSubnode(self.backgroundNode)
self.scrollNodeContentNode.addSubnode(self.contentContainerNode)
self.scrollNodeContentNode.addSubnode(self.topContentContainerNode)
self.backgroundNode.addSubnode(self.effectNode)
self.backgroundNode.addSubnode(self.contentBackgroundNode)
@ -1401,32 +1407,60 @@ private class ChatQrCodeScreenNode: ViewControllerTracingNode, ASScrollViewDeleg
public func animateIn() {
let offset = self.bounds.size.height - self.contentBackgroundNode.frame.minY
if let (layout, _) = self.containerLayout {
self.scrollNodeContentNode.cornerRadius = layout.deviceMetrics.screenCornerRadius
}
let transition = ContainedViewLayoutTransition.animated(duration: 0.4, curve: .spring)
let targetBounds = self.bounds
self.bounds = self.bounds.offsetBy(dx: 0.0, dy: -offset)
transition.animateView({
self.bounds = targetBounds
}, completion: { _ in
self.scrollNodeContentNode.cornerRadius = 0.0
})
}
public func animateOut(completion: (() -> Void)? = nil) {
public func animateOut(velocity: Double? = nil, completion: (() -> Void)? = nil) {
self.animatedOut = true
let offset = self.bounds.size.height - self.contentBackgroundNode.frame.minY
self.wrappingScrollNode.layer.animateBoundsOriginYAdditive(from: 0.0, to: -offset, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, removeOnCompletion: false, completion: { _ in
completion?()
})
self.wrappingScrollNode.view.isScrollEnabled = false
let distance = self.bounds.size.height - self.contentBackgroundNode.frame.minY
if let velocity {
let initialVelocity: CGFloat = distance.isZero ? 0.0 : abs(velocity / distance)
self.wrappingScrollNode.layer.animateSpring(from: 0.0 as NSNumber, to: -distance as NSNumber, keyPath: "bounds.origin.y", duration: 0.45, delay: 0.0, initialVelocity: initialVelocity, damping: 124.0, removeOnCompletion: false, additive: true, completion: { _ in
completion?()
})
} else {
self.wrappingScrollNode.layer.animateBoundsOriginYAdditive(from: 0.0, to: -distance, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, removeOnCompletion: false, completion: { _ in
completion?()
})
}
}
public func scrollViewDidEndDragging(_ scrollView: UIScrollView, willDecelerate decelerate: Bool) {
func scrollViewWillEndDragging(_ scrollView: UIScrollView, withVelocity velocity: CGPoint, targetContentOffset: UnsafeMutablePointer<CGPoint>) {
let contentOffset = scrollView.contentOffset
let additionalTopHeight = max(0.0, -contentOffset.y)
if additionalTopHeight >= 30.0 {
self.cancelButtonPressed()
self.animateOut(velocity: velocity.y, completion: {
self.controller?.dismiss(animated: false)
})
}
}
func scrollViewWillBeginDragging(_ scrollView: UIScrollView) {
guard let (layout, _) = self.containerLayout else {
return
}
self.scrollNodeContentNode.cornerRadius = layout.deviceMetrics.screenCornerRadius
}
func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) {
self.scrollNodeContentNode.cornerRadius = 0.0
}
public func containerLayoutUpdated(_ layout: ContainerViewLayout, navigationBarHeight: CGFloat, transition: ContainedViewLayoutTransition) {
self.containerLayout = (layout, navigationBarHeight)
@ -1455,6 +1489,7 @@ private class ChatQrCodeScreenNode: ViewControllerTracingNode, ASScrollViewDeleg
transition.updateFrame(node: self.effectNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
transition.updateFrame(node: self.contentBackgroundNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
transition.updateFrame(node: self.wrappingScrollNode, frame: CGRect(origin: CGPoint(), size: layout.size))
transition.updateFrame(node: self.scrollNodeContentNode, frame: CGRect(origin: CGPoint(), size: CGSize(width: layout.size.width, height: layout.size.height + 2000.0)))
let titleSize = self.titleNode.measure(CGSize(width: width - 90.0, height: titleHeight))
let titleFrame = CGRect(origin: CGPoint(x: floor((contentFrame.width - titleSize.width) / 2.0), y: 19.0 + UIScreenPixel), size: titleSize)

View File

@ -168,7 +168,7 @@ public final class ChatRecentActionsController: TelegramBaseController {
}, openStarsPurchase: { _ in
}, openMessagePayment: {
}, openBoostToUnrestrict: {
}, updateVideoTrimRange: { _, _, _, _ in
}, updateRecordingTrimRange: { _, _, _, _ in
}, updateHistoryFilter: { _ in
}, updateChatLocationThread: { _, _ in
}, toggleChatSidebarMode: {

View File

@ -186,6 +186,9 @@ private class TimerDatePickerView: UIDatePicker, TimerPickerView {
}
}
private let digitsCharacterSet = CharacterSet(charactersIn: "0123456789")
private let nondigitsCharacterSet = CharacterSet(charactersIn: "0123456789").inverted
private class TimerPickerItemView: UIView {
let valueLabel = UILabel()
let unitLabel = UILabel()
@ -207,6 +210,9 @@ private class TimerPickerItemView: UIView {
} else if components.count > 1 {
self.valueLabel.text = components[0]
self.unitLabel.text = components[1]
} else {
self.valueLabel.text = string.trimmingCharacters(in: nondigitsCharacterSet)
self.unitLabel.text = string.trimmingCharacters(in: digitsCharacterSet)
}
}

View File

@ -1070,11 +1070,10 @@ final class GiftStoreScreenComponent: Component {
var modelTitle = environment.strings.Gift_Store_Filter_Model
var backdropTitle = environment.strings.Gift_Store_Filter_Backdrop
var symbolTitle = environment.strings.Gift_Store_Filter_Symbol
var modelCount: Int32 = 0
var backdropCount: Int32 = 0
var symbolCount: Int32 = 0
if let filterAttributes = self.state?.starGiftsState?.filterAttributes {
var modelCount: Int32 = 0
var backdropCount: Int32 = 0
var symbolCount: Int32 = 0
for attribute in filterAttributes {
switch attribute {
case .model:
@ -1099,6 +1098,7 @@ final class GiftStoreScreenComponent: Component {
filterItems.append(FilterSelectorComponent.Item(
id: AnyHashable(FilterItemId.model),
index: Int(modelCount),
title: modelTitle,
action: { [weak self] view in
if let self {
@ -1110,6 +1110,7 @@ final class GiftStoreScreenComponent: Component {
))
filterItems.append(FilterSelectorComponent.Item(
id: AnyHashable(FilterItemId.backdrop),
index: Int(backdropCount),
title: backdropTitle,
action: { [weak self] view in
if let self {
@ -1121,6 +1122,7 @@ final class GiftStoreScreenComponent: Component {
))
filterItems.append(FilterSelectorComponent.Item(
id: AnyHashable(FilterItemId.symbol),
index: Int(symbolCount),
title: symbolTitle,
action: { [weak self] view in
if let self {

View File

@ -433,7 +433,7 @@ final class PeerInfoSelectionPanelNode: ASDisplayNode {
}, openStarsPurchase: { _ in
}, openMessagePayment: {
}, openBoostToUnrestrict: {
}, updateVideoTrimRange: { _, _, _, _ in
}, updateRecordingTrimRange: { _, _, _, _ in
}, updateHistoryFilter: { _ in
}, updateChatLocationThread: { _, _ in
}, toggleChatSidebarMode: {

View File

@ -782,7 +782,7 @@ final class PeerSelectionControllerNode: ASDisplayNode {
}, openStarsPurchase: { _ in
}, openMessagePayment: {
}, openBoostToUnrestrict: {
}, updateVideoTrimRange: { _, _, _, _ in
}, updateRecordingTrimRange: { _, _, _, _ in
}, updateHistoryFilter: { _ in
}, updateChatLocationThread: { _, _ in
}, toggleChatSidebarMode: {

View File

@ -4107,12 +4107,11 @@ extension ChatControllerImpl {
)
self.push(boostController)
})
}, updateVideoTrimRange: { [weak self] start, end, updatedEnd, apply in
if let videoRecorder = self?.videoRecorderValue {
videoRecorder.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply)
} else if let audioRecorder = self?.audioRecorderValue {
audioRecorder.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply)
}, updateRecordingTrimRange: { [weak self] start, end, updatedEnd, apply in
guard let self else {
return
}
self.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply)
}, updateHistoryFilter: { [weak self] update in
guard let self else {
return

View File

@ -309,6 +309,13 @@ extension ChatControllerImpl {
strongSelf.context.account.postbox.mediaBox.storeResourceData(resource!.id, data: data.compressedData)
}
let audioWaveform: AudioWaveform
if let recordedMediaPreview = strongSelf.presentationInterfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview {
audioWaveform = audio.waveform
} else {
audioWaveform = AudioWaveform(bitstream: waveform, bitsPerSample: 5)
}
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInterfaceState {
$0.withUpdatedMediaDraftState(.audio(
@ -316,7 +323,7 @@ extension ChatControllerImpl {
resource: resource!,
fileSize: Int32(data.compressedData.count),
duration: Int32(data.duration),
waveform: AudioWaveform(bitstream: waveform, bitsPerSample: 5),
waveform: audioWaveform,
trimRange: data.trimRange,
resumeData: data.resumeData
)
@ -465,29 +472,9 @@ extension ChatControllerImpl {
}
func resumeMediaRecorder() {
self.context.sharedContext.mediaManager.playlistControl(.playback(.pause), type: nil)
self.recorderDataDisposable.set(nil)
if let audioRecorderValue = self.audioRecorderValue {
audioRecorderValue.resume()
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(.audio(recorder: audioRecorderValue, isLocked: true))
}.updatedInterfaceState { $0.withUpdatedMediaDraftState(nil) }
})
} else if let recordedMediaPreview = self.presentationInterfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview {
self.requestAudioRecorder(beginWithTone: false, existingDraft: audio)
if let audioRecorderValue = self.audioRecorderValue {
audioRecorderValue.resume()
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(.audio(recorder: audioRecorderValue, isLocked: true))
}.updatedInterfaceState { $0.withUpdatedMediaDraftState(nil) }
})
}
}
self.context.sharedContext.mediaManager.playlistControl(.playback(.pause), type: nil)
if let videoRecorderValue = self.videoRecorderValue {
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
@ -496,6 +483,36 @@ extension ChatControllerImpl {
return panelState.withUpdatedMediaRecordingState(.video(status: .recording(InstantVideoControllerRecordingStatus(micLevel: recordingStatus.micLevel, duration: recordingStatus.duration)), isLocked: true))
}.updatedInterfaceState { $0.withUpdatedMediaDraftState(nil) }
})
} else {
let proceed = {
self.withAudioRecorder({ audioRecorder in
audioRecorder.resume()
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(.audio(recorder: audioRecorder, isLocked: true))
}.updatedInterfaceState { $0.withUpdatedMediaDraftState(nil) }
})
})
}
if let recordedMediaPreview = self.presentationInterfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let _ = audio.trimRange {
self.present(
textAlertController(
context: self.context,
title: "Trim to selected range?",
text: "Audio outside that range will be discarded, and recording will start immediately.",
actions: [
TextAlertAction(type: .genericAction, title: "Cancel", action: {}),
TextAlertAction(type: .defaultAction, title: "Proceed", action: {
proceed()
})
]
), in: .window(.root)
)
} else {
proceed()
}
}
}
@ -526,6 +543,27 @@ extension ChatControllerImpl {
self.updateDownButtonVisibility()
}
private func withAudioRecorder(_ f: (ManagedAudioRecorder) -> Void) {
if let audioRecorder = self.audioRecorderValue {
f(audioRecorder)
} else if let recordedMediaPreview = self.presentationInterfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview {
self.requestAudioRecorder(beginWithTone: false, existingDraft: audio)
if let audioRecorder = self.audioRecorderValue {
f(audioRecorder)
}
}
}
func updateTrimRange(start: Double, end: Double, updatedEnd: Bool, apply: Bool) {
if let videoRecorder = self.videoRecorderValue {
videoRecorder.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply)
} else {
self.withAudioRecorder({ audioRecorder in
audioRecorder.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply)
})
}
}
func sendMediaRecording(
silentPosting: Bool? = nil,
scheduleTime: Int32? = nil,

View File

@ -54,6 +54,8 @@ extension ChatControllerImpl: MFMessageComposeViewControllerDelegate {
}
params.progress?.set(.single(false))
var firstName = ""
var lastName = ""
let phoneNumber: String
if let peer, case let .user(user) = peer, let phone = user.phone {
phoneNumber = "+\(phone)"
@ -61,13 +63,18 @@ extension ChatControllerImpl: MFMessageComposeViewControllerDelegate {
phoneNumber = number
}
if case let .user(user) = peer {
firstName = user.firstName ?? ""
lastName = user.lastName ?? ""
}
var items: [ContextMenuItem] = []
items.append(
.action(ContextMenuActionItem(text: self.presentationData.strings.Chat_Context_Phone_AddToContacts, icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/AddUser"), color: theme.contextMenu.primaryColor) }, action: { [weak self] c, _ in
guard let self, let c else {
return
}
let basicData = DeviceContactBasicData(firstName: "", lastName: "", phoneNumbers: [
let basicData = DeviceContactBasicData(firstName: firstName, lastName: lastName, phoneNumbers: [
DeviceContactPhoneNumberData(label: "", value: phoneNumber)
])
let contactData = DeviceContactExtendedData(basicData: basicData, middleName: "", prefix: "", suffix: "", organization: "", jobTitle: "", department: "", emailAddresses: [], urls: [], addresses: [], birthdayDate: nil, socialProfiles: [], instantMessagingProfiles: [], note: "")

View File

@ -525,7 +525,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
self.trimView.trimUpdated = { [weak self] start, end, updatedEnd, apply in
if let self {
self.mediaPlayer?.pause()
self.interfaceInteraction?.updateVideoTrimRange(start, end, updatedEnd, apply)
self.interfaceInteraction?.updateRecordingTrimRange(start, end, updatedEnd, apply)
if apply {
if !updatedEnd {
self.mediaPlayer?.seek(timestamp: start, play: true)
@ -548,7 +548,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
}
self.trimView.frame = waveformBackgroundFrame
let playButtonSize = CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX, height: waveformBackgroundFrame.height)
let playButtonSize = CGSize(width: max(0.0, rightHandleFrame.minX - leftHandleFrame.maxX), height: waveformBackgroundFrame.height)
self.playButtonNode.update(size: playButtonSize, transition: transition)
transition.updateFrame(node: self.playButtonNode, frame: CGRect(origin: CGPoint(x: waveformBackgroundFrame.minX + leftHandleFrame.maxX, y: waveformBackgroundFrame.minY), size: playButtonSize))
case let .video(video):
@ -584,7 +584,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
positionUpdated: { _, _ in },
trackTrimUpdated: { [weak self] _, start, end, updatedEnd, apply in
if let self {
self.interfaceInteraction?.updateVideoTrimRange(start, end, updatedEnd, apply)
self.interfaceInteraction?.updateRecordingTrimRange(start, end, updatedEnd, apply)
}
},
trackOffsetUpdated: { _, _, _ in },
@ -825,14 +825,16 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
let _ = (mediaPlayer.status
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] status in
guard let self else {
guard let self, let mediaPlayer = self.mediaPlayer else {
return
}
if case .playing = status.status {
self.mediaPlayer?.pause()
mediaPlayer.pause()
} else if status.timestamp <= trimRange.lowerBound {
self.mediaPlayer?.seek(timestamp: trimRange.lowerBound, play: true)
mediaPlayer.seek(timestamp: trimRange.lowerBound, play: true)
} else {
mediaPlayer.play()
}
})
} else {

View File

@ -9,6 +9,7 @@ import AccountContext
import OpusBinding
import ChatPresentationInterfaceState
import AudioWaveform
import FFMpegBinding
private let kOutputBus: UInt32 = 0
private let kInputBus: UInt32 = 1
@ -147,6 +148,8 @@ final class ManagedAudioRecorderContext {
private let id: Int32
private let micLevel: ValuePromise<Float>
private let recordingState: ValuePromise<AudioRecordingState>
private let previewState: ValuePromise<AudioPreviewState>
private let beginWithTone: Bool
private let beganWithTone: (Bool) -> Void
@ -156,8 +159,8 @@ final class ManagedAudioRecorderContext {
private let queue: Queue
private let mediaManager: MediaManager
private let oggWriter: TGOggOpusWriter
private let dataItem: TGDataItem
private var oggWriter: TGOggOpusWriter
private var dataItem: TGDataItem
private var audioBuffer = Data()
private let audioUnit = Atomic<AudioUnit?>(value: nil)
@ -193,6 +196,7 @@ final class ManagedAudioRecorderContext {
pushIdleTimerExtension: @escaping () -> Disposable,
micLevel: ValuePromise<Float>,
recordingState: ValuePromise<AudioRecordingState>,
previewState: ValuePromise<AudioPreviewState>,
beginWithTone: Bool,
beganWithTone: @escaping (Bool) -> Void
) {
@ -204,6 +208,7 @@ final class ManagedAudioRecorderContext {
self.beganWithTone = beganWithTone
self.recordingState = recordingState
self.previewState = previewState
self.queue = queue
self.mediaManager = mediaManager
@ -487,6 +492,29 @@ final class ManagedAudioRecorderContext {
func resume() {
assert(self.queue.isCurrent())
if let trimRange = self.trimRange, trimRange.upperBound < self.oggWriter.encodedDuration() {
if self.oggWriter.writeFrame(nil, frameByteCount: 0), let data = self.dataItem.data() {
let tempSourceFile = EngineTempBox.shared.tempFile(fileName: "audio.ogg")
let tempDestinationFile = EngineTempBox.shared.tempFile(fileName: "audio.ogg")
try? data.write(to: URL(fileURLWithPath: tempSourceFile.path))
FFMpegOpusTrimmer.trim(tempSourceFile.path, to: tempDestinationFile.path, start: trimRange.lowerBound, end: trimRange.upperBound)
if let trimmedData = try? Data(contentsOf: URL(fileURLWithPath: tempDestinationFile.path), options: []) {
self.dataItem = TGDataItem(data: trimmedData)
self.oggWriter = TGOggOpusWriter()
self.oggWriter.beginAppend(with: self.dataItem)
}
EngineTempBox.shared.dispose(tempSourceFile)
EngineTempBox.shared.dispose(tempDestinationFile)
self.trimRange = nil
self.previewState.set(AudioPreviewState(trimRange: self.trimRange))
}
}
self.start()
}
@ -755,7 +783,7 @@ final class ManagedAudioRecorderImpl: ManagedAudioRecorder {
) {
self.beginWithTone = beginWithTone
self.queue.async {
let context = ManagedAudioRecorderContext(queue: self.queue, mediaManager: mediaManager, resumeData: resumeData, pushIdleTimerExtension: pushIdleTimerExtension, micLevel: self.micLevelValue, recordingState: self.recordingStateValue, beginWithTone: beginWithTone, beganWithTone: beganWithTone)
let context = ManagedAudioRecorderContext(queue: self.queue, mediaManager: mediaManager, resumeData: resumeData, pushIdleTimerExtension: pushIdleTimerExtension, micLevel: self.micLevelValue, recordingState: self.recordingStateValue, previewState: self.previewStateValue, beginWithTone: beginWithTone, beganWithTone: beganWithTone)
self.contextRef = Unmanaged.passRetained(context)
}
}