From d4247feacbeb9aab867953df2d703a09d9af56e6 Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Sun, 11 Aug 2019 00:11:06 +0300 Subject: [PATCH 1/5] Once again improved Siri contacts resolving --- SiriIntents/IntentContacts.swift | 11 ++++++- SiriIntents/IntentHandler.swift | 51 +++++++++++++++----------------- 2 files changed, 34 insertions(+), 28 deletions(-) diff --git a/SiriIntents/IntentContacts.swift b/SiriIntents/IntentContacts.swift index cfe9f540bf..65cefa824b 100644 --- a/SiriIntents/IntentContacts.swift +++ b/SiriIntents/IntentContacts.swift @@ -110,5 +110,14 @@ func personWithUser(stableId: String, user: TelegramUser) -> INPerson { var nameComponents = PersonNameComponents() nameComponents.givenName = user.firstName nameComponents.familyName = user.lastName - return INPerson(personHandle: INPersonHandle(value: stableId, type: .unknown), nameComponents: nameComponents, displayName: user.debugDisplayTitle, image: nil, contactIdentifier: stableId, customIdentifier: "tg\(user.id.toInt64())") + let personHandle: INPersonHandle + if let phone = user.phone { + personHandle = INPersonHandle(value: formatPhoneNumber(phone), type: .phoneNumber) + } else if let username = user.username { + personHandle = INPersonHandle(value: "@\(username)", type: .unknown) + } else { + personHandle = INPersonHandle(value: user.displayTitle, type: .unknown) + } + + return INPerson(personHandle: personHandle, nameComponents: nameComponents, displayName: user.debugDisplayTitle, image: nil, contactIdentifier: stableId, customIdentifier: "tg\(user.id.toInt64())") } diff --git a/SiriIntents/IntentHandler.swift b/SiriIntents/IntentHandler.swift index 27479e02d0..50daf8ba39 100644 --- a/SiriIntents/IntentHandler.swift +++ b/SiriIntents/IntentHandler.swift @@ -202,11 +202,6 @@ class IntentHandler: INExtension, INSendMessageIntentHandling, INSearchForMessag return } - if filteredPersons.count > 1 { - completion([.disambiguation(filteredPersons)]) - return - } - var allPersonsAlreadyMatched = true for person in filteredPersons { if !(person.customIdentifier ?? "").hasPrefix("tg") { @@ -215,7 +210,7 @@ class IntentHandler: INExtension, INSendMessageIntentHandling, INSearchForMessag } } - if allPersonsAlreadyMatched { + if allPersonsAlreadyMatched && filteredPersons.count == 1 { completion([.success(filteredPersons[0])]) return } @@ -239,29 +234,31 @@ class IntentHandler: INExtension, INSendMessageIntentHandling, INSearchForMessag let account = self.accountPromise.get() let signal = matchingDeviceContacts(stableIds: stableIds) - |> take(1) - |> mapToSignal { matchedContacts in - return account - |> introduceError(IntentContactsError.self) - |> mapToSignal { account -> Signal<[(String, TelegramUser)], IntentContactsError> in - if let account = account { - return matchingCloudContacts(postbox: account.postbox, contacts: matchedContacts) - |> introduceError(IntentContactsError.self) - } else { - return .fail(.generic) - } - } + |> take(1) + |> mapToSignal { matchedContacts in + return account + |> introduceError(IntentContactsError.self) + |> mapToSignal { account -> Signal<[(String, TelegramUser)], IntentContactsError> in + if let account = account { + return matchingCloudContacts(postbox: account.postbox, contacts: matchedContacts) + |> introduceError(IntentContactsError.self) + } else { + return .fail(.generic) + } + } } self.resolvePersonsDisposable.set((signal - |> deliverOnMainQueue).start(next: { peers in - if peers.isEmpty { - completion([.needsValue]) - } else { - completion(peers.map { .success(personWithUser(stableId: $0, user: $1)) }) - } - }, error: { error in - completion([.skip]) - })) + |> deliverOnMainQueue).start(next: { peers in + if peers.isEmpty { + completion([.noResult]) + } else if peers.count == 1 { + completion(peers.map { .success(personWithUser(stableId: $0, user: $1)) }) + } else { + completion([.disambiguation(peers.map { (personWithUser(stableId: $0, user: $1)) })]) + } + }, error: { error in + completion([.skip]) + })) } // MARK: - INSendMessageIntentHandling From 1540c1105c82c8596318ca034c1718d3a680b8d0 Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Sun, 11 Aug 2019 23:41:46 +0300 Subject: [PATCH 2/5] SiriIntents: match added Telegram contacts without phone number --- SiriIntents/IntentContacts.swift | 72 ++++++++++--------- .../Bridge Audio/TGBridgeAudioEncoder.m | 2 +- 2 files changed, 38 insertions(+), 36 deletions(-) diff --git a/SiriIntents/IntentContacts.swift b/SiriIntents/IntentContacts.swift index 65cefa824b..fcfd7edbba 100644 --- a/SiriIntents/IntentContacts.swift +++ b/SiriIntents/IntentContacts.swift @@ -10,18 +10,33 @@ struct MatchingDeviceContact { let firstName: String let lastName: String let phoneNumbers: [String] + let peerId: PeerId? } enum IntentContactsError { case generic } +private let phonebookUsernamePathPrefix = "@id" +private let phonebookUsernamePrefix = "https://t.me/" + phonebookUsernamePathPrefix + +private func parseAppSpecificContactReference(_ value: String) -> PeerId? { + if !value.hasPrefix(phonebookUsernamePrefix) { + return nil + } + let idString = String(value[value.index(value.startIndex, offsetBy: phonebookUsernamePrefix.count)...]) + if let id = Int32(idString) { + return PeerId(namespace: Namespaces.Peer.CloudUser, id: id) + } + return nil +} + func matchingDeviceContacts(stableIds: [String]) -> Signal<[MatchingDeviceContact], IntentContactsError> { guard CNContactStore.authorizationStatus(for: .contacts) == .authorized else { return .fail(.generic) } let store = CNContactStore() - guard let contacts = try? store.unifiedContacts(matching: CNContact.predicateForContacts(withIdentifiers: stableIds), keysToFetch: [CNContactFormatter.descriptorForRequiredKeys(for: .fullName), CNContactPhoneNumbersKey as CNKeyDescriptor]) else { + guard let contacts = try? store.unifiedContacts(matching: CNContact.predicateForContacts(withIdentifiers: stableIds), keysToFetch: [CNContactFormatter.descriptorForRequiredKeys(for: .fullName), CNContactPhoneNumbersKey as CNKeyDescriptor, CNContactUrlAddressesKey as CNKeyDescriptor]) else { return .fail(.generic) } @@ -34,7 +49,14 @@ func matchingDeviceContacts(stableIds: [String]) -> Signal<[MatchingDeviceContac } }) - return MatchingDeviceContact(stableId: contact.identifier, firstName: contact.givenName, lastName: contact.familyName, phoneNumbers: phoneNumbers) + var contactPeerId: PeerId? + for address in contact.urlAddresses { + if address.label == "Telegram", let peerId = parseAppSpecificContactReference(address.value as String) { + contactPeerId = peerId + } + } + + return MatchingDeviceContact(stableId: contact.identifier, firstName: contact.givenName, lastName: contact.familyName, phoneNumbers: phoneNumbers, peerId: contactPeerId) })) } @@ -52,44 +74,24 @@ func matchingCloudContacts(postbox: Postbox, contacts: [MatchingDeviceContact]) return postbox.transaction { transaction -> [(String, TelegramUser)] in var result: [(String, TelegramUser)] = [] outer: for peerId in transaction.getContactPeerIds() { - if let peer = transaction.getPeer(peerId) as? TelegramUser, let peerPhoneNumber = peer.phone { - for contact in contacts { - for phoneNumber in contact.phoneNumbers { - if matchPhoneNumbers(phoneNumber, peerPhoneNumber) { + if let peer = transaction.getPeer(peerId) as? TelegramUser { + if let peerPhoneNumber = peer.phone { + for contact in contacts { + for phoneNumber in contact.phoneNumbers { + if matchPhoneNumbers(phoneNumber, peerPhoneNumber) { + result.append((contact.stableId, peer)) + continue outer + } + } + } + } else { + for contact in contacts { + if let contactPeerId = contact.peerId, contactPeerId == peerId { result.append((contact.stableId, peer)) continue outer } } } -// var parsedPhoneNumbers: [String: ParsedPhoneNumber] = [:] -// let parsedPeerPhoneNumber: ParsedPhoneNumber? -// if let number = parsedPhoneNumbers[peerPhoneNumber] { -// parsedPeerPhoneNumber = number -// } else if let number = ParsedPhoneNumber(string: peerPhoneNumber) { -// parsedPeerPhoneNumber = number -// parsedPhoneNumbers[peerPhoneNumber] = number -// } else { -// parsedPeerPhoneNumber = nil -// } -// -// for contact in contacts { -// for phoneNumber in contact.phoneNumbers { -// let parsedPhoneNumber: ParsedPhoneNumber? -// if let number = parsedPhoneNumbers[phoneNumber] { -// parsedPhoneNumber = number -// } else if let number = ParsedPhoneNumber(string: phoneNumber) { -// parsedPhoneNumber = number -// parsedPhoneNumbers[phoneNumber] = number -// } else { -// parsedPhoneNumber = nil -// } -// -// if parsedPeerPhoneNumber == parsedPhoneNumber { -// result.append((contact.stableId, peer)) -// continue outer -// } -// } -// } } } return result diff --git a/submodules/TelegramUI/TelegramUI/Bridge Audio/TGBridgeAudioEncoder.m b/submodules/TelegramUI/TelegramUI/Bridge Audio/TGBridgeAudioEncoder.m index af5ba77bf5..b96776b012 100644 --- a/submodules/TelegramUI/TelegramUI/Bridge Audio/TGBridgeAudioEncoder.m +++ b/submodules/TelegramUI/TelegramUI/Bridge Audio/TGBridgeAudioEncoder.m @@ -12,7 +12,7 @@ static const char *AMQueueSpecific = "AMQueueSpecific"; -const NSInteger TGBridgeAudioEncoderSampleRate = 16000; +const NSInteger TGBridgeAudioEncoderSampleRate = 48000; typedef enum { ATQueuePriorityLow, From 6fa80ac81f0cc108e76404b4456d8bdaa1fad716 Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Mon, 12 Aug 2019 00:37:40 +0300 Subject: [PATCH 3/5] Fixed proxy secret base64decode --- submodules/MtProtoKit/MTProtoKit/MTApiEnvironment.m | 2 ++ 1 file changed, 2 insertions(+) diff --git a/submodules/MtProtoKit/MTProtoKit/MTApiEnvironment.m b/submodules/MtProtoKit/MTProtoKit/MTApiEnvironment.m index c12d36c60a..fe5777904a 100644 --- a/submodules/MtProtoKit/MTProtoKit/MTApiEnvironment.m +++ b/submodules/MtProtoKit/MTProtoKit/MTApiEnvironment.m @@ -225,6 +225,8 @@ static NSData *base64_decode(NSString *str) { if (hexData == nil) { NSString *finalString = @""; finalString = [finalString stringByAppendingString:[string stringByTrimmingCharactersInSet:[NSCharacterSet characterSetWithCharactersInString:@"="]]]; + finalString = [finalString stringByReplacingOccurrencesOfString:@"-" withString:@"+"]; + finalString = [finalString stringByReplacingOccurrencesOfString:@"_" withString:@"/"]; while (finalString.length % 4 != 0) { finalString = [finalString stringByAppendingString:@"="]; } From 41c468c139a411b093d01930da04afcad680ca1e Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Mon, 12 Aug 2019 01:10:30 +0300 Subject: [PATCH 4/5] Fixed accent colors limitations --- .../TelegramUI/ThemeSettingsController.swift | 33 +++++++++---------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/submodules/TelegramUI/TelegramUI/ThemeSettingsController.swift b/submodules/TelegramUI/TelegramUI/ThemeSettingsController.swift index a962138eea..77d5c4e32b 100644 --- a/submodules/TelegramUI/TelegramUI/ThemeSettingsController.swift +++ b/submodules/TelegramUI/TelegramUI/ThemeSettingsController.swift @@ -65,7 +65,7 @@ private enum ThemeSettingsControllerEntry: ItemListNodeEntry { case fontSize(PresentationTheme, PresentationFontSize) case chatPreview(PresentationTheme, PresentationTheme, TelegramWallpaper, PresentationFontSize, PresentationStrings, PresentationDateTimeFormat, PresentationPersonNameOrder) case wallpaper(PresentationTheme, String) - case accentColor(PresentationTheme, String, PresentationThemeAccentColor?) + case accentColor(PresentationTheme, PresentationThemeReference, String, PresentationThemeAccentColor?) case autoNightTheme(PresentationTheme, String, String) case themeItem(PresentationTheme, PresentationStrings, [PresentationThemeReference], PresentationThemeReference, [Int64: PresentationThemeAccentColor], PresentationThemeAccentColor?, Bool) case iconHeader(PresentationTheme, String) @@ -137,8 +137,8 @@ private enum ThemeSettingsControllerEntry: ItemListNodeEntry { } else { return false } - case let .accentColor(lhsTheme, lhsText, lhsColor): - if case let .accentColor(rhsTheme, rhsText, rhsColor) = rhs, lhsTheme === rhsTheme, lhsText == rhsText, lhsColor == rhsColor { + case let .accentColor(lhsTheme, lhsCurrentTheme, lhsText, lhsColor): + if case let .accentColor(rhsTheme, rhsCurrentTheme, rhsText, rhsColor) = rhs, lhsTheme === rhsTheme, lhsCurrentTheme == rhsCurrentTheme, lhsText == rhsText, lhsColor == rhsColor { return true } else { return false @@ -230,21 +230,20 @@ private enum ThemeSettingsControllerEntry: ItemListNodeEntry { return ItemListDisclosureItem(theme: theme, title: text, label: "", sectionId: self.section, style: .blocks, action: { arguments.openWallpaperSettings() }) - case let .accentColor(theme, _, color): + case let .accentColor(theme, currentTheme, _, color): + var defaultColor = PresentationThemeAccentColor(baseColor: .blue, value: 0.5) var colors = PresentationThemeBaseColor.allCases - if theme.overallDarkAppearance { - colors = colors.filter { $0 != .black } + if case let .builtin(name) = currentTheme { + if name == .night || name == .nightAccent { + colors = colors.filter { $0 != .black } + } + if name == .night { + colors = colors.filter { $0 != .gray } + defaultColor = PresentationThemeAccentColor(baseColor: .white, value: 0.5) + } else { + colors = colors.filter { $0 != .white } + } } - - let defaultColor: PresentationThemeAccentColor - if case let .builtin(name) = theme.name, name == .night { - colors = colors.filter { $0 != .gray } - defaultColor = PresentationThemeAccentColor(baseColor: .white, value: 0.5) - } else { - colors = colors.filter { $0 != .white } - defaultColor = PresentationThemeAccentColor(baseColor: .blue, value: 0.5) - } - return ThemeSettingsAccentColorItem(theme: theme, sectionId: self.section, colors: colors, currentColor: color ?? defaultColor, updated: { color in arguments.selectAccentColor(color) }, toggleSlider: { baseColor in @@ -306,7 +305,7 @@ private func themeSettingsControllerEntries(presentationData: PresentationData, entries.append(.themeItem(presentationData.theme, presentationData.strings, availableThemes, themeReference, themeSpecificAccentColors, themeSpecificAccentColors[themeReference.index], displayColorSlider)) if theme.name != .builtin(.dayClassic) { - entries.append(.accentColor(presentationData.theme, strings.Appearance_AccentColor, themeSpecificAccentColors[themeReference.index])) + entries.append(.accentColor(presentationData.theme, themeReference, strings.Appearance_AccentColor, themeSpecificAccentColors[themeReference.index])) } entries.append(.wallpaper(presentationData.theme, strings.Settings_ChatBackground)) From b4541f7495e8b6f45d9bb8e2ef733c86f04df8e6 Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Mon, 12 Aug 2019 05:22:13 +0300 Subject: [PATCH 5/5] Fixed opus encoding completion --- .../Bridge Audio/TGBridgeAudioEncoder.m | 254 +++++++++--------- .../TelegramUI/ManagedAudioRecorder.swift | 112 ++++---- .../TelegramUI/third-party/opusenc/opusenc.m | 47 ++-- 3 files changed, 201 insertions(+), 212 deletions(-) diff --git a/submodules/TelegramUI/TelegramUI/Bridge Audio/TGBridgeAudioEncoder.m b/submodules/TelegramUI/TelegramUI/Bridge Audio/TGBridgeAudioEncoder.m index b96776b012..0177caaffa 100644 --- a/submodules/TelegramUI/TelegramUI/Bridge Audio/TGBridgeAudioEncoder.m +++ b/submodules/TelegramUI/TelegramUI/Bridge Audio/TGBridgeAudioEncoder.m @@ -81,13 +81,13 @@ typedef enum { NSDictionary *outputSettings = @ { - AVFormatIDKey: @(kAudioFormatLinearPCM), - AVSampleRateKey: @(TGBridgeAudioEncoderSampleRate), - AVNumberOfChannelsKey: @1, - AVLinearPCMBitDepthKey: @16, - AVLinearPCMIsFloatKey: @false, - AVLinearPCMIsBigEndianKey: @false, - AVLinearPCMIsNonInterleaved: @false + AVFormatIDKey: @(kAudioFormatLinearPCM), + AVSampleRateKey: @(TGBridgeAudioEncoderSampleRate), + AVNumberOfChannelsKey: @1, + AVLinearPCMBitDepthKey: @16, + AVLinearPCMIsFloatKey: @false, + AVLinearPCMIsBigEndianKey: @false, + AVLinearPCMIsNonInterleaved: @false }; _readerOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:asset.tracks audioSettings:outputSettings]; @@ -114,9 +114,9 @@ typedef enum { static ATQueue *queue = nil; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^ - { - queue = [[ATQueue alloc] initWithName:@"org.telegram.opusAudioEncoderQueue"]; - }); + { + queue = [[ATQueue alloc] initWithName:@"org.telegram.opusAudioEncoderQueue"]; + }); return queue; } @@ -126,74 +126,74 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100 - (void)startWithCompletion:(void (^)(NSString *, int32_t))completion { [[TGBridgeAudioEncoder processingQueue] dispatch:^ - { - _oggWriter = [[TGOggOpusWriter alloc] init]; - if (![_oggWriter beginWithDataItem:_tempFileItem]) - { - [self cleanup]; - return; - } - - [_assetReader startReading]; - - while (_assetReader.status != AVAssetReaderStatusCompleted) - { - if (_assetReader.status == AVAssetReaderStatusReading) - { - CMSampleBufferRef nextBuffer = [_readerOutput copyNextSampleBuffer]; - if (nextBuffer) - { - AudioBufferList abl; - CMBlockBufferRef blockBuffer; - CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(nextBuffer, NULL, &abl, sizeof(abl), NULL, NULL, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer); - - [[TGBridgeAudioEncoder processingQueue] dispatch:^ - { - [self _processBuffer:&abl.mBuffers[0]]; - - CFRelease(nextBuffer); - CFRelease(blockBuffer); - }]; - } - else - { - [[TGBridgeAudioEncoder processingQueue] dispatch:^ - { - if (_tailLength > 0) { - [_oggWriter writeFrame:(uint8_t *)_audioBuffer.bytes frameByteCount:(NSUInteger)_tailLength]; - } - }]; - break; - } - } - } - - [[TGBridgeAudioEncoder processingQueue] dispatch:^ - { - TGFileDataItem *dataItemResult = nil; - NSTimeInterval durationResult = 0.0; - - NSUInteger totalBytes = 0; - - if (_assetReader.status == AVAssetReaderStatusCompleted) - { - NSLog(@"finished"); - if (_oggWriter != nil) - { - dataItemResult = _tempFileItem; - durationResult = [_oggWriter encodedDuration]; - totalBytes = [_oggWriter encodedBytes]; - } - - [self cleanup]; - } - - //TGLog(@"[TGBridgeAudioEncoder#%x convert time: %f ms]", self, (CFAbsoluteTimeGetCurrent() - startTime) * 1000.0); - - if (completion != nil) - completion(dataItemResult.path, (int32_t)durationResult); - }]; - }]; + { + _oggWriter = [[TGOggOpusWriter alloc] init]; + if (![_oggWriter beginWithDataItem:_tempFileItem]) + { + [self cleanup]; + return; + } + + [_assetReader startReading]; + + while (_assetReader.status != AVAssetReaderStatusCompleted) + { + if (_assetReader.status == AVAssetReaderStatusReading) + { + CMSampleBufferRef nextBuffer = [_readerOutput copyNextSampleBuffer]; + if (nextBuffer) + { + AudioBufferList abl; + CMBlockBufferRef blockBuffer; + CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(nextBuffer, NULL, &abl, sizeof(abl), NULL, NULL, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer); + + [[TGBridgeAudioEncoder processingQueue] dispatch:^ + { + [self _processBuffer:&abl.mBuffers[0]]; + + CFRelease(nextBuffer); + CFRelease(blockBuffer); + }]; + } + else + { + [[TGBridgeAudioEncoder processingQueue] dispatch:^ + { + if (_tailLength > 0) { + [_oggWriter writeFrame:(uint8_t *)_audioBuffer.bytes frameByteCount:(NSUInteger)_tailLength]; + } + }]; + break; + } + } + } + + [[TGBridgeAudioEncoder processingQueue] dispatch:^ + { + TGFileDataItem *dataItemResult = nil; + NSTimeInterval durationResult = 0.0; + + NSUInteger totalBytes = 0; + + if (_assetReader.status == AVAssetReaderStatusCompleted) + { + NSLog(@"finished"); + if (_oggWriter != nil && [_oggWriter writeFrame:NULL frameByteCount:0]) + { + dataItemResult = _tempFileItem; + durationResult = [_oggWriter encodedDuration]; + totalBytes = [_oggWriter encodedBytes]; + } + + [self cleanup]; + } + + //TGLog(@"[TGBridgeAudioEncoder#%x convert time: %f ms]", self, (CFAbsoluteTimeGetCurrent() - startTime) * 1000.0); + + if (completion != nil) + completion(dataItemResult.path, (int32_t)durationResult); + }]; + }]; } - (void)_processBuffer:(AudioBuffer const *)buffer @@ -306,11 +306,11 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100 [_queue dispatch:^ - { - _fileName = filePath; - _length = [[[NSFileManager defaultManager] attributesOfItemAtPath:_fileName error:nil][NSFileSize] unsignedIntegerValue]; - _fileExists = [[NSFileManager defaultManager] fileExistsAtPath:_fileName]; - }]; + { + _fileName = filePath; + _length = [[[NSFileManager defaultManager] attributesOfItemAtPath:_fileName error:nil][NSFileSize] unsignedIntegerValue]; + _fileExists = [[NSFileManager defaultManager] fileExistsAtPath:_fileName]; + }]; } return self; } @@ -322,38 +322,38 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100 - (void)moveToPath:(NSString *)path { [_queue dispatch:^ - { - [[NSFileManager defaultManager] moveItemAtPath:_fileName toPath:path error:nil]; - _fileName = path; - }]; + { + [[NSFileManager defaultManager] moveItemAtPath:_fileName toPath:path error:nil]; + _fileName = path; + }]; } - (void)remove { [_queue dispatch:^ - { - [[NSFileManager defaultManager] removeItemAtPath:_fileName error:nil]; - }]; + { + [[NSFileManager defaultManager] removeItemAtPath:_fileName error:nil]; + }]; } - (void)appendData:(NSData *)data { [_queue dispatch:^ - { - if (!_fileExists) - { - [[NSFileManager defaultManager] createFileAtPath:_fileName contents:nil attributes:nil]; - _fileExists = true; - } - NSFileHandle *file = [NSFileHandle fileHandleForUpdatingAtPath:_fileName]; - [file seekToEndOfFile]; - [file writeData:data]; - [file synchronizeFile]; - [file closeFile]; - _length += data.length; - - [_data appendData:data]; - }]; + { + if (!_fileExists) + { + [[NSFileManager defaultManager] createFileAtPath:_fileName contents:nil attributes:nil]; + _fileExists = true; + } + NSFileHandle *file = [NSFileHandle fileHandleForUpdatingAtPath:_fileName]; + [file seekToEndOfFile]; + [file writeData:data]; + [file synchronizeFile]; + [file closeFile]; + _length += data.length; + + [_data appendData:data]; + }]; } - (NSData *)readDataAtOffset:(NSUInteger)offset length:(NSUInteger)length @@ -361,14 +361,14 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100 __block NSData *data = nil; [_queue dispatch:^ - { - NSFileHandle *file = [NSFileHandle fileHandleForUpdatingAtPath:_fileName]; - [file seekToFileOffset:(unsigned long long)offset]; - data = [file readDataOfLength:length]; - if (data.length != length) - //TGLog(@"Read data length mismatch"); - [file closeFile]; - } synchronous:true]; + { + NSFileHandle *file = [NSFileHandle fileHandleForUpdatingAtPath:_fileName]; + [file seekToFileOffset:(unsigned long long)offset]; + data = [file readDataOfLength:length]; + if (data.length != length) + //TGLog(@"Read data length mismatch"); + [file closeFile]; + } synchronous:true]; return data; } @@ -377,9 +377,9 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100 { __block NSUInteger result = 0; [_queue dispatch:^ - { - result = _length; - } synchronous:true]; + { + result = _length; + } synchronous:true]; return result; } @@ -420,11 +420,11 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100 static ATQueue *queue = nil; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^ - { - queue = [[ATQueue alloc] init]; - queue->_nativeQueue = dispatch_get_main_queue(); - queue->_isMainQueue = true; - }); + { + queue = [[ATQueue alloc] init]; + queue->_nativeQueue = dispatch_get_main_queue(); + queue->_isMainQueue = true; + }); return queue; } @@ -434,9 +434,9 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100 static ATQueue *queue = nil; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^ - { - queue = [[ATQueue alloc] initWithNativeQueue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)]; - }); + { + queue = [[ATQueue alloc] initWithNativeQueue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)]; + }); return queue; } @@ -446,9 +446,9 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100 static ATQueue *queue = nil; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^ - { - queue = [[ATQueue alloc] initWithNativeQueue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0)]; - }); + { + queue = [[ATQueue alloc] initWithNativeQueue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0)]; + }); return queue; } diff --git a/submodules/TelegramUI/TelegramUI/ManagedAudioRecorder.swift b/submodules/TelegramUI/TelegramUI/ManagedAudioRecorder.swift index c6fafa8e81..dc8217c1de 100644 --- a/submodules/TelegramUI/TelegramUI/ManagedAudioRecorder.swift +++ b/submodules/TelegramUI/TelegramUI/ManagedAudioRecorder.swift @@ -319,7 +319,7 @@ final class ManagedAudioRecorderContext { self.idleTimerExtensionDisposable = (Signal { subscriber in return pushIdleTimerExtension() - } |> delay(5.0, queue: queue)).start() + } |> delay(5.0, queue: queue)).start() } deinit { @@ -406,19 +406,19 @@ final class ManagedAudioRecorderContext { strongSelf.audioSessionAcquired(headset: state.isHeadsetConnected) } } - }, deactivate: { [weak self] in - return Signal { subscriber in - queue.async { - if let strongSelf = self { - strongSelf.hasAudioSession = false - strongSelf.stop() - strongSelf.recordingState.set(.stopped) - subscriber.putCompletion() - } + }, deactivate: { [weak self] in + return Signal { subscriber in + queue.async { + if let strongSelf = self { + strongSelf.hasAudioSession = false + strongSelf.stop() + strongSelf.recordingState.set(.stopped) + subscriber.putCompletion() } - - return EmptyDisposable } + + return EmptyDisposable + } }) } } @@ -592,53 +592,57 @@ final class ManagedAudioRecorderContext { } func takeData() -> RecordedAudioData? { - var scaledSamplesMemory = malloc(100 * 2)! - var scaledSamples: UnsafeMutablePointer = scaledSamplesMemory.assumingMemoryBound(to: Int16.self) - defer { - free(scaledSamplesMemory) - } - memset(scaledSamples, 0, 100 * 2); - var waveform: Data? - - let count = self.compressedWaveformSamples.count / 2 - self.compressedWaveformSamples.withUnsafeMutableBytes { (samples: UnsafeMutablePointer) -> Void in - for i in 0 ..< count { - let sample = samples[i] - let index = i * 100 / count - if (scaledSamples[index] < sample) { - scaledSamples[index] = sample; + if self.oggWriter.writeFrame(nil, frameByteCount: 0) { + var scaledSamplesMemory = malloc(100 * 2)! + var scaledSamples: UnsafeMutablePointer = scaledSamplesMemory.assumingMemoryBound(to: Int16.self) + defer { + free(scaledSamplesMemory) + } + memset(scaledSamples, 0, 100 * 2); + var waveform: Data? + + let count = self.compressedWaveformSamples.count / 2 + self.compressedWaveformSamples.withUnsafeMutableBytes { (samples: UnsafeMutablePointer) -> Void in + for i in 0 ..< count { + let sample = samples[i] + let index = i * 100 / count + if (scaledSamples[index] < sample) { + scaledSamples[index] = sample; + } } - } - - var peak: Int16 = 0 - var sumSamples: Int64 = 0 - for i in 0 ..< 100 { - let sample = scaledSamples[i] - if peak < sample { - peak = sample + + var peak: Int16 = 0 + var sumSamples: Int64 = 0 + for i in 0 ..< 100 { + let sample = scaledSamples[i] + if peak < sample { + peak = sample + } + sumSamples += Int64(sample) } - sumSamples += Int64(sample) - } - var calculatedPeak: UInt16 = 0 - calculatedPeak = UInt16((Double(sumSamples) * 1.8 / 100.0)) - - if calculatedPeak < 2500 { - calculatedPeak = 2500 + var calculatedPeak: UInt16 = 0 + calculatedPeak = UInt16((Double(sumSamples) * 1.8 / 100.0)) + + if calculatedPeak < 2500 { + calculatedPeak = 2500 + } + + for i in 0 ..< 100 { + let sample: UInt16 = UInt16(Int64(scaledSamples[i])) + let minPeak = min(Int64(sample), Int64(calculatedPeak)) + let resultPeak = minPeak * 31 / Int64(calculatedPeak) + scaledSamples[i] = Int16(clamping: min(31, resultPeak)) + } + + let resultWaveform = AudioWaveform(samples: Data(bytes: scaledSamplesMemory, count: 100 * 2), peak: 31) + let bitstream = resultWaveform.makeBitstream() + waveform = AudioWaveform(bitstream: bitstream, bitsPerSample: 5).makeBitstream() } - for i in 0 ..< 100 { - let sample: UInt16 = UInt16(Int64(scaledSamples[i])) - let minPeak = min(Int64(sample), Int64(calculatedPeak)) - let resultPeak = minPeak * 31 / Int64(calculatedPeak) - scaledSamples[i] = Int16(clamping: min(31, resultPeak)) - } - - let resultWaveform = AudioWaveform(samples: Data(bytes: scaledSamplesMemory, count: 100 * 2), peak: 31) - let bitstream = resultWaveform.makeBitstream() - waveform = AudioWaveform(bitstream: bitstream, bitsPerSample: 5).makeBitstream() + return RecordedAudioData(compressedData: self.dataItem.data(), duration: self.oggWriter.encodedDuration(), waveform: waveform) + } else { + return nil } - - return RecordedAudioData(compressedData: self.dataItem.data(), duration: self.oggWriter.encodedDuration(), waveform: waveform) } } diff --git a/submodules/TelegramUI/third-party/opusenc/opusenc.m b/submodules/TelegramUI/third-party/opusenc/opusenc.m index 2cd7fdea65..d29eaa10ad 100644 --- a/submodules/TelegramUI/third-party/opusenc/opusenc.m +++ b/submodules/TelegramUI/third-party/opusenc/opusenc.m @@ -426,39 +426,24 @@ static inline int writeOggPage(ogg_page *page, TGDataItem *fileItem) pages_out++; } - op.packet = (unsigned char *)_packet; - op.bytes = nbBytes; - op.b_o_s = 0; - op.granulepos = enc_granulepos; - if (op.e_o_s) - { - /* We compute the final GP as ceil(len*48k/input_rate). When a resampling - decoder does the matching floor(len*input/48k) conversion the length will - be exactly the same as the input. - */ - op.granulepos = ((total_samples * 48000 + rate - 1) / rate) + header.preskip; - } - op.packetno = 2 + _packetId; - ogg_stream_packetin(&os, &op); - last_segments += size_segments; + if (framePcmBytes != NULL) { + op.packet = (unsigned char *)_packet; + op.bytes = nbBytes; + op.b_o_s = 0; + op.granulepos = enc_granulepos; - /* The downside of early reading is if the input is an exact - multiple of the frame_size you'll get an extra frame that needs - to get cropped off. The downside of late reading is added delay. - If your ogg_delay is 120ms or less we'll assume you want the - low delay behavior. - */ - /*if ((!op.e_o_s) && max_ogg_delay > 5760) - { - nb_samples = inopt.read_samples(inopt.readdata, input, frame_size); - total_samples += nb_samples; - if (nb_samples < frame_size) - eos = 1; - if (nb_samples == 0) - op.e_o_s = 1; + if (op.e_o_s) + { + /* We compute the final GP as ceil(len*48k/input_rate). When a resampling + decoder does the matching floor(len*input/48k) conversion the length will + be exactly the same as the input. + */ + op.granulepos = ((total_samples * 48000 + rate - 1) / rate) + header.preskip; + } + op.packetno = 2 + _packetId; + ogg_stream_packetin(&os, &op); + last_segments += size_segments; } - else - nb_samples = -1;*/ // If the stream is over or we're sure that the delayed flush will fire, go ahead and flush now to avoid adding delay while ((op.e_o_s || (enc_granulepos + (frame_size * 48000 / coding_rate) - last_granulepos > max_ogg_delay) ||