mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-12-04 05:26:48 +00:00
Merge branch 'beta'
This commit is contained in:
commit
1d2c593944
@ -10,18 +10,33 @@ struct MatchingDeviceContact {
|
|||||||
let firstName: String
|
let firstName: String
|
||||||
let lastName: String
|
let lastName: String
|
||||||
let phoneNumbers: [String]
|
let phoneNumbers: [String]
|
||||||
|
let peerId: PeerId?
|
||||||
}
|
}
|
||||||
|
|
||||||
enum IntentContactsError {
|
enum IntentContactsError {
|
||||||
case generic
|
case generic
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private let phonebookUsernamePathPrefix = "@id"
|
||||||
|
private let phonebookUsernamePrefix = "https://t.me/" + phonebookUsernamePathPrefix
|
||||||
|
|
||||||
|
private func parseAppSpecificContactReference(_ value: String) -> PeerId? {
|
||||||
|
if !value.hasPrefix(phonebookUsernamePrefix) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
let idString = String(value[value.index(value.startIndex, offsetBy: phonebookUsernamePrefix.count)...])
|
||||||
|
if let id = Int32(idString) {
|
||||||
|
return PeerId(namespace: Namespaces.Peer.CloudUser, id: id)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func matchingDeviceContacts(stableIds: [String]) -> Signal<[MatchingDeviceContact], IntentContactsError> {
|
func matchingDeviceContacts(stableIds: [String]) -> Signal<[MatchingDeviceContact], IntentContactsError> {
|
||||||
guard CNContactStore.authorizationStatus(for: .contacts) == .authorized else {
|
guard CNContactStore.authorizationStatus(for: .contacts) == .authorized else {
|
||||||
return .fail(.generic)
|
return .fail(.generic)
|
||||||
}
|
}
|
||||||
let store = CNContactStore()
|
let store = CNContactStore()
|
||||||
guard let contacts = try? store.unifiedContacts(matching: CNContact.predicateForContacts(withIdentifiers: stableIds), keysToFetch: [CNContactFormatter.descriptorForRequiredKeys(for: .fullName), CNContactPhoneNumbersKey as CNKeyDescriptor]) else {
|
guard let contacts = try? store.unifiedContacts(matching: CNContact.predicateForContacts(withIdentifiers: stableIds), keysToFetch: [CNContactFormatter.descriptorForRequiredKeys(for: .fullName), CNContactPhoneNumbersKey as CNKeyDescriptor, CNContactUrlAddressesKey as CNKeyDescriptor]) else {
|
||||||
return .fail(.generic)
|
return .fail(.generic)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -34,7 +49,14 @@ func matchingDeviceContacts(stableIds: [String]) -> Signal<[MatchingDeviceContac
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
return MatchingDeviceContact(stableId: contact.identifier, firstName: contact.givenName, lastName: contact.familyName, phoneNumbers: phoneNumbers)
|
var contactPeerId: PeerId?
|
||||||
|
for address in contact.urlAddresses {
|
||||||
|
if address.label == "Telegram", let peerId = parseAppSpecificContactReference(address.value as String) {
|
||||||
|
contactPeerId = peerId
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return MatchingDeviceContact(stableId: contact.identifier, firstName: contact.givenName, lastName: contact.familyName, phoneNumbers: phoneNumbers, peerId: contactPeerId)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -52,44 +74,24 @@ func matchingCloudContacts(postbox: Postbox, contacts: [MatchingDeviceContact])
|
|||||||
return postbox.transaction { transaction -> [(String, TelegramUser)] in
|
return postbox.transaction { transaction -> [(String, TelegramUser)] in
|
||||||
var result: [(String, TelegramUser)] = []
|
var result: [(String, TelegramUser)] = []
|
||||||
outer: for peerId in transaction.getContactPeerIds() {
|
outer: for peerId in transaction.getContactPeerIds() {
|
||||||
if let peer = transaction.getPeer(peerId) as? TelegramUser, let peerPhoneNumber = peer.phone {
|
if let peer = transaction.getPeer(peerId) as? TelegramUser {
|
||||||
for contact in contacts {
|
if let peerPhoneNumber = peer.phone {
|
||||||
for phoneNumber in contact.phoneNumbers {
|
for contact in contacts {
|
||||||
if matchPhoneNumbers(phoneNumber, peerPhoneNumber) {
|
for phoneNumber in contact.phoneNumbers {
|
||||||
|
if matchPhoneNumbers(phoneNumber, peerPhoneNumber) {
|
||||||
|
result.append((contact.stableId, peer))
|
||||||
|
continue outer
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for contact in contacts {
|
||||||
|
if let contactPeerId = contact.peerId, contactPeerId == peerId {
|
||||||
result.append((contact.stableId, peer))
|
result.append((contact.stableId, peer))
|
||||||
continue outer
|
continue outer
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// var parsedPhoneNumbers: [String: ParsedPhoneNumber] = [:]
|
|
||||||
// let parsedPeerPhoneNumber: ParsedPhoneNumber?
|
|
||||||
// if let number = parsedPhoneNumbers[peerPhoneNumber] {
|
|
||||||
// parsedPeerPhoneNumber = number
|
|
||||||
// } else if let number = ParsedPhoneNumber(string: peerPhoneNumber) {
|
|
||||||
// parsedPeerPhoneNumber = number
|
|
||||||
// parsedPhoneNumbers[peerPhoneNumber] = number
|
|
||||||
// } else {
|
|
||||||
// parsedPeerPhoneNumber = nil
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// for contact in contacts {
|
|
||||||
// for phoneNumber in contact.phoneNumbers {
|
|
||||||
// let parsedPhoneNumber: ParsedPhoneNumber?
|
|
||||||
// if let number = parsedPhoneNumbers[phoneNumber] {
|
|
||||||
// parsedPhoneNumber = number
|
|
||||||
// } else if let number = ParsedPhoneNumber(string: phoneNumber) {
|
|
||||||
// parsedPhoneNumber = number
|
|
||||||
// parsedPhoneNumbers[phoneNumber] = number
|
|
||||||
// } else {
|
|
||||||
// parsedPhoneNumber = nil
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// if parsedPeerPhoneNumber == parsedPhoneNumber {
|
|
||||||
// result.append((contact.stableId, peer))
|
|
||||||
// continue outer
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
@ -110,5 +112,14 @@ func personWithUser(stableId: String, user: TelegramUser) -> INPerson {
|
|||||||
var nameComponents = PersonNameComponents()
|
var nameComponents = PersonNameComponents()
|
||||||
nameComponents.givenName = user.firstName
|
nameComponents.givenName = user.firstName
|
||||||
nameComponents.familyName = user.lastName
|
nameComponents.familyName = user.lastName
|
||||||
return INPerson(personHandle: INPersonHandle(value: stableId, type: .unknown), nameComponents: nameComponents, displayName: user.debugDisplayTitle, image: nil, contactIdentifier: stableId, customIdentifier: "tg\(user.id.toInt64())")
|
let personHandle: INPersonHandle
|
||||||
|
if let phone = user.phone {
|
||||||
|
personHandle = INPersonHandle(value: formatPhoneNumber(phone), type: .phoneNumber)
|
||||||
|
} else if let username = user.username {
|
||||||
|
personHandle = INPersonHandle(value: "@\(username)", type: .unknown)
|
||||||
|
} else {
|
||||||
|
personHandle = INPersonHandle(value: user.displayTitle, type: .unknown)
|
||||||
|
}
|
||||||
|
|
||||||
|
return INPerson(personHandle: personHandle, nameComponents: nameComponents, displayName: user.debugDisplayTitle, image: nil, contactIdentifier: stableId, customIdentifier: "tg\(user.id.toInt64())")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -202,11 +202,6 @@ class IntentHandler: INExtension, INSendMessageIntentHandling, INSearchForMessag
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if filteredPersons.count > 1 {
|
|
||||||
completion([.disambiguation(filteredPersons)])
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var allPersonsAlreadyMatched = true
|
var allPersonsAlreadyMatched = true
|
||||||
for person in filteredPersons {
|
for person in filteredPersons {
|
||||||
if !(person.customIdentifier ?? "").hasPrefix("tg") {
|
if !(person.customIdentifier ?? "").hasPrefix("tg") {
|
||||||
@ -215,7 +210,7 @@ class IntentHandler: INExtension, INSendMessageIntentHandling, INSearchForMessag
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if allPersonsAlreadyMatched {
|
if allPersonsAlreadyMatched && filteredPersons.count == 1 {
|
||||||
completion([.success(filteredPersons[0])])
|
completion([.success(filteredPersons[0])])
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -239,29 +234,31 @@ class IntentHandler: INExtension, INSendMessageIntentHandling, INSearchForMessag
|
|||||||
let account = self.accountPromise.get()
|
let account = self.accountPromise.get()
|
||||||
|
|
||||||
let signal = matchingDeviceContacts(stableIds: stableIds)
|
let signal = matchingDeviceContacts(stableIds: stableIds)
|
||||||
|> take(1)
|
|> take(1)
|
||||||
|> mapToSignal { matchedContacts in
|
|> mapToSignal { matchedContacts in
|
||||||
return account
|
return account
|
||||||
|> introduceError(IntentContactsError.self)
|
|> introduceError(IntentContactsError.self)
|
||||||
|> mapToSignal { account -> Signal<[(String, TelegramUser)], IntentContactsError> in
|
|> mapToSignal { account -> Signal<[(String, TelegramUser)], IntentContactsError> in
|
||||||
if let account = account {
|
if let account = account {
|
||||||
return matchingCloudContacts(postbox: account.postbox, contacts: matchedContacts)
|
return matchingCloudContacts(postbox: account.postbox, contacts: matchedContacts)
|
||||||
|> introduceError(IntentContactsError.self)
|
|> introduceError(IntentContactsError.self)
|
||||||
} else {
|
} else {
|
||||||
return .fail(.generic)
|
return .fail(.generic)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.resolvePersonsDisposable.set((signal
|
self.resolvePersonsDisposable.set((signal
|
||||||
|> deliverOnMainQueue).start(next: { peers in
|
|> deliverOnMainQueue).start(next: { peers in
|
||||||
if peers.isEmpty {
|
if peers.isEmpty {
|
||||||
completion([.needsValue])
|
completion([.noResult])
|
||||||
} else {
|
} else if peers.count == 1 {
|
||||||
completion(peers.map { .success(personWithUser(stableId: $0, user: $1)) })
|
completion(peers.map { .success(personWithUser(stableId: $0, user: $1)) })
|
||||||
}
|
} else {
|
||||||
}, error: { error in
|
completion([.disambiguation(peers.map { (personWithUser(stableId: $0, user: $1)) })])
|
||||||
completion([.skip])
|
}
|
||||||
}))
|
}, error: { error in
|
||||||
|
completion([.skip])
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
// MARK: - INSendMessageIntentHandling
|
// MARK: - INSendMessageIntentHandling
|
||||||
|
|||||||
@ -225,6 +225,8 @@ static NSData *base64_decode(NSString *str) {
|
|||||||
if (hexData == nil) {
|
if (hexData == nil) {
|
||||||
NSString *finalString = @"";
|
NSString *finalString = @"";
|
||||||
finalString = [finalString stringByAppendingString:[string stringByTrimmingCharactersInSet:[NSCharacterSet characterSetWithCharactersInString:@"="]]];
|
finalString = [finalString stringByAppendingString:[string stringByTrimmingCharactersInSet:[NSCharacterSet characterSetWithCharactersInString:@"="]]];
|
||||||
|
finalString = [finalString stringByReplacingOccurrencesOfString:@"-" withString:@"+"];
|
||||||
|
finalString = [finalString stringByReplacingOccurrencesOfString:@"_" withString:@"/"];
|
||||||
while (finalString.length % 4 != 0) {
|
while (finalString.length % 4 != 0) {
|
||||||
finalString = [finalString stringByAppendingString:@"="];
|
finalString = [finalString stringByAppendingString:@"="];
|
||||||
}
|
}
|
||||||
|
|||||||
@ -12,7 +12,7 @@
|
|||||||
|
|
||||||
static const char *AMQueueSpecific = "AMQueueSpecific";
|
static const char *AMQueueSpecific = "AMQueueSpecific";
|
||||||
|
|
||||||
const NSInteger TGBridgeAudioEncoderSampleRate = 16000;
|
const NSInteger TGBridgeAudioEncoderSampleRate = 48000;
|
||||||
|
|
||||||
typedef enum {
|
typedef enum {
|
||||||
ATQueuePriorityLow,
|
ATQueuePriorityLow,
|
||||||
@ -81,13 +81,13 @@ typedef enum {
|
|||||||
|
|
||||||
NSDictionary *outputSettings = @
|
NSDictionary *outputSettings = @
|
||||||
{
|
{
|
||||||
AVFormatIDKey: @(kAudioFormatLinearPCM),
|
AVFormatIDKey: @(kAudioFormatLinearPCM),
|
||||||
AVSampleRateKey: @(TGBridgeAudioEncoderSampleRate),
|
AVSampleRateKey: @(TGBridgeAudioEncoderSampleRate),
|
||||||
AVNumberOfChannelsKey: @1,
|
AVNumberOfChannelsKey: @1,
|
||||||
AVLinearPCMBitDepthKey: @16,
|
AVLinearPCMBitDepthKey: @16,
|
||||||
AVLinearPCMIsFloatKey: @false,
|
AVLinearPCMIsFloatKey: @false,
|
||||||
AVLinearPCMIsBigEndianKey: @false,
|
AVLinearPCMIsBigEndianKey: @false,
|
||||||
AVLinearPCMIsNonInterleaved: @false
|
AVLinearPCMIsNonInterleaved: @false
|
||||||
};
|
};
|
||||||
|
|
||||||
_readerOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:asset.tracks audioSettings:outputSettings];
|
_readerOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:asset.tracks audioSettings:outputSettings];
|
||||||
@ -114,9 +114,9 @@ typedef enum {
|
|||||||
static ATQueue *queue = nil;
|
static ATQueue *queue = nil;
|
||||||
static dispatch_once_t onceToken;
|
static dispatch_once_t onceToken;
|
||||||
dispatch_once(&onceToken, ^
|
dispatch_once(&onceToken, ^
|
||||||
{
|
{
|
||||||
queue = [[ATQueue alloc] initWithName:@"org.telegram.opusAudioEncoderQueue"];
|
queue = [[ATQueue alloc] initWithName:@"org.telegram.opusAudioEncoderQueue"];
|
||||||
});
|
});
|
||||||
|
|
||||||
return queue;
|
return queue;
|
||||||
}
|
}
|
||||||
@ -126,74 +126,74 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100
|
|||||||
- (void)startWithCompletion:(void (^)(NSString *, int32_t))completion
|
- (void)startWithCompletion:(void (^)(NSString *, int32_t))completion
|
||||||
{
|
{
|
||||||
[[TGBridgeAudioEncoder processingQueue] dispatch:^
|
[[TGBridgeAudioEncoder processingQueue] dispatch:^
|
||||||
{
|
{
|
||||||
_oggWriter = [[TGOggOpusWriter alloc] init];
|
_oggWriter = [[TGOggOpusWriter alloc] init];
|
||||||
if (![_oggWriter beginWithDataItem:_tempFileItem])
|
if (![_oggWriter beginWithDataItem:_tempFileItem])
|
||||||
{
|
{
|
||||||
[self cleanup];
|
[self cleanup];
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
[_assetReader startReading];
|
[_assetReader startReading];
|
||||||
|
|
||||||
while (_assetReader.status != AVAssetReaderStatusCompleted)
|
while (_assetReader.status != AVAssetReaderStatusCompleted)
|
||||||
{
|
{
|
||||||
if (_assetReader.status == AVAssetReaderStatusReading)
|
if (_assetReader.status == AVAssetReaderStatusReading)
|
||||||
{
|
{
|
||||||
CMSampleBufferRef nextBuffer = [_readerOutput copyNextSampleBuffer];
|
CMSampleBufferRef nextBuffer = [_readerOutput copyNextSampleBuffer];
|
||||||
if (nextBuffer)
|
if (nextBuffer)
|
||||||
{
|
{
|
||||||
AudioBufferList abl;
|
AudioBufferList abl;
|
||||||
CMBlockBufferRef blockBuffer;
|
CMBlockBufferRef blockBuffer;
|
||||||
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(nextBuffer, NULL, &abl, sizeof(abl), NULL, NULL, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer);
|
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(nextBuffer, NULL, &abl, sizeof(abl), NULL, NULL, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer);
|
||||||
|
|
||||||
[[TGBridgeAudioEncoder processingQueue] dispatch:^
|
[[TGBridgeAudioEncoder processingQueue] dispatch:^
|
||||||
{
|
{
|
||||||
[self _processBuffer:&abl.mBuffers[0]];
|
[self _processBuffer:&abl.mBuffers[0]];
|
||||||
|
|
||||||
CFRelease(nextBuffer);
|
CFRelease(nextBuffer);
|
||||||
CFRelease(blockBuffer);
|
CFRelease(blockBuffer);
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
[[TGBridgeAudioEncoder processingQueue] dispatch:^
|
[[TGBridgeAudioEncoder processingQueue] dispatch:^
|
||||||
{
|
{
|
||||||
if (_tailLength > 0) {
|
if (_tailLength > 0) {
|
||||||
[_oggWriter writeFrame:(uint8_t *)_audioBuffer.bytes frameByteCount:(NSUInteger)_tailLength];
|
[_oggWriter writeFrame:(uint8_t *)_audioBuffer.bytes frameByteCount:(NSUInteger)_tailLength];
|
||||||
}
|
}
|
||||||
}];
|
}];
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[[TGBridgeAudioEncoder processingQueue] dispatch:^
|
[[TGBridgeAudioEncoder processingQueue] dispatch:^
|
||||||
{
|
{
|
||||||
TGFileDataItem *dataItemResult = nil;
|
TGFileDataItem *dataItemResult = nil;
|
||||||
NSTimeInterval durationResult = 0.0;
|
NSTimeInterval durationResult = 0.0;
|
||||||
|
|
||||||
NSUInteger totalBytes = 0;
|
NSUInteger totalBytes = 0;
|
||||||
|
|
||||||
if (_assetReader.status == AVAssetReaderStatusCompleted)
|
if (_assetReader.status == AVAssetReaderStatusCompleted)
|
||||||
{
|
{
|
||||||
NSLog(@"finished");
|
NSLog(@"finished");
|
||||||
if (_oggWriter != nil)
|
if (_oggWriter != nil && [_oggWriter writeFrame:NULL frameByteCount:0])
|
||||||
{
|
{
|
||||||
dataItemResult = _tempFileItem;
|
dataItemResult = _tempFileItem;
|
||||||
durationResult = [_oggWriter encodedDuration];
|
durationResult = [_oggWriter encodedDuration];
|
||||||
totalBytes = [_oggWriter encodedBytes];
|
totalBytes = [_oggWriter encodedBytes];
|
||||||
}
|
}
|
||||||
|
|
||||||
[self cleanup];
|
[self cleanup];
|
||||||
}
|
}
|
||||||
|
|
||||||
//TGLog(@"[TGBridgeAudioEncoder#%x convert time: %f ms]", self, (CFAbsoluteTimeGetCurrent() - startTime) * 1000.0);
|
//TGLog(@"[TGBridgeAudioEncoder#%x convert time: %f ms]", self, (CFAbsoluteTimeGetCurrent() - startTime) * 1000.0);
|
||||||
|
|
||||||
if (completion != nil)
|
if (completion != nil)
|
||||||
completion(dataItemResult.path, (int32_t)durationResult);
|
completion(dataItemResult.path, (int32_t)durationResult);
|
||||||
}];
|
}];
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)_processBuffer:(AudioBuffer const *)buffer
|
- (void)_processBuffer:(AudioBuffer const *)buffer
|
||||||
@ -306,11 +306,11 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100
|
|||||||
|
|
||||||
|
|
||||||
[_queue dispatch:^
|
[_queue dispatch:^
|
||||||
{
|
{
|
||||||
_fileName = filePath;
|
_fileName = filePath;
|
||||||
_length = [[[NSFileManager defaultManager] attributesOfItemAtPath:_fileName error:nil][NSFileSize] unsignedIntegerValue];
|
_length = [[[NSFileManager defaultManager] attributesOfItemAtPath:_fileName error:nil][NSFileSize] unsignedIntegerValue];
|
||||||
_fileExists = [[NSFileManager defaultManager] fileExistsAtPath:_fileName];
|
_fileExists = [[NSFileManager defaultManager] fileExistsAtPath:_fileName];
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
@ -322,38 +322,38 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100
|
|||||||
- (void)moveToPath:(NSString *)path
|
- (void)moveToPath:(NSString *)path
|
||||||
{
|
{
|
||||||
[_queue dispatch:^
|
[_queue dispatch:^
|
||||||
{
|
{
|
||||||
[[NSFileManager defaultManager] moveItemAtPath:_fileName toPath:path error:nil];
|
[[NSFileManager defaultManager] moveItemAtPath:_fileName toPath:path error:nil];
|
||||||
_fileName = path;
|
_fileName = path;
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)remove
|
- (void)remove
|
||||||
{
|
{
|
||||||
[_queue dispatch:^
|
[_queue dispatch:^
|
||||||
{
|
{
|
||||||
[[NSFileManager defaultManager] removeItemAtPath:_fileName error:nil];
|
[[NSFileManager defaultManager] removeItemAtPath:_fileName error:nil];
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appendData:(NSData *)data
|
- (void)appendData:(NSData *)data
|
||||||
{
|
{
|
||||||
[_queue dispatch:^
|
[_queue dispatch:^
|
||||||
{
|
{
|
||||||
if (!_fileExists)
|
if (!_fileExists)
|
||||||
{
|
{
|
||||||
[[NSFileManager defaultManager] createFileAtPath:_fileName contents:nil attributes:nil];
|
[[NSFileManager defaultManager] createFileAtPath:_fileName contents:nil attributes:nil];
|
||||||
_fileExists = true;
|
_fileExists = true;
|
||||||
}
|
}
|
||||||
NSFileHandle *file = [NSFileHandle fileHandleForUpdatingAtPath:_fileName];
|
NSFileHandle *file = [NSFileHandle fileHandleForUpdatingAtPath:_fileName];
|
||||||
[file seekToEndOfFile];
|
[file seekToEndOfFile];
|
||||||
[file writeData:data];
|
[file writeData:data];
|
||||||
[file synchronizeFile];
|
[file synchronizeFile];
|
||||||
[file closeFile];
|
[file closeFile];
|
||||||
_length += data.length;
|
_length += data.length;
|
||||||
|
|
||||||
[_data appendData:data];
|
[_data appendData:data];
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSData *)readDataAtOffset:(NSUInteger)offset length:(NSUInteger)length
|
- (NSData *)readDataAtOffset:(NSUInteger)offset length:(NSUInteger)length
|
||||||
@ -361,14 +361,14 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100
|
|||||||
__block NSData *data = nil;
|
__block NSData *data = nil;
|
||||||
|
|
||||||
[_queue dispatch:^
|
[_queue dispatch:^
|
||||||
{
|
{
|
||||||
NSFileHandle *file = [NSFileHandle fileHandleForUpdatingAtPath:_fileName];
|
NSFileHandle *file = [NSFileHandle fileHandleForUpdatingAtPath:_fileName];
|
||||||
[file seekToFileOffset:(unsigned long long)offset];
|
[file seekToFileOffset:(unsigned long long)offset];
|
||||||
data = [file readDataOfLength:length];
|
data = [file readDataOfLength:length];
|
||||||
if (data.length != length)
|
if (data.length != length)
|
||||||
//TGLog(@"Read data length mismatch");
|
//TGLog(@"Read data length mismatch");
|
||||||
[file closeFile];
|
[file closeFile];
|
||||||
} synchronous:true];
|
} synchronous:true];
|
||||||
|
|
||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
@ -377,9 +377,9 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100
|
|||||||
{
|
{
|
||||||
__block NSUInteger result = 0;
|
__block NSUInteger result = 0;
|
||||||
[_queue dispatch:^
|
[_queue dispatch:^
|
||||||
{
|
{
|
||||||
result = _length;
|
result = _length;
|
||||||
} synchronous:true];
|
} synchronous:true];
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@ -420,11 +420,11 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100
|
|||||||
static ATQueue *queue = nil;
|
static ATQueue *queue = nil;
|
||||||
static dispatch_once_t onceToken;
|
static dispatch_once_t onceToken;
|
||||||
dispatch_once(&onceToken, ^
|
dispatch_once(&onceToken, ^
|
||||||
{
|
{
|
||||||
queue = [[ATQueue alloc] init];
|
queue = [[ATQueue alloc] init];
|
||||||
queue->_nativeQueue = dispatch_get_main_queue();
|
queue->_nativeQueue = dispatch_get_main_queue();
|
||||||
queue->_isMainQueue = true;
|
queue->_isMainQueue = true;
|
||||||
});
|
});
|
||||||
|
|
||||||
return queue;
|
return queue;
|
||||||
}
|
}
|
||||||
@ -434,9 +434,9 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100
|
|||||||
static ATQueue *queue = nil;
|
static ATQueue *queue = nil;
|
||||||
static dispatch_once_t onceToken;
|
static dispatch_once_t onceToken;
|
||||||
dispatch_once(&onceToken, ^
|
dispatch_once(&onceToken, ^
|
||||||
{
|
{
|
||||||
queue = [[ATQueue alloc] initWithNativeQueue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
|
queue = [[ATQueue alloc] initWithNativeQueue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
|
||||||
});
|
});
|
||||||
|
|
||||||
return queue;
|
return queue;
|
||||||
}
|
}
|
||||||
@ -446,9 +446,9 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100
|
|||||||
static ATQueue *queue = nil;
|
static ATQueue *queue = nil;
|
||||||
static dispatch_once_t onceToken;
|
static dispatch_once_t onceToken;
|
||||||
dispatch_once(&onceToken, ^
|
dispatch_once(&onceToken, ^
|
||||||
{
|
{
|
||||||
queue = [[ATQueue alloc] initWithNativeQueue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0)];
|
queue = [[ATQueue alloc] initWithNativeQueue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0)];
|
||||||
});
|
});
|
||||||
|
|
||||||
return queue;
|
return queue;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -314,7 +314,7 @@ final class ManagedAudioRecorderContext {
|
|||||||
|
|
||||||
self.idleTimerExtensionDisposable = (Signal<Void, NoError> { subscriber in
|
self.idleTimerExtensionDisposable = (Signal<Void, NoError> { subscriber in
|
||||||
return pushIdleTimerExtension()
|
return pushIdleTimerExtension()
|
||||||
} |> delay(5.0, queue: queue)).start()
|
} |> delay(5.0, queue: queue)).start()
|
||||||
}
|
}
|
||||||
|
|
||||||
deinit {
|
deinit {
|
||||||
@ -401,19 +401,19 @@ final class ManagedAudioRecorderContext {
|
|||||||
strongSelf.audioSessionAcquired(headset: state.isHeadsetConnected)
|
strongSelf.audioSessionAcquired(headset: state.isHeadsetConnected)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, deactivate: { [weak self] in
|
}, deactivate: { [weak self] in
|
||||||
return Signal { subscriber in
|
return Signal { subscriber in
|
||||||
queue.async {
|
queue.async {
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
strongSelf.hasAudioSession = false
|
strongSelf.hasAudioSession = false
|
||||||
strongSelf.stop()
|
strongSelf.stop()
|
||||||
strongSelf.recordingState.set(.stopped)
|
strongSelf.recordingState.set(.stopped)
|
||||||
subscriber.putCompletion()
|
subscriber.putCompletion()
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return EmptyDisposable
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return EmptyDisposable
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -587,53 +587,57 @@ final class ManagedAudioRecorderContext {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func takeData() -> RecordedAudioData? {
|
func takeData() -> RecordedAudioData? {
|
||||||
var scaledSamplesMemory = malloc(100 * 2)!
|
if self.oggWriter.writeFrame(nil, frameByteCount: 0) {
|
||||||
var scaledSamples: UnsafeMutablePointer<Int16> = scaledSamplesMemory.assumingMemoryBound(to: Int16.self)
|
var scaledSamplesMemory = malloc(100 * 2)!
|
||||||
defer {
|
var scaledSamples: UnsafeMutablePointer<Int16> = scaledSamplesMemory.assumingMemoryBound(to: Int16.self)
|
||||||
free(scaledSamplesMemory)
|
defer {
|
||||||
}
|
free(scaledSamplesMemory)
|
||||||
memset(scaledSamples, 0, 100 * 2);
|
}
|
||||||
var waveform: Data?
|
memset(scaledSamples, 0, 100 * 2);
|
||||||
|
var waveform: Data?
|
||||||
let count = self.compressedWaveformSamples.count / 2
|
|
||||||
self.compressedWaveformSamples.withUnsafeMutableBytes { (samples: UnsafeMutablePointer<Int16>) -> Void in
|
let count = self.compressedWaveformSamples.count / 2
|
||||||
for i in 0 ..< count {
|
self.compressedWaveformSamples.withUnsafeMutableBytes { (samples: UnsafeMutablePointer<Int16>) -> Void in
|
||||||
let sample = samples[i]
|
for i in 0 ..< count {
|
||||||
let index = i * 100 / count
|
let sample = samples[i]
|
||||||
if (scaledSamples[index] < sample) {
|
let index = i * 100 / count
|
||||||
scaledSamples[index] = sample;
|
if (scaledSamples[index] < sample) {
|
||||||
|
scaledSamples[index] = sample;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
var peak: Int16 = 0
|
||||||
var peak: Int16 = 0
|
var sumSamples: Int64 = 0
|
||||||
var sumSamples: Int64 = 0
|
for i in 0 ..< 100 {
|
||||||
for i in 0 ..< 100 {
|
let sample = scaledSamples[i]
|
||||||
let sample = scaledSamples[i]
|
if peak < sample {
|
||||||
if peak < sample {
|
peak = sample
|
||||||
peak = sample
|
}
|
||||||
|
sumSamples += Int64(sample)
|
||||||
}
|
}
|
||||||
sumSamples += Int64(sample)
|
var calculatedPeak: UInt16 = 0
|
||||||
}
|
calculatedPeak = UInt16((Double(sumSamples) * 1.8 / 100.0))
|
||||||
var calculatedPeak: UInt16 = 0
|
|
||||||
calculatedPeak = UInt16((Double(sumSamples) * 1.8 / 100.0))
|
if calculatedPeak < 2500 {
|
||||||
|
calculatedPeak = 2500
|
||||||
if calculatedPeak < 2500 {
|
}
|
||||||
calculatedPeak = 2500
|
|
||||||
|
for i in 0 ..< 100 {
|
||||||
|
let sample: UInt16 = UInt16(Int64(scaledSamples[i]))
|
||||||
|
let minPeak = min(Int64(sample), Int64(calculatedPeak))
|
||||||
|
let resultPeak = minPeak * 31 / Int64(calculatedPeak)
|
||||||
|
scaledSamples[i] = Int16(clamping: min(31, resultPeak))
|
||||||
|
}
|
||||||
|
|
||||||
|
let resultWaveform = AudioWaveform(samples: Data(bytes: scaledSamplesMemory, count: 100 * 2), peak: 31)
|
||||||
|
let bitstream = resultWaveform.makeBitstream()
|
||||||
|
waveform = AudioWaveform(bitstream: bitstream, bitsPerSample: 5).makeBitstream()
|
||||||
}
|
}
|
||||||
|
|
||||||
for i in 0 ..< 100 {
|
return RecordedAudioData(compressedData: self.dataItem.data(), duration: self.oggWriter.encodedDuration(), waveform: waveform)
|
||||||
let sample: UInt16 = UInt16(Int64(scaledSamples[i]))
|
} else {
|
||||||
let minPeak = min(Int64(sample), Int64(calculatedPeak))
|
return nil
|
||||||
let resultPeak = minPeak * 31 / Int64(calculatedPeak)
|
|
||||||
scaledSamples[i] = Int16(clamping: min(31, resultPeak))
|
|
||||||
}
|
|
||||||
|
|
||||||
let resultWaveform = AudioWaveform(samples: Data(bytes: scaledSamplesMemory, count: 100 * 2), peak: 31)
|
|
||||||
let bitstream = resultWaveform.makeBitstream()
|
|
||||||
waveform = AudioWaveform(bitstream: bitstream, bitsPerSample: 5).makeBitstream()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return RecordedAudioData(compressedData: self.dataItem.data(), duration: self.oggWriter.encodedDuration(), waveform: waveform)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -65,7 +65,7 @@ private enum ThemeSettingsControllerEntry: ItemListNodeEntry {
|
|||||||
case fontSize(PresentationTheme, PresentationFontSize)
|
case fontSize(PresentationTheme, PresentationFontSize)
|
||||||
case chatPreview(PresentationTheme, PresentationTheme, TelegramWallpaper, PresentationFontSize, PresentationStrings, PresentationDateTimeFormat, PresentationPersonNameOrder)
|
case chatPreview(PresentationTheme, PresentationTheme, TelegramWallpaper, PresentationFontSize, PresentationStrings, PresentationDateTimeFormat, PresentationPersonNameOrder)
|
||||||
case wallpaper(PresentationTheme, String)
|
case wallpaper(PresentationTheme, String)
|
||||||
case accentColor(PresentationTheme, String, PresentationThemeAccentColor?)
|
case accentColor(PresentationTheme, PresentationThemeReference, String, PresentationThemeAccentColor?)
|
||||||
case autoNightTheme(PresentationTheme, String, String)
|
case autoNightTheme(PresentationTheme, String, String)
|
||||||
case themeItem(PresentationTheme, PresentationStrings, [PresentationThemeReference], PresentationThemeReference, [Int64: PresentationThemeAccentColor], PresentationThemeAccentColor?)
|
case themeItem(PresentationTheme, PresentationStrings, [PresentationThemeReference], PresentationThemeReference, [Int64: PresentationThemeAccentColor], PresentationThemeAccentColor?)
|
||||||
case iconHeader(PresentationTheme, String)
|
case iconHeader(PresentationTheme, String)
|
||||||
@ -137,8 +137,8 @@ private enum ThemeSettingsControllerEntry: ItemListNodeEntry {
|
|||||||
} else {
|
} else {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
case let .accentColor(lhsTheme, lhsText, lhsColor):
|
case let .accentColor(lhsTheme, lhsCurrentTheme, lhsText, lhsColor):
|
||||||
if case let .accentColor(rhsTheme, rhsText, rhsColor) = rhs, lhsTheme === rhsTheme, lhsText == rhsText, lhsColor == rhsColor {
|
if case let .accentColor(rhsTheme, rhsCurrentTheme, rhsText, rhsColor) = rhs, lhsTheme === rhsTheme, lhsCurrentTheme == rhsCurrentTheme, lhsText == rhsText, lhsColor == rhsColor {
|
||||||
return true
|
return true
|
||||||
} else {
|
} else {
|
||||||
return false
|
return false
|
||||||
@ -230,21 +230,20 @@ private enum ThemeSettingsControllerEntry: ItemListNodeEntry {
|
|||||||
return ItemListDisclosureItem(theme: theme, title: text, label: "", sectionId: self.section, style: .blocks, action: {
|
return ItemListDisclosureItem(theme: theme, title: text, label: "", sectionId: self.section, style: .blocks, action: {
|
||||||
arguments.openWallpaperSettings()
|
arguments.openWallpaperSettings()
|
||||||
})
|
})
|
||||||
case let .accentColor(theme, _, color):
|
case let .accentColor(theme, currentTheme, _, color):
|
||||||
|
var defaultColor = PresentationThemeAccentColor(baseColor: .blue, value: 0.5)
|
||||||
var colors = PresentationThemeBaseColor.allCases
|
var colors = PresentationThemeBaseColor.allCases
|
||||||
if theme.overallDarkAppearance {
|
if case let .builtin(name) = currentTheme {
|
||||||
colors = colors.filter { $0 != .black }
|
if name == .night || name == .nightAccent {
|
||||||
|
colors = colors.filter { $0 != .black }
|
||||||
|
}
|
||||||
|
if name == .night {
|
||||||
|
colors = colors.filter { $0 != .gray }
|
||||||
|
defaultColor = PresentationThemeAccentColor(baseColor: .white, value: 0.5)
|
||||||
|
} else {
|
||||||
|
colors = colors.filter { $0 != .white }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let defaultColor: PresentationThemeAccentColor
|
|
||||||
if case let .builtin(name) = theme.name, name == .night {
|
|
||||||
colors = colors.filter { $0 != .gray }
|
|
||||||
defaultColor = PresentationThemeAccentColor(baseColor: .white, value: 0.5)
|
|
||||||
} else {
|
|
||||||
colors = colors.filter { $0 != .white }
|
|
||||||
defaultColor = PresentationThemeAccentColor(baseColor: .blue, value: 0.5)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ThemeSettingsAccentColorItem(theme: theme, sectionId: self.section, colors: colors, currentColor: color ?? defaultColor, updated: { color in
|
return ThemeSettingsAccentColorItem(theme: theme, sectionId: self.section, colors: colors, currentColor: color ?? defaultColor, updated: { color in
|
||||||
arguments.selectAccentColor(color)
|
arguments.selectAccentColor(color)
|
||||||
}, tag: ThemeSettingsEntryTag.accentColor)
|
}, tag: ThemeSettingsEntryTag.accentColor)
|
||||||
@ -297,7 +296,7 @@ private func themeSettingsControllerEntries(presentationData: PresentationData,
|
|||||||
entries.append(.themeItem(presentationData.theme, presentationData.strings, availableThemes, themeReference, themeSpecificAccentColors, themeSpecificAccentColors[themeReference.index]))
|
entries.append(.themeItem(presentationData.theme, presentationData.strings, availableThemes, themeReference, themeSpecificAccentColors, themeSpecificAccentColors[themeReference.index]))
|
||||||
|
|
||||||
if theme.name != .builtin(.dayClassic) {
|
if theme.name != .builtin(.dayClassic) {
|
||||||
entries.append(.accentColor(presentationData.theme, strings.Appearance_AccentColor, themeSpecificAccentColors[themeReference.index]))
|
entries.append(.accentColor(presentationData.theme, themeReference, strings.Appearance_AccentColor, themeSpecificAccentColors[themeReference.index]))
|
||||||
}
|
}
|
||||||
|
|
||||||
entries.append(.wallpaper(presentationData.theme, strings.Settings_ChatBackground))
|
entries.append(.wallpaper(presentationData.theme, strings.Settings_ChatBackground))
|
||||||
|
|||||||
@ -426,39 +426,24 @@ static inline int writeOggPage(ogg_page *page, TGDataItem *fileItem)
|
|||||||
pages_out++;
|
pages_out++;
|
||||||
}
|
}
|
||||||
|
|
||||||
op.packet = (unsigned char *)_packet;
|
if (framePcmBytes != NULL) {
|
||||||
op.bytes = nbBytes;
|
op.packet = (unsigned char *)_packet;
|
||||||
op.b_o_s = 0;
|
op.bytes = nbBytes;
|
||||||
op.granulepos = enc_granulepos;
|
op.b_o_s = 0;
|
||||||
if (op.e_o_s)
|
op.granulepos = enc_granulepos;
|
||||||
{
|
|
||||||
/* We compute the final GP as ceil(len*48k/input_rate). When a resampling
|
|
||||||
decoder does the matching floor(len*input/48k) conversion the length will
|
|
||||||
be exactly the same as the input.
|
|
||||||
*/
|
|
||||||
op.granulepos = ((total_samples * 48000 + rate - 1) / rate) + header.preskip;
|
|
||||||
}
|
|
||||||
op.packetno = 2 + _packetId;
|
|
||||||
ogg_stream_packetin(&os, &op);
|
|
||||||
last_segments += size_segments;
|
|
||||||
|
|
||||||
/* The downside of early reading is if the input is an exact
|
if (op.e_o_s)
|
||||||
multiple of the frame_size you'll get an extra frame that needs
|
{
|
||||||
to get cropped off. The downside of late reading is added delay.
|
/* We compute the final GP as ceil(len*48k/input_rate). When a resampling
|
||||||
If your ogg_delay is 120ms or less we'll assume you want the
|
decoder does the matching floor(len*input/48k) conversion the length will
|
||||||
low delay behavior.
|
be exactly the same as the input.
|
||||||
*/
|
*/
|
||||||
/*if ((!op.e_o_s) && max_ogg_delay > 5760)
|
op.granulepos = ((total_samples * 48000 + rate - 1) / rate) + header.preskip;
|
||||||
{
|
}
|
||||||
nb_samples = inopt.read_samples(inopt.readdata, input, frame_size);
|
op.packetno = 2 + _packetId;
|
||||||
total_samples += nb_samples;
|
ogg_stream_packetin(&os, &op);
|
||||||
if (nb_samples < frame_size)
|
last_segments += size_segments;
|
||||||
eos = 1;
|
|
||||||
if (nb_samples == 0)
|
|
||||||
op.e_o_s = 1;
|
|
||||||
}
|
}
|
||||||
else
|
|
||||||
nb_samples = -1;*/
|
|
||||||
|
|
||||||
// If the stream is over or we're sure that the delayed flush will fire, go ahead and flush now to avoid adding delay
|
// If the stream is over or we're sure that the delayed flush will fire, go ahead and flush now to avoid adding delay
|
||||||
while ((op.e_o_s || (enc_granulepos + (frame_size * 48000 / coding_rate) - last_granulepos > max_ogg_delay) ||
|
while ((op.e_o_s || (enc_granulepos + (frame_size * 48000 / coding_rate) - last_granulepos > max_ogg_delay) ||
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user