Merge branch 'beta'

This commit is contained in:
Ilya Laktyushin 2019-08-12 14:42:17 +03:00
commit 1d2c593944
7 changed files with 291 additions and 293 deletions

View File

@ -10,18 +10,33 @@ struct MatchingDeviceContact {
let firstName: String
let lastName: String
let phoneNumbers: [String]
let peerId: PeerId?
}
enum IntentContactsError {
case generic
}
private let phonebookUsernamePathPrefix = "@id"
private let phonebookUsernamePrefix = "https://t.me/" + phonebookUsernamePathPrefix
private func parseAppSpecificContactReference(_ value: String) -> PeerId? {
if !value.hasPrefix(phonebookUsernamePrefix) {
return nil
}
let idString = String(value[value.index(value.startIndex, offsetBy: phonebookUsernamePrefix.count)...])
if let id = Int32(idString) {
return PeerId(namespace: Namespaces.Peer.CloudUser, id: id)
}
return nil
}
func matchingDeviceContacts(stableIds: [String]) -> Signal<[MatchingDeviceContact], IntentContactsError> {
guard CNContactStore.authorizationStatus(for: .contacts) == .authorized else {
return .fail(.generic)
}
let store = CNContactStore()
guard let contacts = try? store.unifiedContacts(matching: CNContact.predicateForContacts(withIdentifiers: stableIds), keysToFetch: [CNContactFormatter.descriptorForRequiredKeys(for: .fullName), CNContactPhoneNumbersKey as CNKeyDescriptor]) else {
guard let contacts = try? store.unifiedContacts(matching: CNContact.predicateForContacts(withIdentifiers: stableIds), keysToFetch: [CNContactFormatter.descriptorForRequiredKeys(for: .fullName), CNContactPhoneNumbersKey as CNKeyDescriptor, CNContactUrlAddressesKey as CNKeyDescriptor]) else {
return .fail(.generic)
}
@ -34,7 +49,14 @@ func matchingDeviceContacts(stableIds: [String]) -> Signal<[MatchingDeviceContac
}
})
return MatchingDeviceContact(stableId: contact.identifier, firstName: contact.givenName, lastName: contact.familyName, phoneNumbers: phoneNumbers)
var contactPeerId: PeerId?
for address in contact.urlAddresses {
if address.label == "Telegram", let peerId = parseAppSpecificContactReference(address.value as String) {
contactPeerId = peerId
}
}
return MatchingDeviceContact(stableId: contact.identifier, firstName: contact.givenName, lastName: contact.familyName, phoneNumbers: phoneNumbers, peerId: contactPeerId)
}))
}
@ -52,7 +74,8 @@ func matchingCloudContacts(postbox: Postbox, contacts: [MatchingDeviceContact])
return postbox.transaction { transaction -> [(String, TelegramUser)] in
var result: [(String, TelegramUser)] = []
outer: for peerId in transaction.getContactPeerIds() {
if let peer = transaction.getPeer(peerId) as? TelegramUser, let peerPhoneNumber = peer.phone {
if let peer = transaction.getPeer(peerId) as? TelegramUser {
if let peerPhoneNumber = peer.phone {
for contact in contacts {
for phoneNumber in contact.phoneNumbers {
if matchPhoneNumbers(phoneNumber, peerPhoneNumber) {
@ -61,35 +84,14 @@ func matchingCloudContacts(postbox: Postbox, contacts: [MatchingDeviceContact])
}
}
}
// var parsedPhoneNumbers: [String: ParsedPhoneNumber] = [:]
// let parsedPeerPhoneNumber: ParsedPhoneNumber?
// if let number = parsedPhoneNumbers[peerPhoneNumber] {
// parsedPeerPhoneNumber = number
// } else if let number = ParsedPhoneNumber(string: peerPhoneNumber) {
// parsedPeerPhoneNumber = number
// parsedPhoneNumbers[peerPhoneNumber] = number
// } else {
// parsedPeerPhoneNumber = nil
// }
//
// for contact in contacts {
// for phoneNumber in contact.phoneNumbers {
// let parsedPhoneNumber: ParsedPhoneNumber?
// if let number = parsedPhoneNumbers[phoneNumber] {
// parsedPhoneNumber = number
// } else if let number = ParsedPhoneNumber(string: phoneNumber) {
// parsedPhoneNumber = number
// parsedPhoneNumbers[phoneNumber] = number
// } else {
// parsedPhoneNumber = nil
// }
//
// if parsedPeerPhoneNumber == parsedPhoneNumber {
// result.append((contact.stableId, peer))
// continue outer
// }
// }
// }
} else {
for contact in contacts {
if let contactPeerId = contact.peerId, contactPeerId == peerId {
result.append((contact.stableId, peer))
continue outer
}
}
}
}
}
return result
@ -110,5 +112,14 @@ func personWithUser(stableId: String, user: TelegramUser) -> INPerson {
var nameComponents = PersonNameComponents()
nameComponents.givenName = user.firstName
nameComponents.familyName = user.lastName
return INPerson(personHandle: INPersonHandle(value: stableId, type: .unknown), nameComponents: nameComponents, displayName: user.debugDisplayTitle, image: nil, contactIdentifier: stableId, customIdentifier: "tg\(user.id.toInt64())")
let personHandle: INPersonHandle
if let phone = user.phone {
personHandle = INPersonHandle(value: formatPhoneNumber(phone), type: .phoneNumber)
} else if let username = user.username {
personHandle = INPersonHandle(value: "@\(username)", type: .unknown)
} else {
personHandle = INPersonHandle(value: user.displayTitle, type: .unknown)
}
return INPerson(personHandle: personHandle, nameComponents: nameComponents, displayName: user.debugDisplayTitle, image: nil, contactIdentifier: stableId, customIdentifier: "tg\(user.id.toInt64())")
}

View File

@ -202,11 +202,6 @@ class IntentHandler: INExtension, INSendMessageIntentHandling, INSearchForMessag
return
}
if filteredPersons.count > 1 {
completion([.disambiguation(filteredPersons)])
return
}
var allPersonsAlreadyMatched = true
for person in filteredPersons {
if !(person.customIdentifier ?? "").hasPrefix("tg") {
@ -215,7 +210,7 @@ class IntentHandler: INExtension, INSendMessageIntentHandling, INSearchForMessag
}
}
if allPersonsAlreadyMatched {
if allPersonsAlreadyMatched && filteredPersons.count == 1 {
completion([.success(filteredPersons[0])])
return
}
@ -255,9 +250,11 @@ class IntentHandler: INExtension, INSendMessageIntentHandling, INSearchForMessag
self.resolvePersonsDisposable.set((signal
|> deliverOnMainQueue).start(next: { peers in
if peers.isEmpty {
completion([.needsValue])
} else {
completion([.noResult])
} else if peers.count == 1 {
completion(peers.map { .success(personWithUser(stableId: $0, user: $1)) })
} else {
completion([.disambiguation(peers.map { (personWithUser(stableId: $0, user: $1)) })])
}
}, error: { error in
completion([.skip])

View File

@ -225,6 +225,8 @@ static NSData *base64_decode(NSString *str) {
if (hexData == nil) {
NSString *finalString = @"";
finalString = [finalString stringByAppendingString:[string stringByTrimmingCharactersInSet:[NSCharacterSet characterSetWithCharactersInString:@"="]]];
finalString = [finalString stringByReplacingOccurrencesOfString:@"-" withString:@"+"];
finalString = [finalString stringByReplacingOccurrencesOfString:@"_" withString:@"/"];
while (finalString.length % 4 != 0) {
finalString = [finalString stringByAppendingString:@"="];
}

View File

@ -12,7 +12,7 @@
static const char *AMQueueSpecific = "AMQueueSpecific";
const NSInteger TGBridgeAudioEncoderSampleRate = 16000;
const NSInteger TGBridgeAudioEncoderSampleRate = 48000;
typedef enum {
ATQueuePriorityLow,
@ -178,7 +178,7 @@ static const int encoderPacketSizeInBytes = TGBridgeAudioEncoderSampleRate / 100
if (_assetReader.status == AVAssetReaderStatusCompleted)
{
NSLog(@"finished");
if (_oggWriter != nil)
if (_oggWriter != nil && [_oggWriter writeFrame:NULL frameByteCount:0])
{
dataItemResult = _tempFileItem;
durationResult = [_oggWriter encodedDuration];

View File

@ -587,6 +587,7 @@ final class ManagedAudioRecorderContext {
}
func takeData() -> RecordedAudioData? {
if self.oggWriter.writeFrame(nil, frameByteCount: 0) {
var scaledSamplesMemory = malloc(100 * 2)!
var scaledSamples: UnsafeMutablePointer<Int16> = scaledSamplesMemory.assumingMemoryBound(to: Int16.self)
defer {
@ -634,6 +635,9 @@ final class ManagedAudioRecorderContext {
}
return RecordedAudioData(compressedData: self.dataItem.data(), duration: self.oggWriter.encodedDuration(), waveform: waveform)
} else {
return nil
}
}
}

View File

@ -65,7 +65,7 @@ private enum ThemeSettingsControllerEntry: ItemListNodeEntry {
case fontSize(PresentationTheme, PresentationFontSize)
case chatPreview(PresentationTheme, PresentationTheme, TelegramWallpaper, PresentationFontSize, PresentationStrings, PresentationDateTimeFormat, PresentationPersonNameOrder)
case wallpaper(PresentationTheme, String)
case accentColor(PresentationTheme, String, PresentationThemeAccentColor?)
case accentColor(PresentationTheme, PresentationThemeReference, String, PresentationThemeAccentColor?)
case autoNightTheme(PresentationTheme, String, String)
case themeItem(PresentationTheme, PresentationStrings, [PresentationThemeReference], PresentationThemeReference, [Int64: PresentationThemeAccentColor], PresentationThemeAccentColor?)
case iconHeader(PresentationTheme, String)
@ -137,8 +137,8 @@ private enum ThemeSettingsControllerEntry: ItemListNodeEntry {
} else {
return false
}
case let .accentColor(lhsTheme, lhsText, lhsColor):
if case let .accentColor(rhsTheme, rhsText, rhsColor) = rhs, lhsTheme === rhsTheme, lhsText == rhsText, lhsColor == rhsColor {
case let .accentColor(lhsTheme, lhsCurrentTheme, lhsText, lhsColor):
if case let .accentColor(rhsTheme, rhsCurrentTheme, rhsText, rhsColor) = rhs, lhsTheme === rhsTheme, lhsCurrentTheme == rhsCurrentTheme, lhsText == rhsText, lhsColor == rhsColor {
return true
} else {
return false
@ -230,21 +230,20 @@ private enum ThemeSettingsControllerEntry: ItemListNodeEntry {
return ItemListDisclosureItem(theme: theme, title: text, label: "", sectionId: self.section, style: .blocks, action: {
arguments.openWallpaperSettings()
})
case let .accentColor(theme, _, color):
case let .accentColor(theme, currentTheme, _, color):
var defaultColor = PresentationThemeAccentColor(baseColor: .blue, value: 0.5)
var colors = PresentationThemeBaseColor.allCases
if theme.overallDarkAppearance {
if case let .builtin(name) = currentTheme {
if name == .night || name == .nightAccent {
colors = colors.filter { $0 != .black }
}
let defaultColor: PresentationThemeAccentColor
if case let .builtin(name) = theme.name, name == .night {
if name == .night {
colors = colors.filter { $0 != .gray }
defaultColor = PresentationThemeAccentColor(baseColor: .white, value: 0.5)
} else {
colors = colors.filter { $0 != .white }
defaultColor = PresentationThemeAccentColor(baseColor: .blue, value: 0.5)
}
}
return ThemeSettingsAccentColorItem(theme: theme, sectionId: self.section, colors: colors, currentColor: color ?? defaultColor, updated: { color in
arguments.selectAccentColor(color)
}, tag: ThemeSettingsEntryTag.accentColor)
@ -297,7 +296,7 @@ private func themeSettingsControllerEntries(presentationData: PresentationData,
entries.append(.themeItem(presentationData.theme, presentationData.strings, availableThemes, themeReference, themeSpecificAccentColors, themeSpecificAccentColors[themeReference.index]))
if theme.name != .builtin(.dayClassic) {
entries.append(.accentColor(presentationData.theme, strings.Appearance_AccentColor, themeSpecificAccentColors[themeReference.index]))
entries.append(.accentColor(presentationData.theme, themeReference, strings.Appearance_AccentColor, themeSpecificAccentColors[themeReference.index]))
}
entries.append(.wallpaper(presentationData.theme, strings.Settings_ChatBackground))

View File

@ -426,10 +426,12 @@ static inline int writeOggPage(ogg_page *page, TGDataItem *fileItem)
pages_out++;
}
if (framePcmBytes != NULL) {
op.packet = (unsigned char *)_packet;
op.bytes = nbBytes;
op.b_o_s = 0;
op.granulepos = enc_granulepos;
if (op.e_o_s)
{
/* We compute the final GP as ceil(len*48k/input_rate). When a resampling
@ -441,24 +443,7 @@ static inline int writeOggPage(ogg_page *page, TGDataItem *fileItem)
op.packetno = 2 + _packetId;
ogg_stream_packetin(&os, &op);
last_segments += size_segments;
/* The downside of early reading is if the input is an exact
multiple of the frame_size you'll get an extra frame that needs
to get cropped off. The downside of late reading is added delay.
If your ogg_delay is 120ms or less we'll assume you want the
low delay behavior.
*/
/*if ((!op.e_o_s) && max_ogg_delay > 5760)
{
nb_samples = inopt.read_samples(inopt.readdata, input, frame_size);
total_samples += nb_samples;
if (nb_samples < frame_size)
eos = 1;
if (nb_samples == 0)
op.e_o_s = 1;
}
else
nb_samples = -1;*/
// If the stream is over or we're sure that the delayed flush will fire, go ahead and flush now to avoid adding delay
while ((op.e_o_s || (enc_granulepos + (frame_size * 48000 / coding_rate) - last_granulepos > max_ogg_delay) ||