Swiftgram/submodules/TelegramUI/TelegramUI/AudioRecordningToneData.swift
Peter b317aab568 Add 'submodules/TelegramUI/' from commit 'fa3ac0b61a27c8dd3296518a15891a6f9750cbf2'
git-subtree-dir: submodules/TelegramUI
git-subtree-mainline: 5c1613d1048026b9e00a6ce753775cef87eb53fa
git-subtree-split: fa3ac0b61a27c8dd3296518a15891a6f9750cbf2
2019-06-11 19:00:46 +01:00

56 lines
1.8 KiB
Swift

import Foundation
import AVFoundation
private func loadAudioRecordingToneData() -> Data? {
let outputSettings: [String: Any] = [
AVFormatIDKey: kAudioFormatLinearPCM as NSNumber,
AVSampleRateKey: 44100.0 as NSNumber,
AVLinearPCMBitDepthKey: 16 as NSNumber,
AVLinearPCMIsNonInterleaved: false as NSNumber,
AVLinearPCMIsFloatKey: false as NSNumber,
AVLinearPCMIsBigEndianKey: false as NSNumber
]
guard let url = Bundle.main.url(forResource: "begin_record", withExtension: "caf") else {
return nil
}
let asset = AVURLAsset(url: url)
guard let assetReader = try? AVAssetReader(asset: asset) else {
return nil
}
let readerOutput = AVAssetReaderAudioMixOutput(audioTracks: asset.tracks, audioSettings: outputSettings)
if !assetReader.canAdd(readerOutput) {
return nil
}
assetReader.add(readerOutput)
if !assetReader.startReading() {
return nil
}
var data = Data()
while assetReader.status == .reading {
if let nextBuffer = readerOutput.copyNextSampleBuffer() {
var abl = AudioBufferList()
var blockBuffer: CMBlockBuffer? = nil
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(nextBuffer, nil, &abl, MemoryLayout<AudioBufferList>.size, nil, nil, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer)
let size = Int(CMSampleBufferGetTotalSampleSize(nextBuffer))
if size != 0, let mData = abl.mBuffers.mData {
data.append(Data(bytes: mData, count: size))
}
} else {
break
}
}
return data
}
let audioRecordingToneData: Data? = loadAudioRecordingToneData()