Swiftgram/TelegramUI/AudioRecordningToneData.swift
Peter Iakovlev b39986a1ee no message
2018-03-02 20:55:22 +04:00

56 lines
1.8 KiB
Swift

import Foundation
import AVFoundation
private func loadAudioRecordingToneData() -> Data? {
let outputSettings: [String: Any] = [
AVFormatIDKey: kAudioFormatLinearPCM as NSNumber,
AVSampleRateKey: 44100.0 as NSNumber,
AVLinearPCMBitDepthKey: 16 as NSNumber,
AVLinearPCMIsNonInterleaved: false as NSNumber,
AVLinearPCMIsFloatKey: false as NSNumber,
AVLinearPCMIsBigEndianKey: false as NSNumber
]
guard let url = Bundle.main.url(forResource: "begin_record", withExtension: "caf") else {
return nil
}
let asset = AVURLAsset(url: url)
guard let assetReader = try? AVAssetReader(asset: asset) else {
return nil
}
let readerOutput = AVAssetReaderAudioMixOutput(audioTracks: asset.tracks, audioSettings: outputSettings)
if !assetReader.canAdd(readerOutput) {
return nil
}
assetReader.add(readerOutput)
if !assetReader.startReading() {
return nil
}
var data = Data()
while assetReader.status == .reading {
if let nextBuffer = readerOutput.copyNextSampleBuffer() {
var abl = AudioBufferList()
var blockBuffer: CMBlockBuffer? = nil
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(nextBuffer, nil, &abl, MemoryLayout<AudioBufferList>.size, nil, nil, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer)
let size = Int(CMSampleBufferGetTotalSampleSize(nextBuffer))
if size != 0, let mData = abl.mBuffers.mData {
data.append(Data(bytes: mData, count: size))
}
} else {
break
}
}
return data
}
let audioRecordingToneData: Data? = loadAudioRecordingToneData()