我将代码从swift转换为客观c:
func toPCMBuffer(data: NSData) -> AVAudioPCMBuffer {
let audioFormat = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatFloat32, sampleRate: 8000, channels: 1, interleaved: false) // given NSData audio format
let PCMBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: UInt32(data.length) / audioFormat.streamDescription.pointee.mBytesPerFrame)
PCMBuffer.frameLength = PCMBuffer.frameCapacity
let channels = UnsafeBufferPointer(start: PCMBuffer.floatChannelData, count: Int(PCMBuffer.format.channelCount))
data.getBytes(UnsafeMutableRawPointer(channels[0]) , length: data.length)
return PCMBuffer
}
我转换为目标c:
-(AVAudioPCMBuffer*)toPCMBuffer: (NSData*)data {
AVAudioFormat * audioFormat = [[AVAudioFormat alloc]initWithCommonFormat:AVAudioPCMFormatFloat32 sampleRate:8000 channels:1 interleaved:false];
AVAudioPCMBuffer* PCMBuffer = [[AVAudioPCMBuffer alloc]initWithPCMFormat:audioFormat frameCapacity:data.length/audioFormat.streamDescription->mBytesPerFrame];
PCMBuffer.frameLength = PCMBuffer.frameCapacity;
float *channels = malloc(PCMBuffer.format.channelCount * sizeof(float)); // remember to free eventually
memcpy(channels, PCMBuffer.floatChannelData, PCMBuffer.format.channelCount * sizeof(float));
[data getBytes:channels length:data.length];
return PCMBuffer;
}
但我不确定纠正行代码:
AVAudioPCMBuffer* PCMBuffer = [[AVAudioPCMBuffer alloc]initWithPCMFormat:audioFormat frameCapacity:data.length/audioFormat.streamDescription->mBytesPerFrame];
这是我不知道如何写UInt32(data.length)
到目标c的代码
和:
如何将UnsafeMutableRawPointer(channels[0])
转换为objc?
[data getBytes:channels length:data.length];