我正在从代码中接收到的输入渲染音频:
...
let bufferRenderSyncer = AVSampleBufferRenderSynchronizer()
let bufferRenderer = AVSampleBufferAudioRenderer()
...
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
bufferRenderer.enqueue(sampleBuffer)
if bufferRenderSyncer.rate == 0 {
bufferRenderSyncer.setRate(1, time: sampleBuffer.presentationTimeStamp)
}
}
...
无论有没有耳机,它都可以很好地工作(我在 iPhone 或耳机上对麦克风讲话,并从相关扬声器(例如 iPhone 或耳机上)听到自己的声音)
但我需要在耳机上使用麦克风,并且输出设备可以在耳机、扬声器或其他连接的耳机(airpods)之间切换
我没有找到任何消息来源说明如何使用 bufferRenderer 更改输出设备(默认)..有什么建议吗?
当我们在 macOS 上使用 AVSampleBufferAudioRenderer 时,要更改输出设备,我们需要手动更新
audioOutputDeviceUniqueID
属性。
看一下 AVSampleBufferAudioRenderer 的这个仅 macOS 实例属性:
默认情况下,AVSampleBufferAudioRenderer 使用默认的音频输出设备(即audioOutputDeviceUniqueID 为nil)。
但是,如果默认输出设备发生更改(例如,当用户插入耳机或选择不同的输出设备时),渲染器不会自动切换到新设备。手动将 audioOutputDeviceUniqueID 设置为新设备的唯一 ID 可确保音频继续通过正确的输出播放。
我正好在Apple平台上开发视频播放器,因为我们在iOS/iPad/tvOS平台上有AVAudioSession API来处理音频路由更改,但macOS上没有这样的API,这是我用来切换音频输出的代码设备:
import CoreAudio
import AudioToolbox
import AVFoundation
/// A Event Handler for Audio Route change events from macOS
class AudioRouteChangeHandler {
private var audioRenderer: AVSampleBufferAudioRenderer
private var callback: AudioObjectPropertyListenerBlock!
private let queue: DispatchQueue
init(renderer: AVSampleBufferAudioRenderer, queue: DispatchQueue) {
self.audioRenderer = renderer
self.queue = queue
self.callback = { [weak self] ( _, _ ) in
guard let self else { return }
handleAudioRouteChange()
}
registerAudioRouteChangeListener()
}
private func registerAudioRouteChangeListener() {
var pAddrs = AudioObjectPropertyAddress(
mSelector: kAudioHardwarePropertyDefaultOutputDevice,
mScope: kAudioObjectPropertyScopeGlobal,
mElement: kAudioObjectPropertyElementMain
)
let systemObjectId = AudioObjectID(kAudioObjectSystemObject)
let status = AudioObjectAddPropertyListenerBlock(systemObjectId,
&pAddrs,
queue,
callback)
}
deinit {
var propertyAddress = AudioObjectPropertyAddress(
mSelector: kAudioHardwarePropertyDefaultOutputDevice,
mScope: kAudioObjectPropertyScopeGlobal,
mElement: kAudioObjectPropertyElementMain
)
let systemObjectID = AudioObjectID(kAudioObjectSystemObject)
AudioObjectRemovePropertyListenerBlock(systemObjectID, &propertyAddress, queue, callback)
}
}
extension AudioRouteChangeHandler {
func handleAudioRouteChange() {
debugPrint("Audio route changed")
// Retrieve the new default output device's UID
if let deviceUID = getDefaultOutputDeviceUID() {
// Update the audioRenderer's audioOutputDeviceUniqueID
updateAudioRendererDeviceUID(deviceUID)
} else {
debugPrint("Failed to get default output device UID")
}
}
func getDefaultOutputDeviceUID() -> String? {
var deviceID = AudioDeviceID(0)
var propertyAddress = AudioObjectPropertyAddress(
mSelector: kAudioHardwarePropertyDefaultOutputDevice,
mScope: kAudioObjectPropertyScopeGlobal,
mElement: kAudioObjectPropertyElementMain
)
var propertySize = UInt32(MemoryLayout<AudioDeviceID>.size)
let systemObjectID = AudioObjectID(kAudioObjectSystemObject)
let status = AudioObjectGetPropertyData(
systemObjectID,
&propertyAddress,
0,
nil,
&propertySize,
&deviceID
)
if status != noErr {
debugPrint("Error getting default output device ID: \(status)")
return nil
}
// Get the device UID
var deviceUID: CFString = "" as CFString
propertyAddress.mSelector = kAudioDevicePropertyDeviceUID
propertyAddress.mScope = kAudioObjectPropertyScopeGlobal
propertySize = UInt32(MemoryLayout<CFString?>.size)
let statusUID = AudioObjectGetPropertyData(
deviceID,
&propertyAddress,
0,
nil,
&propertySize,
&deviceUID
)
if statusUID != noErr {
debugPrint("Error getting device UID: \(statusUID)")
return nil
}
return deviceUID as String
}
func updateAudioRendererDeviceUID(_ deviceUID: String) {
// Update the audioOutputDeviceUniqueID property
audioRenderer.audioOutputDeviceUniqueID = deviceUID
}
}