我有一个 macOS Swift 应用程序,可以从用户的麦克风获取信息。我正在尝试使用
AVAudioApplication
上提供的新 (macOS 14+) API,允许用户使用手势(按下主干)将音频输入静音。
根据 WWDC,执行此操作有两个“级别”:获取通知并在应用程序级别处理它,或使用较低级别的 CoreAudio API。在这种情况下,我正在尝试做前者。
这是我的示例代码(相关部分只是
Manager
——其余部分只是大量样板文件,用于通过 CoreAudio 获取麦克风输入,因此这可以作为最小的可重现示例)。
class Manager: ObservableObject {
private var controller: AudioInputController?
private var cancellable: AnyCancellable?
init() {
cancellable = NotificationCenter.default.publisher(for: AVAudioApplication.inputMuteStateChangeNotification)
.sink { notification in
print("Notification", notification)
}
do {
try AVAudioApplication.shared.setInputMuteStateChangeHandler { isMuted in
print("Mute state", isMuted, Date())
return true
}
} catch {
assertionFailure()
print("Error setting up handler", error)
}
controller = AudioInputController()!
controller?.start()
}
}
struct ContentView: View {
@StateObject private var manager = Manager()
var body: some View {
VStack {
Image(systemName: "globe")
.imageScale(.large)
.foregroundStyle(.tint)
}
.padding()
}
}
func getDefaultAudioDeviceID() -> AudioDeviceID? {
var deviceID = AudioDeviceID()
var dataSize = UInt32(MemoryLayout<AudioDeviceID>.size)
var propertyAddress = AudioObjectPropertyAddress(
mSelector: kAudioHardwarePropertyDefaultInputDevice,
mScope: kAudioObjectPropertyScopeInput,
mElement: kAudioObjectPropertyElementMain
)
let status = AudioObjectGetPropertyData(
AudioObjectID(kAudioObjectSystemObject),
&propertyAddress,
0,
nil,
&dataSize,
&deviceID
)
guard status == noErr else {
assertionFailure()
return nil
}
return deviceID
}
private final class AudioInputController {
private var auHAL: AudioComponentInstance?
private var inputBufferList: UnsafeMutableAudioBufferListPointer?
private var sampleRate: Float = 0.0
init?() {
guard let audioDeviceID = getDefaultAudioDeviceID() else {
assertionFailure()
return nil
}
var osStatus: OSStatus = noErr
// Create an AUHAL instance.
var description = AudioComponentDescription(
componentType: kAudioUnitType_Output,
componentSubType: kAudioUnitSubType_HALOutput,
componentManufacturer: kAudioUnitManufacturer_Apple,
componentFlags: 0,
componentFlagsMask: 0
)
guard let component = AudioComponentFindNext(nil, &description) else {
assertionFailure()
return
}
osStatus = AudioComponentInstanceNew(component, &auHAL)
guard osStatus == noErr, let auHAL else {
return nil
}
// Enable the input bus, and disable the output bus.
let kInputElement: UInt32 = 1
let kOutputElement: UInt32 = 0
var kInputData: UInt32 = 1
var kOutputData: UInt32 = 0
let ioDataSize: UInt32 = UInt32(MemoryLayout<UInt32>.size)
osStatus = AudioUnitSetProperty(
auHAL,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
kInputElement,
&kInputData,
ioDataSize
)
guard osStatus == noErr else {
assertionFailure()
return nil
}
osStatus = AudioUnitSetProperty(
auHAL,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
kOutputElement,
&kOutputData,
ioDataSize
)
if osStatus != noErr {
assertionFailure()
}
var inputDevice: AudioDeviceID = audioDeviceID
let inputDeviceSize: UInt32 = UInt32(MemoryLayout<AudioDeviceID>.size)
osStatus = AudioUnitSetProperty(
auHAL,
AudioUnitPropertyID(kAudioOutputUnitProperty_CurrentDevice),
AudioUnitScope(kAudioUnitScope_Global),
0,
&inputDevice,
inputDeviceSize
)
guard osStatus == noErr else {
assertionFailure()
return nil
}
// Adopt the stream format.
var deviceFormat = AudioStreamBasicDescription()
var desiredFormat = AudioStreamBasicDescription()
var ioFormatSize: UInt32 = UInt32(MemoryLayout<AudioStreamBasicDescription>.size)
osStatus = AudioUnitGetProperty(
auHAL,
AudioUnitPropertyID(kAudioUnitProperty_StreamFormat),
AudioUnitScope(kAudioUnitScope_Input),
kInputElement,
&deviceFormat,
&ioFormatSize
)
guard osStatus == noErr else {
assertionFailure()
return nil
}
osStatus = AudioUnitGetProperty(
auHAL,
AudioUnitPropertyID(kAudioUnitProperty_StreamFormat),
AudioUnitScope(kAudioUnitScope_Output),
kInputElement,
&desiredFormat,
&ioFormatSize
)
guard osStatus == noErr else {
assertionFailure()
return nil
}
// Same sample rate, same number of channels.
desiredFormat.mSampleRate = deviceFormat.mSampleRate
desiredFormat.mChannelsPerFrame = deviceFormat.mChannelsPerFrame
// Canonical audio format.
desiredFormat.mFormatID = kAudioFormatLinearPCM
desiredFormat
.mFormatFlags = kAudioFormatFlagIsFloat | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked | kAudioFormatFlagIsNonInterleaved
desiredFormat.mFramesPerPacket = 1
desiredFormat.mBytesPerFrame = UInt32(MemoryLayout<Float32>.size)
desiredFormat.mBytesPerPacket = UInt32(MemoryLayout<Float32>.size)
desiredFormat.mBitsPerChannel = 8 * UInt32(MemoryLayout<Float32>.size)
osStatus = AudioUnitSetProperty(
auHAL,
AudioUnitPropertyID(kAudioUnitProperty_StreamFormat),
AudioUnitScope(kAudioUnitScope_Output),
kInputElement,
&desiredFormat,
UInt32(MemoryLayout<AudioStreamBasicDescription>.size)
)
guard osStatus == noErr else {
assertionFailure()
return nil
}
// Store the format information.
sampleRate = Float(desiredFormat.mSampleRate)
// Get the buffer frame size.
var bufferSizeFrames: UInt32 = 0
var bufferSizeFramesSize = UInt32(MemoryLayout<UInt32>.size)
osStatus = AudioUnitGetProperty(
auHAL,
AudioUnitPropertyID(kAudioDevicePropertyBufferFrameSize),
AudioUnitScope(kAudioUnitScope_Global),
0,
&bufferSizeFrames,
&bufferSizeFramesSize
)
guard osStatus == noErr else {
assertionFailure()
return nil
}
let bufferSizeBytes: UInt32 = bufferSizeFrames * UInt32(MemoryLayout<Float32>.size)
let channels: UInt32 = deviceFormat.mChannelsPerFrame
inputBufferList = AudioBufferList.allocate(maximumBuffers: Int(channels))
for i in 0 ..< Int(channels) {
inputBufferList?[i] = AudioBuffer(
mNumberChannels: channels,
mDataByteSize: UInt32(bufferSizeBytes),
mData: malloc(Int(bufferSizeBytes))
)
}
var callbackStruct = AURenderCallbackStruct(
inputProc: { (
inRefCon: UnsafeMutableRawPointer,
ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>,
inTimeStamp: UnsafePointer<AudioTimeStamp>,
inBusNumber: UInt32,
inNumberFrame: UInt32,
_: UnsafeMutablePointer<AudioBufferList>?
) -> OSStatus in
let owner = Unmanaged<AudioInputController>.fromOpaque(inRefCon).takeUnretainedValue()
owner.inputCallback(
ioActionFlags: ioActionFlags,
inTimeStamp: inTimeStamp,
inBusNumber: inBusNumber,
inNumberFrame: inNumberFrame
)
return noErr
},
inputProcRefCon: Unmanaged.passUnretained(self).toOpaque()
)
osStatus = AudioUnitSetProperty(
auHAL,
AudioUnitPropertyID(kAudioOutputUnitProperty_SetInputCallback),
AudioUnitScope(kAudioUnitScope_Global),
0,
&callbackStruct,
UInt32(MemoryLayout<AURenderCallbackStruct>.size)
)
guard osStatus == noErr else {
assertionFailure()
return nil
}
osStatus = AudioUnitInitialize(auHAL)
guard osStatus == noErr else {
assertionFailure()
return nil
}
}
deinit {
if let auHAL {
AudioOutputUnitStop(auHAL)
AudioComponentInstanceDispose(auHAL)
}
if let inputBufferList {
for buffer in inputBufferList {
free(buffer.mData)
}
}
}
private func inputCallback(
ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>,
inTimeStamp: UnsafePointer<AudioTimeStamp>,
inBusNumber: UInt32,
inNumberFrame: UInt32
) {
guard let inputBufferList,
let auHAL
else {
assertionFailure()
return
}
let err = AudioUnitRender(
auHAL,
ioActionFlags,
inTimeStamp,
inBusNumber,
inNumberFrame,
inputBufferList.unsafeMutablePointer
)
guard err == noErr else {
assertionFailure()
return
}
}
func start() {
guard let auHAL else {
assertionFailure()
return
}
let status: OSStatus = AudioOutputUnitStart(auHAL)
if status != noErr {
assertionFailure()
}
}
func stop() {
guard let auHAL else {
assertionFailure()
return
}
let status: OSStatus = AudioOutputUnitStop(auHAL)
if status != noErr {}
}
}
注意:如果您尝试运行此程序,请确保将音频输入添加到应用程序的功能中,并将
NSMicrophoneUsageDescription
键添加到 Info.plist
当我按下 AirPods Pro(第 2 代)上的柄时,我得到以下信息:
如何确保在按下阀杆时实际调用
AVAudioApplication.inputMuteStateChangeNotification
或 AVAudioApplication.shared.setInputMuteStateChangeHandler
?
您的代码几乎是正确的。看起来如果通知未正确注册,您会收到错误“无法使用 Airpods Pro 控制麦克风”。在您的 ContentView 中注册
AVAudioApplication.inputMuteStateChangeNotification
,您可以在那里更新 UI。这是代码:
import SwiftUI
import AVFAudio
struct ContentView: View {
let pub = NotificationCenter.default
.publisher(for: AVAudioApplication.inputMuteStateChangeNotification)
@StateObject private var manager = Manager()
var body: some View {
VStack {
Image(systemName: "globe")
.imageScale(.large)
.foregroundStyle(.tint)
}
.padding()
.onReceive(pub, perform: { _ in
print("received")
})
}
}