我正在使用 MediaRecorder Web-API 来录制音频。它在 Windows 和 Mac 浏览器中工作得很好,但是当我在 ipad 浏览器中尝试它时,有时它会在 blob 中加载数据并创建 url,但有时我会收到错误
Failed to load resource: The operation coulden't be completed. (WebKitBlobResource error 3)
我还面临一个问题,有时在录制时会突然停止录制,我附上屏幕截图以供参考
这是我的角度代码的一部分:
@ViewChild('audioPlayer') audioPlayer: ElementRef;
mediaRecorder: MediaRecorder;
chunks: Blob[] = [];
startRecording() {
navigator.mediaDevices.getUserMedia({ audio: true })
.then(stream => {
this.mediaRecorder = new MediaRecorder(stream);
console.log('MediaRecorder.mimeType: ', "audio/mp3");
this.mediaRecorder.ondataavailable = (e: any) => {
this.chunks.push(e.data);
console.log("chunk size: ", this.chunks.length);
console.log("e.data: ", e.data);
console.log("e", e);
const blob = new Blob(this.chunks, { type: "audio/mp3" });
const audioURL = window.URL.createObjectURL(blob);
this.audioPlayer.nativeElement.src = audioURL;
};
this.mediaRecorder.onstop = () => {
console.log('Stopped recording');
this.chunks = [];
};
this.mediaRecorder.start();
})
.catch(error => {
console.error('Error accessing the microphone: ', error);
});
}
pauseRecording() {
if (this.mediaRecorder.state === 'recording') {
this.mediaRecorder.requestData();
this.mediaRecorder.pause();
}
}
resumeRecording() {
if (this.mediaRecorder.state === 'paused') {
this.mediaRecorder.resume();
}
}
stopRecording() {
if (this.mediaRecorder.state !== 'inactive') {
this.mediaRecorder.stop();
}
}
我尝试调整 mediaRecorder.pause() 函数和 mediaRecorder.requestData() 函数的顺序,我还尝试在 onDataAvilable() 事件处理程序上添加 4 秒的 settimeout,以便减少 blob 处理压力(我必须在某处阅读它)但没有成功。还尝试将 onError() 事件处理程序添加到 mediaRecorder 并尝试打印它,但它不会从该处理程序抛出错误。
如果你想检查整个代码,我已将其添加到我的 github 上。请随意分叉、克隆或提出问题或发送拉取请求:https://github.com/Jayantkhandebharad/angular-audio-toolkit
提前谢谢您!
注意:如果您觉得此问题有任何问题,请随时评论或编辑。
为了使 safari AudioRecorder API 工作,必须完成以下步骤:
audio/mp4
'ondataavailable'
是否将空 blob 事件传递给处理程序根据您提供的信息:
audio/mp4
event
是否是有效的 blob 事件,例如:if( !e.data?.size ) return
https://gist.github.com/hikariNTU/19d744b8072ab056291781109bcd17ed
export class VoiceRecorder {
recorder: MediaRecorder | undefined;
options: {
onStart?: () => void;
onStop?: () => void;
};
constructor(options: VoiceRecorder['options'] = {}) {
console.log('!MediaRecorder Init!');
this.options = options;
}
async start(deviceId = '') {
if (this.recorder) {
this.stop();
}
const recordType = VoiceRecorder.getSupportedTypes()[0];
if (!recordType) {
throw Error('Browser MediaRecorder cannot support desired format!');
}
const stream = await navigator.mediaDevices.getUserMedia({
audio: {
deviceId,
echoCancellation: false,
autoGainControl: false,
noiseSuppression: false,
},
});
this.recorder = new MediaRecorder(stream, {
mimeType: recordType,
audioBitsPerSecond: 320000,
});
this.recorder.start();
const started = new Promise<void>((res, rej) => {
this.recorder?.addEventListener('start', () => {
setTimeout(rej, 5000); // wait for some slow device like remote iPhone
console.log('[Mic] Recording...', recordType);
this.options.onStart?.();
res();
});
});
this.recorder.addEventListener('stop', () => {
console.log('[Mic] Stop!');
});
this.recorder.addEventListener('error', (e) => {
console.log('[Mic] Error?', e);
});
return started;
}
pauseOrResume() {
if (this.recorder?.state === 'recording') {
this.recorder.pause();
} else if (this.recorder?.state === 'paused') {
this.recorder.resume();
}
}
stop() {
this.options.onStop?.();
if (!this.recorder) {
return;
}
const data = new Promise<{
type: string;
blob: Blob;
}>((res, rej) => {
const timeId = setTimeout(() => {
rej('Record End Responding Timeout!');
}, 3000);
if (this.recorder) {
const mimeType = this.recorder.mimeType;
const handler = (event: BlobEvent) => {
// Safari tend to send out an empty blob just before real one end here.
if (!event.data.size) {
return;
}
res({
type: mimeType,
blob: event.data,
});
clearTimeout(timeId);
this.recorder?.removeEventListener('dataavailable', handler);
};
this.recorder.addEventListener('dataavailable', handler);
this.recorder.stream.getTracks().forEach((track) => track.stop());
this.recorder.stop();
this.recorder = undefined;
}
});
return data;
}
static getSupportedTypes() {
const containers = [
'wav',
'aac',
'm4a',
'mp4',
'webm',
'ogg',
'mpeg',
'flac',
];
const codecs = ['avc1', 'aac', 'mp4a', 'av1', 'opus', 'mpeg', 'pcm'];
const supportedAudios = containers
.map((format) => `audio/${format}`)
.filter((mimeType) => MediaRecorder.isTypeSupported(mimeType));
const supportedAudioCodecs = supportedAudios
.flatMap((audio) => codecs.map((codec) => `${audio};codecs=${codec}`))
.filter((mimeType) => MediaRecorder.isTypeSupported(mimeType));
return supportedAudioCodecs;
}
static async getInputList() {
// trigger permission first to get full list
await navigator.mediaDevices
.getUserMedia({
audio: true,
})
.then((stream) => {
stream.getTracks().forEach((t) => t.stop());
});
const list = await navigator.mediaDevices.enumerateDevices();
// filter out hidden deviceId, when user first visited the site and didn't provide permission.
return list.filter((v) => v.kind === 'audioinput' && v.deviceId !== '');
}
}