我有一个创建音频可视化工具的任务,但无论出于何种原因,我无法弄清楚如何在我的代码中播放音频。我复制了上一个赋值的代码,它具有一个有效的音频元素,但由于某些原因它不能播放。
这是我的版本中的相关JavaScript
let canvasElement, drawCtx;
canvasElement = document.querySelector('canvas');
drawCtx = canvasElement.getContext("2d");
let playButton, audioCtx, audioElement, sourceNode, analyserNode, gainNode;
const NUM_SAMPLES = 256;
let audioData = new Uint8Array(NUM_SAMPLES/2);
const SOUND_PATH = Object.freeze({
sound1: "media/New Adventure Theme.mp3",
sound2: "media/Peanuts Theme.mp3",
sound3: "media/The Picard Song.mp3"
});
// main functions
function setup()
{
setupWebAudio();
setupUI();
loop();
}
function setupUI()
{
addShapeButton.onclick = addShape;
document.querySelector(".fa-plus").onclick = showHide;
playButton = document.querySelector("#playButton");
playButton.onclick = e =>
{
console.log(`audioCtx.state = ${audioCtx.state}`);
// check if context is in suspended state (autoplay policy)
if (audioCtx.state == "suspended")
{
audioCtx.resume();
}
if (e.target.dataset.playing == "no")
{
audioElement.play();
e.target.dataset.playing = "yes";
}
else if (e.target.dataset.playing == "yes")
{
audioElement.pause();
e.target.dataset.playing = "no";
}
}
}
function setupWebAudio()
{
const AudioContext = window.AudioContext || window.webkitAudioContext;
audioCtx = new AudioContext();
audioElement = document.querySelector("audio");
audioElement.src = SOUND_PATH.sound1;
sourceNode = audioCtx.createMediaElementSource(audioElement);
analyserNode = audioCtx.createAnalyser();
analyserNode.fftSize = NUM_SAMPLES;
gainNode = audioCtx.createGain();
gainNode.gain.value = 1;
sourceNode.connect(analyserNode);
analyserNode.connect(gainNode);
gainNode.connect(audioCtx.destination);
audioElement.src = "media/New Adventure Theme.mp3";
audioElement.onended = _ => {
playButton.dataset.playing = "no";
};
gainNode.gain.value = 50;
//audioCtx.resume();
audioElement.play();
}
这是以前的工作版本中的代码。
window.onload = init;
// SCRIPT SCOPED VARIABLES
// 1- here we are faking an enumeration - we'll look at another way to do this soon
const SOUND_PATH = Object.freeze({
sound1: "media/New Adventure Theme.mp3",
sound2: "media/Peanuts Theme.mp3",
sound3: "media/The Picard Song.mp3"
});
// 2 - elements on the page
let audioElement,canvasElement;
// UI
let playButton;
// 3 - our canvas drawing context
let drawCtx
// 4 - our WebAudio context
let audioCtx;
// 5 - nodes that are part of our WebAudio audio routing graph
let sourceNode, analyserNode, gainNode;
// 6 - a typed array to hold the audio frequency data
const NUM_SAMPLES = 256;
// create a new array of 8-bit integers (0-255)
let audioData = new Uint8Array(NUM_SAMPLES/2);
let maxRadius = 200;
let invert = false, tintRed = false, noise = false, sepia = false;
// FUNCTIONS
function init(){
setupWebaudio();
setupCanvas();
setupUI();
update();
}
function setupWebaudio(){
// 1 - The || is because WebAudio has not been standardized across browsers yet
const AudioContext = window.AudioContext || window.webkitAudioContext;
audioCtx = new AudioContext();
// 2 - get a reference to the <audio> element on the page
audioElement = document.querySelector("audio");
audioElement.src = SOUND_PATH.sound3;
// 3 - create an a source node that points at the <audio> element
sourceNode = audioCtx.createMediaElementSource(audioElement);
// 4 - create an analyser node
analyserNode = audioCtx.createAnalyser();
/*
We will request NUM_SAMPLES number of samples or "bins" spaced equally
across the sound spectrum.
If NUM_SAMPLES (fftSize) is 256, then the first bin is 0 Hz, the second is 172 Hz,
the third is 344Hz. Each bin contains a number between 0-255 representing
the amplitude of that frequency.
*/
// fft stands for Fast Fourier Transform
analyserNode.fftSize = NUM_SAMPLES;
// 5 - create a gain (volume) node
gainNode = audioCtx.createGain();
gainNode.gain.value = 1;
// 6 - connect the nodes - we now have an audio graph
sourceNode.connect(analyserNode);
analyserNode.connect(gainNode);
gainNode.connect(audioCtx.destination);
}
function setupCanvas(){
canvasElement = document.querySelector('canvas');
drawCtx = canvasElement.getContext("2d");
}
function setupUI(){
playButton = document.querySelector("#playButton");
playButton.onclick = e => {
console.log(`audioCtx.state = ${audioCtx.state}`);
// check if context is in suspended state (autoplay policy)
if (audioCtx.state == "suspended") {
audioCtx.resume();
}
if (e.target.dataset.playing == "no") {
audioElement.play();
e.target.dataset.playing = "yes";
// if track is playing pause it
}
else if (e.target.dataset.playing == "yes") {
audioElement.pause();
e.target.dataset.playing = "no";
}
};
let volumeSlider = document.querySelector("#volumeSlider");
volumeSlider.oninput = e => {
gainNode.gain.value = e.target.value;
volumeLabel.innerHTML = Math.round((e.target.value/2 * 100));
};
volumeSlider.dispatchEvent(new InputEvent("input"));
let radiusSlider = document.querySelector("#circleRadiusSlider");
radiusSlider.oninput = e => {
maxRadius = e.target.value;
circleRadiusLabel.innerHTML = Math.round((e.target.value));
};
radiusSlider.dispatchEvent(new InputEvent("input"));
document.querySelector("#trackSelect").onchange = e =>{
audioElement.src = e.target.value;
// pause the current track if it is playing
playButton.dispatchEvent(new MouseEvent("click"));
};
// if track ends
audioElement.onended = _ => {
playButton.dataset.playing = "no";
};
document.querySelector("#fsButton").onclick = _ =>{
requestFullscreen(canvasElement);
};
document.querySelector("#tintRedCheck").checked = tintRed;
document.querySelector("#tintRedCheck").onchange = e =>{
tintRed = e.target.checked;
}
document.querySelector("#invertCheck").checked = invert;
document.querySelector("#invertCheck").onchange = e =>{
invert = e.target.checked;
}
document.querySelector("#noiseCheck").checked = noise;
document.querySelector("#noiseCheck").onchange = e =>{
noise = e.target.checked;
}
document.querySelector("#sepiaCheck").checked = sepia;
document.querySelector("#sepiaCheck").onchange = e =>{
sepia = e.target.checked;
}
}
我似乎无法找到两者之间的区别,我对调试方法没有想法。我很抱歉,这不是一个非常相关的问题,供人们在将来学习,但对此事的任何帮助都将不胜感激。
当媒体元素传递到.createMediaElementSource()
setupUI
内的fetch()
资源时使用Body.blob()
将资源作为Blob
播放媒体,在链接.then()
内使用URL.createObjectURL()
创建Blob URL
以避免
MediaElementAudioSource outputs zeroes due to CORS access restrictions for https://path/to/resource
错误。
将.play()
移到以下if
语句之外或在用户单击e.dataset.playing
之前将"no"
设置为"Play"
if (e.target.dataset.playing == "no") {}
以下代码
shapes[myShapeIndex].rot = myRotationList.childNodes.item(1).childNodes.item(0).value;
记录错误
(index):381 Uncaught TypeError: Cannot read property 'childNodes' of null
at valueLink ((index):381)
at loop ((index):249)
虽然媒体确实发挥了作用。
plinkr桶