p5(sketch => {
let audioContext;
let analyserNode;
let analyserData;
let analyserTarget;
let gainNode;
let audio;
let isFloat = false;
let interval;
sketch.setup = function() {
sketch.createCanvas(width, width/2);
};
sketch.draw = function() {
sketch.background("#001b42");
sketch.stroke("white");
sketch.noFill();
if (analyserNode) {
for (let i = 0; i < analyserData.length; i++) {
analyserData[i] = damp( analyserData[i], isFloat ? analyserTarget[i] : ( analyserTarget[i] / 256) * 2 - 1, 0.01, sketch.deltaTime );
}
sketch.beginShape();
const margin = 0.1;
for (let i = 0; i < analyserData.length; i++) {
const x = sketch.map( i,
0, analyserData.length,
width * margin, width * (1 - margin));
// Signal coming from this frequency bin
const signal = analyserData[i];
// Boost the signal a little so it shows better
const amplitude = height * 4;
// Map signal to screen Y position
const y = sketch.map( signal,
-1, 1,
height / 2 - amplitude / 2, height / 2 + amplitude / 2);
// Place vertex
sketch.vertex(x, y);
}
// Finish the line
sketch.endShape();
} else {
// Draw a play button
const dim = sketch.min(width, height);
polygon(width / 2, height / 2, dim * 0.1, 3);
}
}
// Draw a basic polygon, handles triangles, squares, pentagons, etc
function polygon(x, y, radius, sides = 3, angle = 0) {
sketch.fill('white');
sketch.beginShape();
for (let i = 0; i < sides; i++) {
const a = angle + sketch.TWO_PI * (i / sides);
let sx = x + sketch.cos(a) * radius;
let sy = y + sketch.sin(a) * radius;
sketch.vertex(sx, sy);
}
sketch.endShape(sketch.CLOSE);
sketch.noFill()
}
//---------------------------------------------
// bug here
sketch.mousePressed = function () { // play sound if any place on the screen
// Only initiate audio upon a user gesture
// console.log('audioContext is ', audioContext)
if (!audioContext) {
const AudioContext = window.AudioContext || window.webkitAudioContext;
audioContext = new AudioContext();
// Optional:
// If the user inserts/removes bluetooth headphones or pushes
// the play/pause media keys, we can use the following to ignore the action
// navigator.mediaSession.setActionHandler("pause", () => {});
// Make a stream source, i.e. MP3, microphone, etc
// In this case we choose an <audio> element
audio = document.createElement("audio");
// Upon loading the audio, let's play it
audio.addEventListener(
"canplay",
() => {
console.log('audio can play')
// First, ensure the context is in a resumed state
audioContext.resume();
// Now, play the audio
audio.play();
},
{ once: true }
);
// Loop audio
audio.loop = true;
// Set source
audio.crossOrigin = "Anonymous";
audio.src = urlPiano;
console.log('audio',audio)
// Connect source into the WebAudio context
const source = audioContext.createMediaElementSource(audio);
source.connect(audioContext.destination);
analyserNode = audioContext.createAnalyser();
// You can increase the detail to some power-of-two value
// This will give you more samples of data per second
const detail = 4;
analyserNode.fftSize = 2048 * detail;
isFloat = Boolean( analyserNode.getFloatTimeDomainData);
analyserData = new Float32Array( analyserNode.fftSize);
if (isFloat) {
// We can use float array for this, for higher detail
analyserTarget = new Float32Array( analyserData.length);
} else {
// We are stuck with byte array
analyserTarget = new Uint8Array( analyserData.length);
analyserTarget.fill(0xff / 2);
}
// connect source to analyser
source.connect( analyserNode);
// Only update the data every N fps
const fps = 12;
interval = setInterval(() => {
if (isFloat) {
analyserNode.getFloatTimeDomainData(analyserTarget);
} else {
analyserNode.getByteTimeDomainData(analyserTarget);
}
}, (1 / fps) * 1000);
} else {
// kill audio
audio.pause();
audioContext.close();
clearInterval(interval);
audioContext = null;
analyserNode = null;
}
}
//------------------------------
// Smooth linear interpolation that accounts for delta time
function damp(a, b, lambda, dt) {
return sketch.lerp(a, b, 1 - Math.exp(-lambda * dt));
}
})