Public
Edited
Apr 20, 2023
16 stars
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
fileBuffer = (await fetch(audioFile)).blob()
Insert cell
tags = new Promise(
(resolve, reject) => jsmediatags.read(
fileBuffer,
{ onSuccess: resolve, onError: reject }
)
)
Insert cell
context = createAudioContext()
Insert cell
audioBuffer = {
const buffer = await getFileArrayBuffer(fileBuffer)
return decodeAudioData(
context,
buffer
)
}
Insert cell
audioArray = audioBuffer.getChannelData(0)
Insert cell
chunkSize = nearestPowerOf2(audioBuffer.sampleRate / SAMPLES_PER_SECOND)
Insert cell
chunkCount = Math.floor(audioArray.length / chunkSize)
Insert cell
chromaBandCount = CHROMA_BANDS_PER_TONE * 12
Insert cell
chromaBandLabels = CHROMATIC_SCALE.reduce(
(acc, val, i) => {
acc[i * CHROMA_BANDS_PER_TONE] = val
return acc
}, {}
)
Insert cell
chroma = {
let returnVal = new Array(chunkCount)
Meyda.chromaBands = chromaBandCount
for (const i of R.range(0, chunkCount)) {
const features = Meyda.extract(
'chroma',
audioArray.slice(i * chunkSize, (i + 1) * chunkSize)
)
returnVal[i] = {
startTime: msForSample(i * chunkSize),
chroma: features,
}
yield returnVal
}
}
Insert cell
Insert cell
height = width / 2
Insert cell
margin = ({ top: 20, right: 20, bottom: 30, left: 30 })
Insert cell
xScale = d3.scaleBand()
.domain(R.range(0, chunkCount).map(i => msForSample(i * chunkSize)))
.range([
margin.left,
width - margin.right
])
Insert cell
xScaleContinuous = d3.scaleLinear()
.domain([0, msForSample(audioBuffer.length)])
.range([
margin.left,
width - margin.right
])
Insert cell
yScale = {
const bands = R.range(0, CHROMA_BANDS_PER_TONE * 12)
// Rotate the bands array to get all the
// bands close to C on the bottom of the graph
// (otherwise Pure C will be the bottom of the graph
// and some close to C bands will be on top)
const rotationCount = Math.floor(CHROMA_BANDS_PER_TONE / 2)
bands.unshift.apply(bands, bands.splice(bands.length - rotationCount, rotationCount))
return d3.scaleBand()
.domain(bands)
.range([
height - margin.bottom,
margin.top
])
}
Insert cell
xAxis = g => g.attr('transform', `translate(0, ${height - margin.bottom})`)
.call(
d3.axisBottom(xScale)
// TODO: Figure out a good way to add final tick. We may need to switch from scaleBand to scaleTime
.tickValues([0])
.tickFormat(formatDuration)
)
Insert cell
yAxis = g => g.attr('transform', `translate(${margin.left}, 0)`)
.call(
d3.axisLeft(yScale)
.tickValues(
Object.entries(chromaBandLabels).map(x => x[0])
)
.tickFormat(t => chromaBandLabels[t])
)
Insert cell
colorScale = d3.scalePow().exponent(CONTRAST).range(COLOR_SCALE).domain([0, 1])
Insert cell
Insert cell
Insert cell
AudioPlayerMonitor = class {
constructor(audio, monitorFn) {
this._audio = audio
this._monitorFn = monitorFn
this.startMonitoring = this.startMonitoring.bind(this)
this.stopMonitoring = this.stopMonitoring.bind(this)
this.tick = this.tick.bind(this)
audio.addEventListener('play', this.startMonitoring)
audio.addEventListener('pause', this.stopMonitoring)
audio.addEventListener('seeked', () => this.tick(false))
// If the audio is currently playing, start monitoring!
if (!this._audio.paused) this.startMonitoring()
}
startMonitoring() {
this.tick()
}
stopMonitoring() {
cancelAnimationFrame(this._nextFrame)
}
tick(requestNextFrame = true) {
this._monitorFn(this._audio)
if (requestNextFrame) this._nextFrame = requestAnimationFrame(this.tick)
}
}
Insert cell
Insert cell
Insert cell
nearestPowerOf2 = n => {
return Math.pow(2, Math.round(Math.log(n) / Math.log(2)));
}
Insert cell
msForSample = hz => hz * 1000 / audioBuffer.sampleRate
Insert cell
formatDuration = ms => {
const secondsTotal = Math.floor(ms / 1000)
const hours = Math.floor(secondsTotal / 3600)
const minutes = Math.floor((secondsTotal % 3600) / 60)
const seconds = secondsTotal % 60
if (hours) return [hours, minutes, seconds].join(':')
return [minutes, seconds].join(':')
}
Insert cell
Insert cell
Insert cell
createAudioContext = () => {
try {
return new AudioContext()
} catch {
// Safari
return new window.webkitAudioContext()
}
}
Insert cell
getFileArrayBuffer = async blob => {
try {
return await blob.arrayBuffer()
} catch {
// Safari
const reader = new FileReader()
reader.readAsArrayBuffer(blob)
return new Promise((resolve, reject) => {
reader.addEventListener('loadend', e => resolve(e.target.result))
})
}
}
Insert cell
decodeAudioData = async (context, arrayBuffer) => {
try {
return await context.decodeAudioData(arrayBuffer)
} catch {
// Safari
return new Promise((reject, resolve) => {
context.decodeAudioData(arrayBuffer, reject, resolve)
})
}
}
Insert cell
Insert cell
Insert cell
COLOR_SCALE = DARK_MODE ? ['#050A0F', '#CEF'] : ['#FAFCFF','#101C20']
Insert cell
PLAY_HEAD_COLOR = DARK_MODE ? '#A80' : 'rgba(0, 100, 150, 1)'
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell

One platform to build and deploy the best data apps

Experiment and prototype by building visualizations in live JavaScript notebooks. Collaborate with your team and decide which concepts to build out.
Use Observable Framework to build data apps locally. Use data loaders to build in any language or library, including Python, SQL, and R.
Seamlessly deploy to Observable. Test before you ship, use automatic deploy-on-commit, and ensure your projects are always up-to-date.
Learn more