Published
Edited
Apr 16, 2018
5 stars
Insert cell
Insert cell
tf = require('@tensorflow/tfjs')
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
inputData = {
const data = tf.tidy(() => {
const numPoints = 200;
const [a, b, c, d] = coeff.map(i => tf.scalar(i)); // a = tf.scalar(coeff[0]), and so on

const x = tf.randomUniform([numPoints], -1, 1);
const y = a.mul(x.pow(tf.scalar(3)))
.add(b.mul(x.square()))
.add(c.mul(x))
.add(d)
.add(tf.randomNormal([numPoints], 0, sd));

return {x, y};
});
yield data;
// This cell is re-evaluated and creates new tensors as we interact with the sliders to adjust parameters.
// We need to manually dispose them to avoid memory leak.
// See: https://beta.observablehq.com/@nsthorat/clean-up-deeplearn-js-tensor-generator-cells
try {
yield invalidation;
} finally {
data.x.dispose();
data.y.dispose();
}
}
Insert cell
Insert cell
Insert cell
cubicPredictions = {
const predictions = tf.tidy(() => {
// Model parameters:
const a = tf.variable(tf.zeros([1]));
const b = tf.variable(tf.zeros([1]));
const c = tf.variable(tf.zeros([1]));
const d = tf.variable(tf.zeros([1]));

// Model: f(x) = a * x^3 + b * x^2 + c * x + d
const f = (x) =>
a.mul(x.pow(tf.scalar(3)))
.add(b.mul(x.square()))
.add(c.mul(x))
.add(d);

// Optimizer: Stochastic Gradient Descent (SGD)
const optimizer = tf.train.sgd(0.8);
let mse = null;

// Training Loop
for (let i = 0; i < numIterations; i++) {
optimizer.minimize(() => loss(f(inputData.x), inputData.y));
}

const predTensor = f(inputData.x);
// tf.tidy() will not clean up variables
[a, b, c, d].forEach(x => {
x.dispose();
});

return predTensor;
});
yield predictions;
try {
yield invalidation;
} finally {
predictions.dispose();
}
}
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
dnnPredictions = {
const dnn = tf.sequential({
name: 'dnn',
layers: [
tf.layers.dense({units: 10, inputShape: [1], activation: 'tanh'}),
tf.layers.dense({units: 5, activation: 'tanh'}),
tf.layers.dense({units: 5, activation: 'tanh'}),
tf.layers.dense({units: 1, activation: 'tanh'})
]
});
dnn.compile({loss: 'meanSquaredError', optimizer: 'Adam'});
const history = await dnn.fit(inputData.x, inputData.y);
const predictions = tf.tidy(() =>
dnn.predict(inputData.x.expandDims(1))
);
yield predictions;
try {
yield invalidation;
} finally {
predictions.dispose();
}
}
Insert cell
Insert cell
Insert cell
WTF = {
let totalCubic = 0;
let totalDNN = 0;
const cubic = cubicPredictions.dataSync();
const dnn = dnnPredictions.dataSync();
const y = inputData.y.dataSync();
for(let i=0; i<200; i++) {
totalCubic += Math.pow(cubic[i] - y[i], 2);
totalDNN += Math.pow(dnn[i] - y[i], 2);
}
return {cubic: totalCubic / 200.0, dnn: totalDNN / 200.0}
}
Insert cell
dnnLossV1 = {
const mseTensor = tf.tidy(() => loss(tf.tensor(dnnPredictions.dataSync()), inputData.y));
const mse = mseTensor.dataSync()[0];
mseTensor.dispose();
return md`Mean Squared Error: ${mse}`;
}
Insert cell
dnnLossV2 = {
const mseTensor = tf.tidy(() => loss(dnnPredictions, inputData.y));
const mse = mseTensor.dataSync()[0];
mseTensor.dispose();
return md`Mean Squared Error: ${mse}`;
}
Insert cell
dnnLossV3 = {
const mseTensor = tf.tidy(() => loss(tf.tensor(dnnPredictions.dataSync()), tf.tensor(inputData.y.dataSync())));
const mse = mseTensor.dataSync()[0];
mseTensor.dispose();
return md`Mean Squared Error: ${mse}`;
}
Insert cell
numTensors = tf.memory().numTensors
Insert cell
Insert cell
Insert cell
Insert cell

Purpose-built for displays of data

Observable is your go-to platform for exploring data and creating expressive data visualizations. Use reactive JavaScript notebooks for prototyping and a collaborative canvas for visual data exploration and dashboard creation.
Learn more