Published
Edited
Nov 22, 2021
1 fork
Importers
3 stars
Insert cell
Insert cell
Insert cell
mutable nanlog = []
Insert cell
print(testrand)
Insert cell
ones1d = (n) => {
return tf.ones([n]);
}
Insert cell
ones1d(16).arraySync()
Insert cell
Insert cell
Insert cell
Insert cell
nanlog
Insert cell
results.slice(-15)
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
coreloop.net.size
Insert cell
coreloop.net.weights.arraySync()[0][1]
Insert cell
Insert cell
Insert cell
scatterplot = {
slowloop; // visualize two dimensions/nodes only.
var dataobjects = [];
for (var row of results) {
row = row.map((x) => x * 1000);
var yp = coreloop.yprime_cpu;
dataobjects.push({
x: row[scattercol_x], // was: row[0],
y: row[scattercol_y],
z: row[2],
total: row[0] + row[1] + row[2],
yp: yp
});
// logger.log("xyz", row);
}

yield Plot.plot({
marks: [
Plot.text(dataobjects, { x: "X-Axis", y: "Y-Axis", text: (d) => d.name }),
Plot.dot(dataobjects, {
x: "x", // { domain: [0 - 100] },
y: "y",
stroke: (d) => (d.in_fp ? "red" : "black") // todo
})
]
});
}
Insert cell
Insert cell
import { table } from "@binarmaya/table-from-array-of-data" // proof-of-concept; better to make own, or use d3
Insert cell
table(slowloop.net.weights_cpu, {
sortable: false,
rank: false,
header: false
})
Insert cell
Insert cell
chosen_example
Insert cell
chosen_example
Insert cell
coreloop = {
//reset_button;
//examples;
// mutable results = [];
return runSimulation(chosen_example);
//return runSimulation(osc2NodeExampleAlternate());
// return runSimulation(osc2NodeExample());
// return = runSimulation(osc3NodeExample())
// return runSimulation(osc4NodeExample());
//return runSimulation(Random16Example());
//return runSimulation(Beer1995Fig4a3a());
}
Insert cell
examples = [
// RandomNetExample(),
osc4NodeExample(),
Beer1995Fig4a3a(),
osc2NodeExample(),
osc3NodeExample(),
osc4NodeExample(),
osc2NodeExampleAlternate(),
Beer1995Example7()
]
Insert cell
slowloop = {
// sample slowly so we can do slower graphical ops like graphviz/dot.
while (true) {
if (coreloop.count % 10 == 0) {
yield coreloop;
}
await Promises.delay(1);
}
}
Insert cell
monitor_1 = {
results.push(coreloop.outputs_cpu);

logger.log(
"outputs",
coreloop.outputs_cpu.map((x) => ppfloat(x))
);

logger.log("numTensors", tf.memory().numTensors);
logger.log("count", coreloop.count);
logger.log("in a fixedpoint", coreloop.in_fp);
logger.log("size", coreloop.net.size);
logger.log("changey", changey(coreloop));
logger.log("run_duration", coreloop.net.run_duration);
logger.log("chosen_example", chosen_example.name);
//return runSimulation(chosen_example);

// if i swap slowloop for coreloop on some of these, the whole block slows down. puzzling for now.

// Take out the trash after delaying a second
//
await Promises.delay(1000);
coreloop.outputs.dispose();
//"So the cause of the error you are seeing is that you are passing null values as inputs to the model. " - for undefined reading backend bug.
coreloop.yprime.dispose();
// todo: push things into a trash buffer, and retire from the old end?
}
Insert cell
Insert cell
// This is the strangest example, skip it unless working on alternate activation functions

function osc2NodeExampleAlternate() {
let a = 0.5; // 0 is sin, 1 is tanh.
a = a_param;
let net = ctNet({
size: 2,
// squashfn: (x) => tf.sigmoid(x) // <- default so not needed.

//squashfn: (x) => tf.sin(x), // instead of logistic sigmoid

squashfn: (x) => tf.sin(x), // not interesting but this is where you'd do sin+tanx ...

// couldn't get this working, maybe due to vectorized nature of the function we need
//
//
// a* and 1-a * ...
/* see https://edizquierdo.wordpress.com/2006/12/09/generative-music-creative-dynamical-systems-and-ctrnn-activation-functions/
Where F=A*tanh + (1-A)*sin, thus A=1 is the simple tanh, A=0 is the sine wave, A=0.5 is what we just showed. As soon as we get closer to the tanh (A=0.75) the system begins to get more ‘classical’ looking...

*/

init_weights: [
[4.5, 1],
[-1, 4.5]
]
});
net.states = tf.tensor1d([2, 3]);
net.biases = tf.tensor1d([-2.75, -1.75]);

// net.run_duration = 100000;
net.step_size = 1;
net.name = "2-Node, Alt activation function.";
Object.freeze(net);
return net;
}
Insert cell
function osc2NodeExample() {
let net = ctNet({
size: 2,
init_weights: [
[4.5, 1],
[-1, 4.5]
]
});
net.states = tf.tensor1d([2, 3]);
net.biases = tf.tensor1d([-2.75, -1.75]);

net.run_duration = 100000;
net.step_size = 1;
net.name = "Simple 2-Node oscillator.";
Object.freeze(net);
return net;
}
Insert cell
// a 3x3 version with disconnected node 3
function osc3NodeExample() {
let net = ctNet({
size: 3,
init_weights: [
// [3.99999999, 1, 1],
[4.5, 1, 0.1],
[-1, 4.5, 0],
[1.1, 0.3, 1]
]
});
net.states = tf.tensor1d([2, 3, 100]);
net.biases = tf.tensor1d([-2.75, -1.75, 0]);

net.taus = tf.tensor1d([1, 1.1, 5]); //slow

//net.run_duration = 1000;
net.step_size = 1;
net.name = "Simple 3-Node.";
net.freeze(net, net.weights);
// Object.freeze(net);
return net;
}
Insert cell
// a 3x3 version with disconnected node 3
function osc4NodeExample() {
let net = ctNet({
size: 4,
init_weights: [
// [3.99999999, 1, 1],
[4.5001, 1, 0.1, 0],
[-1, 4.5, 0, 0.2],
[1.1, 0.3, 1, 0.6],
[1.1, -0.5, 1, -0.1]
]
});
net.states = tf.tensor1d([2, 3, 10, 5]);
net.biases = tf.tensor1d([-2.75, -1.75, 0, 1]);
net.taus = tf.tensor1d([1, 1.1, 5, 2]); //slow

//net.run_duration = 10000;
net.step_size = 1;
net.name = "Simple 4-Node";
Object.freeze(net);
return net;
}
Insert cell
rand1d(9).arraySync()
Insert cell
function rand1d(n, limit = 1) {
return tf.tensor1d(Array.from({ length: n }, () => Math.random() * limit));
}
Insert cell
coreloop.net.weights.arraySync()
Insert cell
// doesn't oscillate but 11C from same figure does, and this is the parameter set used in 1st example above.

function Beer1995Fig4a3a() {
let net = ctNet({
size: 2,
init_weights: [
[5.25, 1],
[-1, 5.25]
]
});
net.states = tf.tensor1d([20, 53]); // what here?
net.biases = tf.tensor1d([-3.4, -2.5]);

//net.run_duration = 3000;
net.step_size = 1;
net.name =
"Figure 4a3a from Beer 1995. Doesn't oscillate, but similar to those that do.";
Object.freeze(net);
return net;
}
Insert cell
function Beer1995Example7() {
let net = ctNet({
size: 2,
init_weights: [
[6.3, 1],
[-1, 6.3]
]
});
net.states = tf.tensor1d([20, 53]); // what here?
net.biases = tf.tensor1d([-3.75, -2.65]);

//net.run_duration = 3000;
net.step_size = 1;
net.name = "7th example from Beer 1995. Try different inputs?";
Object.freeze(net);
return net;
}
Insert cell
Insert cell
tf.setBackend("nodelgl")
Insert cell
tf.getBackend()
Insert cell
// τy' = -y + Wσ(g(y+ θ)) + I

function ctNet(spec) {
if (spec === undefined) {
spec = {};
}

let override_squashing_function = spec["squashfn"] || false;

if (typeof spec === "number") {
spec = { size: spec };
}

// avoiding use of this., clumsily.
function docalc(net, livestates, liveoutputs) {
// todo: add input_weights?

/*
yprime = -states + total_inputs
delta = yprime * step_size / tau
states += delta
outputs = tf.sigmoid(gain*(states+bias)) */

// activation function
// vectorized.
function squash(x) {
// return tf.tanh(x);

// return tf.sigmoid(x);

if (override_squashing_function == false) {
return tf.sigmoid(x);
} else {
// console.log("Alternate squashing function!", spec["squashfn"]);
return spec["squashfn"](x);
}
}
// todo: api for other functions
// eg @ylecun
// 1.7159 * tanh((2x/3)
// http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf
// can we try to show sweep with these parqms?

// more!
// https://edizquierdo.wordpress.com/2006/12/09/generative-music-creative-dynamical-systems-and-ctrnn-activation-functions/
// eg "...half way between sin and tanh"

const calc = tf.tidy(() => {
let total_inputs = net.weights.dot(liveoutputs).add(net.ext_inputs);
/* todo: we are leaking yprime
and output tensors, hence cleanup
downstream from mainloop. Address here? */

// let oldgradients = yprime;
let yprime = total_inputs.sub(livestates); //.div(net.taus); // y' = -y +g(Wo+ϑ)
//oldgradients.dispose(); // junked!
let delta = yprime.mul(tf.scalar(net.step_size)).div(net.taus); // Euler step
let trash = livestates;
livestates = livestates.add(delta);
trash.dispose();
liveoutputs = squash(
livestates.transpose().add(net.biases).mul(net.gains)
); //σ

// somewhere we need this - problem has been figuring out when to do it
// new approach - copy to cpu each cycle. Expensive? should make it a flag and do some perf timings. Could also be done
// every n loops...
// coreloop.outputs.dispose();
//coreloop.yprime.dispose();

if (livestates.arraySync().includes(NaN)) {
nanlog.push(livestates.arraySync());
throw { states: livestates };
}

return {
states: livestates,
states_cpu: livestates.arraySync(),
outputs: liveoutputs,
outputs_cpu: liveoutputs.arraySync(),
yprime: yprime,
yprime_cpu: yprime.arraySync()
};
});
//----
return calc;
}

/* Have the dynamics settled down?
logic in js i.e. CPU for now
TODO: use tf.whereAsync to use GPU */

function in_fixed_point(yp) {
return tf.tidy(() => {
const mask = yp.equal([0]).asType("bool");
const isz = mask.arraySync().every((item) => item !== 0);
return isz;
});
}

const ones1d = (n) => {
return tf.ones([n]);
};

const genRand2d = (n) =>
tf.tensor2d(
Array.from({ length: n * n }, () => Math.random() * 2),
[size, size]
);

const genFixed2d = (n) =>
tf.tensor2d(Array(size * size).fill(0.1), [size, size]);

const zeros1d = (n) => {
return tf.zeros([n]);
};

if (!spec.size) {
spec.size = 3;
} // todo: move to a principled Examples structure

let size = spec.size;

var net = { size: spec.size };

net.ones1d = ones1d;
net.zeros1d = zeros1d;

if (spec.init_weights) {
if (Array.isArray(spec.init_weights)) {
// amon.log("init weights as array: ", spec.init_weights);
net.weights = tf.tensor2d(spec.init_weights, [size, size]);
} else {
if (typeof spec.init_weights === "function") {
let values = Array.from({ length: size * size }, spec.init_weights);
net.weights = tf.tensor2d(values, [size, size]);
} else {
throw (
("init_weights should be an array, a or a function.",
spec.init_weights)
);
}
}
} else {
net.weights = genRand2d(size);
}

net.biases = zeros1d(size);
net.taus = ones1d(size);
net.gains = ones1d(size);

// things that change (will be generators not mutables):
net.ext_inputs = zeros1d(size);
net.states = zeros1d(size);
net.outputs = zeros1d(size);

// todo - expose functions as an api here

net.eulerStep = docalc;
net.in_fixed_point = in_fixed_point;

// amon.log("net created: ", net);
net.run_duration = 10000;

//net.freeze = function () {};
net.freeze = function (net, weights) {
if (net.weights != undefined) {
net.weights = weights;
net.weights_cpu = net.weights.arraySync();
Object.freeze(net);
} else {
console.log("Can't freeze a weightless network."); // throw exception?
throw "Can't freeze a network with no weight matrix.";
}
};

return net;
}
Insert cell
async function* runSimulation(net) {
let loopcount = 0;
let livestates = net.states;
let liveoutputs = net.outputs;
var calc;
while (loopcount++ < net.run_duration) {
const calc = await net.eulerStep(net, livestates, liveoutputs);
livestates = calc.states;
liveoutputs = calc.outputs;

yield {
net: net,
states: calc.states,
states_cpu: calc.states.arraySync(),
outputs: calc.outputs,
outputs_cpu: calc.outputs.arraySync(),
yprime: calc.yprime,
yprime_cpu: calc.yprime.arraySync(),
count: loopcount,
in_fp: net.in_fixed_point(calc.yprime)
};

// OUCH
// calc.outputs.dispose();
// calc.yprime.dispose(); // for now flush the GPU version
}
}
Insert cell
pptensor = function pptensor(t) {
return t.arraySync().map((x) => ppfloat(x));
}
Insert cell
ppfloat = function ppfloat(x) {
return Number.parseFloat(Number.parseFloat(x).toPrecision(2));
}
Insert cell
function col(data, n) {
return data.map(function (value, index) {
return value[n];
});
}
Insert cell
function changey(coreloop) {
return Math.abs(
ppfloat(coreloop.yprime.arraySync().reduce((a, b) => a + b, 0))
);
} // mean rate of activation change
Insert cell
Insert cell
function makePlotMarks(loop) {
let net = loop.net;
let plotlines = [];
let netPalette;
const shuffle = d3.shuffler(d3.randomLcg(165));
netPalette = shuffle(
d3.quantize(d3.interpolateHcl("#f4e153", "#362142"), net.size)
);
for (let ix = 0; ix < net.size; ix++) {
plotlines.push(
Plot.lineY(col(results, ix).slice(-400), {
stroke: netPalette[ix]
})
);
}
return plotlines;
}
Insert cell
function net2dot(net) {
let w = coreloop.net.weights_cpu;

// ${n0} -> ${n0} [color=blue penwidth=5];
function edgelist_for_viz(net) {
let dotlist = [];
function w2ink(x) {
if (x < -2) {
return 5;
}
if (x < -1) {
return 3;
}
if (x < -0.5) {
return 1;
}

if (x > 2) {
return 5;
}
if (x > 1) {
return 3;
}
if (x > 0.5) {
return 1;
} // yeah yeah this is vile
}

let edgecol = "blue";
for (var fromNode in w) {
for (var toNode in w[fromNode]) {
if (w[fromNode][toNode] > 0) {
edgecol = "blue";
} else {
edgecol = "red";
}
dotlist.push(
`${fromNode} -> ${toNode} [color=${edgecol} penwidth=${w2ink(
w[fromNode][toNode]
)}]\n`
);
}
}

return dotlist.join("\n");
}

let dotintro = `digraph {
graph [fontname = helvetica];
node [fontname = helvetica];
edge [fontname = helvetica];
node [style=filled]`;

// generate node info
let dotnode = ``;
for (var n in w) {
dotnode = dotnode + `# node ${n} `;
}

let dotedges = edgelist_for_viz(coreloop.net);

return dot`${dotintro.concat(dotedges.concat("}")).concat(dotnode)}`;
}
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
Insert cell
/* HundredNodesLargeRandomWeights = ctNet({
size: 100,
init_weights: () => Math.random() * 1000
}).weights.arraySync() */
Insert cell
Insert cell
tf = require("@tensorflow/tfjs@3.9.0")
Insert cell
import { div, print } from "@visnup/hello-danfo-js" // https://blog.tensorflow.org/2020/08/introducing-danfo-js-pandas-like-library-in-javascript.html
Insert cell
Insert cell

Purpose-built for displays of data

Observable is your go-to platform for exploring data and creating expressive data visualizations. Use reactive JavaScript notebooks for prototyping and a collaborative canvas for visual data exploration and dashboard creation.
Learn more