class Network {
layers = [];
sigError = 1;
epoch = 0;
constructor(layers_neurons){
for(let i in layers_neurons){
let neuron_count = layers_neurons[i]
this.layers.push(new Layer(i,neuron_count))
let this_layer = this.layers[this.layers.length-1]
let prev_layer = this.layers[this.layers.length-2]
if(!prev_layer) continue;
for(let j = 0; j<neuron_count; j++){
for(let k = 0; k<prev_layer.neurons.length; k++){
this_layer.neurons[j].weights[k] = Math.random() * 2 - 1
}
}
}
}
input(input){
let result = input.slice();
//let last_layer = this.layers[this.layers.length-1]
// for(let n of last_layer.neurons){
// output.push(n.output)
// }
// feed input into the first layer,
// then feed the result of each layer into the next layer
for(let i = 0; i<this.layers.length; i++){
result = this.layers[i].activate(result)
}
return result
}
// generator
*train(tests,ideals){
this.epoch = 0;
this.error_over_time = [];
let learningRate = 0.3;
let momentum = 0.3;
let minError = 0.01;
let maxEpochs = 30 * 1000;
this.sigError = 1;
let _this = this;
this.deltas = []
this.consec_valid = 0;
this.valid_over_time = [];
this.min_over_time = [];
this.passed = false;
let loop = true;
while(loop){
if(this.epoch > maxEpochs){
break;
}
//this.sigError = 0;
this.samples = []
for(let i = 0; i < 1; i++){
// start with 0 error
this.sigError=0
// pick a random test/ideal pair (probably could just loop thru them in order?)
// but i think picking at random is what makes it *stochastic* gradient descent
let rand = Math.round(Math.random() * 3)
let sample_idx = rand; //this.epoch % 3; //rand;
let test_input = tests[sample_idx]
let ideal_output = ideals[sample_idx]
// ask the network to predict output for the given test input
this.input(test_input);
// grab the last layer for easy reference
let last_layer = this.layers[this.layers.length-1]
// loop backward thru our layers last to first and figure gradients
for(let k = this.layers.length-1; k>=0; k--){
if(k === this.layers.length-1){
// if we're on the last layer; the gradient is the squared diff of ideal minus given prediction
for(let j in last_layer.neurons){
j = parseInt(j) // :G
let neuron = last_layer.neurons[j]
let output = neuron.output
let delta = ideal_output[j] - output
neuron.gradient = output * (1 - output) * (delta)
this.sigError += Math.pow(delta, 2)
}
}else{
// otherwise, if we're NOT on the last layer, we need to do a little more work to calculate the gradient
let hidden_layer = this.layers[k]
// grab a reference the layer AFTER the layer we're calculating
let following_layer = this.layers[k+1]
// for each of the neurons in the given layer,
// reach into the FOLLOWING layer, and get the weight for the given layer
// we sum up an error for the given layer neuron
// as a total of the weights * gradient for the following layer's referencing neruons
for(let n in hidden_layer.neurons){
let neuron = hidden_layer.neurons[n]
let output = neuron.output
let error = 0;
// for every neuron in the layer following this one,
// increase error contribution by the sum of the weights
// that represent how much input we have on that neuron in the following layer
// and... ease that amount further by adjusting it to be
// proportional of that other layer's neurons OWN gradient (proportional contribution)
// (chain rule // backpropagation // local data, global results)
for(let n2 in following_layer.neurons){
let following_neuron = following_layer.neurons[n2]
error += following_neuron.weights[n] * following_neuron.gradient
}
neuron.gradient = output * (1 - output) * error
} // end for neurons
} // end else
} // end for layers
// Once all gradients are calculated, work forward and calculate
// the new weights. w = w + (lr * df/de * in)
for(let i = 0; i < this.layers.length; i++) {
// For each neuron in each layer, ...
let layer = this.layers[i]
for(let j = 0; j < layer.neurons.length; j++) {
let neuron = layer.neurons[j];
// Modify the bias.
neuron.bias += learningRate * neuron.gradient;
// For each weight, ...
for(let k = 0; k < neuron.weights.length; k++) {
// Modify the weight by multiplying the weight by the
// learning rate and the output of the corresponding preceding neuron.
let prev_layer = this.layers[i-1]
if(typeof prev_layer === 'undefined'){
prev_layer = this.layers[0]; // input layer
}
neuron.deltas[k] = learningRate * neuron.gradient * prev_layer.neurons[k].output
neuron.weights[k] += neuron.deltas[k];
// use momentum
//console.log(i,j,k,neuron.previousDeltas[k])
if(neuron.previousDeltas[k]){
neuron.weights[k] += momentum * neuron.previousDeltas[k];
}
}
// Set previous delta values.
neuron.previousDeltas = neuron.deltas.slice();
} // end for layers
} // end for samples
// validate
this.passed = true;
for(let i = 0; i<4; i++){
let output = this.input(tests[i]);
if(Math.round(output[0]) != ideals[i][0]){
this.passed = false;
}
}
if(this.passed){
this.consec_valid++;
}else{
this.consec_valid = 0;
}
this.valid_over_time.push(this.consec_valid);
this.epoch++;
this.error_over_time
.push(this.sigError)
this.min_over_time.push(Math.min(...this.error_over_time));
loop = this.sigError > minError || this.consec_valid < 3
if(this.consec_valid >= 3){
loop = false;
break;
}
yield this
} // end while
yield this
}
}
}