Skip to content

Commit

Permalink
Add method to distribute array of weight references into network's ne…
Browse files Browse the repository at this point in the history
…urons
  • Loading branch information
Eric L. Solis committed Nov 26, 2020
1 parent 46a2a36 commit 3f5e5f4
Show file tree
Hide file tree
Showing 4 changed files with 40 additions and 6 deletions.
7 changes: 7 additions & 0 deletions src/ai/ann/Layer.js
Original file line number Diff line number Diff line change
@@ -1,15 +1,22 @@
const { chunk } = require('lodash');
const { Neuron } = require('./Neuron');

function Layer([size, activation]) {
let layer = {
neurons: Array.from(Array(size)).map(_ => Neuron(1, activation)),
error: Infinity,
predict,
distributeWights,
};
function predict(inputs) {
layer.output = layer.neurons.map(neuron => neuron.predict(inputs));
return layer.output;
}
function distributeWights(weights) {
chunk(weights, weights.length / layer.neurons.length).forEach((weightChunk, i) => {
layer.neurons[i].weights = weightChunk;
});
}
return layer;
}

Expand Down
23 changes: 19 additions & 4 deletions src/ai/ann/Network.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
const { Layer } = require('./Layer');
const { delta, mse } = require('./math');
const { sum } = require('lodash');
const { delta, mse, weightsForArchitecture } = require('./math');
const { sum, times } = require('lodash');

function Network(layerDescriptors, learningRate = 0.5) {
let layers = [];
let trainingData = [];
layers = layerDescriptors.map(Layer);
layers = layerDescriptors.slice(1).map(Layer);
const architecture = layerDescriptors.map(([n]) => n);

const network = {
layers,
Expand All @@ -14,7 +15,16 @@ function Network(layerDescriptors, learningRate = 0.5) {
trainingData,
train,
converges,
distributeWights,
};
function distributeWights(weightObjects) {
let offset = 0;
for (let i = 1; i < architecture.length; i++) {
const currentLayerWeightCount = architecture[i] * (architecture[i - 1] + 1);
layers[i - 1].distributeWights(weightObjects.slice(offset, offset + currentLayerWeightCount));
offset += currentLayerWeightCount;
}
}

function forward(inputs) {
const layerOutputs = [layers[0].predict(inputs)];
Expand Down Expand Up @@ -63,11 +73,16 @@ function Network(layerDescriptors, learningRate = 0.5) {

const network = Network(
[
[2],
[3, 'sigmoid'],
[1, 'sigmoid'],
],
0.9,
);
const problemSize = weightsForArchitecture([2, 3, 1]);
const weights = times(problemSize, () => ({ value: Math.random() }));
network.distributeWights(weights);

const inputs = [
[0, 0],
[0, 1],
Expand All @@ -90,7 +105,7 @@ while (!network.converges(0.1)) {
.join(','),
)
.join(';');
if (i == 1000) {
if (i == 10000) {
console.log(actualOutputs);
console.log(network.error);
i = 0;
Expand Down
6 changes: 5 additions & 1 deletion src/ai/ann/Neuron.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ function Neuron(inputQuantity = 1, type = 'linear') {
}

function predict(inputs) {
fill(inputs.length);
inputs = [...inputs, -1];
while (inputs.length > neuron.weights.length) neuron.weights.push({ value: Math.random() });
neuron.inputs = inputs;
neuron.output = activations[type].function(_predict(inputs));

Expand All @@ -34,6 +34,10 @@ function Neuron(inputQuantity = 1, type = 'linear') {
}
}

function fill(inputsLength) {
while (inputsLength >= neuron.weights.length) neuron.weights.push({ value: Math.random() });
}

return neuron;
}

Expand Down
10 changes: 9 additions & 1 deletion src/ai/ann/math.js
Original file line number Diff line number Diff line change
Expand Up @@ -66,4 +66,12 @@ const activations = {
},
};

module.exports = { dot, activations, mse };
function weightsForArchitecture(arch) {
let sum = 0;
for (let i = 0; i < arch.length - 1; i++) {
sum += (arch[i] + 1) * arch[i + 1];
}
return sum;
}

module.exports = { dot, activations, mse, weightsForArchitecture };

0 comments on commit 3f5e5f4

Please sign in to comment.