Do-it-yourself neural network from scratch. Part 2. Implementation

As I said in the introduction to the first part , I am a frontend developer, and my native language is JavaScript, we will implement our neural network in this article using it. First, a few words about structure. Except for various computed properties and methods, the neural network object will contain an array of layers, each layer will contain an array of neurons, and each neuron, in turn, will contain an array of β€œinputs” - connections with the neurons of the previous layer. Also, since we have things common for the entire network, such as the activation function, its derivative and the learning rate, and we need to access them from each neuron, let's agree that the neuron will have a _layer link to the layer to which it belongs, and the layer will have _network - a link to the network itself.





Let's go from private to general and first describe the input class for the neuron.





class Input {
 constructor(neuron, weight) {
   this.neuron = neuron;
   this.weight = weight;
 }
}
      
      



Everything is pretty simple here. Each input has a numerical weight and a reference to the neuron of the previous layer. Let's go further. Let's describe the class of the neuron itself.





class Neuron {
 constructor(layer, previousLayer) {
   this._layer = layer;
   this.inputs = previousLayer
     ? previousLayer.neurons.map((neuron) => new Input(neuron, Math.random() - 0.5))
     : [0];
 }

 get $isFirstLayerNeuron() {
   return !(this.inputs[0] instanceof Input)
 }

 get inputSum() {
   return this.inputs.reduce((sum, input) => {
     return sum + input.neuron.value * input.weight;
   }, 0);
 }

 get value() {
   return this.$isFirstLayerNeuron
     ? this.inputs[0]
     : this._layer._network.activationFunction(this.inputSum);
 }

 set input(val) {
   if (!this.$isFirstLayerNeuron) {
     return;
   }

   this.inputs[0] = val;
 }

 set error(error) {
   if (this.$isFirstLayerNeuron) {
     return;
   }

   const wDelta = error * this._layer._network.derivativeFunction(this.inputSum);

   this.inputs.forEach((input) => {
     input.weight -= input.neuron.value * wDelta * this._layer._network.learningRate;
     input.neuron.error = input.weight * wDelta;
   });
 }
}
      
      



Let's see what's going on here. We can pass two parameters to the constructor of the neuron: the layer on which this neuron is located and, if this is not the input layer of the neural network, a link to the previous layer.





In the constructor, for each neuron of the previous layer, we will create an input that will connect neurons and have a random weight, and write all the inputs to the inputs array. If this is the input layer of the network, then the inputs array will consist of a single numeric value, the one that we pass to the input.





$isFirstLayerNeuron - , , . , , .





inputSum - readonly , (, ) .





value - . , , inputSum.





:





input - , .





- error. , , error . , , .





. .





class Layer {
 constructor(neuronsCount, previousLayer, network) {
   this._network = network;
   this.neurons = [];
   for (let i = 0; i < neuronsCount; i++) {
     this.neurons.push(new Neuron(this, previousLayer));
   }
 }

 get $isFirstLayer() {
   return this.neurons[0].$isFirstLayerNeuron;
 }

 set input(val) {
   if (!this.$isFirstLayer) {
     return;
   }

   if (!Array.isArray(val)) {
     return;
   }

   if (val.length !== this.neurons.length) {
     return;
   }

   val.forEach((v, i) => this.neurons[i].input = v);
 }
}
      
      



- , neurons , , , .





$isFirstLayer - , , , input, , , . , .





, ,





class Network {
 static  sigmoid(x) {
   return 1 / (1 + Math.exp(-x));
 }

 static sigmoidDerivative(x) {
   return Network.sigmoid(x) * (1 - Network.sigmoid(x));
 }

 constructor(inputSize, outputSize, hiddenLayersCount = 1, learningRate = 0.5) {
   this.activationFunction = Network.sigmoid;
   this.derivativeFunction = Network.sigmoidDerivative;
   this.learningRate = learningRate;

   this.layers = [new Layer(inputSize, null, this)];

   for (let i = 0; i < hiddenLayersCount; i++) {
     const layerSize = Math.min(inputSize * 2 - 1, Math.ceil((inputSize * 2 / 3) + outputSize));
     this.layers.push(new Layer(layerSize, this.layers[this.layers.length - 1], this));
   }

   this.layers.push(new Layer(outputSize, this.layers[this.layers.length - 1], this));
 }

 set input(val) {
   this.layers[0].input = val;
 }

 get prediction() {
   return this.layers[this.layers.length - 1].neurons.map((neuron) => neuron.value);
 }

 trainOnce(dataSet) {
   if (!Array.isArray(dataSet)) {
     return;
   }

   dataSet.forEach((dataCase) => {
     const [input, expected] = dataCase;

     this.input = input;
     this.prediction.forEach((r, i) => {
       this.layers[this.layers.length - 1].neurons[i].error = r - expected[i];
     });
   });
 }

 train(dataSet, epochs = 100000) {
   return new Promise(resolve => {
     for (let i = 0; i < epochs; i++) {
       this.trainOnce(dataSet);
     }
     resolve();
   });
 }
}
      
      



, learning rate.





input - , .





prediction - , . .





trainOnce dataset - , , , - . , , . , , , .





train - , . . , .then, main thread.





, . - XOR.





.





const network = new Network(2, 1);
      
      



:





const data = [
 [[0, 0], [0]],
 [[0, 1], [1]],
 [[1, 0], [1]],
 [[1, 1], [0]],
];

      
      



, .





network.train(data).then(() => {
 const testData = [
   [0, 0],
   [0, 1],
   [1, 0],
   [1, 1],
 ];

 testData.forEach((input, index) => {
   network.input = input;
   console.log(`${input[0]} XOR ${input[1]} = ${network.prediction}`)
 });
});
      
      



, . .








All Articles