-
Notifications
You must be signed in to change notification settings - Fork 0
/
Network.js
109 lines (88 loc) · 3.34 KB
/
Network.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
'use strict';
{
var NNLayer = require(__dirname + '/layers/NNLayer')
, ReLULayer = require(__dirname + '/layers/ReLULayer')
, SoftmaxLayer = require(__dirname + '/layers/SoftmaxLayer')
, CrossEntropyLoss = require(__dirname + '/loss/CrossEntropyLoss')
, _ = require('lodash');
function Network(inputSize, batchSize) {
this.currentSize = inputSize
this.batchSize = batchSize
}
module.exports = Network;
Network.prototype = {
addLayer: function (outputSize, stepSize) {
if (outputSize < 1) {
throw new Error('[Network.addLayer]: Output size less than 1');
}
var layer = new NNLayer(this.currentSize, outputSize, stepSize, this.batchSize);
this.currentSize = outputSize;
this.layers.push({
calcRegLoss: true,
layer: layer
});
}
, addReLU: function () {
var layer = new ReLULayer(this.batchSize);
this.layers.push({
calcRegLoss: false,
layer: layer
});
}
, addSoftmax: function () {
var layer = new SoftmaxLayer(this.batchSize);
this.layers.push({
calcRegLoss: false,
layer: layer
});
}
, useCrossEntropyLoss: function () {
var loss = new CrossEntropyLoss(this.batchSize);
this.lossFunction = loss;
}
, train: function (iterations, inputFunction) {
for (var i = 0; i < iterations; i += this.batchSize) {
var input = inputFunction(this.batchSize);
// Forward Pass
input.X = this.forwardPass(input);
// --------------------------------
// Calculate Loss
// --------------------------------
var checker = input.X.transpose().toArray();
var total = 0;
for (var j = 0; j < this.batchSize; ++j) {
var prediction = (checker[j][0] > checker[j][1]) ? 0 : 1;
if (input.Y.toArray()[0][j] == prediction) {
++total;
}
}
input.X = this.lossFunction.forwardPass(input.X, input.Y)
var loss = _.sum(input.X)/this.batchSize;
console.log('Iterations: ' + (i+this.batchSize));
console.log('% Accuracy: ' + (total/this.batchSize)*100);
console.log('Loss: ' + loss);
console.log('----------------------------------');
this.backwardPass(input);
}
}
, forwardPass: function (input) {
var x = input.X;
for (var i in this.layers) {
x = this.layers[i].layer.forwardPass(x);
}
return x;
}
, backwardPass: function (input) {
var back = this.lossFunction.backwardPass(input.X, input.Y);
for (var i = 0; i < this.layers.length; ++i) {
var index = this.layers.length - (i + 1);
back = this.layers[index].layer.backwardPass(back);
}
return back;
}
, lossFunction: {}
, currentSize: 0
, batchSize: 0
, layers: []
}
}