Skip to content

Commit bd82923

Browse files
committed
cleanup cruft
1 parent 367c664 commit bd82923

File tree

5 files changed

+10
-27
lines changed

5 files changed

+10
-27
lines changed

README.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,8 @@
2323
</p>
2424

2525
## Get Started
26-
[Documentation](http://dev-coop.github.io/anny/docs/dist/0.1.0/index.html)
27-
[Demo](http://dev-coop.github.io/anny)
26+
Read the [documentation](http://dev-coop.github.io/anny/docs/dist/0.1.0)
27+
or try the [demo](http://dev-coop.github.io/anny).
2828

2929
## Hacking
3030

conf.json

+1-17
Original file line numberDiff line numberDiff line change
@@ -38,22 +38,6 @@
3838
"./docs/src/static"
3939
]
4040
}
41-
},
42-
"applicationName": "Anny",
43-
"disqus": "",
44-
"googleAnalytics": "",
45-
"openGraph": {
46-
"title": "",
47-
"type": "website",
48-
"image": "",
49-
"site_name": "",
50-
"url": ""
51-
},
52-
"meta": {
53-
"title": "",
54-
"description": "",
55-
"keyword": ""
56-
},
57-
"linenums": true
41+
}
5842
}
5943
}

package.json

-1
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,6 @@
6060
"jsdoc": "^3.3.2",
6161
"lodash": "^3.10.0",
6262
"mathjs": "^2.1.1",
63-
"minami": "^1.1.1",
6463
"mocha": "^2.2.5",
6564
"node-libs-browser": "^0.5.2",
6665
"pre-commit": "^1.1.1",

src/Layer.js

+5-5
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,9 @@ var INITIALIZE = require('./Initialize');
33
var Neuron = require('./Neuron');
44

55
/**
6-
* Creates a single dimension Layer of Neurons.
6+
* Creates a single dimension Layer of Neurons.
77
* @param {string} numNeurons - The number of Neurons this Layer should have.
8-
* @param {boolean} [addBias=false] - Add a bias Neuron to this Layer.
8+
* @param {boolean} [addBias=false] - Add a bias Neuron to this Layer.
99
* @constructor
1010
* @see Neuron
1111
*/
@@ -27,7 +27,7 @@ function Layer(numNeurons, addBias) {
2727
}
2828

2929
/**
30-
* Connects every Neuron in this Layer to each Neuron in the `target` Layer.
30+
* Connects every Neuron in this Layer to each Neuron in the `target` Layer.
3131
* @param {Layer} targetLayer - The Layer to connect to.
3232
*/
3333
Layer.prototype.connect = function(targetLayer) {
@@ -46,8 +46,8 @@ Layer.prototype.connect = function(targetLayer) {
4646

4747
/**
4848
* Activates all the Neurons in this Layer with the given array of values.
49-
* @param {number[]} [values] - Map of input values for each Neuron.
50-
* @returns {number[]} - Array of Neuron output values.
49+
* @param {number[]} [values] - Map of input values for each Neuron.
50+
* @returns {number[]} - Array of Neuron output values.
5151
*/
5252
Layer.prototype.activate = function(values) {
5353
return _.map(this.neurons, function(neuron, i) {

src/Neuron.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -76,15 +76,15 @@ Neuron.Connection = function(source, target, weight) {
7676
*/
7777
this.target = target;
7878

79-
// We add one to initialize the weight value as if this connection were
80-
// already part of the fan.
8179
/**
8280
* The weight is used as a multiplier for two purposes. First, for
8381
* activation, when transferring the output of the `source` Neuron to
8482
* the input of the `target` Neuron. Second, during training, calculating the
8583
* total error delta.
8684
* @type {number}
8785
*/
86+
// We add one to initialize the weight value as if this connection were
87+
// already part of the fan.
8888
this.weight = weight || INITIALIZE.weight(target.incoming.length + 1);
8989
};
9090

0 commit comments

Comments
 (0)