diff --git a/SharpNeatWalker/OscillatorQuadruped/CTRNN/CTRNNExperiment.cs b/SharpNeatWalker/OscillatorQuadruped/CTRNN/CTRNNExperiment.cs
new file mode 100644
index 000000000..3c4d6c18c
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/CTRNN/CTRNNExperiment.cs
@@ -0,0 +1,120 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using SharpNeatLib.Experiments;
+using SharpNeatLib.Evolution;
+using SharpNeatLib.NeuralNetwork;
+
+namespace OscillatorQuadruped
+{
+ class CTRNNExperiment : IExperiment
+ {
+ private uint inputs;
+ private uint outputs;
+ private uint hidden;
+ private int cppnInputs;
+ private int cppnOutputs;
+ private IPopulationEvaluator populationEvaluator = null;
+ private NeatParameters neatParams = null;
+
+ public CTRNNExperiment(uint inputs, uint outputs, uint hidden, int cppnInputs, int cppnOutputs)
+ {
+ this.inputs = inputs;
+ this.outputs = outputs;
+ this.hidden = hidden;
+ this.cppnInputs = cppnInputs;
+ this.cppnOutputs = cppnOutputs;
+ }
+
+ #region IExperiment Members
+
+ public void LoadExperimentParameters(System.Collections.Hashtable parameterTable)
+ {
+ //throw new Exception("The method or operation is not implemented.");
+ }
+
+ public IPopulationEvaluator PopulationEvaluator
+ {
+ get
+ {
+ if (populationEvaluator == null)
+ ResetEvaluator(HyperNEATParameters.substrateActivationFunction);
+
+ return populationEvaluator;
+ }
+ }
+
+ public void ResetEvaluator(IActivationFunction activationFn)
+ {
+ populationEvaluator = new CTRNNPopulationEvaluator(new CTRNNNetworkEvaluator(inputs, outputs, hidden));
+ }
+
+ public int InputNeuronCount
+ {
+ get { return cppnInputs; }
+ }
+
+ public int OutputNeuronCount
+ {
+ get { return cppnOutputs; }
+ }
+
+ public NeatParameters DefaultNeatParameters
+ {
+ get
+ {
+ if (neatParams == null)
+ {
+ NeatParameters np = new NeatParameters();
+ np.activationProbabilities = new double[4];
+ np.activationProbabilities[0] = .25;
+ np.activationProbabilities[1] = .25;
+ np.activationProbabilities[2] = .25;
+ np.activationProbabilities[3] = .25;
+ np.compatibilityDisjointCoeff = 1;
+ np.compatibilityExcessCoeff = 1;
+ np.compatibilityThreshold = 100;
+ np.compatibilityWeightDeltaCoeff = 3;
+ np.connectionWeightRange = 3;
+ np.elitismProportion = .1;
+ np.pInitialPopulationInterconnections = 1;
+ np.pInterspeciesMating = 0.01;
+ np.pMutateAddConnection = .06;
+ np.pMutateAddNode = .01;
+ np.pMutateConnectionWeights = .96;
+ np.pMutateDeleteConnection = 0;
+ np.pMutateDeleteSimpleNeuron = 0;
+ np.populationSize = 300;
+ np.pruningPhaseBeginComplexityThreshold = float.MaxValue;
+ np.pruningPhaseBeginFitnessStagnationThreshold = int.MaxValue;
+ np.pruningPhaseEndComplexityStagnationThreshold = int.MinValue;
+ np.selectionProportion = .8;
+ np.speciesDropoffAge = 1500;
+ np.targetSpeciesCountMax = np.populationSize / 10;
+ np.targetSpeciesCountMin = np.populationSize / 10 - 2;
+
+ neatParams = np;
+ }
+ return neatParams;
+ }
+ }
+
+ public IActivationFunction SuggestedActivationFunction
+ {
+ get { return HyperNEATParameters.substrateActivationFunction; }
+ }
+
+ public AbstractExperimentView CreateExperimentView()
+ {
+ return null;
+ }
+
+ public string ExplanatoryText
+ {
+ get { return "A HyperNEAT experiemnt quadruped locomotion"; }
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/OscillatorQuadruped/CTRNN/CTRNNNetworkEvaluator.cs b/SharpNeatWalker/OscillatorQuadruped/CTRNN/CTRNNNetworkEvaluator.cs
new file mode 100644
index 000000000..b6a23bac4
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/CTRNN/CTRNNNetworkEvaluator.cs
@@ -0,0 +1,48 @@
+using SharpNeatLib.Experiments;
+using SharpNeatLib.NeuralNetwork;
+
+namespace OscillatorQuadruped
+{
+ internal class CTRNNNetworkEvaluator : INetworkEvaluator
+ {
+ public static CTRNNSubstrate substrate;
+ private NoveltyArchive noveltyArchive;
+
+ public CTRNNNetworkEvaluator(uint inputs, uint outputs, uint hidden)
+ {
+ substrate = new CTRNNSubstrate(inputs, outputs, hidden, HyperNEATParameters.substrateActivationFunction);
+ noveltyArchive = new NoveltyArchive();
+ }
+
+ #region INetworkEvaluator Members
+
+ public double[] threadSafeEvaluateNetwork(INetwork network)
+ {
+ var tempGenome = substrate.generateGenome(network);
+ var tempNet = tempGenome.Decode(null);
+
+ using (var quadDomain = new Domain(noveltyArchive, MainProgram.novelty))
+ {
+ var fitness = quadDomain.EvaluateController(new Controller(tempNet));
+ return fitness;
+ }
+ }
+
+ public void endOfGeneration()
+ {
+ noveltyArchive.endOfGeneration();
+ }
+
+ public double EvaluateNetwork(INetwork network)
+ {
+ return 1;
+ }
+
+ public string EvaluatorStateMessage
+ {
+ get { return ""; }
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/OscillatorQuadruped/CTRNN/CTRNNPopulationEvaluator.cs b/SharpNeatWalker/OscillatorQuadruped/CTRNN/CTRNNPopulationEvaluator.cs
new file mode 100644
index 000000000..1777666dc
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/CTRNN/CTRNNPopulationEvaluator.cs
@@ -0,0 +1,18 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using SharpNeatLib.Experiments;
+
+namespace OscillatorQuadruped
+{
+ class CTRNNPopulationEvaluator : MultiThreadedPopulationEvaluator
+ {
+
+ public CTRNNPopulationEvaluator(INetworkEvaluator eval)
+ : base(eval, null)
+ {
+
+ }
+ }
+}
diff --git a/SharpNeatWalker/OscillatorQuadruped/CTRNN/CTRNNSubstrate.cs b/SharpNeatWalker/OscillatorQuadruped/CTRNN/CTRNNSubstrate.cs
new file mode 100644
index 000000000..b9bb94789
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/CTRNN/CTRNNSubstrate.cs
@@ -0,0 +1,605 @@
+//#define OUTPUT
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using SharpNeatLib.CPPNs;
+using SharpNeatLib.NeuralNetwork;
+using SharpNeatLib.NeatGenome;
+
+namespace OscillatorQuadruped
+{
+ class CTRNNSubstrate : Substrate
+ {
+ private const float shiftScale = 0.2f;
+
+ public CTRNNSubstrate(uint inputs, uint outputs, uint hidden, IActivationFunction function)
+ : base(inputs, outputs, hidden, function)
+ {
+
+ }
+
+ public override NeatGenome generateGenome(INetwork network)
+ {
+ var coordinates = new double[8];
+ int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
+
+ // copy the neuron list to a new list and update the biases for hidden and output nodes
+ NeuronGeneList newNeurons = new NeuronGeneList(neurons);
+
+ foreach(NeuronGene gene in newNeurons)
+ {
+ if (gene.NeuronType == NeuronType.Output)
+ {
+ gene.NeuronBias = 3; // GWM - Bias hardcoded to 3 for output neurons in Sebastian's CTRNN architecture
+ coordinates[2] = 0;
+ coordinates[3] = 0;
+ coordinates[6] = 0;
+ coordinates[7] = 0;
+ switch (gene.InnovationId)
+ {
+ case 4:
+ coordinates[0] = -1;
+ coordinates[1] = 1;
+ coordinates[4] = -1;
+ coordinates[5] = 1;
+ break;
+
+ case 5:
+ coordinates[0] = 0;
+ coordinates[1] = 1;
+ coordinates[4] = -1;
+ coordinates[5] = 1;
+ break;
+
+ case 6:
+ coordinates[0] = 1;
+ coordinates[1] = 1;
+ coordinates[4] = -1;
+ coordinates[5] = 1;
+ break;
+
+ case 7:
+ coordinates[0] = -1;
+ coordinates[1] = 1;
+ coordinates[4] = 1;
+ coordinates[5] = 1;
+ break;
+
+ case 8:
+ coordinates[0] = 0;
+ coordinates[1] = 1;
+ coordinates[4] = 1;
+ coordinates[5] = 1;
+ break;
+
+ case 9:
+ coordinates[0] = 1;
+ coordinates[1] = 1;
+ coordinates[4] = 1;
+ coordinates[5] = 1;
+ break;
+
+ case 10:
+ coordinates[0] = -1;
+ coordinates[1] = 1;
+ coordinates[4] = -1;
+ coordinates[5] = -1;
+ break;
+
+ case 11:
+ coordinates[0] = 0;
+ coordinates[1] = 1;
+ coordinates[4] = -1;
+ coordinates[5] = -1;
+ break;
+
+ case 12:
+ coordinates[0] = 1;
+ coordinates[1] = 1;
+ coordinates[4] = -1;
+ coordinates[5] = -1;
+ break;
+
+ case 13:
+ coordinates[0] = -1;
+ coordinates[1] = 1;
+ coordinates[4] = 1;
+ coordinates[5] = -1;
+ break;
+
+ case 14:
+ coordinates[0] = 0;
+ coordinates[1] = 1;
+ coordinates[4] = 1;
+ coordinates[5] = -1;
+ break;
+
+ case 15:
+ coordinates[0] = 1;
+ coordinates[1] = 1;
+ coordinates[4] = 1;
+ coordinates[5] = -1;
+ break;
+ }
+
+ float output;
+ network.ClearSignals();
+ network.SetInputSignals(coordinates);
+ network.MultipleSteps(iterations);
+ output = network.GetOutputSignal(3);
+ gene.TimeConstant = (output + 1) * 30 + 1; // normalize output to [1,61] for the time constant
+ }
+ if (gene.NeuronType == NeuronType.Hidden)
+ {
+ coordinates[2] = 0;
+ coordinates[3] = 0;
+ coordinates[6] = 0;
+ coordinates[7] = 0;
+ switch (gene.InnovationId)
+ {
+ case 16:
+ coordinates[0] = -1;
+ coordinates[1] = 0;
+ coordinates[4] = -1;
+ coordinates[5] = 1;
+ break;
+
+ case 17:
+ coordinates[0] = 1;
+ coordinates[1] = 0;
+ coordinates[4] = -1;
+ coordinates[5] = 1;
+ break;
+
+ case 18:
+ coordinates[0] = -1;
+ coordinates[1] = 0;
+ coordinates[4] = 1;
+ coordinates[5] = 1;
+ break;
+
+ case 19:
+ coordinates[0] = 1;
+ coordinates[1] = 0;
+ coordinates[4] = 1;
+ coordinates[5] = 1;
+ break;
+
+ case 20:
+ coordinates[0] = -1;
+ coordinates[1] = 0;
+ coordinates[4] = -1;
+ coordinates[5] = -1;
+ break;
+
+ case 21:
+ coordinates[0] = 1;
+ coordinates[1] = 0;
+ coordinates[4] = -1;
+ coordinates[5] = -1;
+ break;
+
+ case 22:
+ coordinates[0] = -1;
+ coordinates[1] = 0;
+ coordinates[4] = 1;
+ coordinates[5] = -1;
+ break;
+
+ case 23:
+ coordinates[0] = 1;
+ coordinates[1] = 0;
+ coordinates[4] = 1;
+ coordinates[5] = -1;
+ break;
+ }
+
+ float output;
+ network.ClearSignals();
+ network.SetInputSignals(coordinates);
+ network.MultipleSteps(iterations);
+ output = network.GetOutputSignal(2);
+ gene.NeuronBias = output;
+ output = network.GetOutputSignal(3);
+ gene.TimeConstant = (output + 1) * 30 + 1; // normalize output to [1,61] for the time constant
+ }
+ }
+
+ ConnectionGeneList connections = new ConnectionGeneList(88);
+ uint connectionCounter = 0;
+
+ // intramodule connections for first subunit
+ coordinates[4] = -1;
+ coordinates[5] = 1;
+ coordinates[6] = -1;
+ coordinates[7] = 1;
+ addModule(network, iterations, coordinates, 0, connections, connectionCounter);
+
+ // intramodule connections for second subunit
+ coordinates[4] = 1;
+ coordinates[5] = 1;
+ coordinates[6] = 1;
+ coordinates[7] = 1;
+ addModule(network, iterations, coordinates, 1, connections, connectionCounter);
+
+ // intramodule connections for third subunit
+ coordinates[4] = -1;
+ coordinates[5] = -1;
+ coordinates[6] = -1;
+ coordinates[7] = -1;
+ addModule(network, iterations, coordinates, 2, connections, connectionCounter);
+
+ // intramodule connections for fourth subunit
+ coordinates[4] = 1;
+ coordinates[5] = -1;
+ coordinates[6] = 1;
+ coordinates[7] = -1;
+ addModule(network, iterations, coordinates, 3, connections, connectionCounter);
+
+ // intermodule connections
+ // vertical connections
+ coordinates[4] = -1;
+ coordinates[5] = -1;
+ coordinates[6] = -1;
+ coordinates[7] = 1;
+
+ coordinates[0] = -1;
+ coordinates[1] = 1;
+ coordinates[2] = -1;
+ coordinates[3] = 0;
+ addConnection(network, iterations, 10, 16, coordinates, true, connections, connectionCounter);
+
+ coordinates[0] = 0;
+ coordinates[1] = 1;
+ coordinates[2] = 0;
+ coordinates[3] = -1;
+ addConnection(network, iterations, 11, 0, coordinates, true, connections, connectionCounter);
+
+ coordinates[0] = 1;
+ coordinates[1] = 1;
+ coordinates[2] = 1;
+ coordinates[3] = 0;
+ addConnection(network, iterations, 12, 17, coordinates, true, connections, connectionCounter);
+
+ coordinates[4] = -1;
+ coordinates[5] = 1;
+ coordinates[6] = -1;
+ coordinates[7] = -1;
+
+ coordinates[0] = -1;
+ coordinates[1] = 0;
+ coordinates[2] = -1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 16, 10, coordinates, true, connections, connectionCounter);
+
+ coordinates[0] = 0;
+ coordinates[1] = -1;
+ coordinates[2] = 0;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 0, 11, coordinates, true, connections, connectionCounter);
+
+ coordinates[0] = 1;
+ coordinates[1] = 0;
+ coordinates[2] = 1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 17, 12, coordinates, true, connections, connectionCounter);
+
+ coordinates[4] = 1;
+ coordinates[5] = -1;
+ coordinates[6] = 1;
+ coordinates[7] = 1;
+
+ coordinates[0] = -1;
+ coordinates[1] = 1;
+ coordinates[2] = -1;
+ coordinates[3] = 0;
+ addConnection(network, iterations, 13, 18, coordinates, true, connections, connectionCounter);
+
+ coordinates[0] = 0;
+ coordinates[1] = 1;
+ coordinates[2] = 0;
+ coordinates[3] = -1;
+ addConnection(network, iterations, 14, 1, coordinates, true, connections, connectionCounter);
+
+ coordinates[0] = 1;
+ coordinates[1] = 1;
+ coordinates[2] = 1;
+ coordinates[3] = 0;
+ addConnection(network, iterations, 15, 19, coordinates, true, connections, connectionCounter);
+
+ coordinates[4] = 1;
+ coordinates[5] = 1;
+ coordinates[6] = 1;
+ coordinates[7] = -1;
+
+ coordinates[0] = -1;
+ coordinates[1] = 0;
+ coordinates[2] = -1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 18, 13, coordinates, true, connections, connectionCounter);
+
+ coordinates[0] = 0;
+ coordinates[1] = -1;
+ coordinates[2] = 0;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 1, 14, coordinates, true, connections, connectionCounter);
+
+ coordinates[0] = 1;
+ coordinates[1] = 0;
+ coordinates[2] = 1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 19, 15, coordinates, true, connections, connectionCounter);
+
+ // horizonal connections
+ coordinates[4] = -1;
+ coordinates[5] = 1;
+ coordinates[6] = 1;
+ coordinates[7] = 1;
+
+ coordinates[0] = 1;
+ coordinates[1] = 0;
+ coordinates[2] = -1;
+ coordinates[3] = 0;
+ addConnection(network, iterations, 17, 18, coordinates, true, connections, connectionCounter);
+
+ coordinates[0] = 1;
+ coordinates[1] = 1;
+ coordinates[2] = -1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 6, 7, coordinates, true, connections, connectionCounter);
+
+ coordinates[4] = 1;
+ coordinates[5] = 1;
+ coordinates[6] = -1;
+ coordinates[7] = 1;
+
+ coordinates[0] = -1;
+ coordinates[1] = 0;
+ coordinates[2] = 1;
+ coordinates[3] = 0;
+ addConnection(network, iterations, 18, 17, coordinates, true, connections, connectionCounter);
+
+ coordinates[0] = -1;
+ coordinates[1] = 1;
+ coordinates[2] = 1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 7, 6, coordinates, true, connections, connectionCounter);
+
+ coordinates[4] = -1;
+ coordinates[5] = -1;
+ coordinates[6] = 1;
+ coordinates[7] = -1;
+
+ coordinates[0] = 1;
+ coordinates[1] = 0;
+ coordinates[2] = -1;
+ coordinates[3] = 0;
+ addConnection(network, iterations, 21, 22, coordinates, true, connections, connectionCounter);
+
+ coordinates[0] = 1;
+ coordinates[1] = 1;
+ coordinates[2] = -1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 12, 13, coordinates, true, connections, connectionCounter);
+
+ coordinates[4] = 1;
+ coordinates[5] = -1;
+ coordinates[6] = -1;
+ coordinates[7] = -1;
+
+ coordinates[0] = -1;
+ coordinates[1] = 0;
+ coordinates[2] = 1;
+ coordinates[3] = 0;
+ addConnection(network, iterations, 22, 21, coordinates, true, connections, connectionCounter);
+
+ coordinates[0] = -1;
+ coordinates[1] = 1;
+ coordinates[2] = 1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 13, 12, coordinates, true, connections, connectionCounter);
+
+ return new SharpNeatLib.NeatGenome.NeatGenome(0, newNeurons, connections, (int)inputCount, (int)outputCount);
+ }
+
+ private void addModule(INetwork network, int iterations, double[] coordinates, uint moduleOffset, ConnectionGeneList connections, uint connectionCounter)
+ {
+ // from input
+ coordinates[0] = 0;
+ coordinates[1] = -1;
+ coordinates[2] = -1;
+ coordinates[3] = 0;
+ addConnection(network, iterations, 0 + moduleOffset, 16 + moduleOffset*2, coordinates, false, connections, connectionCounter);
+
+ coordinates[2] = 1;
+ coordinates[3] = 0;
+ addConnection(network, iterations, 0 + moduleOffset, 17 + moduleOffset * 2, coordinates, false, connections, connectionCounter);
+
+ coordinates[2] = -1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 0 + moduleOffset, 4 + moduleOffset * 3, coordinates, false, connections, connectionCounter);
+
+ coordinates[2] = 0;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 0 + moduleOffset, 5 + moduleOffset * 3, coordinates, false, connections, connectionCounter);
+
+ coordinates[2] = 1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 0 + moduleOffset, 6 + moduleOffset * 3, coordinates, false, connections, connectionCounter);
+
+ // from first hidden
+ coordinates[0] = -1;
+ coordinates[1] = 0;
+ coordinates[2] = -1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 16 + moduleOffset * 2, 4 + moduleOffset*3, coordinates, false, connections, connectionCounter);
+
+ coordinates[2] = 0;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 16 + moduleOffset * 2, 5 + moduleOffset*3, coordinates, false, connections, connectionCounter);
+
+ coordinates[2] = 1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 16 + moduleOffset * 2, 6 + moduleOffset * 3, coordinates, false, connections, connectionCounter);
+
+ coordinates[2] = 1;
+ coordinates[3] = 0;
+ addConnection(network, iterations, 16 + moduleOffset * 2, 17 + moduleOffset * 2, coordinates, false, connections, connectionCounter);
+
+ // from second hidden
+ coordinates[0] = 1;
+ coordinates[1] = 0;
+ coordinates[2] = -1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 17 + moduleOffset * 2, 4 + moduleOffset * 3, coordinates, false, connections, connectionCounter);
+
+ coordinates[2] = 0;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 17 + moduleOffset * 2, 5 + moduleOffset * 3, coordinates, false, connections, connectionCounter);
+
+ coordinates[2] = 1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 17 + moduleOffset * 2, 6 + moduleOffset * 3, coordinates, false, connections, connectionCounter);
+
+ coordinates[2] = -1;
+ coordinates[3] = 0;
+ addConnection(network, iterations, 17 + moduleOffset * 2, 16 + moduleOffset * 2, coordinates, false, connections, connectionCounter);
+
+ // output to output connections
+ coordinates[0] = -1;
+ coordinates[1] = 1;
+ coordinates[2] = 0;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 4 + moduleOffset * 3, 5 + moduleOffset * 3, coordinates, false, connections, connectionCounter);
+
+ coordinates[0] = 0;
+ coordinates[1] = 1;
+ coordinates[2] = -1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 5 + moduleOffset * 3, 4 + moduleOffset * 3, coordinates, false, connections, connectionCounter);
+
+ coordinates[0] = 1;
+ coordinates[1] = 1;
+ coordinates[2] = 0;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 6 + moduleOffset * 3, 5 + moduleOffset * 3, coordinates, false, connections, connectionCounter);
+
+ coordinates[0] = 0;
+ coordinates[1] = 1;
+ coordinates[2] = 1;
+ coordinates[3] = 1;
+ addConnection(network, iterations, 5 + moduleOffset * 3, 6 + moduleOffset * 3, coordinates, false, connections, connectionCounter);
+ }
+
+ private void addConnection(INetwork network, int iterations, uint source, uint target, double[] coordinates, bool isInter, ConnectionGeneList connections, uint connectionCounter)
+ {
+ float output;
+ network.ClearSignals();
+ network.SetInputSignals(coordinates);
+ network.MultipleSteps(iterations);
+ if (isInter)
+ output = network.GetOutputSignal(1);
+ else
+ output = network.GetOutputSignal(0);
+
+ if (Math.Abs(output) > threshold)
+ {
+ float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
+ connections.Add(new ConnectionGene(connectionCounter++, source, target, weight));
+ }
+ }
+
+ // for predator prey, ignore
+ public INetwork generateMultiNetwork(INetwork network, uint numberOfAgents)
+ {
+ return generateMultiGenomeModulus(network, numberOfAgents).Decode(activationFunction);
+ }
+
+ // for predator prey, ignore
+ public NeatGenome generateMultiGenomeModulus(INetwork network, uint numberOfAgents)
+ {
+#if OUTPUT
+ System.IO.StreamWriter sw = new System.IO.StreamWriter("testfile.txt");
+#endif
+ var coordinates = new double[4];
+ float output;
+ uint connectionCounter = 0;
+
+ uint inputsPerAgent = inputCount / numberOfAgents;
+ uint hiddenPerAgent = hiddenCount / numberOfAgents;
+ uint outputsPerAgent = outputCount / numberOfAgents;
+
+ ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));
+
+ int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
+
+ coordinates[0] = -1 + inputDelta / 2.0f; //x1
+ coordinates[1] = -1; //y1
+ coordinates[2] = -1 + hiddenDelta / 2.0f; //x2
+ coordinates[3] = 0; //y2
+
+ for (uint agent = 0; agent < numberOfAgents; agent++)
+ {
+ coordinates[0] = -1 + (agent * inputsPerAgent * inputDelta) + inputDelta / 2.0f;
+ for (uint source = 0; source < inputsPerAgent; source++, coordinates[0] += inputDelta)
+ {
+ coordinates[2] = -1 + (agent * hiddenPerAgent * hiddenDelta) + hiddenDelta / 2.0f;
+ for (uint target = 0; target < hiddenPerAgent; target++, coordinates[2] += hiddenDelta)
+ {
+
+ //Since there are an equal number of input and hidden nodes, we check these everytime
+ network.ClearSignals();
+ network.SetInputSignals(coordinates);
+ ((FloatFastConcurrentNetwork)network).MultipleStepsWithMod(iterations, (int)numberOfAgents);
+ output = network.GetOutputSignal(0);
+#if OUTPUT
+ foreach (double d in inputs)
+ sw.Write(d + " ");
+ sw.Write(output);
+ sw.WriteLine();
+#endif
+ if (Math.Abs(output) > threshold)
+ {
+ float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
+ connections.Add(new ConnectionGene(connectionCounter++, (agent * inputsPerAgent) + source, (agent * hiddenPerAgent) + target + inputCount + outputCount, weight));
+ }
+
+ //Since every other hidden node has a corresponding output node, we check every other time
+ if (target % 2 == 0)
+ {
+ network.ClearSignals();
+ coordinates[1] = 0;
+ coordinates[3] = 1;
+ network.SetInputSignals(coordinates);
+ ((FloatFastConcurrentNetwork)network).MultipleStepsWithMod(iterations, (int)numberOfAgents);
+ output = network.GetOutputSignal(0);
+#if OUTPUT
+ foreach (double d in inputs)
+ sw.Write(d + " ");
+ sw.Write(output);
+ sw.WriteLine();
+#endif
+ if (Math.Abs(output) > threshold)
+ {
+ float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
+ connections.Add(new ConnectionGene(connectionCounter++, (agent * hiddenPerAgent) + source + inputCount + outputCount, ((outputsPerAgent * agent) + ((target) / 2)) + inputCount, weight));
+ }
+ coordinates[1] = -1;
+ coordinates[3] = 0;
+
+ }
+ }
+ }
+ }
+#if OUTPUT
+ sw.Flush();
+#endif
+ //Console.WriteLine(count);
+ //Console.ReadLine();
+ return new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
+ }
+
+ }
+}
diff --git a/SharpNeatWalker/OscillatorQuadruped/Clune/CluneExperiment.cs b/SharpNeatWalker/OscillatorQuadruped/Clune/CluneExperiment.cs
new file mode 100644
index 000000000..b55dbc4be
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/Clune/CluneExperiment.cs
@@ -0,0 +1,120 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using SharpNeatLib.Experiments;
+using SharpNeatLib.Evolution;
+using SharpNeatLib.NeuralNetwork;
+
+namespace OscillatorQuadruped
+{
+ class CluneExperiment : IExperiment
+ {
+ private uint inputs;
+ private uint outputs;
+ private uint hidden;
+ private int cppnInputs;
+ private int cppnOutputs;
+ private IPopulationEvaluator populationEvaluator = null;
+ private NeatParameters neatParams = null;
+
+ public CluneExperiment(uint inputs, uint outputs, uint hidden, int cppnInputs, int cppnOutputs)
+ {
+ this.inputs = inputs;
+ this.outputs = outputs;
+ this.hidden = hidden;
+ this.cppnInputs = cppnInputs;
+ this.cppnOutputs = cppnOutputs;
+ }
+
+ #region IExperiment Members
+
+ public void LoadExperimentParameters(System.Collections.Hashtable parameterTable)
+ {
+ //throw new Exception("The method or operation is not implemented.");
+ }
+
+ public IPopulationEvaluator PopulationEvaluator
+ {
+ get
+ {
+ if (populationEvaluator == null)
+ ResetEvaluator(HyperNEATParameters.substrateActivationFunction);
+
+ return populationEvaluator;
+ }
+ }
+
+ public void ResetEvaluator(IActivationFunction activationFn)
+ {
+ populationEvaluator = new ClunePopulationEvaluator(new CluneNetworkEvaluator(inputs, outputs, hidden));
+ }
+
+ public int InputNeuronCount
+ {
+ get { return cppnInputs; }
+ }
+
+ public int OutputNeuronCount
+ {
+ get { return cppnOutputs; }
+ }
+
+ public NeatParameters DefaultNeatParameters
+ {
+ get
+ {
+ if (neatParams == null)
+ {
+ NeatParameters np = new NeatParameters();
+ np.activationProbabilities = new double[4];
+ np.activationProbabilities[0] = .25;
+ np.activationProbabilities[1] = .25;
+ np.activationProbabilities[2] = .25;
+ np.activationProbabilities[3] = .25;
+ np.compatibilityDisjointCoeff = 1;
+ np.compatibilityExcessCoeff = 1;
+ np.compatibilityThreshold = 100;
+ np.compatibilityWeightDeltaCoeff = 3;
+ np.connectionWeightRange = 3;
+ np.elitismProportion = .1;
+ np.pInitialPopulationInterconnections = 1;
+ np.pInterspeciesMating = 0.01;
+ np.pMutateAddConnection = .06;
+ np.pMutateAddNode = .01;
+ np.pMutateConnectionWeights = .96;
+ np.pMutateDeleteConnection = 0;
+ np.pMutateDeleteSimpleNeuron = 0;
+ np.populationSize = 300;
+ np.pruningPhaseBeginComplexityThreshold = float.MaxValue;
+ np.pruningPhaseBeginFitnessStagnationThreshold = int.MaxValue;
+ np.pruningPhaseEndComplexityStagnationThreshold = int.MinValue;
+ np.selectionProportion = .8;
+ np.speciesDropoffAge = 1500;
+ np.targetSpeciesCountMax = np.populationSize / 10;
+ np.targetSpeciesCountMin = np.populationSize / 10 - 2;
+
+ neatParams = np;
+ }
+ return neatParams;
+ }
+ }
+
+ public IActivationFunction SuggestedActivationFunction
+ {
+ get { return HyperNEATParameters.substrateActivationFunction; }
+ }
+
+ public AbstractExperimentView CreateExperimentView()
+ {
+ return null;
+ }
+
+ public string ExplanatoryText
+ {
+ get { return "A HyperNEAT experiemnt for quadruped locomotion"; }
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/OscillatorQuadruped/Clune/CluneNetworkEvaluator.cs b/SharpNeatWalker/OscillatorQuadruped/Clune/CluneNetworkEvaluator.cs
new file mode 100644
index 000000000..2120eeb0b
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/Clune/CluneNetworkEvaluator.cs
@@ -0,0 +1,55 @@
+using SharpNeatLib.Experiments;
+using SharpNeatLib.NeuralNetwork;
+
+namespace OscillatorQuadruped
+{
+ internal class CluneNetworkEvaluator : INetworkEvaluator
+ {
+ public static CluneSubstrate substrate;
+ private NoveltyArchive noveltyArchive;
+
+ public CluneNetworkEvaluator(uint inputs, uint outputs, uint hidden)
+ {
+ substrate = new CluneSubstrate(inputs, outputs, hidden, HyperNEATParameters.substrateActivationFunction);
+ noveltyArchive = new NoveltyArchive();
+ }
+
+ #region INetworkEvaluator Members
+
+ public double[] threadSafeEvaluateNetwork(INetwork network)
+ {
+ var tempGenome = substrate.generateGenome(network);
+ var tempNet = tempGenome.Decode(null);
+
+ using (var quadDomain = new Domain(noveltyArchive, MainProgram.novelty))
+ {
+ var fitness = quadDomain.EvaluateController(new Controller(tempNet));
+ return fitness;
+ }
+ }
+
+ public double EvaluateNetwork(INetwork network)
+ {
+ var tempGenome = substrate.generateGenome(network);
+ var tempNet = tempGenome.Decode(null);
+
+ using (var quadDomain = new Domain())
+ {
+ var fitness = quadDomain.EvaluateController(new Controller(tempNet));
+ return fitness[0];
+ }
+ }
+
+ public string EvaluatorStateMessage
+ {
+ get { return ""; }
+ }
+
+ public void endOfGeneration()
+ {
+ noveltyArchive.endOfGeneration();
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/OscillatorQuadruped/Clune/ClunePopulationEvaluator.cs b/SharpNeatWalker/OscillatorQuadruped/Clune/ClunePopulationEvaluator.cs
new file mode 100644
index 000000000..ce3eea5d9
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/Clune/ClunePopulationEvaluator.cs
@@ -0,0 +1,18 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using SharpNeatLib.Experiments;
+
+namespace OscillatorQuadruped
+{
+ class ClunePopulationEvaluator : MultiThreadedPopulationEvaluator
+ {
+
+ public ClunePopulationEvaluator(INetworkEvaluator eval)
+ : base(eval, null)
+ {
+
+ }
+ }
+}
diff --git a/SharpNeatWalker/OscillatorQuadruped/Clune/CluneSubstrate.cs b/SharpNeatWalker/OscillatorQuadruped/Clune/CluneSubstrate.cs
new file mode 100644
index 000000000..847808195
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/Clune/CluneSubstrate.cs
@@ -0,0 +1,85 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using SharpNeatLib.CPPNs;
+using SharpNeatLib.NeuralNetwork;
+using SharpNeatLib.NeatGenome;
+
+namespace OscillatorQuadruped
+{
+ class CluneSubstrate : Substrate
+ {
+ private const float shiftScale = 0.2f;
+
+ public CluneSubstrate(uint inputs, uint outputs, uint hidden, IActivationFunction function)
+ : base(inputs, outputs, hidden, function)
+ {
+
+ }
+
+ public override NeatGenome generateGenome(INetwork network)
+ {
+ var coordinates = new double[6];
+ int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
+ uint connectionCounter = 0;
+ ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));
+
+
+ for (int layer = -1; layer < 1; layer++)
+ {
+ coordinates[0] = layer;
+ coordinates[3] = layer + 1;
+ uint srcRow = 0;
+ for (float row1 = -1; row1 <= 1; row1 += 0.5f, srcRow++)
+ {
+ coordinates[1] = row1;
+ uint srcCol = 0;
+ for (float col1 = -1; col1 <= 1; col1 += 0.5f, srcCol++)
+ {
+ coordinates[2] = col1;
+ uint tarRow = 0;
+ for (float row2 = -1; row2 <= 1; row2 += 0.5f, tarRow++)
+ {
+ coordinates[4] = row2;
+ uint tarCol = 0;
+ for (float col2 = -1; col2 <= 1; col2 += 0.5f, tarCol++)
+ {
+ coordinates[5] = col2;
+
+ network.ClearSignals();
+ network.SetInputSignals(coordinates);
+ network.MultipleSteps(iterations);
+ float output = network.GetOutputSignal(0);
+ network.ClearSignals();
+
+ if (Math.Abs(output) > threshold)
+ {
+ uint source = srcRow * 5 + srcCol;
+ if (layer == 0)
+ source += inputCount + outputCount;
+ uint target = tarRow * 5 + tarCol;
+ if (layer == -1)
+ target += inputCount + outputCount;
+ else
+ target += inputCount;
+
+ float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
+ connections.Add(new ConnectionGene(connectionCounter++, source, target, weight));
+ }
+ }
+
+ if (row2 == -0.5f)
+ row2 += 0.5f;
+ }
+ }
+
+ if (row1 == -0.5f)
+ row1 += 0.5f;
+ }
+ }
+
+ return new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
+ }
+ }
+}
diff --git a/SharpNeatWalker/OscillatorQuadruped/Controller.cs b/SharpNeatWalker/OscillatorQuadruped/Controller.cs
new file mode 100644
index 000000000..c62fed598
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/Controller.cs
@@ -0,0 +1,279 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.IO;
+using SharpNeatLib.NeuralNetwork;
+using SharpNeatLib.NeatGenome;
+
+namespace OscillatorQuadruped
+{
+ class Controller
+ {
+ private bool useFile = false;
+ //private static bool scale = true;
+ private StreamReader reader;
+ private float[] outputs;
+ private int outputindex = 0;
+ private INetwork network;
+ private Boolean scale = true;
+ private Boolean useSUPG;
+ private NeatGenome genome;
+ private INetwork cppn;
+ private int[] triggerMap;
+
+ private static int wavelength = 100; // SUPG wavelength
+ private static int compression = 50;
+
+ private string folder = Directory.GetCurrentDirectory() + "\\logfiles\\";
+ private StreamWriter SW;
+
+ // arrays added to cache CPPN outputs for SUPG activation
+ private float[,] supgOutputs;
+
+ private bool kickstart = true;
+
+ // This constructor should only be used when returning output values from a data file instead of from a network
+ public Controller(bool useFile = false)
+ {
+ this.useFile = useFile;
+ if (useFile)
+ {
+ reader = new StreamReader("C:\\Users\\Greg\\Desktop\\various\\school\\OscillatorQuadruped\\output files\\outs-10.104702-6.868093.txt");
+ outputs = new float[18000];
+ string sLine = "1";
+ int i = 0;
+ while (sLine != null && i < 18000)
+ {
+ sLine = reader.ReadLine();
+ if (sLine != null)
+ outputs[i] = float.Parse(sLine);
+ i++;
+ }
+ reader.Close();
+ }
+ }
+
+ // all of the optional parameters only need to be entered when using SUPG architecture
+ public Controller(INetwork network, bool useSUPG = false, NeatGenome genome = null, INetwork cppn = null, int[] triggerMap = null)
+ {
+ //SW = File.CreateText(folder + "triggers.txt");
+
+ if (useSUPG)
+ {
+ supgOutputs = new float[network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount), wavelength]; // need at least as many rows as the number of hidden neurons
+ // set all supgOutputs to min value to signal they have not been cached yet
+ for (int i = 0; i < network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount); i++)
+ for (int j = 0; j < wavelength; j++)
+ supgOutputs[i, j] = float.MinValue;
+ }
+
+ this.network = network;
+ this.useSUPG = useSUPG;
+ this.genome = genome;
+ this.cppn = cppn;
+ this.triggerMap = triggerMap;
+ if (useSUPG)
+ ((FloatFastConcurrentNetwork)network).UseSUPG = true;
+ }
+
+ public void update(double[] sensors, float[] triggers)
+ {
+ if (SW != null && false)
+ {
+ SW.WriteLine(triggers[0] + "," + triggers[1] + "," + triggers[2] + "," + triggers[3]);
+ SW.Flush();
+ }
+
+ if (network != null)
+ {
+ int iterations = 17;
+
+ network.ClearSignals();
+ if (useSUPG)
+ {
+ int cppnIterations = 2 * (cppn.TotalNeuronCount - (cppn.InputNeuronCount + cppn.OutputNeuronCount)) + 1;
+ // kickstart by setting leg timers to 1, w/2, w/2, 1
+ if (kickstart)
+ {
+ kickstart = false;
+ // set triggers to 0
+ triggers = new float[triggers.Length];
+
+ // set time counters to the kickstart values
+ foreach (NeuronGene neuron in genome.NeuronGeneList)
+ {
+ // get offset value from 2nd cppn output
+ if (neuron.InnovationId >= 16 && neuron.InnovationId <= 18)
+ neuron.TimeCounter = getOffset(1, cppnIterations, neuron);
+ if (neuron.InnovationId >= 19 && neuron.InnovationId <= 21)
+ neuron.TimeCounter = getOffset(2, cppnIterations, neuron);
+ if (neuron.InnovationId >= 22 && neuron.InnovationId <= 24)
+ neuron.TimeCounter = getOffset(3, cppnIterations, neuron);
+ if (neuron.InnovationId >= 25 && neuron.InnovationId <= 27)
+ neuron.TimeCounter = getOffset(4, cppnIterations, neuron);
+ }
+ }
+
+ // set up the override array
+ float[] overrideSignals = new float[network.TotalNeuronCount];
+ for (int i = 0; i < overrideSignals.Length; i++)
+ overrideSignals[i] = float.MinValue;
+
+ // update the SUPGs
+ foreach (NeuronGene neuron in genome.NeuronGeneList)
+ {
+ /* code for triggers */
+ // increment the time counter of any SUPG that is currently running
+
+ if (neuron.TimeCounter > 0)
+ {
+ neuron.TimeCounter = (neuron.TimeCounter + 1) % wavelength;
+ // if the time counter finished and went back to zero, the first step is complete
+ if (neuron.TimeCounter == 0)
+ neuron.FirstStepComplete = true;
+ }
+
+ // check if the neuron is a triggered neuron
+ if (triggerMap[neuron.InnovationId] != int.MinValue)
+ {
+ // check the trigger
+ if (triggers[triggerMap[neuron.InnovationId]] == 1)
+ {
+ // if the time counter was non zero, then the first step has been completed
+ if (neuron.TimeCounter > 0)
+ neuron.FirstStepComplete = true;
+
+ // set the neuron's time to 1
+ neuron.TimeCounter = 1;
+ }
+ }
+ }
+
+ // determine the proper outputs of the SUPGs and send the override array to the network
+
+ foreach (NeuronGene neuron in genome.NeuronGeneList)
+ {
+ if (neuron.TimeCounter > 0) // only need to check neurons whose time counter is non zero
+ {
+ overrideSignals[neuron.InnovationId] = getSUPGActivation(neuron, cppnIterations);
+ }
+ }
+ ((FloatFastConcurrentNetwork)network).OverrideSignals = overrideSignals;
+ }
+ else
+ {
+ network.SetInputSignals(sensors);
+ }
+ network.MultipleSteps(iterations);
+
+ }
+ }
+
+ private float getSUPGActivation(NeuronGene neuron, int cppnIterations)
+ {
+ float activation = 0;
+ int offset = network.InputNeuronCount + network.OutputNeuronCount; // assume that SUPGs are placed at front of hidden neurons
+ // if the element is float.min, then we have not yet cached the SUPG output
+ if (supgOutputs[neuron.InnovationId - offset, neuron.TimeCounter] == float.MinValue)
+ {
+ var coordinates = new double[3];
+
+
+ coordinates[0] = neuron.XValue;
+ coordinates[1] = neuron.YValue;
+
+
+ coordinates[0] = coordinates[0] / compression;
+
+ coordinates[2] = (float)neuron.TimeCounter / wavelength;
+
+ cppn.ClearSignals();
+ cppn.SetInputSignals(coordinates);
+ cppn.MultipleSteps(cppnIterations);
+
+ if (neuron.FirstStepComplete)
+ {
+ activation = cppn.GetOutputSignal(0);
+ supgOutputs[neuron.InnovationId - offset, neuron.TimeCounter] = activation; // only cache the output if the first step is complete
+ }
+ else
+ activation = cppn.GetOutputSignal(0);
+
+ }
+ else
+ {
+ // get the cached value
+ activation = supgOutputs[neuron.InnovationId - offset, neuron.TimeCounter];
+ }
+
+ return activation;
+ }
+
+ private int getOffset(int leg, int cppnIterations, NeuronGene neuron)
+ {
+ int offset = 0;
+ var coordinates = new double[3];
+ coordinates[0] = neuron.XValue / compression;
+ cppn.ClearSignals();
+ cppn.SetInputSignals(coordinates);
+ cppn.MultipleSteps(cppnIterations);
+ float activation = cppn.GetOutputSignal(1);
+ offset = (int)Math.Ceiling((activation + 1) * wavelength / 2);
+ if (offset <= 0)
+ offset = 1;
+ if (offset >= wavelength)
+ offset = wavelength - 1;
+
+ return offset;
+ }
+
+ public float[] getOutputs()
+ {
+ float[] outs = new float[12];
+ if (useFile)
+ {
+ for (int i = 0; i < 12; i++)
+ {
+ outs[i] = outputs[outputindex];
+ outputindex++;
+ if (outputindex > 17999)
+ outputindex = 17999;
+ }
+ }
+ else
+ {
+ if (MainProgram.doClune)
+ for (int i = 0; i < 12; i++)
+ outs[i] = network.GetOutputSignal(i / 3 * 5 + (i % 3));
+ else
+ for (int i = 0; i < 12; i++)
+ {
+ outs[i] = network.GetOutputSignal(i);
+ if (useSUPG)
+ {
+ // with the SUPG architecture, we need the outputs to be normalized between 0 and 1
+ // because the CPPN uses bipolar sigmoid for all outputs, which increases the range to -1, 1
+ // when SUPGs start becoming true hidden nodes, we can remove this modification
+ outs[i] = (outs[i] + 1) / 2;
+ }
+ }
+ }
+ return outs;
+ }
+
+ public void cleanup()
+ {
+ //if (SW != null)
+ //SW.Close();
+ }
+
+ public bool Scale
+ {
+ get
+ {
+ return scale;
+ }
+ }
+ }
+}
diff --git a/SharpNeatWalker/OscillatorQuadruped/Domain.cs b/SharpNeatWalker/OscillatorQuadruped/Domain.cs
new file mode 100644
index 000000000..354456a6e
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/Domain.cs
@@ -0,0 +1,153 @@
+using System;
+using System.Numerics;
+using BepuPhysics;
+using BepuUtilities;
+using DemoContentLoader;
+using DemoRenderer;
+using DemoRenderer.UI;
+using Demos;
+using DemoUtilities;
+
+namespace OscillatorQuadruped
+{
+ internal class Domain : Demo
+ {
+#if TODO // TODO: OscillatorQuadruped
+ private Controller _controller;
+ private Quadruped _walker;
+
+ private float[] _behavior;
+ private int _behaviorCounter;
+ private int _timeCounter;
+ private readonly bool _novelty;
+ private readonly NoveltyArchive _noveltyArchive;
+ private const int SampleRate = 2;
+
+ private bool _cameraFollowCreature;
+ private Vector3 _lastxyz;
+ private Camera _camera;
+
+ public Domain(NoveltyArchive noveltyArchive = null, bool novelty = false)
+ {
+ _novelty = novelty;
+ _noveltyArchive = noveltyArchive;
+ }
+
+ public void Initialize(Controller controller)
+ {
+ _controller = controller;
+ Initialize(null, new Camera(4 / 3f, (float)Math.PI / 3, 0.01f, 100000));
+ }
+
+ public override void Initialize(ContentArchive content, Camera camera)
+ {
+ _camera = camera;
+ if (_controller == null)
+ _controller = new Controller(true);
+
+ _lastxyz = new Vector3(-2f, -2f, 2f);
+ _camera.Position = _lastxyz;
+ _camera.Yaw = MathHelper.Pi * 3f / 4;
+ _camera.Pitch = MathHelper.Pi * 0.1f;
+ _cameraFollowCreature = true;
+
+ Simulation = Simulation.Create(BufferPool, new TestCallbacks());
+ Simulation.PoseIntegrator.Gravity = new Vector3(0, 0, 9.8f);
+
+ _walker = new Quadruped(_controller, Simulation);
+ _walker.Initialize();
+ }
+
+ public override void Update(Input input, float dt)
+ {
+ base.Update(input, dt); // Calls Simulation.Timestep()
+
+ _walker.Update(dt);
+
+ if (_novelty)
+ {
+ if (_timeCounter == 0)
+ {
+ // update the behavior vector
+ var com = _walker.CurrentCom;
+ // location based novelty
+ UpdateBehavior(com);
+ }
+ _timeCounter++;
+ _timeCounter = _timeCounter % SampleRate;
+ }
+ }
+
+ public override void Render(Renderer renderer, TextBuilder text, Font font)
+ {
+ // if we're watching the movie, move the camera automatically
+ if (_cameraFollowCreature)
+ {
+ var pos = _walker.CurrentCom;
+ var desiredxyz = new Vector3(pos.X - 2, pos.Y - 2, 2f);
+ var xyz = _lastxyz + (desiredxyz - _lastxyz) * .01f;
+ _camera.Position = _lastxyz = xyz;
+ }
+ base.Render(renderer, text, font);
+ }
+
+ protected override bool OnCollisionWithGround(Geom geom)
+ {
+ foreach (var o in Objects)
+ for (var i = 0; i < o.Bodies.Count; i++)
+ if (geom.Body == o.Bodies[i])
+ o.BodiesOnGround.Add(o.Bodies[i]);
+ return base.OnCollisionWithGround(geom);
+ }
+
+
+ public double[] EvaluateController(Controller controller)
+ {
+ const int simTime = 1500; // 15 seconds // 100 * 3600 * 100; // 100 hrs
+ _behavior = new float[simTime * 2 / SampleRate];
+ _behaviorCounter = 0;
+
+ Initialize(controller);
+ Run(simTime);
+
+ var com = _walker.CurrentCom;
+ var fitness = _walker.CalcFitness();
+
+ controller.cleanup();
+
+ var objectiveFitness = fitness;
+
+ if (_novelty)
+ {
+ // update the behavior vector in case the simulation was aborted
+ while (_behaviorCounter < _behavior.Length)
+ UpdateBehavior(com);
+
+ // calculate the fitness based on the novelty metric
+ fitness = _noveltyArchive.calcFitness(_behavior);
+ }
+
+ return new double[] { fitness, objectiveFitness };
+ }
+
+ private void UpdateBehavior(Vector3 com)
+ {
+ _behavior[_behaviorCounter++] = com.X;
+ _behavior[_behaviorCounter++] = com.Y;
+ }
+#else
+ public Domain(NoveltyArchive noveltyArchive = null, bool novelty = false)
+ {
+ }
+
+ public override void Initialize(ContentArchive content, Camera camera)
+ {
+ }
+
+ public double[] EvaluateController(Controller controller)
+ {
+ return null;
+ }
+#endif
+ }
+}
\ No newline at end of file
diff --git a/SharpNeatWalker/OscillatorQuadruped/Form1.cs b/SharpNeatWalker/OscillatorQuadruped/Form1.cs
new file mode 100644
index 000000000..b7e92c658
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/Form1.cs
@@ -0,0 +1,87 @@
+#if TODO // TODO: OscillatorQuadruped
+using System;
+
+namespace OscillatorQuadruped
+{
+ public enum RunState
+ {
+ Stopped,
+ Stopping,
+ Running
+ }
+
+ public class Form1
+ {
+ private RunState _state;
+ private readonly System.Threading.CancellationTokenSource _cancel = new System.Threading.CancellationTokenSource();
+
+ public Form1()
+ {
+ InitializeComponent();
+ radioButtonCTRNN.Checked = true;
+ radioButtonObjective.Checked = true;
+ }
+
+ private void buttonRun_Click(object sender, EventArgs e)
+ {
+ if (_state == RunState.Stopped)
+ {
+ System.Threading.Tasks.Task.Run(() => {
+ var mp = new MainProgram();
+ var evaluation = radioButtonObjective.Checked ? 0 : 1;
+ if (radioButtonCTRNN.Checked)
+ mp.run(0, evaluation, _cancel.Token);
+ else if (radioButtonSUPG.Checked)
+ mp.run(1, evaluation, _cancel.Token);
+ else
+ mp.run(2, evaluation, _cancel.Token);
+ UpdateState(RunState.Stopped);
+ }, _cancel.Token);
+ UpdateState(RunState.Running);
+ }
+ else if (_state == RunState.Running)
+ {
+ _cancel.Cancel();
+ UpdateState(RunState.Stopping);
+ }
+ }
+
+ private void UpdateState(RunState state)
+ {
+ _state = state;
+ buttonRun.Invoke((Action)(() => { buttonRun.Text = "HyperNEAT " + _state + "..."; }));
+ }
+
+ private void buttonOpenGenome_Click(object sender, EventArgs e)
+ {
+ DialogResult result = openFileDialog1.ShowDialog();
+ if (result == DialogResult.OK)
+ {
+ textBoxGenome.Text = openFileDialog1.FileName;
+ }
+ }
+
+ private void buttonPlay_Click(object sender, EventArgs e)
+ {
+ MainProgram mp = new MainProgram();
+ if (radioButtonCTRNN.Checked)
+ mp.showMovie(textBoxGenome.Text, 0);
+ else if (radioButtonSUPG.Checked)
+ mp.showMovie(textBoxGenome.Text, 1);
+ else
+ mp.showMovie(textBoxGenome.Text, 2);
+ }
+
+ private void buttonCalcFitness_Click(object sender, EventArgs e)
+ {
+ MainProgram mp = new MainProgram();
+ if (radioButtonCTRNN.Checked)
+ mp.calcFitness(textBoxGenome.Text, 0);
+ else if (radioButtonSUPG.Checked)
+ mp.calcFitness(textBoxGenome.Text, 1);
+ else
+ mp.calcFitness(textBoxGenome.Text, 2);
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/SharpNeatWalker/OscillatorQuadruped/MainProgram.cs b/SharpNeatWalker/OscillatorQuadruped/MainProgram.cs
new file mode 100644
index 000000000..beb086e78
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/MainProgram.cs
@@ -0,0 +1,182 @@
+using System;
+using System.IO;
+using System.Xml;
+using SharpNeatLib.CPPNs;
+using SharpNeatLib.Evolution;
+using SharpNeatLib.Experiments;
+using SharpNeatLib.NeatGenome;
+using SharpNeatLib.NeatGenome.Xml;
+using SharpNeatLib.NeuralNetwork;
+
+namespace OscillatorQuadruped
+{
+ class MainProgram
+ {
+ private string folder = Directory.GetCurrentDirectory() + "\\logfiles\\";
+
+ public static bool doClune = false;
+ public static bool novelty = false;
+
+ public MainProgram()
+ {
+
+ }
+
+ public void run(int type, int evaluationMethod, System.Threading.CancellationToken token)
+ {
+ double maxFitness = 0;
+ int maxGenerations = 800;
+ int populationSize = 300;
+
+ IExperiment exp;
+
+ if (evaluationMethod == 1)
+ novelty = true;
+
+ if (type == 0)
+ exp = new CTRNNExperiment(4, 12, 8, 8, 4);
+ else if (type == 1)
+ exp = new SUPGExperiment(4, 12, 12, 3, 2);
+ else
+ {
+ doClune = true;
+ exp = new CluneExperiment(20, 20, 20, 6, 1);
+ }
+
+ XmlDocument doc;
+ FileInfo oFileInfo;
+ IdGenerator idgen;
+ EvolutionAlgorithm ea;
+ NeatGenome seedGenome = null;
+
+ if (seedGenome == null)
+ {
+ idgen = new IdGenerator();
+ ea = new EvolutionAlgorithm(new Population(idgen, GenomeFactory.CreateGenomeList(exp.DefaultNeatParameters, idgen, exp.InputNeuronCount, exp.OutputNeuronCount, exp.DefaultNeatParameters.pInitialPopulationInterconnections, populationSize)), exp.PopulationEvaluator, exp.DefaultNeatParameters);
+ }
+ else
+ {
+ idgen = new IdGeneratorFactory().CreateIdGenerator(seedGenome);
+ ea = new EvolutionAlgorithm(new Population(idgen, GenomeFactory.CreateGenomeList(seedGenome, populationSize, exp.DefaultNeatParameters, idgen)), exp.PopulationEvaluator, exp.DefaultNeatParameters);
+ }
+ Directory.CreateDirectory(folder);
+ using (var logWriter = File.CreateText(folder + "Log " + DateTime.Now.ToString("u").Replace(':', '.') + ".txt"))
+ for (int j = 0; j < maxGenerations; j++)
+ {
+ if (token.IsCancellationRequested)
+ {
+ logWriter.WriteLine("Cancelled");
+ break;
+ }
+
+ DateTime dt = DateTime.Now;
+ ea.PerformOneGeneration();
+
+ if (ea.BestGenome.ObjectiveFitness > maxFitness)
+ {
+ maxFitness = ea.BestGenome.ObjectiveFitness;
+ doc = new XmlDocument();
+ XmlGenomeWriterStatic.Write(doc, (NeatGenome)ea.BestGenome);
+ oFileInfo = new FileInfo(folder + "bestGenome" + j + ".xml");
+ doc.Save(oFileInfo.FullName);
+
+ /*/ This will output the substrate
+ doc = new XmlDocument();
+ XmlGenomeWriterStatic.Write(doc, SUPGNetworkEvaluator.substrate.generateGenome(ea.BestGenome.Decode(null)));
+ oFileInfo = new FileInfo(folder + "bestNetwork" + j + ".xml");
+ doc.Save(oFileInfo.FullName);*/
+ }
+ var msg = DateTime.Now.ToLongTimeString()
+ + "; Duration=" + DateTime.Now.Subtract(dt).ToString("mm\\:ss")
+ + "; Gen=" + ea.Generation.ToString("000") + "; Neurons=" + (ea.Population.TotalNeuronCount / (float)ea.Population.GenomeList.Count).ToString("00.00") + "; Connections=" + (ea.Population.TotalConnectionCount / (float)ea.Population.GenomeList.Count).ToString("00.00")
+ + "; BestFit=" + ea.BestGenome.ObjectiveFitness.ToString("0.000") + "; MaxFit=" + maxFitness.ToString("0.000");
+ Console.WriteLine(msg);
+ logWriter.WriteLine(msg);
+ logWriter.Flush();
+ //Do any post-hoc stuff here
+ }
+
+ doc = new XmlDocument();
+ XmlGenomeWriterStatic.Write(doc, (NeatGenome)ea.BestGenome, ActivationFunctionFactory.GetActivationFunction("NullFn"));
+ oFileInfo = new FileInfo(folder + "bestGenome.xml");
+ doc.Save(oFileInfo.FullName);
+ }
+
+#if TODO // TODO: OscillatorQuadruped
+ public void showMovie(string genomeFile, int type)
+ {
+ if (true) // set to false to use hardcoded output values from a file
+ {
+ XmlDocument doc = new XmlDocument();
+ doc.Load(genomeFile);
+ NeatGenome genome = XmlNeatGenomeReaderStatic.Read(doc);
+
+ INetwork tempNet = null;
+ INetwork cppn = null;
+ NeatGenome tempGenome = null;
+
+ Substrate substrate;
+
+ if (type == 0)
+ substrate = new CTRNNSubstrate(4, 12, 8, HyperNEATParameters.substrateActivationFunction);
+ else if (type == 1)
+ substrate = new SUPGSubstrate(4, 12, 12, HyperNEATParameters.substrateActivationFunction);
+ else
+ {
+ doClune = true;
+ substrate = new CluneSubstrate(20, 20, 20, HyperNEATParameters.substrateActivationFunction);
+ }
+
+ cppn = genome.Decode(null);
+ tempGenome = substrate.generateGenome(cppn);
+
+ tempNet = tempGenome.Decode(null);
+
+ Controller controller;
+ if (type == 0)
+ controller = new Controller(tempNet);
+ else if (type == 1)
+ controller = new Controller(tempNet, true, tempGenome, cppn, ((SUPGSubstrate)substrate).getSUPGMap());
+ else
+ controller = new Controller(tempNet);
+
+
+ using (var domain = new Domain())
+ {
+ domain.Initialize(controller);
+ domain.RunDraw();
+ }
+ }
+ else
+ {
+ using (var domain = new Domain())
+ {
+ domain.Initialize();
+ domain.RunDraw();
+ }
+ }
+ }
+
+ public void calcFitness(string genomeFile, int type)
+ {
+ XmlDocument doc = new XmlDocument();
+ doc.Load(genomeFile);
+ NeatGenome genome = XmlNeatGenomeReaderStatic.Read(doc);
+
+ INetworkEvaluator eval;
+ if (type == 0)
+ eval = new CTRNNNetworkEvaluator(4, 12, 12);
+ else if (type == 1)
+ eval = new SUPGNetworkEvaluator(4, 12, 12);
+ else
+ {
+ doClune = true;
+ eval = new CluneNetworkEvaluator(20, 20, 20);
+ }
+
+ var tempNet = genome.Decode(null);
+ MessageBox.Show(eval.threadSafeEvaluateNetwork(tempNet)[0].ToString());
+ }
+#endif
+ }
+}
\ No newline at end of file
diff --git a/SharpNeatWalker/OscillatorQuadruped/NoveltyArchive.cs b/SharpNeatWalker/OscillatorQuadruped/NoveltyArchive.cs
new file mode 100644
index 000000000..c14caf365
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/NoveltyArchive.cs
@@ -0,0 +1,113 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Collections;
+using System.Threading;
+
+namespace OscillatorQuadruped
+{
+ class NoveltyArchive
+ {
+ private double novelThreshold = 1000;
+ private int k = 15;
+ private ArrayList noveltyBank;
+ private static Semaphore sem = new Semaphore(1, 1);
+
+ // threshold adjustment variables
+ private int itemsAdded = 0;
+ private int gensSinceLastAddition = 0;
+
+ public NoveltyArchive()
+ {
+ noveltyBank = new ArrayList();
+ }
+
+ public float calcFitness(float[] behavior)
+ {
+
+ float fitness = 0;
+
+
+ float[] neighbors = new float[k];
+ for (int i = 0; i < k; i++)
+ {
+ neighbors[i] = float.MaxValue;
+ }
+
+ // make a local copy of novelty bank
+ sem.WaitOne();
+ ArrayList tempBank = new ArrayList(noveltyBank);
+ sem.Release();
+
+ // find the distance to the k nearest neighbors
+ foreach(float[] candidate in tempBank)
+ {
+ float distance = calcDistance(behavior, candidate);
+ int j = 0;
+ while (j < k)
+ {
+ if (distance < neighbors[j])
+ {
+ float temp = neighbors[j];
+ neighbors[j] = distance;
+ distance = temp;
+ j = -1;
+ }
+ j++;
+ }
+ }
+
+ for (int i = 0; i < k; i++)
+ {
+ if (neighbors[i] == float.MaxValue)
+ neighbors[i] = 0;
+
+ fitness += neighbors[i];
+ }
+
+ // if the fitness is above the threshold, or we don't yet have k behaviors in the bank, add this individual to the novelty bank
+ if (fitness > novelThreshold || noveltyBank.Count < k)
+ {
+ sem.WaitOne();
+ noveltyBank.Add(behavior);
+ sem.Release();
+
+ if (fitness > novelThreshold)
+ itemsAdded++;
+ }
+
+ return fitness;
+ }
+
+ private float calcDistance(float[] a, float[] b)
+ {
+ float distance = 0;
+ for (int i = 0; i < a.Length && i < b.Length; i++)
+ {
+ distance += (float) Math.Pow(a[i] - b[i], 2);
+ }
+ return distance;
+ }
+
+ public void endOfGeneration()
+ {
+ // if more than 4 items have been added, raise the bar
+ if (itemsAdded > 4)
+ novelThreshold *= 1.2;
+
+ // if no items have been added in 4 generations or longer, lower the bar
+ if (itemsAdded == 0)
+ {
+ gensSinceLastAddition++;
+ if (gensSinceLastAddition > 3)
+ novelThreshold *= .8;
+ }
+ else
+ gensSinceLastAddition = 0;
+
+ // reset items added
+ itemsAdded = 0;
+ }
+ }
+}
diff --git a/SharpNeatWalker/OscillatorQuadruped/Quadruped.cs b/SharpNeatWalker/OscillatorQuadruped/Quadruped.cs
new file mode 100644
index 000000000..3f35dedd8
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/Quadruped.cs
@@ -0,0 +1,560 @@
+#if TODO // TODO: OscillatorQuadruped
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using BepuPhysics;
+using BepuPhysics.Collidables;
+
+namespace OscillatorQuadruped
+{
+ internal class Quadruped
+ {
+ private const int NUMBER_TESTING_SIZES = 1;
+
+ private const float FIXED_LEG_SCALE = 1 / 0.35037037037037033f;//was 3.25. leg length for fixed training also used as the length to create the controller when in "display" mode04
+
+ private static readonly float[] LEG_SIZES = { 0.30f };
+
+ //In "display" mode this is the scale of the quadruped
+ private const float LEG_SCALE = 0.35037037037037033f;
+
+ //5.4, 2,3, 7.8
+ private const float SCALE_FACTOR = 3.0f; //was 3 3, 3.5, 4.0
+ private const float FOOTX_SZ = 0.55f / SCALE_FACTOR; //was 0.65
+ private const float FOOTY_SZ = 0.15f / SCALE_FACTOR;
+ private const float FOOTZ_SZ = 0.55f / SCALE_FACTOR;
+ private const float LLEG_LEN = 1.0f; //was 1.0
+ private const float LLEG_RAD = 0.2f / SCALE_FACTOR; //was 0.2
+ private const float ULEG_LEN = 1.0f;//was 1.0
+ private const float ULEG_RAD = 0.2f / SCALE_FACTOR; //was 0.2
+ private const float TORSO_LEN = 1.3f / SCALE_FACTOR; //was 1.3
+ private const float TORSO_RAD = 0.3f / SCALE_FACTOR; //was 0.3
+ private const float LEG_POS = 0.2f / SCALE_FACTOR;
+ private const float TORSO_HEIGHT = 1.2f / SCALE_FACTOR;
+ private const float TORSO_LEN2 = 3.5f / SCALE_FACTOR; //was 3.0
+ private const float ORIG_HEIGHT = (TORSO_RAD / 2.0f + (ULEG_LEN / 4.0f) + (LLEG_LEN / 4.0f) + FOOTZ_SZ); //SET TO SMALLEST POSSIBLE SIZE
+ private const float LEG_MASS = 0.5f; //was 0.5
+ private const float TORSO_MASS = 0.0f; //was 0.3 and before 1.0 and before 0.5
+ private const float FOOT_MASS = 0.1f;
+ private const float MAXTORQUE_FOOT = 10.0f; //was 10.0
+ private const float MAXTORQUE_KNEE = 5.0f; //was 5.0
+ private const float MAXTORQUE_HIPMINOR = 5.0f; // 5.0
+ private const float MAXTORQUE_HIPMAJOR = 5.0f; //5.0
+ private const float P_CONSTANT = 9.0f; //was 9.0
+ private const float D_CONSTANT = 0.0f;
+ private const float FOOTFACTOR = 5.0f;
+
+ public readonly HashSet
BodiesOnGround = new HashSet();
+ private readonly float[] current_angles;
+ private readonly float[] lo_limit;
+ private readonly float[] hi_limit;
+ private double[] sensors;
+ private readonly float[] desired_angvel;
+ private readonly float[] delta_angles;
+ private readonly float[] p_terms;
+ private readonly float[] d_terms;
+
+ private int step;
+ private readonly float[] orig_quat = new float[15];
+ private bool resetting;
+
+ private int reset_counter;
+
+ private Vector3 orig_com;
+ private readonly float[] orig_left = new float[4];
+ private readonly float[] orig_right = new float[4];
+ public Vector3 CurrentCom { get; private set; }
+ private readonly float[] last_com = new float[4];
+ private double last_distance;
+ private bool log;
+
+ private readonly List lft;
+ private readonly List lfx;
+ private readonly List lfy;
+ private readonly List rft;
+ private readonly List rfx;
+ private readonly List rfy;
+
+ private bool leftdown;
+ private bool rightdown;
+ private bool leftdownback;
+ private bool rightdownback;
+ private bool leftrigid;
+ private bool rightrigid;
+ private int lastdown;
+ private double old_distance;
+ private readonly float[] footdown;
+
+ private int timeCounter; // for clune architecture
+ private const int cluneWavelength = 100;
+
+ private readonly Controller _controller;
+ private readonly Simulation _simulation;
+
+ private Box _torso;
+ private Sphere _leftFoot;
+ private Geom _leftLowerLeg;
+ private Geom _leftUpperLeg;
+ private Fixed _leftFootLegJoint;
+ private Hinge _leftKneeLowerLegJoint;
+ private Universal _leftHipUpperLegJoint;
+ private Sphere _rightFoot;
+ private Geom _rightLowerLeg;
+ private Geom _rightUpperLeg;
+ private Fixed _rightFootLegJoint;
+ private Hinge _rightKneeLowerLegJoint;
+ private Universal _rightHipUpperLegJoint;
+ private Sphere _leftBackFoot;
+ private Geom _leftBackLowerLeg;
+ private Geom _leftBackUpperLeg;
+ private Fixed _leftBackFootLegJoint;
+ private Hinge _leftBackKneeLowerLegJoint;
+ private Universal _leftBackHipUpperLegJoint;
+ private Sphere _rightBackFoot;
+ private Geom _rightBackLowerLeg;
+ private Geom _rightBackUpperLeg;
+ private Fixed _rightBackFootLegJoint;
+ private Hinge _rightBackKneeLowerLegJoint;
+ private Universal _rightBackHipUpperLegJoint;
+
+ public Quadruped(Controller controller, Simulation simulation)
+ {
+ _controller = controller;
+ _simulation = simulation;
+
+ const int numJoints = 12;
+ lo_limit = new float[numJoints];
+ hi_limit = new float[numJoints];
+
+ resetting = false;
+ leftdown = false;
+ rightdown = false;
+ reset_counter = 0;
+ old_distance = 0;
+ last_distance = -1;
+ leftdownback = false;
+ rightdownback = false;
+ leftrigid = false;
+ rightrigid = false;
+ lastdown = 0;
+
+ if (log) { } // GWM - No log file for now
+
+ // GWM - lines moved out of for loop
+ current_angles = new float[numJoints];
+ delta_angles = new float[numJoints];
+ desired_angvel = new float[numJoints];
+
+ p_terms = new float[numJoints];
+ d_terms = new float[numJoints];
+
+ for (int x = 0; x < numJoints; x++)
+ {
+ p_terms[x] = P_CONSTANT;
+ d_terms[x] = D_CONSTANT;
+ lo_limit[x] = 0.0f;
+ hi_limit[x] = 0.0f;
+ }
+
+ sensors = new double[4];
+ footdown = new float[4];
+
+ lft = new List();
+ lfx = new List();
+ lfy = new List();
+ rft = new List();
+ rfx = new List();
+ rfy = new List();
+ }
+
+ public void Initialize()
+ {
+ var xAxis = new Vector3 { X = 1 };
+ var nxAxis = new Vector3 { X = -1 };
+ var yAxis = new Vector3 { Y = 1 };
+ //var zAxis = new Vector3 { Z = 1 };
+
+ const float fr = 0.4f; //was 0.2
+ const float mi = 0.0f;
+
+ var torsoPos = new Vector3(0.5f, (TORSO_LEN + ULEG_RAD) / 2.0f, (LEG_SCALE) + (LEG_SCALE) + FOOTZ_SZ);
+
+ float[] leftLegPos = { LEG_POS, -ULEG_RAD / 2, 0.0f };
+ float[] rightLegPos = { LEG_POS, TORSO_LEN + ULEG_RAD + ULEG_RAD / 2, 0.0f };
+ float[] leftLegBackPos = { 1.0f - LEG_POS, -ULEG_RAD / 2, torsoPos.Z - ((LEG_SCALE) + (LEG_SCALE) + FOOTZ_SZ) }; //[1] was 0
+ float[] rightLegBackPos = { 1.0f - LEG_POS, TORSO_LEN + ULEG_RAD + ULEG_RAD / 2, torsoPos.Z - ((LEG_SCALE) + (LEG_SCALE) + FOOTZ_SZ) };
+
+ var leftHip = new Vector3(leftLegPos[0], leftLegPos[1] + ULEG_RAD, torsoPos.Z);
+ var rightHip = new Vector3(rightLegPos[0], rightLegPos[1] - ULEG_RAD, torsoPos.Z);
+ var leftHipBack = new Vector3(leftLegBackPos[0], leftLegBackPos[1] + ULEG_RAD, torsoPos.Z);
+ var rightHipBack = new Vector3(rightLegBackPos[0], rightLegBackPos[1] - ULEG_RAD, torsoPos.Z);
+
+ _torso = AddBoxGeom(TORSO_LEN2, TORSO_LEN, TORSO_HEIGHT, TORSO_MASS, torsoPos);
+
+ CreateLeg(leftLegPos, false, out _leftFoot, out _leftLowerLeg, out _leftUpperLeg, out _leftFootLegJoint, out _leftKneeLowerLegJoint);
+ CreateLeg(rightLegPos, false, out _rightFoot, out _rightLowerLeg, out _rightUpperLeg, out _rightFootLegJoint, out _rightKneeLowerLegJoint);
+
+ //was -1.3, 1.6
+ _leftHipUpperLegJoint = AddUniversalJoint(_torso.Body, _leftUpperLeg.Body, leftHip, xAxis, yAxis, mi, fr, -0.8f, 0.8f, MAXTORQUE_HIPMINOR, MAXTORQUE_HIPMAJOR);
+ _rightHipUpperLegJoint = AddUniversalJoint(_torso.Body, _rightUpperLeg.Body, rightHip, nxAxis, yAxis, mi, fr, -0.8f, 0.8f, MAXTORQUE_HIPMINOR, MAXTORQUE_HIPMAJOR);
+
+ CreateLeg(leftLegBackPos, true, out _leftBackFoot, out _leftBackLowerLeg, out _leftBackUpperLeg, out _leftBackFootLegJoint, out _leftBackKneeLowerLegJoint);
+ CreateLeg(rightLegBackPos, true, out _rightBackFoot, out _rightBackLowerLeg, out _rightBackUpperLeg, out _rightBackFootLegJoint, out _rightBackKneeLowerLegJoint);
+
+ //was -0.2, 0.2
+ _leftBackHipUpperLegJoint = AddUniversalJoint(_torso.Body, _leftBackUpperLeg.Body, leftHipBack, xAxis, yAxis, mi, fr, -0.8f, 0.8f, MAXTORQUE_HIPMINOR, MAXTORQUE_HIPMAJOR);
+ _rightBackHipUpperLegJoint = AddUniversalJoint(_torso.Body, _rightBackUpperLeg.Body, rightHipBack, nxAxis, yAxis, mi, fr, -0.8f, 0.8f, MAXTORQUE_HIPMINOR, MAXTORQUE_HIPMAJOR);
+
+ lo_limit[0] = _leftKneeLowerLegJoint.LimitMotor.LowStop;
+ lo_limit[1] = _leftHipUpperLegJoint.LimitMotor1.LowStop;
+ lo_limit[2] = _leftHipUpperLegJoint.LimitMotor2.LowStop;
+ lo_limit[3] = _rightHipUpperLegJoint.LimitMotor2.LowStop;
+ lo_limit[4] = _rightHipUpperLegJoint.LimitMotor1.LowStop;
+ lo_limit[5] = _rightKneeLowerLegJoint.LimitMotor.LowStop;
+
+ hi_limit[0] = _leftKneeLowerLegJoint.LimitMotor.HighStop;
+ hi_limit[1] = _leftHipUpperLegJoint.LimitMotor1.HighStop;
+ hi_limit[2] = _leftHipUpperLegJoint.LimitMotor2.HighStop;
+ hi_limit[3] = _rightHipUpperLegJoint.LimitMotor2.HighStop;
+ hi_limit[4] = _rightHipUpperLegJoint.LimitMotor1.HighStop;
+ hi_limit[5] = _rightKneeLowerLegJoint.LimitMotor.HighStop;
+
+ lo_limit[6] = _leftBackKneeLowerLegJoint.LimitMotor.LowStop;
+ lo_limit[7] = _leftBackHipUpperLegJoint.LimitMotor1.LowStop;
+ lo_limit[8] = _leftBackHipUpperLegJoint.LimitMotor2.LowStop;
+ lo_limit[9] = _rightBackHipUpperLegJoint.LimitMotor2.LowStop;
+ lo_limit[10] = _rightBackHipUpperLegJoint.LimitMotor1.LowStop;
+ lo_limit[11] = _rightBackKneeLowerLegJoint.LimitMotor.LowStop;
+
+ hi_limit[6] = _leftBackKneeLowerLegJoint.LimitMotor.HighStop; //back knee
+ hi_limit[7] = _leftBackHipUpperLegJoint.LimitMotor1.HighStop;
+ hi_limit[8] = _leftBackHipUpperLegJoint.LimitMotor2.HighStop;
+ hi_limit[9] = _rightBackHipUpperLegJoint.LimitMotor2.HighStop;
+ hi_limit[10] = _rightBackHipUpperLegJoint.LimitMotor1.HighStop;
+ hi_limit[11] = _rightBackKneeLowerLegJoint.LimitMotor.HighStop; //other back knee
+
+ orig_com = GetCenterOfMass();
+ CurrentCom = GetCenterOfMass();
+ orig_left[0] = _leftFoot.Position.X;
+ orig_right[0] = _leftFoot.Position.Y;
+ orig_left[1] = _rightFoot.Position.X;
+ orig_right[1] = _rightFoot.Position.Y;
+ orig_left[2] = 0.0f;
+ orig_right[2] = 0.0f;
+
+ for (int i = 0; i < Bodies.Count; i++)
+ orig_quat[i] = Bodies[i].Position.X;
+ }
+
+ private void CreateLeg(float[] offset, bool flipped, out Sphere foot, out Geom lowerLeg, out Geom upperLeg, out Fixed footLegJoint, out Hinge kneeLowerLegJoint)
+ {
+ //var xAxis = new Vector3(1.0f, 0.0f, 0.0f);
+ var yAxis = new Vector3(0.0f, -1.0f, 0.0f);
+ //var zAxis = new Vector3(0.0f, 0.0f, 1.0f);
+
+ float[] p = { offset[0], offset[1], offset[2] };
+
+ var foot_pos = new Vector3(p[0], p[1], p[2] + (FOOTZ_SZ / 2.0f));
+ foot = AddSphereGeom(FOOTZ_SZ / 2.0f, FOOT_MASS, foot_pos);
+
+ //float sc = LEG_SCALE;//1/9 for different sized legs. default: 2.5;
+ var lower_pos = new Vector3(p[0], p[1], p[2] + FOOTZ_SZ + (LEG_SCALE) / 2.0f);
+ lowerLeg = AddBoxGeom(LLEG_RAD * 1.5f, LLEG_RAD * 1.5f, LEG_SCALE, LEG_MASS, lower_pos, Vector3.UnitZ, PI * 0.5f);
+ //lowerLeg = AddCylinderGeom(LLEG_RAD, LEG_SCALE, LEG_MASS, lower_pos, DirectionAxis.Z, Vector3.UnitZ, PI * 0.5f); //was 3
+ var upper_pos = new Vector3(p[0], p[1], p[2] + FOOTZ_SZ + (LEG_SCALE) + (LEG_SCALE) / 2.0f);
+ upperLeg = AddBoxGeom(ULEG_RAD * 1.5f, ULEG_RAD * 1.5f, LEG_SCALE, LEG_MASS, upper_pos, Vector3.UnitZ, PI * 0.5f);
+ //upperLeg = AddCylinderGeom(ULEG_RAD, LEG_SCALE, LEG_MASS, upper_pos, DirectionAxis.Z, Vector3.UnitZ, PI * 0.5f);
+
+ var knee_joint_a = new Vector3(p[0], p[1], p[2] + FOOTZ_SZ + (LEG_SCALE));
+
+ footLegJoint = AddFixedJoint(foot.Body, lowerLeg.Body);
+
+ if (flipped)
+ kneeLowerLegJoint = AddHingeJoint(lowerLeg.Body, upperLeg.Body, knee_joint_a, yAxis, 0.0f, 0.8f, MAXTORQUE_KNEE); //was -1.4, 0.8
+ else
+ kneeLowerLegJoint = AddHingeJoint(lowerLeg.Body, upperLeg.Body, knee_joint_a, yAxis, -0.8f, 0.0f, MAXTORQUE_KNEE); //was -1.4
+ }
+
+
+ public bool Update(float dt)
+ {
+ if (step == 0)
+ {
+ last_com[0] = CurrentCom.X;
+ last_com[1] = CurrentCom.Y;
+ }
+
+ step++;
+ var oldAngles = current_angles.ToArray(); // Copy
+
+ //read current angles
+ current_angles[0] = _leftKneeLowerLegJoint.Angle; //left knee
+ current_angles[1] = _leftHipUpperLegJoint.Angle1; //left outhip
+ current_angles[2] = _leftHipUpperLegJoint.Angle2; //left mainhip
+
+ current_angles[3] = _rightHipUpperLegJoint.Angle2; //right mainhip
+ current_angles[4] = _rightHipUpperLegJoint.Angle1; //right outhip
+ current_angles[5] = _rightKneeLowerLegJoint.Angle; //right knee
+ //----BACK LEGS
+ current_angles[6] = _leftBackKneeLowerLegJoint.Angle; //left knee
+ current_angles[7] = _leftBackHipUpperLegJoint.Angle1; //left outhip
+ current_angles[8] = _leftBackHipUpperLegJoint.Angle2; //left mainhip
+
+ current_angles[9] = _rightBackHipUpperLegJoint.Angle2; //right mainhip
+ current_angles[10] = _rightBackHipUpperLegJoint.Angle1; //right outhip
+ current_angles[11] = _rightBackKneeLowerLegJoint.Angle; //right knee
+
+ for (var x = 0; x < current_angles.Length; x++)
+ delta_angles[x] = (current_angles[x] - oldAngles[x]) / dt;
+
+ // record behavior
+ bool newleftdown = BodiesOnGround.Contains(_leftFoot.Body);
+ bool newrightdown = BodiesOnGround.Contains(_rightFoot.Body);
+ bool newleftdownback = BodiesOnGround.Contains(_leftBackFoot.Body);
+ bool newrightdownback = BodiesOnGround.Contains(_rightBackFoot.Body);
+
+ var quat = _torso.Quaternion;
+ var q = new[] { quat.W, quat.X, quat.Y, quat.Z };
+
+ float tanyaw = 2.0f * (q[0] * q[1] + q[3] * q[2]) / (q[3] * q[3] + q[0] * q[0] - q[1] * q[1] - q[2] * q[2]);
+ float sinpitch = -2.0f * (q[0] * q[2] - q[3] * q[1]);
+ float tanroll = 2.0f * (q[3] * q[0] + q[1] * q[2]) / (q[3] * q[3] - q[0] * q[0] - q[1] * q[1] + q[2] * q[2]);
+ float yaw = (float)Math.Atan(tanyaw);
+ float pitch = (float)Math.Asin(sinpitch);
+ float roll = (float)Math.Atan(tanroll);
+
+ var triggers = new float[4];
+
+ if (newleftdown && footdown[0] == 0)
+ {
+ triggers[0] = 1;
+ footdown[0] = 1;
+ }
+
+ if (newrightdown && footdown[1] == 0)
+ {
+ triggers[1] = 1;
+ footdown[1] = 1;
+ }
+
+ if (newleftdownback && footdown[2] == 0)
+ {
+ triggers[2] = 1;
+ footdown[2] = 1;
+ }
+
+ if (newrightdownback && footdown[3] == 0)
+ {
+ triggers[3] = 1;
+ footdown[3] = 1;
+ }
+
+
+ // foot sensors
+ if (newleftdown)
+ {
+ footdown[0] = 1;
+ sensors[0] = 1;
+ }
+ else
+ {
+ footdown[0] = 0;
+ sensors[0] = 0;
+ }
+
+ if (newrightdown)
+ {
+ footdown[1] = 1;
+ sensors[1] = 1;
+ }
+ else
+ {
+ sensors[1] = 0;
+ footdown[1] = 0;
+ }
+
+ if (newleftdownback)
+ {
+ sensors[2] = 1;
+ footdown[2] = 1;
+ }
+ else
+ {
+ sensors[2] = 0;
+ footdown[2] = 0;
+ }
+
+ if (newrightdownback)
+ {
+ footdown[3] = 1;
+ sensors[3] = 1;
+ }
+ else
+ {
+ sensors[3] = 0;
+ footdown[3] = 0;
+ }
+
+ /*
+ //Hip sensors
+ sensors[0] = current_angles[2]; //left hip
+ sensors[1] = current_angles[3]; //right hip
+ sensors[2] = current_angles[8]; //left hip back
+ sensors[3] = current_angles[9]; //right hip back
+ */
+
+ // CRS
+ if (MainProgram.doClune)
+ {
+ sensors = new double[20];
+
+ sensors[0] = current_angles[2];
+ sensors[1] = current_angles[1];
+ sensors[2] = current_angles[0];
+ sensors[3] = footdown[0];
+ sensors[4] = pitch;
+
+ sensors[5] = current_angles[3];
+ sensors[6] = current_angles[4];
+ sensors[7] = current_angles[5];
+ sensors[8] = footdown[1];
+ sensors[9] = roll;
+
+ sensors[10] = current_angles[8];
+ sensors[11] = current_angles[7];
+ sensors[12] = current_angles[6];
+ sensors[13] = footdown[2];
+ sensors[14] = yaw;
+
+ sensors[15] = current_angles[9];
+ sensors[16] = current_angles[10];
+ sensors[17] = current_angles[11];
+ sensors[18] = footdown[3];
+ sensors[19] = (float)Math.Sin(2 * Math.PI * timeCounter / cluneWavelength);
+ }
+
+ _controller.update(sensors, triggers);
+ var outs = _controller.getOutputs();
+ //Console.WriteLine(outs[9] + " - " + sensors[3] + "," + sensors[8] + "," + sensors[13] + "," + sensors[18]);
+ //Console.WriteLine(sensors[19]);
+ if (log) { } // no log implemented at this time
+
+ var desired_angles = new float[current_angles.Length];
+
+ for (int x = 0; x < current_angles.Length; x++)
+ {
+ desired_angles[x] = outs[x];
+
+ if (desired_angles[x] < -1.0)
+ desired_angles[x] = -1.0f;
+
+ if (desired_angles[x] > 1)
+ desired_angles[x] = 1.0f;
+
+ if (desired_angles[x] != desired_angles[x])
+ {
+ Console.WriteLine("NOT A NUMBER " + desired_angles[x] + "\n");
+ desired_angles[x] = 0;
+ }
+
+ if (_controller.Scale)
+ {
+ if (desired_angles[x] > 1.0) desired_angles[x] = 1.0f;
+ if (desired_angles[x] < 0.0) desired_angles[x] = 0.0f;
+ desired_angles[x] = lo_limit[x] + (hi_limit[x] - lo_limit[x]) * desired_angles[x];
+
+ }
+
+ }
+
+ for (int x = 0; x < current_angles.Length; x++)
+ {
+ float delta = desired_angles[x] - current_angles[x];
+ float p_term = p_terms[x] * delta;
+ float d_term = (-d_terms[x] * delta_angles[x]);
+ desired_angvel[x] = p_term + d_term;
+ if (log) { } // no log implemented
+ }
+
+
+
+ _leftKneeLowerLegJoint.LimitMotor.Velocity = desired_angvel[0]; //left knee
+ _leftHipUpperLegJoint.LimitMotor1.Velocity = desired_angvel[1]; //left hipout
+ _leftHipUpperLegJoint.LimitMotor2.Velocity = desired_angvel[2]; //left hipmain
+
+ _rightHipUpperLegJoint.LimitMotor2.Velocity = desired_angvel[3]; //right hipmain
+ _rightHipUpperLegJoint.LimitMotor1.Velocity = desired_angvel[4]; //right hipout
+ _rightKneeLowerLegJoint.LimitMotor.Velocity = desired_angvel[5]; //right knee
+
+ //BACK LEGS
+ _leftBackKneeLowerLegJoint.LimitMotor.Velocity = desired_angvel[6]; //left knee
+ _leftBackHipUpperLegJoint.LimitMotor1.Velocity = desired_angvel[7]; //left hipout
+ _leftBackHipUpperLegJoint.LimitMotor2.Velocity = desired_angvel[8]; //left hipmain
+
+ _rightBackHipUpperLegJoint.LimitMotor2.Velocity = desired_angvel[9]; //right hipmain
+ _rightBackHipUpperLegJoint.LimitMotor1.Velocity = desired_angvel[10]; //right hipout
+ _rightBackKneeLowerLegJoint.LimitMotor.Velocity = desired_angvel[11]; //right knee
+
+ CurrentCom = GetCenterOfMass();
+
+ if (!leftdown && newleftdown)
+ {
+ if (lft.Count == 0 || (step - lft[lft.Count - 1] > 100 && lastdown != 1))
+ {
+ lft.Add(step);
+ lfx.Add(CurrentCom.X);
+ lfy.Add(CurrentCom.Y);
+
+ if (0 % 2 == 1) // GWM - removed novelty_function variable
+ lastdown = 1;
+ else
+ lastdown = 0; //if this is set to 0, we don't care if feet sequence alternates
+ }
+ }
+ if (!rightdown && newrightdown)
+ {
+ if (rft.Count == 0 || (step - rft[rft.Count - 1] > 100 && lastdown != -1))
+ {
+ rft.Add(step);
+ rfx.Add(CurrentCom.X);
+ rfy.Add(CurrentCom.Y);
+ if (0 % 2 == 1) // GWM - removed novelty_function variable
+ lastdown = (-1);
+ else
+ lastdown = 0; //if this is set to 0, we don't care if feet sequence alternates
+ }
+ }
+
+ //don't let first recorded instance of both feet down set the lastdown criteria
+ if (step == 1)
+ lastdown = 0;
+
+ leftdown = newleftdown;
+ rightdown = newrightdown;
+ rightdownback = newrightdownback;
+ leftdownback = newleftdownback;
+ //reset ground sensors for feetz
+ BodiesOnGround.Clear();
+
+ timeCounter++; // for clune architecture, counter used for sin wave
+ return base.PostStep(dt) && Continue();
+ }
+
+ private bool Continue()
+ {
+ var torsoPos = _torso.Position;
+ float orig_height = (TORSO_RAD / 2.0f + LEG_SCALE + LEG_SCALE + FOOTZ_SZ);
+ return torsoPos.Z >= 0.5 * orig_height;
+ }
+
+ public float CalcFitness()
+ {
+ var newCom = GetCenterOfMass();
+ var dist = newCom - orig_com;
+ dist = dist * dist;
+ var fitness = dist.X + dist.Y;
+ return (float)Math.Sqrt(fitness);
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/SharpNeatWalker/OscillatorQuadruped/SUPG/SUPGExperiment.cs b/SharpNeatWalker/OscillatorQuadruped/SUPG/SUPGExperiment.cs
new file mode 100644
index 000000000..e24614a24
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/SUPG/SUPGExperiment.cs
@@ -0,0 +1,120 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using SharpNeatLib.Experiments;
+using SharpNeatLib.Evolution;
+using SharpNeatLib.NeuralNetwork;
+
+namespace OscillatorQuadruped
+{
+ class SUPGExperiment : IExperiment
+ {
+ private uint inputs;
+ private uint outputs;
+ private uint hidden;
+ private int cppnInputs;
+ private int cppnOutputs;
+ private IPopulationEvaluator populationEvaluator = null;
+ private NeatParameters neatParams = null;
+
+ public SUPGExperiment(uint inputs, uint outputs, uint hidden, int cppnInputs, int cppnOutputs)
+ {
+ this.inputs = inputs;
+ this.outputs = outputs;
+ this.hidden = hidden;
+ this.cppnInputs = cppnInputs;
+ this.cppnOutputs = cppnOutputs;
+ }
+
+ #region IExperiment Members
+
+ public void LoadExperimentParameters(System.Collections.Hashtable parameterTable)
+ {
+ //throw new Exception("The method or operation is not implemented.");
+ }
+
+ public IPopulationEvaluator PopulationEvaluator
+ {
+ get
+ {
+ if (populationEvaluator == null)
+ ResetEvaluator(HyperNEATParameters.substrateActivationFunction);
+
+ return populationEvaluator;
+ }
+ }
+
+ public void ResetEvaluator(IActivationFunction activationFn)
+ {
+ populationEvaluator = new SUPGPopulationEvaluator(new SUPGNetworkEvaluator(inputs, outputs, hidden));
+ }
+
+ public int InputNeuronCount
+ {
+ get { return cppnInputs; }
+ }
+
+ public int OutputNeuronCount
+ {
+ get { return cppnOutputs; }
+ }
+
+ public NeatParameters DefaultNeatParameters
+ {
+ get
+ {
+ if (neatParams == null)
+ {
+ NeatParameters np = new NeatParameters();
+ np.activationProbabilities = new double[4];
+ np.activationProbabilities[0] = .25;
+ np.activationProbabilities[1] = .25;
+ np.activationProbabilities[2] = .25;
+ np.activationProbabilities[3] = .25;
+ np.compatibilityDisjointCoeff = 1;
+ np.compatibilityExcessCoeff = 1;
+ np.compatibilityThreshold = 100;
+ np.compatibilityWeightDeltaCoeff = 3;
+ np.connectionWeightRange = 3;
+ np.elitismProportion = .1;
+ np.pInitialPopulationInterconnections = 1;
+ np.pInterspeciesMating = 0.01;
+ np.pMutateAddConnection = .06;
+ np.pMutateAddNode = .01;
+ np.pMutateConnectionWeights = .96;
+ np.pMutateDeleteConnection = 0;
+ np.pMutateDeleteSimpleNeuron = 0;
+ np.populationSize = 300;
+ np.pruningPhaseBeginComplexityThreshold = float.MaxValue;
+ np.pruningPhaseBeginFitnessStagnationThreshold = int.MaxValue;
+ np.pruningPhaseEndComplexityStagnationThreshold = int.MinValue;
+ np.selectionProportion = .8;
+ np.speciesDropoffAge = 1500;
+ np.targetSpeciesCountMax = np.populationSize / 10;
+ np.targetSpeciesCountMin = np.populationSize / 10 - 2;
+
+ neatParams = np;
+ }
+ return neatParams;
+ }
+ }
+
+ public IActivationFunction SuggestedActivationFunction
+ {
+ get { return HyperNEATParameters.substrateActivationFunction; }
+ }
+
+ public AbstractExperimentView CreateExperimentView()
+ {
+ return null;
+ }
+
+ public string ExplanatoryText
+ {
+ get { return "A HyperNEAT experiemnt for quadruped locomotion"; }
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/OscillatorQuadruped/SUPG/SUPGNetworkEvaluator.cs b/SharpNeatWalker/OscillatorQuadruped/SUPG/SUPGNetworkEvaluator.cs
new file mode 100644
index 000000000..11a20cd5a
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/SUPG/SUPGNetworkEvaluator.cs
@@ -0,0 +1,55 @@
+using SharpNeatLib.Experiments;
+using SharpNeatLib.NeuralNetwork;
+
+namespace OscillatorQuadruped
+{
+ internal class SUPGNetworkEvaluator : INetworkEvaluator
+ {
+ public static SUPGSubstrate substrate;
+ private NoveltyArchive noveltyArchive;
+
+ public SUPGNetworkEvaluator(uint inputs, uint outputs, uint hidden)
+ {
+ substrate = new SUPGSubstrate(inputs, outputs, hidden, HyperNEATParameters.substrateActivationFunction);
+ noveltyArchive = new NoveltyArchive();
+ }
+
+ #region INetworkEvaluator Members
+
+ public double[] threadSafeEvaluateNetwork(INetwork network)
+ {
+ var tempGenome = substrate.generateGenome(network);
+ var tempNet = tempGenome.Decode(null);
+
+ using (var quadDomain = new Domain(noveltyArchive, MainProgram.novelty))
+ {
+ var fitness = quadDomain.EvaluateController(new Controller(tempNet, true, tempGenome, network, substrate.getSUPGMap()));
+ return fitness;
+ }
+ }
+
+ public double EvaluateNetwork(INetwork network)
+ {
+ var tempGenome = substrate.generateGenome(network);
+ var tempNet = tempGenome.Decode(null);
+
+ using (var quadDomain = new Domain())
+ {
+ var fitness = quadDomain.EvaluateController(new Controller(tempNet, true, tempGenome, network, substrate.getSUPGMap()));
+ return fitness[0];
+ }
+ }
+
+ public string EvaluatorStateMessage
+ {
+ get { return ""; }
+ }
+
+ public void endOfGeneration()
+ {
+ noveltyArchive.endOfGeneration();
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/OscillatorQuadruped/SUPG/SUPGPopulationEvaluator.cs b/SharpNeatWalker/OscillatorQuadruped/SUPG/SUPGPopulationEvaluator.cs
new file mode 100644
index 000000000..864a8a30a
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/SUPG/SUPGPopulationEvaluator.cs
@@ -0,0 +1,18 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using SharpNeatLib.Experiments;
+
+namespace OscillatorQuadruped
+{
+ class SUPGPopulationEvaluator : MultiThreadedPopulationEvaluator
+ {
+
+ public SUPGPopulationEvaluator(INetworkEvaluator eval)
+ : base(eval, null)
+ {
+
+ }
+ }
+}
diff --git a/SharpNeatWalker/OscillatorQuadruped/SUPG/SUPGSubstrate.cs b/SharpNeatWalker/OscillatorQuadruped/SUPG/SUPGSubstrate.cs
new file mode 100644
index 000000000..fb55a3e8d
--- /dev/null
+++ b/SharpNeatWalker/OscillatorQuadruped/SUPG/SUPGSubstrate.cs
@@ -0,0 +1,216 @@
+//#define OUTPUT
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using SharpNeatLib.CPPNs;
+using SharpNeatLib.NeuralNetwork;
+using SharpNeatLib.NeatGenome;
+
+namespace OscillatorQuadruped
+{
+ class SUPGSubstrate : Substrate
+ {
+ private const float shiftScale = 0.2f;
+
+ public SUPGSubstrate(uint inputs, uint outputs, uint hidden, IActivationFunction function)
+ : base(inputs, outputs, hidden, function)
+ {
+
+ }
+
+ public override NeatGenome generateGenome(INetwork network)
+ {
+ // copy the neuron list to a new list and update the x/y values
+ NeuronGeneList newNeurons = new NeuronGeneList(neurons);
+
+ // set the x and y value of the SUPGs
+ foreach (NeuronGene neuron in newNeurons)
+ {
+ if (neuron.NeuronType == NeuronType.Hidden)
+ {
+ // switch to grid substrate configuration
+ neuron.XValue = getXPos2(neuron.InnovationId - 16);
+ neuron.YValue = getYPos2(neuron.InnovationId - 16);
+ }
+ }
+
+ ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));
+ float[] coordinates = new float[5];
+ //float output;
+ uint connectionCounter = 0;
+ int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
+
+ // connect hidden layer to outputs
+ for (uint source = 0; source < hiddenCount; source++)
+ {
+ coordinates[0] = getXPos(source, false);
+ coordinates[1] = getYPos(source, false);
+
+ for (uint target = 0; target < outputCount; target++)
+ {
+ // only connect hidden nodes to their single nearest output
+ if (source == target)
+ {
+ coordinates[2] = getXPos(target, true);
+ coordinates[3] = getYPos(target, true);
+
+ // GWM - fixing weight to 1 for SUPG producing motor outputs
+ connections.Add(new ConnectionGene(connectionCounter++, source + inputCount + outputCount, target + inputCount, 1));
+ }
+ }
+ }
+
+ return new SharpNeatLib.NeatGenome.NeatGenome(0, newNeurons, connections, (int)inputCount, (int)outputCount);
+ }
+
+ private float getXPos(uint index, bool isOutput)
+ {
+ float pos = 0;
+ float shift = shiftScale;
+ if (isOutput)
+ shift *= 2;
+
+ switch (index)
+ {
+ case 0:
+ case 6:
+ pos = -1;
+ break;
+ case 1:
+ case 2:
+ case 7:
+ case 8:
+ pos = -1 + shift;
+ break;
+ case 3:
+ case 4:
+ case 9:
+ case 10:
+ pos = 1 - shift;
+ break;
+ case 5:
+ case 11:
+ pos = 1;
+ break;
+ }
+ return pos;
+ }
+
+ private float getYPos(uint index, bool isOutput)
+ {
+ float pos = 0;
+ float shift = shiftScale;
+ if (isOutput)
+ shift *= 2;
+
+ switch (index)
+ {
+ case 2:
+ case 3:
+ pos = 1;
+ break;
+ case 6:
+ case 7:
+ case 10:
+ case 11:
+ pos = -1 + shift;
+ break;
+ case 0:
+ case 1:
+ case 4:
+ case 5:
+ pos = 1 - shift;
+ break;
+ case 8:
+ case 9:
+ pos = -1;
+ break;
+ }
+ return pos;
+ }
+
+ private float getXPos2(uint index)
+ {
+ float pos = 0;
+ switch (index)
+ {
+ case 0:
+ case 1:
+ case 2:
+ pos = -1;
+ break;
+ case 3:
+ case 4:
+ case 5:
+ pos = -.33f;
+ break;
+ case 6:
+ case 7:
+ case 8:
+ pos = .33f;
+ break;
+ case 9:
+ case 10:
+ case 11:
+ pos = 1;
+ break;
+
+ }
+ return pos;
+ }
+
+ private float getYPos2(uint index)
+ {
+ float pos = 0;
+ switch (index)
+ {
+ case 0:
+ case 5:
+ case 6:
+ case 11:
+ pos = -1;
+ break;
+ case 2:
+ case 3:
+ case 8:
+ case 9:
+ pos = 0;
+ break;
+ case 1:
+ case 4:
+ case 7:
+ case 10:
+ pos = 1;
+ break;
+
+ }
+ return pos;
+ }
+
+ // returns a map that signifies which trigger maps to which hidden neurons.. a value of float.min means that neuron has no trigger
+ // any other value indicates the foot which triggers that given neuron. example: map[16] = 0 means foot 0 triggers neuron 16
+ public int[] getSUPGMap()
+ {
+ int[] map = new int[28];
+ for (int i = 0; i < 16; i++)
+ map[i] = int.MinValue;
+ map[16] = 0;
+ map[17] = 0;
+ map[18] = 0;
+ map[19] = 1;
+ map[20] = 1;
+ map[21] = 1;
+ map[22] = 2;
+ map[23] = 2;
+ map[24] = 2;
+ map[25] = 3;
+ map[26] = 3;
+ map[27] = 3;
+ return map;
+ }
+
+
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/CPPNs/Substrate.cs b/SharpNeatWalker/SharpNeatLib/CPPNs/Substrate.cs
new file mode 100644
index 000000000..39aa581ed
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/CPPNs/Substrate.cs
@@ -0,0 +1,152 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+using SharpNeatLib.NeuralNetwork;
+using SharpNeatLib.NeatGenome;
+using SharpNeatLib.Experiments;
+
+namespace SharpNeatLib.CPPNs
+{
+ public class Substrate
+ {
+ public uint inputCount;
+ public uint outputCount;
+ public uint hiddenCount;
+
+ public float inputDelta;
+ public float hiddenDelta;
+ public float outputDelta;
+
+ public double threshold;
+ public double weightRange;
+ public IActivationFunction activationFunction;
+ public NeuronGeneList neurons;
+
+ public Substrate()
+ {
+ }
+ public Substrate(uint input, uint output, uint hidden, IActivationFunction function)
+ {
+ weightRange = HyperNEATParameters.weightRange;
+ threshold = HyperNEATParameters.threshold;
+
+ inputCount = input;
+ outputCount = output;
+ hiddenCount = hidden;
+ activationFunction = function;
+
+ inputDelta = 2.0f / (inputCount);
+ if (hiddenCount != 0)
+ hiddenDelta = 2.0f / (hiddenCount);
+ else
+ hiddenDelta = 0;
+ outputDelta = 2.0f / (outputCount);
+
+ //SharpNEAT requires that the neuronlist be input|bias|output|hidden
+ neurons=new NeuronGeneList((int)(inputCount + outputCount+ hiddenCount));
+ //setup the inputs
+ for (uint a = 0; a < inputCount; a++)
+ {
+ neurons.Add(new NeuronGene(a, NeuronType.Input, activationFunction));
+ }
+
+ //setup the outputs
+ for (uint a = 0; a < outputCount; a++)
+ {
+ neurons.Add(new NeuronGene(a + inputCount, NeuronType.Output, activationFunction));
+ }
+ for (uint a = 0; a < hiddenCount; a++)
+ {
+ neurons.Add(new NeuronGene(a + inputCount+outputCount, NeuronType.Hidden, activationFunction));
+ }
+
+
+
+ }
+
+ public INetwork generateNetwork(INetwork CPPN)
+ {
+ return generateGenome(CPPN).Decode(null);
+ }
+
+ public virtual NeatGenome.NeatGenome generateGenome(INetwork network)
+ {
+ var coordinates = new double[4];
+ float output;
+ uint connectionCounter = 0;
+ int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
+ ConnectionGeneList connections=new ConnectionGeneList();
+ if (hiddenCount > 0)
+ {
+ coordinates[0] = -1 + inputDelta / 2.0f;
+ coordinates[1] = -1;
+ coordinates[2] = -1 + hiddenDelta / 2.0f;
+ coordinates[3] = 0;
+ for (uint input = 0; input < inputCount; input++, coordinates[0] += inputDelta)
+ {
+ coordinates[2] = -1 + hiddenDelta / 2.0f;
+ for (uint hidden = 0; hidden < hiddenCount; hidden++, coordinates[2] += hiddenDelta)
+ {
+ network.ClearSignals();
+ network.SetInputSignals(coordinates);
+ network.MultipleSteps(iterations);
+ output = network.GetOutputSignal(0);
+
+ if (Math.Abs(output) > threshold)
+ {
+ float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
+ connections.Add(new ConnectionGene(connectionCounter++, input, hidden + inputCount + outputCount, weight));
+ }
+ }
+ }
+ coordinates[0] = -1 + hiddenDelta / 2.0f;
+ coordinates[1] = 0;
+ coordinates[2] = -1 + outputDelta / 2.0f;
+ coordinates[3] = 1;
+ for (uint hidden = 0; hidden < hiddenCount; hidden++, coordinates[0] += hiddenDelta)
+ {
+ coordinates[2] = -1 + outputDelta / 2.0f;
+ for (uint outputs = 0; outputs < outputCount; outputs++, coordinates[2] += outputDelta)
+ {
+ network.ClearSignals();
+ network.SetInputSignals(coordinates);
+ network.MultipleSteps(iterations);
+ output = network.GetOutputSignal(0);
+
+ if (Math.Abs(output) > threshold)
+ {
+ float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
+ connections.Add(new ConnectionGene(connectionCounter++, hidden + inputCount + outputCount, outputs + inputCount, weight));
+ }
+ }
+ }
+ }
+ else
+ {
+ coordinates[0] = -1 + inputDelta / 2.0f;
+ coordinates[1] = -1;
+ coordinates[2] = -1 + outputDelta / 2.0f;
+ coordinates[3] = 1;
+ for (uint input = 0; input < inputCount; input++, coordinates[0] += inputDelta)
+ {
+ coordinates[2] = -1 + outputDelta / 2.0f;
+ for (uint outputs = 0; outputs < outputCount; outputs++, coordinates[2] += outputDelta)
+ {
+ network.ClearSignals();
+ network.SetInputSignals(coordinates);
+ network.MultipleSteps(iterations);
+ output = network.GetOutputSignal(0);
+
+ if (Math.Abs(output) > threshold)
+ {
+ float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
+ connections.Add(new ConnectionGene(connectionCounter++, input, outputs + inputCount, weight));
+ }
+ }
+ }
+ }
+ return new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
+ }
+
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/AbstractGenome.cs b/SharpNeatWalker/SharpNeatLib/Evolution/AbstractGenome.cs
new file mode 100644
index 000000000..7aacc9804
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/AbstractGenome.cs
@@ -0,0 +1,289 @@
+using System;
+using System.Xml;
+using System.Diagnostics;
+
+using SharpNeatLib.NeuralNetwork;
+
+namespace SharpNeatLib.Evolution
+{
+ abstract public class AbstractGenome : IGenome
+ {
+ // See comments on individual properties for more information on these fields.
+ protected uint genomeId;
+ long genomeAge=0;
+ double fitness = 0;
+ long evaluationCount = 0;
+ double totalFitness = 0;
+ int speciesId = -1;
+ int parentSpeciesId1 = -1;
+ int parentSpeciesId2 = -1;
+ Population owningPopulation;
+ double objectiveFitness = 0;
+
+ // Stores the decoded network. Storing this prevents the need to re-decode genomes during
+ // experiments where the same genome may be evaluated multiple times, e.g. re-evaluation
+ // per generation because of a non-deterministic evaluation function, or a deterministic
+ // function that is changing as the search progresses.
+ // If it can be cast to AbstractNetwork then this can also form the basis of constructing a
+ // NetworkModel for network visualization.
+ protected INetwork network=null;
+
+ ///
+ /// A tag object that can be used by evaluators to store evaluation state information. This isn't
+ /// normally used. An example usage is the ParetoCoEv Tic-Tac-Toe evaluator which uses this to store
+ /// an integer which gives the index of the last entry in the pareto chain to have been evaluated against.
+ /// Thus we only have to evaluate against later entries which elimintates a large number of redundant evaluations.
+ ///
+ object tag;
+
+ #region Public Methods [Implemented]
+
+ ///
+ /// Implemented in contravention of the .net documentation. ArrayList.Sort() will sort into descending order.
+ ///
+ ///
+ ///
+ public int CompareTo(Object obj)
+ {
+ if(((IGenome)obj).Fitness > fitness)
+ return 1;
+
+ if(((IGenome)obj).Fitness < fitness)
+ return -1;
+
+ return 0;
+ }
+
+ #endregion
+
+ #region Public Methods [Abstract]
+
+ ///
+ /// Some(most) types of network have fixed numbers of input and output nodes and will not work correctly or
+ /// throw an exception if we try and use inputs/outputs that do not exist. This method allows us to check
+ /// compatibility before we begin.
+ ///
+ ///
+ ///
+ ///
+ abstract public bool IsCompatibleWithNetwork(int inputCount, int outputCount);
+
+ ///
+ /// Asexual reproduction with built in mutation.
+ ///
+ ///
+ abstract public IGenome CreateOffspring_Asexual(EvolutionAlgorithm ea);
+
+ ///
+ /// Sexual reproduction. No mutation performed.
+ ///
+ ///
+ ///
+ abstract public IGenome CreateOffspring_Sexual(EvolutionAlgorithm ea, IGenome parent);
+
+ ///
+ /// Decode the genome's 'DNA' into a working network.
+ ///
+ ///
+ abstract public INetwork Decode(IActivationFunction activationFn);
+
+ ///
+ /// Clone this genome.
+ ///
+ ///
+ abstract public IGenome Clone(EvolutionAlgorithm ea);
+
+ ///
+ /// Compare this IGenome with the provided one. They are compatibile if their calculated difference
+ /// is below the current threshold specified by NeatParameters.compatibilityThreshold
+ ///
+ ///
+ ///
+ ///
+ abstract public bool IsCompatibleWithGenome(IGenome comparisonGenome, NeatParameters neatParameters);
+
+ ///
+ /// Persist to XML.
+ ///
+ ///
+ abstract public void Write(XmlNode parentNode);
+
+ ///
+ /// For debug purposes only.
+ ///
+ /// Returns true if genome integrity checks out OK.
+ abstract public bool PerformIntegrityCheck();
+
+ #endregion
+
+ #region Public Properties [Implemented]
+
+ public object Tag
+ {
+ get
+ {
+ return tag;
+ }
+ set
+ {
+ tag = value;
+ }
+ }
+
+ public double ObjectiveFitness
+ {
+ get
+ {
+ return objectiveFitness;
+ }
+ set
+ {
+ objectiveFitness = value;
+ }
+ }
+
+ public uint GenomeId
+ {
+ get
+ {
+ return genomeId;
+ }
+ set
+ {
+ genomeId = value;
+ }
+ }
+
+ public long GenomeAge
+ {
+ get
+ {
+ return genomeAge;
+ }
+ set
+ {
+ genomeAge = value;
+ }
+ }
+
+ ///
+ /// This genome's fitness as calculated by the evaluation environment.
+ ///
+ public double Fitness
+ {
+ get
+ {
+ return fitness;
+ }
+ set
+ {
+ Debug.Assert(value>=EvolutionAlgorithm.MIN_GENOME_FITNESS, "Genome fitness must be non-zero. Use EvolutionAlgorithm.MIN_GENOME_FITNESS");
+ fitness = value;
+ }
+ }
+
+ ///
+ /// The number of times this genome has been evaluated.
+ ///
+ public long EvaluationCount
+ {
+ get
+ {
+ return evaluationCount;
+ }
+ set
+ {
+ evaluationCount = value;
+ }
+ }
+
+ ///
+ /// Returns the total of all fitness scores if this genome has been evaluated more than once.
+ /// Average fitness is therefore this figure divided by GenomeAge.
+ ///
+ public double TotalFitness
+ {
+ get
+ {
+ return totalFitness;
+ }
+ set
+ {
+ totalFitness = value;
+ }
+ }
+
+ ///
+ /// The species this genome is within.
+ ///
+ public int SpeciesId
+ {
+ get
+ {
+ return speciesId;
+ }
+
+ set
+ {
+ speciesId = value;
+ }
+ }
+
+ ///
+ /// The ID of this genome's first parent.
+ ///
+ public int ParentSpeciesId1
+ {
+ get
+ {
+ return parentSpeciesId1;
+ }
+
+ set
+ {
+ parentSpeciesId1 = value;
+ }
+ }
+
+ ///
+ /// The ID of this genome's second parent. -1 if no second parent.
+ ///
+ public int ParentSpeciesId2
+ {
+ get
+ {
+ return parentSpeciesId2;
+ }
+
+ set
+ {
+ parentSpeciesId2 = value;
+ }
+ }
+
+ public AbstractNetwork AbstractNetwork
+ {
+ get
+ { // The INetwork may not be a AbstractNetwork, return null if that is the case.
+ return network as AbstractNetwork;
+ }
+ }
+
+ ///
+ /// Used primarily to give this IGenome a hook onto the Population it is within.
+ ///
+ public Population OwningPopulation
+ {
+ get
+ {
+ return owningPopulation;
+ }
+ set
+ {
+ owningPopulation = value;
+ }
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/ConnectionEndpointsStruct.cs b/SharpNeatWalker/SharpNeatLib/Evolution/ConnectionEndpointsStruct.cs
new file mode 100644
index 000000000..af6c743f8
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/ConnectionEndpointsStruct.cs
@@ -0,0 +1,48 @@
+using System;
+
+namespace SharpNeatLib.Evolution
+{
+ ///
+ /// Used primarily as a key into a hashtable that uniquely identifies connections
+ /// by their end points.
+ ///
+ struct ConnectionEndpointsStruct
+ {
+ public uint sourceNeuronId;
+ public uint targetNeuronId;
+
+ #region Constructor
+
+ public ConnectionEndpointsStruct(uint sourceNeuronId, uint targetNeuronId)
+ {
+ this.sourceNeuronId = sourceNeuronId;
+ this.targetNeuronId = targetNeuronId;
+ }
+
+ #endregion
+
+ #region Public Overrides
+
+ public override int GetHashCode()
+ {
+ // Point uses x^y far a hash, but this is actually an extremely poor hash function
+ // for a pair of coordinates. Here we swpa the low and high 16 bits of one of the
+ // Id's to generate a much better hash for our (and most other likely) circumstances.
+ return (int)(sourceNeuronId ^ ((targetNeuronId>>16) + (targetNeuronId<<16)));
+ }
+
+ public override bool Equals(object obj)
+ {
+ if(obj==null)
+ return false;
+
+ if(obj.GetType() != typeof(ConnectionEndpointsStruct))
+ return false;
+
+ ConnectionEndpointsStruct ces = (ConnectionEndpointsStruct)obj;
+ return (sourceNeuronId==ces.sourceNeuronId) && (targetNeuronId==ces.targetNeuronId);
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/EvolutionAlgorithm.cs b/SharpNeatWalker/SharpNeatLib/Evolution/EvolutionAlgorithm.cs
new file mode 100644
index 000000000..d4ec8038c
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/EvolutionAlgorithm.cs
@@ -0,0 +1,1155 @@
+using System;
+using System.Collections;
+using System.Diagnostics;
+
+//TODO: decouple from NeatGenome.
+using SharpNeatLib.NeatGenome;
+
+namespace SharpNeatLib.Evolution
+{
+ public class EvolutionAlgorithm
+ {
+ #region Constants
+
+ ///
+ /// Genomes cannot have zero fitness because the fitness sharing logic requires there to be
+ /// a non-zero total fitness in the population. Therefore this figure should be substituted
+ /// in where zero fitness occurs.
+ ///
+ public const double MIN_GENOME_FITNESS = 0.0000001;
+
+ #endregion
+
+ #region Class Variables
+
+ Population pop;
+ IPopulationEvaluator populationEvaluator;
+ NeatParameters neatParameters;
+ NeatParameters neatParameters_Normal;
+ NeatParameters neatParameters_PrunePhase;
+
+ bool pruningModeEnabled=false;
+ bool connectionWeightFixingEnabled=false;
+ bool pruningMode=false;
+
+ ///
+ /// The last generation at which Population.AvgComplexity was reduced. We track this
+ /// when simplifications have completed and that therefore the prune phase should end.
+ ///
+ long prunePhase_generationAtLastSimplification;
+ float prunePhase_MinimumStructuresPerGenome;
+
+ ///
+ /// Population.AvgComplexity when AdjustSpeciationThreshold() was last called. If mean complexity
+ /// moves away from this value by a certain amount then it's time to re-apply the speciation threshold
+ /// to the whole population by calling pop.RedetermineSpeciation().
+ ///
+ double meanComplexityAtLastAdjustSpeciationThreshold;
+
+ // All offspring are temporarily held here before being added to the population proper.
+ GenomeList offspringList = new GenomeList();
+
+ // Tables of new connections and neurons created during adiitive mutations. These tables
+ // are available during the mutations and can be used to check for matching mutations so
+ // that two mutations that create the same structure will be allocated the same ID.
+ // Currently this matching is only performed within the context of a generation, which
+ // is how the original C++ NEAT code operated also.
+ Hashtable newConnectionGeneTable = new Hashtable();
+ Hashtable newNeuronGeneStructTable = new Hashtable();
+
+ // Statistics
+ uint generation=0;
+ IGenome bestGenome;
+
+ #endregion
+
+ #region Constructors
+
+ ///
+ /// Default Constructor.
+ ///
+ public EvolutionAlgorithm(Population pop, IPopulationEvaluator populationEvaluator) : this(pop, populationEvaluator, new NeatParameters())
+ {}
+
+ ///
+ /// Default Constructor.
+ ///
+ public EvolutionAlgorithm(Population pop, IPopulationEvaluator populationEvaluator, NeatParameters neatParameters)
+ {
+ this.pop = pop;
+ this.populationEvaluator = populationEvaluator;
+ this.neatParameters = neatParameters;
+ neatParameters_Normal = neatParameters;
+
+ neatParameters_PrunePhase = new NeatParameters(neatParameters);
+ neatParameters_PrunePhase.pMutateAddConnection = 0.0;
+ neatParameters_PrunePhase.pMutateAddNode = 0.0;
+ neatParameters_PrunePhase.pMutateConnectionWeights = 0.33;
+ neatParameters_PrunePhase.pMutateDeleteConnection = 0.33;
+ neatParameters_PrunePhase.pMutateDeleteSimpleNeuron = 0.33;
+
+ // Disable all crossover as this has a tendency to increase complexity, which is precisely what
+ // we don't want during a pruning phase.
+ neatParameters_PrunePhase.pOffspringAsexual = 1.0;
+ neatParameters_PrunePhase.pOffspringSexual = 0.0;
+
+ InitialisePopulation();
+ }
+
+ #endregion
+
+ #region Properties
+
+ public Population Population
+ {
+ get
+ {
+ return pop;
+ }
+ }
+
+ public uint NextGenomeId
+ {
+ get
+ {
+ return pop.IdGenerator.NextGenomeId;
+ }
+ }
+
+ public uint NextInnovationId
+ {
+ get
+ {
+ return pop.IdGenerator.NextInnovationId;
+ }
+ }
+
+ public NeatParameters NeatParameters
+ {
+ get
+ {
+ return neatParameters;
+ }
+ }
+
+ public IPopulationEvaluator PopulationEvaluator
+ {
+ get
+ {
+ return populationEvaluator;
+ }
+ }
+
+ public uint Generation
+ {
+ get
+ {
+ return generation;
+ }
+ }
+
+ public IGenome BestGenome
+ {
+ get
+ {
+ return bestGenome;
+ }
+ }
+
+ public Hashtable NewConnectionGeneTable
+ {
+ get
+ {
+ return newConnectionGeneTable;
+ }
+ }
+
+ public Hashtable NewNeuronGeneStructTable
+ {
+ get
+ {
+ return newNeuronGeneStructTable;
+ }
+ }
+
+ public bool IsInPruningMode
+ {
+ get
+ {
+ return pruningMode;
+ }
+ }
+
+ ///
+ /// Get/sets a boolean indicating if the search should use pruning mode.
+ ///
+ public bool IsPruningModeEnabled
+ {
+ get
+ {
+ return pruningModeEnabled;
+ }
+ set
+ {
+ pruningModeEnabled = value;
+ if(value==false)
+ { // Weight fixing cannot (currently) occur with pruning mode disabled.
+ connectionWeightFixingEnabled = false;
+ }
+ }
+ }
+
+ ///
+ /// Get/sets a boolean indicating if connection weight fixing is enabled. Note that this technique
+ /// is currently tied to pruning mode, therefore if pruning mode is disabled then weight fixing
+ /// will automatically be disabled.
+ ///
+ public bool IsConnectionWeightFixingEnabled
+ {
+ get
+ {
+ return connectionWeightFixingEnabled;
+ }
+ set
+ { // Ensure disabled if pruningMode is disabled.
+ connectionWeightFixingEnabled = pruningModeEnabled && value;
+ }
+ }
+
+ #endregion
+
+ #region Public Methods
+
+ ///
+ /// Evaluate all genomes in the population, speciate them and then calculate adjusted fitness
+ /// and related stats.
+ ///
+ ///
+ private void InitialisePopulation()
+ {
+ // The GenomeFactories normally won't bother to ensure that like connections have the same ID
+ // throughout the population (because it's not very easy to do in most cases). Therefore just
+ // run this routine to search for like connections and ensure they have the same ID.
+ // Note. This could also be done periodically as part of the search, remember though that like
+ // connections occuring within a generation are already fixed - using a more efficient scheme.
+ MatchConnectionIds();
+
+ // Evaluate the whole population.
+ populationEvaluator.EvaluatePopulation(pop, this);
+
+ // Speciate the population.
+ pop.BuildSpeciesTable(this);
+
+ // Now we have fitness scores and a speciated population we can calculate fitness stats for the
+ // population as a whole and per species.
+ UpdateFitnessStats();
+
+ // Set new threshold 110% of current level or 10 more if current complexity is very low.
+ pop.PrunePhaseAvgComplexityThreshold = pop.AvgComplexity + neatParameters.pruningPhaseBeginComplexityThreshold;
+
+ // Obtain an initial value for this variable that tracks when we should call pp.RedetermineSpeciation().
+ meanComplexityAtLastAdjustSpeciationThreshold = pop.AvgComplexity;
+
+ // Now we have stats we can determine the target size of each species as determined by the
+ // fitness sharing logic.
+ DetermineSpeciesTargetSize();
+
+ // Check integrity.
+ Debug.Assert(pop.PerformIntegrityCheck(), "Population integrity check failed.");
+ }
+
+
+ public void PerformOneGeneration()
+ {
+ //----- Elmininate any poor species before we do anything else. These are species with a zero target
+ // size for this generation and will therefore not have generate any offspring. Here we have to
+ // explicitly eliminate these species, otherwise the species would persist because of elitism.
+ // Also, the species object would persist without any genomes within it, so we have to clean it up.
+ // This code could be executed at the end of this method instead of the start, it doesn't really
+ // matter. Except that If we do it here then the population size will be relatively constant
+ // between generations.
+ if(pop.EliminateSpeciesWithZeroTargetSize())
+ { // If species were removed then we should recalculate population stats.
+ UpdateFitnessStats();
+ DetermineSpeciesTargetSize();
+ }
+
+ //----- Stage 1. Create offspring / cull old genomes / add offspring to population.
+ CreateOffSpring();
+ pop.TrimAllSpeciesBackToElite();
+
+ // Add offspring to the population.
+ int genomeBound = offspringList.Count;
+ for(int genomeIdx=0; genomeIdx
+ /// Indicates that the # of species is outside of the desired bounds and that AdjustSpeciationThreshold()
+ /// is attempting to adjust the speciation threshold at each generation to remedy the situation.
+ ///
+ private bool speciationThresholdAdjustInProgress=false;
+
+ ///
+ /// If speciationThresholdAdjustInProgress is true then the amount by which we are adjustinf the speciation
+ /// threshol dper generation. This value is modified in order to try and find the correct threshold as quickly
+ /// as possibly.
+ ///
+ private double compatibilityThresholdDelta;
+
+ private const double compatibilityThresholdDeltaAcceleration = 1.05;
+
+
+
+ private void AdjustSpeciationThreshold()
+ {
+ bool redetermineSpeciationFlag = false;
+ int speciesCount = pop.SpeciesTable.Count;
+
+ if(speciesCount < neatParameters.targetSpeciesCountMin)
+ {
+ // Too few species. Reduce the speciation threshold.
+ if(speciationThresholdAdjustInProgress)
+ { // Adjustment is already in progress.
+ if(compatibilityThresholdDelta<0.0)
+ { // Negative delta. Correct direction, so just increase the delta to try and find the correct value as quickly as possible.
+ compatibilityThresholdDelta*=compatibilityThresholdDeltaAcceleration;
+ }
+ else
+ { // Positive delta. Incorrect direction. This means we have overshot the correct value.
+ // Reduce the delta and flip its sign.
+ compatibilityThresholdDelta*=-0.5;
+ }
+ }
+ else
+ { // Start new adjustment 'phase'.
+ speciationThresholdAdjustInProgress = true;
+ compatibilityThresholdDelta = -Math.Max(0.1, neatParameters.compatibilityThreshold * 0.01);
+ }
+
+ // Adjust speciation threshold by compatibilityThresholdDelta.
+ neatParameters.compatibilityThreshold += compatibilityThresholdDelta;
+ neatParameters.compatibilityThreshold = Math.Max(0.01, neatParameters.compatibilityThreshold);
+
+ redetermineSpeciationFlag = true;
+ }
+ else if(speciesCount > neatParameters.targetSpeciesCountMax)
+ {
+ // Too many species. Increase the species threshold.
+ if(speciationThresholdAdjustInProgress)
+ { // Adjustment is already in progress.
+ if(compatibilityThresholdDelta<0.0)
+ { // Negative delta. Incorrect direction. This means we have overshot the correct value.
+ // Reduce the delta and flip its sign.
+ compatibilityThresholdDelta*=-0.5;
+ }
+ else
+ { // Positive delta. Correct direction, so just increase the delta to try and find the correct value as quickly as possible.
+ compatibilityThresholdDelta*=compatibilityThresholdDeltaAcceleration;
+ }
+ }
+ else
+ { // Start new adjustment 'phase'.
+ speciationThresholdAdjustInProgress = true;
+ compatibilityThresholdDelta = Math.Max(0.1, neatParameters.compatibilityThreshold * 0.5); // 0.01); // GWM - compatibility adjustment seems way too slow
+ }
+
+ // Adjust speciation threshold by compatibilityThresholdDelta.
+ neatParameters.compatibilityThreshold += compatibilityThresholdDelta;
+
+ redetermineSpeciationFlag = true;
+ }
+ else
+ { // Correct # of species. Ensure flag is reset.
+ speciationThresholdAdjustInProgress=false;
+ }
+
+ if(!redetermineSpeciationFlag)
+ {
+ double complexityDeltaProportion = Math.Abs(pop.AvgComplexity-meanComplexityAtLastAdjustSpeciationThreshold)/meanComplexityAtLastAdjustSpeciationThreshold;
+
+ if(complexityDeltaProportion>0.05)
+ { // If the population's complexity has changed by more than some proportion then force a
+ // call to RedetermineSpeciation().
+ redetermineSpeciationFlag = true;
+
+ // Update the tracking variable.
+ meanComplexityAtLastAdjustSpeciationThreshold = pop.AvgComplexity;
+ }
+ }
+
+ if(redetermineSpeciationFlag)
+ {
+ // If the speciation threshold was adjusted then we must disregard all previous speciation
+ // and rebuild the species table.
+ pop.RedetermineSpeciation(this);
+
+ // If we are in a pruning phase then we should reset the pruning phase tracking variables.
+ // We are effectively re-starting the pruning phase.
+ prunePhase_generationAtLastSimplification = generation;
+ prunePhase_MinimumStructuresPerGenome = pop.AvgComplexity;
+
+ //Debug.WriteLine("ad hoc RedetermineSpeciation()");
+ }
+ }
+
+// ///
+// /// Returns true if the speciation threshold was adjusted.
+// ///
+// ///
+// private bool AdjustSpeciationThreshold()
+// {
+// int speciesCount = pop.SpeciesTable.Count;
+//
+// if(speciesCount < neatParameters.targetSpeciesCountMin)
+// {
+// // Too few species. Reduce the speciation threshold.
+// if(speciationThresholdAdjustInProgress)
+// { // Adjustment is already in progress.
+// if(compatibilityThresholdDelta<0.0)
+// { // Negative delta. Correct direction, so just increase the delta to try and find the correct value as quickly as possible.
+// compatibilityThresholdDelta*=compatibilityThresholdDeltaAcceleration;
+// }
+// else
+// { // Positive delta. Incorrect direction. This means we have overshot the correct value.
+// // Reduce the delta and flip its sign.
+// compatibilityThresholdDelta*=-0.5;
+// }
+// }
+// else
+// { // Start new adjustment 'phase'.
+// speciationThresholdAdjustInProgress = true;
+// compatibilityThresholdDelta = -Math.Max(0.1, neatParameters.compatibilityThreshold * 0.01);
+// }
+//
+// // Adjust speciation threshold by compatibilityThresholdDelta.
+// neatParameters.compatibilityThreshold += compatibilityThresholdDelta;
+// neatParameters.compatibilityThreshold = Math.Max(0.01, neatParameters.compatibilityThreshold);
+//
+// Debug.WriteLine("delta=" + compatibilityThresholdDelta);
+//
+// return true;
+// }
+// else if(speciesCount > neatParameters.targetSpeciesCountMax)
+// {
+// // Too many species. Increase the species threshold.
+// if(speciationThresholdAdjustInProgress)
+// { // Adjustment is already in progress.
+// if(compatibilityThresholdDelta<0.0)
+// { // Negative delta. Incorrect direction. This means we have overshot the correct value.
+// // Reduce the delta and flip its sign.
+// compatibilityThresholdDelta*=-0.5;
+// }
+// else
+// { // Positive delta. Correct direction, so just increase the delta to try and find the correct value as quickly as possible.
+// compatibilityThresholdDelta*=compatibilityThresholdDeltaAcceleration;
+// }
+// }
+// else
+// { // Start new adjustment 'phase'.
+// speciationThresholdAdjustInProgress = true;
+// compatibilityThresholdDelta = Math.Max(0.1, neatParameters.compatibilityThreshold * 0.01);
+// }
+//
+// // Adjust speciation threshold by compatibilityThresholdDelta.
+// neatParameters.compatibilityThreshold += compatibilityThresholdDelta;
+//
+// Debug.WriteLine("delta=" + compatibilityThresholdDelta);
+//
+// return true;
+// }
+// else
+// { // Correct # of species. Ensure flag is reset.
+// speciationThresholdAdjustInProgress=false;
+// return false;
+// }
+// }
+
+// private const double compatibilityThresholdDeltaBaseline = 0.1;
+// private const double compatibilityThresholdDeltaAcceleration = 1.5;
+//
+// private double compatibilityThresholdDelta = compatibilityThresholdDeltaBaseline;
+// private bool compatibilityThresholdDeltaDirection=true;
+//
+// ///
+// /// This routine adjusts the speciation threshold so that the number of species remains between the specified upper
+// /// and lower limits. This routine implements a momentum approach so that the rate of change in the threshold increases
+// /// if the number of species remains incorrect for consecutive invocations.
+// ///
+// private void AdjustSpeciationThreshold()
+// {
+// double newThreshold;
+//
+// if(pop.SpeciesTable.Count < neatParameters.targetSpeciesCountMin)
+// {
+// newThreshold = Math.Max(compatibilityThresholdDeltaBaseline, neatParameters.compatibilityThreshold - compatibilityThresholdDelta);
+//
+// // Delta acceleration.
+// if(compatibilityThresholdDeltaDirection)
+// { // Wrong direction - Direction change. Also reset compatibilityThresholdDelta.
+// compatibilityThresholdDelta = compatibilityThresholdDeltaBaseline;
+// compatibilityThresholdDeltaDirection=false;
+// }
+// else
+// { // Already going in the right direction.
+// compatibilityThresholdDelta *= compatibilityThresholdDeltaAcceleration;
+// }
+// }
+// else if(pop.SpeciesTable.Count > neatParameters.targetSpeciesCountMax)
+// {
+// newThreshold = neatParameters.compatibilityThreshold + compatibilityThresholdDelta;
+//
+// // Delta acceleration.
+// if(compatibilityThresholdDeltaDirection)
+// { // Already going in the right direction.
+// compatibilityThresholdDelta *= compatibilityThresholdDeltaAcceleration;
+// }
+// else
+// { // Wrong direction - Direction change. Also reset compatibilityThresholdDelta.
+// compatibilityThresholdDelta = compatibilityThresholdDeltaBaseline;
+// compatibilityThresholdDeltaDirection=true;
+// }
+// }
+// else
+// { // Current threshold is OK. Reset compatibilityThresholdDelta in case it has 'accelerated' to a large value.
+// // This would be a bad value to start with when the threshold next needs adjustment.
+// compatibilityThresholdDelta = compatibilityThresholdDeltaBaseline;
+// return;
+// }
+//
+// neatParameters.compatibilityThreshold = newThreshold;
+//
+// // If the speciation threshold was adjusted then we must disregard all previous speciation
+// // and rebuild the species table.
+// pop.RedetermineSpeciation(this);
+// }
+
+ #endregion
+
+ #region Private Methods
+
+ private void CreateOffSpring()
+ {
+ offspringList.Clear();
+ CreateOffSpring_Asexual();
+ CreateOffSpring_Sexual();
+ }
+
+ private void CreateOffSpring_Asexual()
+ {
+ // Create a new lists so that we can track which connections/neurons have been added during this routine.
+ newConnectionGeneTable.Clear();
+ newNeuronGeneStructTable.Clear();
+
+ //----- Repeat the reproduction per species to give each species a fair chance at reproducion.
+ // Note that for this to work for small numbers of genomes in a species we need a reproduction
+ // rate of 100% or more. This is analagous to the strategy used in NEAT.
+ foreach(Species species in pop.SpeciesTable.Values)
+ {
+ // Determine how many asexual offspring to create.
+ // Minimum of 1. Any species with TargetSize of 0 are eliminated at the top of PerformOneGeneration(). This copes with the
+ // special case where every species may calculate offspringCount to be zero and therefor we loose the entire population!
+ // This can happen e.g. when each genome is allocated it's own species with TargetSize of 1.
+ int offspringCount = Math.Max(1,(int)Math.Round((species.TargetSize - species.ElitistSize) * neatParameters.pOffspringAsexual));
+ for(int i=0; i
+// /// Mutations can sometime create the same innovation more than once within a population.
+// /// If this occurs then we ensure like innovations are allocated the same innovation ID.
+// /// This is for this generation only - if the innovation occurs in a later generation we
+// /// leave it as it is.
+// ///
+// private void AmalgamateInnovations()
+// {
+// // TODO: Inefficient routine. Revise.
+// // Indicates that at least one list's order has been invalidated.
+// bool bOrderInvalidated=false;
+//
+// // Check through the new NeuronGenes - and their associated connections.
+// int neuronListBound = newNeuronGeneStructList.Count;
+// for(int i=0; i
+ /// Biased select.
+ ///
+ /// Species to select from.
+ ///
+ private IGenome RouletteWheelSelect(Species species)
+ {
+ double selectValue = (Utilities.NextDouble() * species.SelectionCountTotalFitness);
+ double accumulator=0.0;
+
+ int genomeBound = species.Members.Count;
+ for(int genomeIdx=0; genomeIdx
+ /// Biased select.
+ ///
+ /// Species to select from.
+ ///
+ private IGenome RouletteWheelSelect(Population p)
+ {
+ double selectValue = (Utilities.NextDouble() * p.SelectionTotalFitness);
+ double accumulator=0.0;
+
+ int genomeBound = p.GenomeList.Count;
+ for(int genomeIdx=0; genomeIdx bestFitness) GWM - changed for novelty
+ if (genome.ObjectiveFitness > bestFitness)
+ {
+ bestGenome = genome;
+ bestFitness = bestGenome.ObjectiveFitness;
+ }
+
+ // Track the generation number when the species improves.
+ if(genome.Fitness > species.MaxFitnessEver)
+ {
+ species.MaxFitnessEver = genome.Fitness;
+ species.AgeAtLastImprovement = species.SpeciesAge;
+ }
+ else if(!pruningMode && (species.SpeciesAge-species.AgeAtLastImprovement > neatParameters.speciesDropoffAge))
+ { // The species is a candidate for culling. It may be given a pardon (later) if it is a champion species.
+ species.CullCandidateFlag=true;
+ bCandidateCullFlag=true;
+ }
+
+ //----- Update species totals in this first loop.
+ // Calculate and store the number of genomes that will be selected from.
+ species.SelectionCount = (int)Math.Max(1.0, Math.Round((double)species.Members.Count * neatParameters.selectionProportion));
+ species.SelectionCountTotalFitness = 0.0;
+
+ int genomeBound = species.Members.Count;
+ for(int genomeIdx=0; genomeIdx=EvolutionAlgorithm.MIN_GENOME_FITNESS, "Genome fitness must be non-zero. Use EvolutionAlgorithm.MIN_GENOME_FITNESS");
+ species.TotalFitness += genome.Fitness;
+
+ if(genomeIdx < species.SelectionCount)
+ species.SelectionCountTotalFitness += genome.Fitness;
+
+ species.TotalNeuronCount += genome.NeuronGeneList.Count;
+ species.TotalConnectionCount += genome.ConnectionGeneList.Count;
+ }
+
+ species.TotalStructureCount = species.TotalNeuronCount + species.TotalConnectionCount;
+ }
+
+ // If any species have had their CullCandidateFlag set then we need to execute some extra logic
+ // to ensure we don't cull a champion species if it is the only champion species.
+ // If there is more than one champion species and all of them have the CullCandidateFlag set then
+ // we unset the flag on one of them. Therefore we always at least one champion species in the
+ // population.
+ if(bCandidateCullFlag)
+ {
+ ArrayList championSpecies = new ArrayList();
+
+ //----- 2nd loop through species. Build list of champion species.
+ foreach(Species species in pop.SpeciesTable.Values)
+ {
+ if(species.Members[0].ObjectiveFitness == bestFitness)
+ championSpecies.Add(species);
+ }
+ Debug.Assert(championSpecies.Count>0, "No champion species! There should be at least one.");
+
+ if(championSpecies.Count==1)
+ {
+ Species species = (Species)championSpecies[0];
+ if(species.CullCandidateFlag==true)
+ {
+ species.CullCandidateFlag = false;
+
+ // Also reset the species AgeAtLastImprovement so that it doesn't become
+ // a cull candidate every generation, which would inefficiently invoke this
+ // extra logic on every generation.
+ species.AgeAtLastImprovement=species.SpeciesAge;
+ }
+ }
+ else
+ { // There are multiple champion species. Check for special case where all champions
+ // are cull candidates.
+ bool bAllChampionsAreCullCandidates = true; // default to true.
+ foreach(Species species in championSpecies)
+ {
+ if(species.CullCandidateFlag)
+ continue;
+
+ bAllChampionsAreCullCandidates=false;
+ break;
+ }
+
+ if(bAllChampionsAreCullCandidates)
+ { // Unset the flag on one of the champions at random.
+ Species champ = (Species)championSpecies[(int)Math.Floor(Utilities.NextDouble()*championSpecies.Count)];
+ champ.CullCandidateFlag = false;
+
+ // Also reset the species AgeAtLastImprovement so that it doesn't become
+ // a cull candidate every generation, which would inefficiently invoke this
+ // extra logic on every generation.
+ champ.AgeAtLastImprovement=champ.SpeciesAge;
+ }
+ }
+ }
+
+ //----- 3rd loop through species. Update remaining stats.
+ foreach(Species species in pop.SpeciesTable.Values)
+ {
+ const double MEAN_FITNESS_ADJUSTMENT_FACTOR = 0.01;
+
+ if(species.CullCandidateFlag)
+ species.MeanFitness = (species.TotalFitness / species.Members.Count) * MEAN_FITNESS_ADJUSTMENT_FACTOR;
+ else
+ species.MeanFitness = species.TotalFitness / species.Members.Count;
+
+ //----- Update population totals.
+ pop.TotalFitness += species.TotalFitness;
+ pop.TotalSpeciesMeanFitness += species.MeanFitness;
+ pop.SelectionTotalFitness += species.SelectionCountTotalFitness;
+ pop.TotalNeuronCount += species.TotalNeuronCount;
+ pop.TotalConnectionCount += species.TotalConnectionCount;
+ }
+
+ //----- Update some population stats /averages.
+ if(bestFitness > pop.MaxFitnessEver)
+ {
+ Debug.WriteLine("UpdateStats() - bestFitness=" + bestGenome.ObjectiveFitness.ToString() + ", " + bestFitness.ToString());
+ pop.MaxFitnessEver = bestGenome.ObjectiveFitness;
+ pop.GenerationAtLastImprovement = this.generation;
+ }
+
+ pop.MeanFitness = pop.TotalFitness / pop.GenomeList.Count;
+ pop.TotalStructureCount = pop.TotalNeuronCount + pop.TotalConnectionCount;
+ pop.AvgComplexity = (float)pop.TotalStructureCount / (float)pop.GenomeList.Count;
+ }
+
+ ///
+ /// Determine the target size of each species based upon the current fitness stats. The target size
+ /// is stored against each Species object.
+ ///
+ ///
+ private void DetermineSpeciesTargetSize()
+ {
+ foreach(Species species in pop.SpeciesTable.Values)
+ {
+ species.TargetSize = (int)Math.Round((species.MeanFitness / pop.TotalSpeciesMeanFitness) * pop.PopulationSize);
+
+ // Calculate how many elite genomes to keep in the next round. If this is a large number then we can only
+ // keep as many genomes as we have!
+ species.ElitistSize = Math.Min(species.Members.Count, (int)Math.Floor(species.TargetSize * neatParameters.elitismProportion));
+ if(species.ElitistSize==0 && species.TargetSize > 1)
+ { // If ElitistSize is calculated to be zero but the TargetSize non-zero then keep just one genome.
+ // If the the TargetSize is 1 then we can't really do this since it would mean that no offspring would be generated.
+ // So we throw away the one member and hope that the one offspring generated will be OK.
+ species.ElitistSize = 1;
+ }
+ }
+ }
+
+ ///
+ /// Search for connections with the same end-points throughout the whole population and
+ /// ensure that like connections have the same innovation ID.
+ ///
+ private void MatchConnectionIds()
+ {
+ Hashtable connectionIdTable = new Hashtable();
+
+ int genomeBound=pop.GenomeList.Count;
+ for(int genomeIdx=0; genomeIdx pop.PrunePhaseAvgComplexityThreshold) &&
+ ((generation-pop.GenerationAtLastImprovement) >= neatParameters.pruningPhaseBeginFitnessStagnationThreshold);
+ }
+
+ private bool TestForPruningPhaseEnd()
+ {
+ // Don't expect simplification on every generation. But if nothing has happened for
+ // 'pruningPhaseEndComplexityStagnationThreshold' gens then end the prune phase.
+ if(generation-prunePhase_generationAtLastSimplification > neatParameters.pruningPhaseEndComplexityStagnationThreshold)
+ return true;
+
+ return false;
+ }
+
+
+ private void BeginPruningPhase()
+ {
+ // Enter pruning phase.
+ pruningMode = true;
+ prunePhase_generationAtLastSimplification = generation;
+ prunePhase_MinimumStructuresPerGenome = pop.AvgComplexity;
+ neatParameters = neatParameters_PrunePhase;
+
+ // Copy the speciation threshold as this is dynamically altered during a search and we wish to maintain
+ // the tracking during pruning.
+ neatParameters.compatibilityThreshold = neatParameters_Normal.compatibilityThreshold;
+
+ System.Diagnostics.Debug.WriteLine(">>Prune Phase<< Complexity=" + pop.AvgComplexity.ToString("0.00"));
+ }
+
+ private void EndPruningPhase()
+ {
+ // Leave pruning phase.
+ pruningMode = false;
+
+ // Set new threshold 110% of current level or 10 more if current complexity is very low.
+ pop.PrunePhaseAvgComplexityThreshold = pop.AvgComplexity + neatParameters.pruningPhaseBeginComplexityThreshold;
+ System.Diagnostics.Debug.WriteLine("complexity=" + pop.AvgComplexity.ToString() + ", threshold=" + pop.PrunePhaseAvgComplexityThreshold.ToString());
+
+ neatParameters = neatParameters_Normal;
+ neatParameters.compatibilityThreshold = neatParameters_PrunePhase.compatibilityThreshold;
+
+ // Update species.AgaAtLastimprovement. Originally we reset this age to give all of the species
+ // a 'clean slate' following the pruning phase. This though has the effect of giving all of the
+ // species the same AgeAtLastImprovement - which in turn often results in all of the species
+ // reaching the dropoff age simulataneously which results in the species being culled and therefore
+ // causes a radical fall in population diversity.
+ // Therefore we give the species a new AgeAtLastImprovement which reflects their relative
+ // AgeAtLastImprovement, this gives the species a new chance following pruning but does not allocate
+ // them all the same AgeAtLastImprovement.
+ NormalizeSpeciesAges();
+
+ if(connectionWeightFixingEnabled)
+ {
+ // Fix all of the connection weights that remain after pruning (proven to be good values).
+ foreach(NeatGenome.NeatGenome genome in pop.GenomeList)
+ genome.FixConnectionWeights();
+ }
+ }
+
+ private void NormalizeSpeciesAges()
+ {
+ float quarter_of_dropoffage = (float)neatParameters.speciesDropoffAge / 4.0F;
+
+ // Calculate the spread of AgeAtLastImprovement - first find the min and max values.
+ long minAgeAtLastImprovement;
+ long maxAgeAtLastImprovement;
+
+ minAgeAtLastImprovement = long.MaxValue;
+ maxAgeAtLastImprovement = 0;
+
+ foreach(Species species in pop.SpeciesTable.Values)
+ {
+ minAgeAtLastImprovement = Math.Min(minAgeAtLastImprovement, species.AgeAtLastImprovement);
+ maxAgeAtLastImprovement = Math.Max(maxAgeAtLastImprovement, species.AgeAtLastImprovement);
+ }
+
+ long spread = maxAgeAtLastImprovement-minAgeAtLastImprovement;
+
+ // Allocate each species a new AgeAtLastImprovement. Scale the ages so that the oldest is
+ // only 25% towards the cutoff age.
+ foreach(Species species in pop.SpeciesTable.Values)
+ {
+ long droppOffAge = species.AgeAtLastImprovement-minAgeAtLastImprovement;
+ long newDropOffAge = (long)(((float)droppOffAge / (float)spread) * quarter_of_dropoffage);
+ species.AgeAtLastImprovement = species.SpeciesAge - newDropOffAge;
+ }
+ }
+
+ #endregion
+
+ #region Some routines useful for profiling.
+// System.Text.StringBuilder sb = new System.Text.StringBuilder();
+// int tickCountStart;
+// int tickDuration;
+//
+// private void StartMonitor()
+// {
+// tickCountStart = System.Environment.TickCount;
+// }
+//
+// private void EndMonitor(string msg)
+// {
+// tickDuration = System.Environment.TickCount - tickCountStart;
+// sb.Append(msg + " : " + tickDuration + " ms\n");
+// }
+//
+// private void DumpMessage()
+// {
+// System.Windows.Forms.MessageBox.Show(sb.ToString());
+// sb = new System.Text.StringBuilder();
+// }
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/GenomeAgeComparer.cs b/SharpNeatWalker/SharpNeatLib/Evolution/GenomeAgeComparer.cs
new file mode 100644
index 000000000..d5390c9b0
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/GenomeAgeComparer.cs
@@ -0,0 +1,25 @@
+using System;
+using System.Collections;
+using SharpNeatLib.Evolution;
+
+namespace SharpNeatLib.Evolution
+{
+ ///
+ /// Summary description for GenomeAgeComparer.
+ ///
+ public class GenomeAgeComparer : IComparer
+ {
+ public int Compare(object x, object y)
+ {
+ long diff = (((IGenome)x).GenomeAge - ((IGenome)y).GenomeAge);
+
+ // Convert result to an int.
+ if(diff <0)
+ return -1;
+ else if(diff==0)
+ return 0;
+ else
+ return 1;
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/GenomeComparer.cs b/SharpNeatWalker/SharpNeatLib/Evolution/GenomeComparer.cs
new file mode 100644
index 000000000..5a4448f25
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/GenomeComparer.cs
@@ -0,0 +1,37 @@
+using System;
+using System.Collections.Generic;
+using SharpNeatLib.Evolution;
+
+namespace SharpNeatLib.Evolution
+{
+ ///
+ /// Sort by Fitness(Descending). Genomes with like fitness are then sorted by genome size(Ascending).
+ /// This means the selection routines are more liley to select the fit AND the smallest first.
+ ///
+ public class GenomeComparer : IComparer
+ {
+
+ #region IComparer Members
+
+ public int Compare(IGenome x, IGenome y)
+ {
+ double fitnessDelta = y.ObjectiveFitness - x.ObjectiveFitness; // GWM - must sort by objective fitness to ensure elitism works properly
+ if (fitnessDelta < 0.0D)
+ return -1;
+ else if (fitnessDelta > 0.0D)
+ return 1;
+
+ long ageDelta = x.GenomeAge - y.GenomeAge;
+
+ // Convert result to an int.
+ if (ageDelta < 0)
+ return -1;
+ else if (ageDelta > 0)
+ return 1;
+
+ return 0;
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/GenomeList.cs b/SharpNeatWalker/SharpNeatLib/Evolution/GenomeList.cs
new file mode 100644
index 000000000..7ac1c527c
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/GenomeList.cs
@@ -0,0 +1,27 @@
+using System;
+using System.Collections.Generic;
+
+namespace SharpNeatLib.Evolution
+{
+
+ public class GenomeList : List
+ {
+ static GenomeComparer genomeComparer = new GenomeComparer();
+ static PruningModeGenomeComparer pruningModeGenomeComparer = new PruningModeGenomeComparer();
+
+ new public void Sort()
+ {
+ Sort(genomeComparer);
+ }
+
+ ///
+ /// This perfroms a secondary sort on genome size (ascending order), so that small genomes
+ /// are more likely to be selected thus aiding a pruning phase.
+ ///
+ public void Sort_PruningMode()
+ {
+ Sort(pruningModeGenomeComparer);
+ }
+
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/IGenome.cs b/SharpNeatWalker/SharpNeatLib/Evolution/IGenome.cs
new file mode 100644
index 000000000..da653be01
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/IGenome.cs
@@ -0,0 +1,182 @@
+using System;
+using System.Xml;
+
+using SharpNeatLib.Evolution;
+using SharpNeatLib.NeuralNetwork;
+
+namespace SharpNeatLib.Evolution
+{
+ ///
+ /// An interface for describing a generic genome.
+ /// IComparable must be implemented in contravention of the docs. So that ArrayList.Sort() will sort into descending order.
+ /// This interface may be discarded since the development of SharpNEAT has seen the EvolutionAlgorithm become more
+ /// closely coupled with the NeatGenome, thus making this interfaces abstraction unmaintainable.
+ ///
+ public interface IGenome : IComparable
+ {
+ ///
+ /// Some(most) types of network have fixed numbers of input and output nodes and will not work correctly or
+ /// throw an exception if we try and use inputs/outputs that do not exist. This method allows us to check
+ /// compatibility before we begin.
+ ///
+ ///
+ ///
+ ///
+ bool IsCompatibleWithNetwork(int inputCount, int outputCount);
+
+ ///
+ /// Asexual reproduction with built in mutation.
+ ///
+ ///
+ IGenome CreateOffspring_Asexual(EvolutionAlgorithm ea);
+
+ ///
+ /// Sexual reproduction. No mutation performed.
+ ///
+ ///
+ ///
+ IGenome CreateOffspring_Sexual(EvolutionAlgorithm ea, IGenome parent);
+
+ ///
+ /// The globally unique ID for this genome (within the context of a search).
+ ///
+ uint GenomeId
+ {
+ get;
+ }
+
+ ///
+ /// The number of generations that this genome has existed. Note that to
+ /// survive a generation a genome must be one of the elite that are preserved
+ /// between generations.
+ ///
+ long GenomeAge
+ {
+ get;
+ set;
+ }
+
+ ///
+ /// This genome's fitness as calculated by the evaluation environment.
+ ///
+ double Fitness
+ {
+ get;
+ set;
+ }
+
+ ///
+ /// The number of times this genome has been evaluated.
+ ///
+ long EvaluationCount
+ {
+ get;
+ set;
+ }
+
+ ///
+ /// Returns the total of all fitness scores if this genome has been evaluated more than once.
+ /// Average fitness is therefore this figure divided by EvaluationCount.
+ ///
+ double TotalFitness
+ {
+ get;
+ set;
+ }
+
+ double ObjectiveFitness
+ {
+ get;
+ set;
+ }
+
+ ///
+ /// The species this genome is within.
+ ///
+ int SpeciesId
+ {
+ get;
+ set;
+ }
+
+ ///
+ /// The ID of this genome's first parent.
+ ///
+ int ParentSpeciesId1
+ {
+ get;
+ set;
+ }
+
+ ///
+ /// The ID of this genome's second parent. -1 if no second parent.
+ ///
+ int ParentSpeciesId2
+ {
+ get;
+ set;
+ }
+
+ AbstractNetwork AbstractNetwork
+ {
+ get;
+ }
+
+ ///
+ /// An object reference that can be used by IPopulationEvaluator objects to
+ /// store evaluation state information against a genome. E.g. If we have a growing
+ /// list of test cases as evolution progresses then we could store the index of the
+ /// last test case to be evaluated against. We can then skip over these test cases
+ /// in subsequent evaluations of this genome.
+ ///
+ object Tag
+ {
+ get;
+ set;
+ }
+
+ ///
+ /// Decode the genome's 'DNA' into a working network.
+ ///
+ ///
+ INetwork Decode(IActivationFunction activationFn);
+
+ ///
+ /// Clone this genome.
+ ///
+ ///
+ IGenome Clone(EvolutionAlgorithm ea);
+
+ ///
+ /// Compare this IGenome with the provided one. They are compatible (determined to be in
+ /// the same species) if their calculated difference is below the current threshold specified
+ /// by NeatParameters.compatibilityThreshold
+ ///
+ ///
+ ///
+ ///
+ bool IsCompatibleWithGenome(IGenome comparisonGenome, NeatParameters neatParameters);
+
+ ///
+ /// Used primarily to give this IGenome a hook onto the Population it is within.
+ ///
+ Population OwningPopulation
+ {
+ get;
+ set;
+ }
+
+ ///
+ /// Persist to XML.
+ ///
+ ///
+ void Write(XmlNode parentNode);
+
+
+ ///
+ /// For debug purposes only.
+ ///
+ /// Returns true if genome integrity checks out OK.
+ bool PerformIntegrityCheck();
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/IIdGeneratorFactory.cs b/SharpNeatWalker/SharpNeatLib/Evolution/IIdGeneratorFactory.cs
new file mode 100644
index 000000000..163ac5d40
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/IIdGeneratorFactory.cs
@@ -0,0 +1,14 @@
+using System;
+
+namespace SharpNeatLib.Evolution
+{
+ public interface IIdGeneratorFactory
+ {
+ ///
+ /// Create an IdGenerator based upon the IDs within the provided population.
+ ///
+ ///
+ ///
+ IdGenerator CreateIdGenerator(GenomeList genomeList);
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/IPopulationEvaluator.cs b/SharpNeatWalker/SharpNeatLib/Evolution/IPopulationEvaluator.cs
new file mode 100644
index 000000000..9f4899434
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/IPopulationEvaluator.cs
@@ -0,0 +1,51 @@
+using System;
+
+namespace SharpNeatLib.Evolution
+{
+ public interface IPopulationEvaluator
+ {
+ ///
+ /// Evaluate the genomes within the Population argument. Implementors can choose how to evaluate
+ /// the genomes and which ones to evaluate, e.g. only evaluate new genomes (EvaluationCount>0).
+ ///
+ ///
+ /// Some evaluators may wish to interogate the current EvolutionAlgorithm to
+ /// obtain statistical information. Most experiments though do not require this parameter.
+ void EvaluatePopulation(Population pop, EvolutionAlgorithm ea);
+
+ ///
+ /// The total number of evaluations performed.
+ ///
+ ulong EvaluationCount
+ {
+ get;
+ }
+
+ ///
+ /// A human readable message that describes the state of the evaluator. This is useful if the
+ /// evaluator has several modes (e.g. difficulty levels in incremenetal evolution) and we want
+ /// to let the user know what mode the evaluator is in.
+ ///
+ string EvaluatorStateMessage
+ {
+ get;
+ }
+
+ ///
+ /// Indicates that the current best genome is a champion at the current level of difficulty.
+ /// If there is only one difficulty level then the 'SearchCompleted' flag should also be set.
+ ///
+ bool BestIsIntermediateChampion
+ {
+ get;
+ }
+
+ ///
+ /// Indicates that the best solution meets the evaluator's end criteria.
+ ///
+ bool SearchCompleted
+ {
+ get;
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/IdGenerator.cs b/SharpNeatWalker/SharpNeatLib/Evolution/IdGenerator.cs
new file mode 100644
index 000000000..8ad4f8acc
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/IdGenerator.cs
@@ -0,0 +1,63 @@
+using System;
+
+namespace SharpNeatLib.Evolution
+{
+ public class IdGenerator
+ {
+ uint nextGenomeId;
+ uint nextInnovationId;
+
+ #region Constructors
+
+ public IdGenerator()
+ {
+ this.nextGenomeId = 0;
+ this.nextInnovationId = 0;
+ }
+
+ public IdGenerator(uint nextGenomeId, uint nextInnovationId)
+ {
+ this.nextGenomeId = nextGenomeId;
+ this.nextInnovationId = nextInnovationId;
+ }
+
+ #endregion
+
+ #region Properties
+
+ public uint NextGenomeId
+ {
+ get
+ {
+ if(nextGenomeId==uint.MaxValue)
+ nextGenomeId=0;
+ return nextGenomeId++;
+ }
+ }
+
+ public uint NextInnovationId
+ {
+ get
+ {
+ if(nextInnovationId==uint.MaxValue)
+ nextInnovationId=0;
+ return nextInnovationId++;
+ }
+ }
+
+ #endregion
+
+ #region Public Methods
+
+ ///
+ /// Used primarilty by the GenomeFactory so that the same innovation ID's are used for input & output nodes
+ /// for all of the initial population.
+ ///
+ public void ResetNextInnovationNumber()
+ {
+ nextInnovationId=0;
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/NeatParameters.cs b/SharpNeatWalker/SharpNeatLib/Evolution/NeatParameters.cs
new file mode 100644
index 000000000..26d14b20c
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/NeatParameters.cs
@@ -0,0 +1,250 @@
+using System;
+using System.Collections;
+
+namespace SharpNeatLib.Evolution
+{
+ public class NeatParameters
+ {
+ #region Constants
+
+ public const int DEFAULT_POPULATION_SIZE = 150;
+ public const float DEFAULT_P_INITIAL_POPULATION_INTERCONNECTIONS = 1.00F;//DAVID 0.05F;
+
+ public const double DEFAULT_P_OFFSPRING_ASEXUAL = 0.5;
+ public const double DEFAULT_P_OFFSPRING_SEXUAL = 0.5;
+ public const double DEFAULT_P_INTERSPECIES_MATING = 0.01;
+
+ public const double DEFAULT_P_DISJOINGEXCESSGENES_RECOMBINED = 0.1;
+
+ //----- High level mutation proportions
+ public const double DEFAULT_P_MUTATE_CONNECTION_WEIGHTS = 0.988;
+ public const double DEFAULT_P_MUTATE_ADD_NODE = 0.001;
+ public const double DEFAULT_P_MUTATE_ADD_CONNECTION = 0.01;
+ public const double DEFAULT_P_MUTATE_DELETE_CONNECTION = 0.001;
+ public const double DEFAULT_P_MUTATE_DELETE_SIMPLENEURON = 0.00;
+
+// //----- Secondary mutation proportions (Connection weight mutation).
+// public const double DEFAULT_P_MUTATE_CONNECTIONWEIGHT_JIGGLE_LARGEPROPORTION = 0.2;
+// public const double DEFAULT_P_MUTATE_CONNECTIONWEIGHT_JIGGLE_SMALLPROPORTION = 0.2;
+// public const double DEFAULT_P_MUTATE_CONNECTIONWEIGHT_JIGGLE_SINGLEWEIGHT = 0.2;
+// public const double DEFAULT_P_MUTATE_CONNECTIONWEIGHT_RESET_SMALLPROPORTION = 0.2;
+// public const double DEFAULT_P_MUTATE_CONNECTIONWEIGHT_RESET_SINGLEWEIGHT = 0.2;
+//
+// //----- Tertiary mutation weight parameters.
+// public const double DEFAULT_P_CONNECTION_JIGGLE_LARGEPROPORTION = 0.5;
+// public const double DEFAULT_P_CONNECTION_JIGGLE_SMALLPROPORTION = 0.1;
+// public const double DEFAULT_P_CONNECTION_RESET_SMALLPROPORTION = 0.1;
+
+ //-----
+ public const double DEFAULT_COMPATIBILITY_THRESHOLD = 8 ;
+ public const double DEFAULT_COMPATIBILITY_DISJOINT_COEFF = 1.0;
+ public const double DEFAULT_COMPATIBILITY_EXCESS_COEFF = 1.0;
+ public const double DEFAULT_COMPATIBILITY_WEIGHTDELTA_COEFF = 0.1;
+
+ public const double DEFAULT_ELITISM_PROPORTION = 0.2;
+ public const double DEFAULT_SELECTION_PROPORTION = 0.2;
+
+ public const int DEFAULT_TARGET_SPECIES_COUNT_MIN = 6;
+ public const int DEFAULT_TARGET_SPECIES_COUNT_MAX = 10;
+
+ public const int DEFAULT_SPECIES_DROPOFF_AGE = 200;
+
+ public const int DEFAULT_PRUNINGPHASE_BEGIN_COMPLEXITY_THRESHOLD = 50;
+ public const int DEFAULT_PRUNINGPHASE_BEGIN_FITNESS_STAGNATION_THRESHOLD = 10;
+ public const int DEFAULT_PRUNINGPHASE_END_COMPLEXITY_STAGNATION_THRESHOLD = 15;
+
+ public const double DEFAULT_CONNECTION_WEIGHT_RANGE = 10.0;
+// public const double DEFAULT_CONNECTION_MUTATION_SIGMA = 0.015;
+
+ public const double DEFAULT_ACTIVATION_PROBABILITY = 1.0;
+
+ #endregion
+
+ #region Fields
+
+ public int populationSize;
+ public float pInitialPopulationInterconnections;
+
+ public double pOffspringAsexual;
+ public double pOffspringSexual;
+ public double pInterspeciesMating;
+
+ ///
+ /// The proportion of excess and disjoint genes used from the least fit parent during crossover.
+ ///
+ public double pDisjointExcessGenesRecombined;
+
+ //----- High level mutation proportions
+ public double pMutateConnectionWeights;
+ public double pMutateAddNode;
+ public double pMutateAddConnection;
+ public double pMutateDeleteConnection;
+ public double pMutateDeleteSimpleNeuron;
+
+ ///
+ /// A list of ConnectionMutationParameterGroup objects to drive the types of connection mutation
+ /// that occur.
+ ///
+ public ConnectionMutationParameterGroupList ConnectionMutationParameterGroupList;
+
+ //-----
+ public double compatibilityThreshold;
+ public double compatibilityDisjointCoeff;
+ public double compatibilityExcessCoeff;
+ public double compatibilityWeightDeltaCoeff;
+
+ ///
+ /// The proportion of best genomes from the parent generation to keep in the following generation.
+ ///
+ public double elitismProportion;
+
+ ///
+ /// Similar to the elitist proportion. This is the proportion of genomes from a species that we select
+ /// from when creating offspring. The top n% genomes are selected from.
+ ///
+ public double selectionProportion;
+
+ public int targetSpeciesCountMin;
+ public int targetSpeciesCountMax;
+
+ public int speciesDropoffAge;
+
+ ///
+ /// The complexity at which pruning phase should begin. The actual threshold is calculted by adding this
+ /// number to the average complexity of the population at the end of the previous prune phase.
+ ///
+ public float pruningPhaseBeginComplexityThreshold;
+
+ ///
+ /// The minimum amount of fitness stagnation (measured in generations) that must have occured before pruning
+ /// phase can begin. E.g. consider that pruningPhaseBeginComplexityThreshold has been passed. We do not
+ /// enter prune phase until this threshold has also been met, that way we wait for the population to stop improving
+ /// before we start pruning.
+ ///
+ public int pruningPhaseBeginFitnessStagnationThreshold;
+
+ ///
+ /// When in pruning mode the avg population complexity will drop. We wait for 'pruningPhaseEndComplexityStagnationThreshold'
+ /// generations of no complexity drop before ending a pruning phase.
+ ///
+ public int pruningPhaseEndComplexityStagnationThreshold;
+
+ public double connectionWeightRange;
+
+ //DAVID
+ public double[] activationProbabilities;
+
+ #endregion
+
+ #region Constructor
+
+ ///
+ /// Default Constructor.
+ ///
+ public NeatParameters()
+ {
+ populationSize = DEFAULT_POPULATION_SIZE;
+ pInitialPopulationInterconnections = DEFAULT_P_INITIAL_POPULATION_INTERCONNECTIONS;
+
+ pOffspringAsexual = DEFAULT_P_OFFSPRING_ASEXUAL;
+ pOffspringSexual = DEFAULT_P_OFFSPRING_SEXUAL;
+ pInterspeciesMating = DEFAULT_P_INTERSPECIES_MATING;
+
+ pDisjointExcessGenesRecombined = DEFAULT_P_DISJOINGEXCESSGENES_RECOMBINED;
+
+// pMutateConnectionWeights = DEFAULT_P_MUTATE_CONNECTION_WEIGHTS;
+// pMutateAddNode = DEFAULT_P_MUTATE_ADD_NODE;
+// pMutateAddConnection = DEFAULT_P_MUTATE_ADD_CONNECTION;
+// pMutateDeleteConnection = DEFAULT_P_MUTATE_DELETE_CONNECTION;
+// pMutateDeleteSimpleNeuron = DEFAULT_P_MUTATE_DELETE_SIMPLENEURON;
+
+ //----- High level mutation proportions
+ pMutateConnectionWeights = DEFAULT_P_MUTATE_CONNECTION_WEIGHTS;
+ pMutateAddNode = DEFAULT_P_MUTATE_ADD_NODE;
+ pMutateAddConnection = DEFAULT_P_MUTATE_ADD_CONNECTION;
+ pMutateDeleteConnection = DEFAULT_P_MUTATE_DELETE_CONNECTION;
+ pMutateDeleteSimpleNeuron = DEFAULT_P_MUTATE_DELETE_SIMPLENEURON;
+
+ //----- Build a default ConnectionMutationParameterGroupList.
+ ConnectionMutationParameterGroupList = new ConnectionMutationParameterGroupList();
+ ConnectionMutationParameterGroupList.Add(new ConnectionMutationParameterGroup(0.125, ConnectionPerturbationType.JiggleEven, ConnectionSelectionType.Proportional, 0.5, 0, 0.05, 0.0));
+ ConnectionMutationParameterGroupList.Add(new ConnectionMutationParameterGroup(0.125, ConnectionPerturbationType.JiggleEven, ConnectionSelectionType.Proportional, 0.1, 0, 0.05, 0.0));
+ ConnectionMutationParameterGroupList.Add(new ConnectionMutationParameterGroup(0.125, ConnectionPerturbationType.JiggleEven, ConnectionSelectionType.FixedQuantity, 0.0, 1, 0.05, 0.0));
+ ConnectionMutationParameterGroupList.Add(new ConnectionMutationParameterGroup(0.5, ConnectionPerturbationType.Reset, ConnectionSelectionType.Proportional, 0.1, 0, 0.0, 0.0));
+ ConnectionMutationParameterGroupList.Add(new ConnectionMutationParameterGroup(0.125, ConnectionPerturbationType.Reset, ConnectionSelectionType.FixedQuantity, 0.0, 1, 0.0, 0.0));
+
+ //-----
+ compatibilityThreshold = DEFAULT_COMPATIBILITY_THRESHOLD;
+ compatibilityDisjointCoeff = DEFAULT_COMPATIBILITY_DISJOINT_COEFF;
+ compatibilityExcessCoeff = DEFAULT_COMPATIBILITY_EXCESS_COEFF;
+ compatibilityWeightDeltaCoeff = DEFAULT_COMPATIBILITY_WEIGHTDELTA_COEFF;
+
+ elitismProportion = DEFAULT_ELITISM_PROPORTION;
+ selectionProportion = DEFAULT_SELECTION_PROPORTION;
+
+ targetSpeciesCountMin = DEFAULT_TARGET_SPECIES_COUNT_MIN;
+ targetSpeciesCountMax = DEFAULT_TARGET_SPECIES_COUNT_MAX;
+
+ pruningPhaseBeginComplexityThreshold = DEFAULT_PRUNINGPHASE_BEGIN_COMPLEXITY_THRESHOLD;
+ pruningPhaseBeginFitnessStagnationThreshold = DEFAULT_PRUNINGPHASE_BEGIN_FITNESS_STAGNATION_THRESHOLD;
+ pruningPhaseEndComplexityStagnationThreshold = DEFAULT_PRUNINGPHASE_END_COMPLEXITY_STAGNATION_THRESHOLD;
+
+ speciesDropoffAge = DEFAULT_SPECIES_DROPOFF_AGE;
+
+ connectionWeightRange = DEFAULT_CONNECTION_WEIGHT_RANGE;
+
+ //DAVID
+ activationProbabilities = new double[4];
+ activationProbabilities[0] = DEFAULT_ACTIVATION_PROBABILITY;
+ activationProbabilities[1] = 0;
+ activationProbabilities[2] = 0;
+ activationProbabilities[3] = 0;
+
+ }
+
+ ///
+ /// Copy constructor.
+ ///
+ ///
+ public NeatParameters(NeatParameters copyFrom)
+ {
+ populationSize = copyFrom.populationSize;
+
+ pOffspringAsexual = copyFrom.pOffspringAsexual;
+ pOffspringSexual = copyFrom.pOffspringSexual;
+ pInterspeciesMating = copyFrom.pInterspeciesMating;
+
+ pDisjointExcessGenesRecombined = copyFrom.pDisjointExcessGenesRecombined;
+
+ pMutateConnectionWeights = copyFrom.pMutateConnectionWeights;
+ pMutateAddNode = copyFrom.pMutateAddNode;
+ pMutateAddConnection = copyFrom.pMutateAddConnection;
+ pMutateDeleteConnection = copyFrom.pMutateDeleteConnection;
+ pMutateDeleteSimpleNeuron = copyFrom.pMutateDeleteSimpleNeuron;
+
+ // Copy the list.
+ ConnectionMutationParameterGroupList = new ConnectionMutationParameterGroupList(copyFrom.ConnectionMutationParameterGroupList);
+
+ compatibilityThreshold = copyFrom.compatibilityThreshold;
+ compatibilityDisjointCoeff = copyFrom.compatibilityDisjointCoeff;
+ compatibilityExcessCoeff = copyFrom.compatibilityExcessCoeff;
+ compatibilityWeightDeltaCoeff = copyFrom.compatibilityWeightDeltaCoeff;
+
+ elitismProportion = copyFrom.elitismProportion;
+ selectionProportion = copyFrom.selectionProportion;
+
+ targetSpeciesCountMin = copyFrom.targetSpeciesCountMin;
+ targetSpeciesCountMax = copyFrom.targetSpeciesCountMax;
+
+ pruningPhaseBeginComplexityThreshold = copyFrom.pruningPhaseBeginComplexityThreshold;
+ pruningPhaseBeginFitnessStagnationThreshold = copyFrom.pruningPhaseBeginFitnessStagnationThreshold;
+ pruningPhaseEndComplexityStagnationThreshold = copyFrom.pruningPhaseEndComplexityStagnationThreshold;
+
+ speciesDropoffAge = copyFrom.speciesDropoffAge;
+
+ connectionWeightRange = copyFrom.connectionWeightRange;
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/NeatParameters/ConnectionMutationParameterGroup.cs b/SharpNeatWalker/SharpNeatLib/Evolution/NeatParameters/ConnectionMutationParameterGroup.cs
new file mode 100644
index 000000000..8f37d80ac
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/NeatParameters/ConnectionMutationParameterGroup.cs
@@ -0,0 +1,143 @@
+using System;
+using SharpNeatLib.Maths;
+
+namespace SharpNeatLib.Evolution
+{
+ public class ConnectionMutationParameterGroup
+ {
+ #region Public Fields
+
+ ///
+ /// This group's activation proportion - relative to the totalled
+ /// ActivationProportion for all groups.
+ ///
+ public double ActivationProportion;
+
+ ///
+ /// The type of mutation that this group represents.
+ ///
+ public ConnectionPerturbationType PerturbationType;
+
+ ///
+ /// The type of connection selection that this group represents.
+ ///
+ public ConnectionSelectionType SelectionType;
+
+ ///
+ /// Specifies the proportion for SelectionType.Proportional
+ ///
+ public double Proportion;
+
+ ///
+ /// Specifies the quantity for SelectionType.FixedQuantity
+ ///
+ public int Quantity;
+
+ ///
+ /// The perturbation factor for ConnectionPerturbationType.JiggleEven.
+ ///
+ public double PerturbationFactor;
+
+ ///
+ /// Sigma for for ConnectionPerturbationType.JiggleND.
+ ///
+ public double Sigma;
+
+ #endregion
+
+ #region Constructors
+
+ public ConnectionMutationParameterGroup( double activationProportion,
+ ConnectionPerturbationType perturbationType,
+ ConnectionSelectionType selectionType,
+ double proportion,
+ int quantity,
+ double perturbationFactor,
+ double sigma)
+ {
+ ActivationProportion = activationProportion;
+ PerturbationType = perturbationType;
+ SelectionType = selectionType;
+ Proportion = proportion;
+ Quantity = quantity;
+ PerturbationFactor = perturbationFactor;
+ Sigma = sigma;
+ }
+
+ ///
+ /// Copy constructor.
+ ///
+ ///
+ public ConnectionMutationParameterGroup(ConnectionMutationParameterGroup copyFrom)
+ {
+ ActivationProportion = copyFrom.ActivationProportion;
+ PerturbationType = copyFrom.PerturbationType;
+ SelectionType = copyFrom.SelectionType;
+ Proportion = copyFrom.Proportion;
+ Quantity = copyFrom.Quantity;
+ PerturbationFactor = copyFrom.PerturbationFactor;
+ Sigma = copyFrom.Sigma;
+ }
+
+ #endregion
+
+ #region Public Methods
+
+// public void Mutate(double pValueJiggle)
+// {
+// // Determine which parameter to mutate.
+// int possibleOutcomes=2;
+// if(PerturbationType!=ConnectionPerturbationType.Reset)
+// possibleOutcomes++;
+//
+// int outcome = RouletteWheel.SingleThrowEven(possibleOutcomes);
+// bool resetOnly=(Utilities.NextDouble() < pValueJiggle);
+//
+// switch(outcome)
+// {
+// case 0: // ActivationProportion.
+// {
+// if(resetOnly)
+// {
+// ActivationProportion = Utilities.NextDouble();
+// }
+// else
+// {
+//
+// }
+// }
+// case 1: // In scope SelectionType parameter.
+// {
+// if(resetOnly)
+// {
+// switch(SelectionType)
+// {
+// case ConnectionSelectionType.FixedQuantity:
+// Quantity
+// case ConnectionSelectionType.Proportional:
+// Proportion
+//
+// }
+// }
+// else
+// {
+//
+// }
+// }
+// case 2: // In scope PerturbationType parameter.
+// {
+// if(resetOnly)
+// {
+//
+// }
+// else
+// {
+//
+// }
+// }
+// }
+// }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/NeatParameters/ConnectionMutationParameterGroupList.cs b/SharpNeatWalker/SharpNeatLib/Evolution/NeatParameters/ConnectionMutationParameterGroupList.cs
new file mode 100644
index 000000000..7b713d40a
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/NeatParameters/ConnectionMutationParameterGroupList.cs
@@ -0,0 +1,24 @@
+using System;
+using System.Collections.Generic;
+
+namespace SharpNeatLib.Evolution
+{
+ public class ConnectionMutationParameterGroupList : List
+ {
+ #region Constructors
+
+ public ConnectionMutationParameterGroupList()
+ {}
+
+ ///
+ /// Copy constructor.
+ ///
+ public ConnectionMutationParameterGroupList(ConnectionMutationParameterGroupList copyFrom)
+ {
+ foreach(ConnectionMutationParameterGroup paramGroup in copyFrom)
+ Add(new ConnectionMutationParameterGroup(paramGroup));
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/NeatParameters/ConnectionPerturbationType.cs b/SharpNeatWalker/SharpNeatLib/Evolution/NeatParameters/ConnectionPerturbationType.cs
new file mode 100644
index 000000000..11d0168fa
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/NeatParameters/ConnectionPerturbationType.cs
@@ -0,0 +1,22 @@
+using System;
+
+namespace SharpNeatLib.Evolution
+{
+ public enum ConnectionPerturbationType
+ {
+ ///
+ /// Reset weights.
+ ///
+ Reset,
+
+ ///
+ /// Jiggle - even distribution
+ ///
+ JiggleEven,
+
+ ///
+ /// Jiggle - normal distribution
+ ///
+ JiggleND
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/NeatParameters/ConnectionSelectionType.cs b/SharpNeatWalker/SharpNeatLib/Evolution/NeatParameters/ConnectionSelectionType.cs
new file mode 100644
index 000000000..0f951af65
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/NeatParameters/ConnectionSelectionType.cs
@@ -0,0 +1,21 @@
+using System;
+
+namespace SharpNeatLib.Evolution
+{
+ ///
+ /// Different systems of determining which connection weights will be selected
+ /// for mutation.
+ ///
+ public enum ConnectionSelectionType
+ {
+ ///
+ /// Select a proportion of the weights in a genome.
+ ///
+ Proportional,
+
+ ///
+ /// Select a fixed number of weights in a genome.
+ ///
+ FixedQuantity
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/NewConnectionGeneStruct.cs b/SharpNeatWalker/SharpNeatLib/Evolution/NewConnectionGeneStruct.cs
new file mode 100644
index 000000000..e5da1edde
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/NewConnectionGeneStruct.cs
@@ -0,0 +1,21 @@
+using System;
+using SharpNeatLib.NeatGenome;
+
+namespace SharpNeatLib.Evolution
+{
+ ///
+ /// When mutation creates a new ConnectionGene we wish to store the new gene in a list so that we
+ /// can amalgamate innovations for a generation.
+ ///
+ public struct NewConnectionGeneStruct
+ {
+ public NeatGenome.NeatGenome OwningGenome;
+ public ConnectionGene NewConnectionGene;
+
+ public NewConnectionGeneStruct(NeatGenome.NeatGenome owningGenome, ConnectionGene newConnectionGene)
+ {
+ this.OwningGenome = owningGenome;
+ this.NewConnectionGene = newConnectionGene;
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/NewNeuronGeneStruct.cs b/SharpNeatWalker/SharpNeatLib/Evolution/NewNeuronGeneStruct.cs
new file mode 100644
index 000000000..248a45de3
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/NewNeuronGeneStruct.cs
@@ -0,0 +1,37 @@
+using System;
+using SharpNeatLib.NeatGenome;
+
+namespace SharpNeatLib.Evolution
+{
+ ///
+ /// When mutation creates a new NeuronGene we wish to store the new gene in a list so that we
+ /// can amalgamate innovations for a generation. We also need to know the neuron's connections
+ /// and so we use this structure to store the new neuron along with it's two connections.
+ /// Remember that new neurons are always connected to the network by replacing (splitting)
+ /// an existing connection.
+ ///
+ public struct NewNeuronGeneStruct
+ {
+ public NeuronGene NewNeuronGene;
+
+ ///
+ /// The incoming connection.
+ ///
+ public ConnectionGene NewConnectionGene_Input;
+
+ ///
+ /// The outgoing connection.
+ ///
+ public ConnectionGene NewConnectionGene_Output;
+
+
+ public NewNeuronGeneStruct( NeuronGene newNeuronGene,
+ ConnectionGene newConnectionGene_Input,
+ ConnectionGene newConnectionGene_Output)
+ {
+ this.NewNeuronGene = newNeuronGene;
+ this.NewConnectionGene_Input = newConnectionGene_Input;
+ this.NewConnectionGene_Output = newConnectionGene_Output;
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/Population.cs b/SharpNeatWalker/SharpNeatLib/Evolution/Population.cs
new file mode 100644
index 000000000..c9cbb040f
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/Population.cs
@@ -0,0 +1,606 @@
+using System;
+using System.Collections;
+
+namespace SharpNeatLib.Evolution
+{
+ public class Population
+ {
+ IdGenerator idGenerator;
+ GenomeList genomeList; // The master list of genomes in the population.
+ Hashtable speciesTable; // Asecondary structure containing all of the genomes partitioned into their respective species. A Hashtable of GenomeList structures.
+
+ int populationSize; // The base-line number for the population size. The actual size may vary slightly from this figure as offspring are generated and culled.
+ double totalFitness; // totalled fitness values of all genomes in the population.
+ double meanFitness;
+ double totalSpeciesMeanFitness;
+
+ // The totalled fitness of the genomes that will be selected from.
+ double selectionTotalFitness;
+
+ int totalNeuronCount;
+ int totalConnectionCount;
+ int totalStructureCount;
+ float avgComplexity;
+
+ int nextSpeciesId=0;
+
+ // Some statistics.
+ long generationAtLastImprovement=0;
+ double maxFitnessEver = 0.0;
+// double fitnessAtLastPrunePhaseEnd=0.0;
+
+ float prunePhaseAvgComplexityThreshold=-1;
+
+ #region Constructor
+
+ public Population(IdGenerator idGenerator, GenomeList genomeList)
+ {
+ this.idGenerator = idGenerator;
+ this.genomeList = genomeList;
+ this.populationSize = genomeList.Count;
+ }
+
+ #endregion
+
+ #region Properties
+
+ public IdGenerator IdGenerator
+ {
+ get
+ {
+ return idGenerator;
+ }
+ }
+
+ ///
+ /// The base-line number for the population size. The actual size may vary slightly from this figure as offspring are generated and culled.
+ ///
+ public int PopulationSize
+ {
+ get
+ {
+ return populationSize;
+ }
+ }
+
+ public GenomeList GenomeList
+ {
+ get
+ {
+ return genomeList;
+ }
+ }
+
+ public Hashtable SpeciesTable
+ {
+ get
+ {
+ return speciesTable;
+ }
+ }
+
+ public double TotalFitness
+ {
+ get
+ {
+ return totalFitness;
+ }
+ set
+ {
+ totalFitness = value;
+ }
+ }
+
+ public double MeanFitness
+ {
+ get
+ {
+ return meanFitness;
+ }
+ set
+ {
+ meanFitness = value;
+ }
+ }
+
+ ///
+ /// The total of all of the Species.MeanFitness
+ ///
+ public double TotalSpeciesMeanFitness
+ {
+ get
+ {
+ return totalSpeciesMeanFitness;
+ }
+ set
+ {
+ totalSpeciesMeanFitness = value;
+ }
+ }
+
+ ///
+ /// The total of all of the Species.MeanFitness
+ ///
+ public double SelectionTotalFitness
+ {
+ get
+ {
+ return selectionTotalFitness;
+ }
+ set
+ {
+ selectionTotalFitness = value;
+ }
+ }
+
+ public int TotalNeuronCount
+ {
+ get
+ {
+ return totalNeuronCount;
+ }
+ set
+ {
+ totalNeuronCount = value;
+ }
+ }
+
+ public int TotalConnectionCount
+ {
+ get
+ {
+ return totalConnectionCount;
+ }
+ set
+ {
+ totalConnectionCount = value;
+ }
+ }
+
+ ///
+ /// TotalNeuronCount + TotalConnectionCount
+ ///
+ public int TotalStructureCount
+ {
+ get
+ {
+ return totalStructureCount;
+ }
+ set
+ {
+ totalStructureCount = value;
+ }
+ }
+
+ ///
+ /// Avg Structures Per Genome.
+ ///
+ public float AvgComplexity
+ {
+ get
+ {
+ return avgComplexity;
+ }
+ set
+ {
+ avgComplexity = value;
+ }
+ }
+
+ public long GenerationAtLastImprovement
+ {
+ get
+ {
+ return generationAtLastImprovement;
+ }
+ set
+ {
+ generationAtLastImprovement = value;
+ }
+ }
+
+// public long GenerationAtLastPrunePhaseEnd
+// {
+// get
+// {
+// return generationAtLastPrunePhaseEnd;
+// }
+// set
+// {
+// generationAtLastPrunePhaseEnd = value;
+// }
+// }
+
+ public double MaxFitnessEver
+ {
+ get
+ {
+ return maxFitnessEver;
+ }
+ set
+ {
+ maxFitnessEver = value;
+ }
+ }
+
+// public double FitnessAtLastPrunePhaseEnd
+// {
+// get
+// {
+// return fitnessAtLastPrunePhaseEnd;
+// }
+// set
+// {
+// fitnessAtLastPrunePhaseEnd = value;
+// }
+// }
+
+ public float PrunePhaseAvgComplexityThreshold
+ {
+ get
+ {
+ return prunePhaseAvgComplexityThreshold;
+ }
+ set
+ {
+ prunePhaseAvgComplexityThreshold = value;
+ }
+ }
+
+ #endregion
+
+ #region Public Methods
+
+ public void ResetFitnessValues()
+ {
+ totalFitness = 0.0;
+ meanFitness = 0.0;
+ totalSpeciesMeanFitness = 0.0;
+ selectionTotalFitness = 0.0;
+ }
+
+ public void AddGenomeToPopulation(EvolutionAlgorithm ea, IGenome genome)
+ {
+ //----- Add genome to the master list of genomes.
+ genomeList.Add(genome);
+
+ //----- Determine it's species and insert into the speciestable.
+ AddGenomeToSpeciesTable(ea, genome);
+ }
+
+ ///
+ /// Determine the species of each genome in genomeList and build the 'species' Hashtable.
+ ///
+ public void BuildSpeciesTable(EvolutionAlgorithm ea)
+ {
+ //----- Build the table.
+ speciesTable = new Hashtable();
+
+ // First pass. Genomes that already have an assigned species.
+
+ //foreach(IGenome genome in genomeList)
+ int genomeIdx;
+ int genomeBound = genomeList.Count;
+ for(genomeIdx=0; genomeIdx0)
+ {
+ bSpeciesRemoved = true;
+ RebuildGenomeList();
+ }
+ else
+ {
+ bSpeciesRemoved = false;
+ }
+
+ if(bSpeciesRemoved)
+ speciesToRemove.Clear();
+
+ return bSpeciesRemoved;
+ }
+
+ public void TrimAllSpeciesBackToElite()
+ {
+ speciesToRemove.Clear();
+ foreach(Species species in speciesTable.Values)
+ {
+ if(species.ElitistSize==0)
+ { // Remove the entire species.
+ speciesToRemove.Add(species.SpeciesId);
+ }
+ else
+ { // Remove genomes from the species.
+ int delta = species.Members.Count - species.ElitistSize;
+ species.Members.RemoveRange(species.ElitistSize, delta);
+ }
+ }
+ //foreach(int speciesId in speciesToRemove)
+ int speciesBound=speciesToRemove.Count;
+ for(int speciesIdx=0; speciesIdx
+ /// Rebuild GenomeList from the genomes held in the speciesTable.
+ /// Quite useful to keep the list up-to-date after a species has been deleted.
+ ///
+ public void RebuildGenomeList()
+ {
+ genomeList.Clear();
+ foreach(Species species in speciesTable.Values)
+ {
+ //foreach(IGenome genome in species.Members)
+ int genomeBound = species.Members.Count;
+ for(int genomeIdx=0; genomeIdx
+ /// Some(most) types of network have fixed numbers of input and output nodes and will not work correctly or
+ /// throw an exception if we try and use inputs/outputs that do not exist. This method allows us to check
+ /// compatibility of the current populations genomes before we try to use them.
+ ///
+ ///
+ ///
+ ///
+ public bool IsCompatibleWithNetwork(int inputCount, int outputCount)
+ {
+ foreach(IGenome genome in genomeList)
+ {
+ if(!genome.IsCompatibleWithNetwork(inputCount, outputCount))
+ return false;
+ }
+ return true;
+ }
+
+ public void IncrementGenomeAges()
+ {
+ int genomeBound = genomeList.Count;
+ for(int genomeIdx=0; genomeIdx
+ /// For debug purposes only.
+ ///
+ /// Returns true if population integrity checks out OK.
+ public bool PerformIntegrityCheck()
+ {
+ foreach(IGenome genome in genomeList)
+ {
+ if(!genome.PerformIntegrityCheck())
+ return false;
+ }
+ return true;
+ }
+
+ #endregion
+
+ #region Private Methods
+
+ private void AddGenomeToSpeciesTable(EvolutionAlgorithm ea, IGenome genome)
+ {
+ Species species = DetermineSpecies(ea, genome);
+ if(species==null)
+ {
+ species = new Species();
+
+ // Completely new species. Generate a speciesID.
+ species.SpeciesId = nextSpeciesId++;
+ speciesTable.Add(species.SpeciesId, species);
+ }
+
+ //----- The genome is a member of an existing species.
+ genome.SpeciesId = species.SpeciesId;
+ species.Members.Add(genome);
+ }
+
+ ///
+ /// This version of AddGenomeToSpeciesTable is used by RedetermineSpeciation(). It allows us to
+ /// pass in the genome's original species object, which we can then re-use if the genome does not
+ /// match any of our existing species and needs to be placed into a new species of it's own.
+ /// The old species object can be used directly because it should already have already had all of
+ /// its genome sremoved by RedetermineSpeciation() before being passed in here.
+ ///
+ ///
+ ///
+ ///
+ private void AddGenomeToSpeciesTable(EvolutionAlgorithm ea, IGenome genome, Species originalSpecies)
+ {
+ Species species = DetermineSpecies(ea, genome);
+ if(species==null)
+ {
+ // The genome is not in one of the existing (new) species. Is this genome's old
+ // species already in the new table?
+ species = (Species)speciesTable[genome.SpeciesId];
+ if(species!=null)
+ {
+ // The genomes old species is already in the table but the genome no longer fits into that
+ // species. Therefore we need to create an entirely new species.
+ species = new Species();
+ species.SpeciesId = nextSpeciesId++;
+ }
+ else
+ {
+ // We can re-use the oldSpecies object.
+ species = originalSpecies;
+ }
+ speciesTable.Add(species.SpeciesId, species);
+ }
+
+ //----- The genome is a member of an existing species.
+ genome.SpeciesId = species.SpeciesId;
+ species.Members.Add(genome);
+ }
+
+
+
+ ///
+ /// Determine the given genome's species and return that species. If the genome does not
+ /// match one of the existing species then we return null to indicate a new species.
+ ///
+ ///
+ ///
+ private Species DetermineSpecies(EvolutionAlgorithm ea, IGenome genome)
+ {
+ //----- Performance optimization. Check against parent species IDs first.
+ Species parentSpecies1 = null;
+ Species parentSpecies2 = null;
+
+ // Parent1. Not set in initial population.
+ if(genome.ParentSpeciesId1!=-1)
+ {
+ parentSpecies1 = (Species)speciesTable[genome.ParentSpeciesId1];
+ if(parentSpecies1!=null)
+ {
+ if(IsGenomeInSpecies(genome, parentSpecies1, ea))
+ return parentSpecies1;
+ }
+ }
+
+ // Parent2. Not set if result of asexual reproduction.
+ if(genome.ParentSpeciesId2!=-1)
+ {
+ parentSpecies2 = (Species)speciesTable[genome.ParentSpeciesId2];
+ if(parentSpecies2!=null)
+ {
+ if(IsGenomeInSpecies(genome, parentSpecies2, ea))
+ return parentSpecies2;
+ }
+ }
+
+ //----- Not in parent species. Systematically compare against all species.
+ foreach(Species compareWithSpecies in speciesTable.Values)
+ {
+ // Don't compare against the parent species again.
+ if(compareWithSpecies==parentSpecies1 || compareWithSpecies == parentSpecies2)
+ continue;
+
+ if(IsGenomeInSpecies(genome, compareWithSpecies, ea))
+ { // We have found matching species.
+ return compareWithSpecies;
+ }
+ }
+
+ //----- The genome is not a member of any existing species.
+ return null;
+ }
+
+ private bool IsGenomeInSpecies(IGenome genome, Species compareWithSpecies, EvolutionAlgorithm ea)
+ {
+// // Pick a member of the species at random.
+// IGenome compareWithGenome = compareWithSpecies.Members[(int)Math.Floor(compareWithSpecies.Members.Count * Utilities.NextDouble())];
+// return (genome.CalculateCompatibility(compareWithGenome, ea.NeatParameters) < ea.NeatParameters.compatibilityThreshold);
+
+ // Compare against the species champ. The species champ is the exemplar that represents the species.
+ IGenome compareWithGenome = compareWithSpecies.Members[0];
+ //IGenome compareWithGenome = compareWithSpecies.Members[(int)Math.Floor(compareWithSpecies.Members.Count * Utilities.NextDouble())];
+ return genome.IsCompatibleWithGenome(compareWithGenome, ea.NeatParameters);
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/PruningModeGenomeComparer.cs b/SharpNeatWalker/SharpNeatLib/Evolution/PruningModeGenomeComparer.cs
new file mode 100644
index 000000000..4a2ef6a9a
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/PruningModeGenomeComparer.cs
@@ -0,0 +1,40 @@
+using System;
+using System.Collections.Generic;
+using SharpNeatLib.Evolution;
+using SharpNeatLib.NeatGenome;
+
+namespace SharpNeatLib.Evolution
+{
+ ///
+ /// Sort by Fitness(Descending). Genomes with like fitness are then sorted by genome age(Ascending).
+ /// This means the selection routines are more liley to select the fit AND the youngest fit.
+ ///
+ public class PruningModeGenomeComparer : IComparer
+ {
+ #region IComparer Members
+
+ public int Compare(IGenome x, IGenome y)
+ {
+ NeatGenome.NeatGenome X = (NeatGenome.NeatGenome)x;
+ NeatGenome.NeatGenome Y = (NeatGenome.NeatGenome)y;
+
+ double fitnessDelta = Y.Fitness - X.Fitness;
+ if (fitnessDelta < 0.0D)
+ return -1;
+ else if (fitnessDelta > 0.0D)
+ return 1;
+
+ long sizeDelta = (X.NeuronGeneList.Count + X.ConnectionGeneList.Count) - (Y.NeuronGeneList.Count + Y.ConnectionGeneList.Count);
+
+ // Convert result to an int.
+ if (sizeDelta < 0)
+ return -1;
+ else if (sizeDelta == 0)
+ return 0;
+ else
+ return 1;
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/Species.cs b/SharpNeatWalker/SharpNeatLib/Evolution/Species.cs
new file mode 100644
index 000000000..f40b06985
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/Species.cs
@@ -0,0 +1,56 @@
+using System;
+
+namespace SharpNeatLib.Evolution
+{
+ public class Species
+ {
+ public long SpeciesAge=0;
+ public long AgeAtLastImprovement=0;
+ public double MaxFitnessEver=0.0;
+
+ public int SpeciesId=-1;
+ public GenomeList Members = new GenomeList();
+ public double TotalFitness;
+ public double MeanFitness;
+
+ ///
+ /// The target size for this species, as determined by the fitness sharing technique.
+ ///
+ public int TargetSize;
+
+ ///
+ /// The number of orgainisms that are elite and should not be culled.
+ ///
+ public int ElitistSize;
+
+ ///
+ /// The number of top scoring genomes we can should select from.
+ ///
+ public int SelectionCount;
+
+ ///
+ /// The total fitness of all of the genomes that can be selected from.
+ ///
+ public double SelectionCountTotalFitness;
+
+ public int TotalNeuronCount;
+ public int TotalConnectionCount;
+
+ ///
+ /// TotalNeuronCount + TotalConnectionCount.
+ ///
+ public int TotalStructureCount;
+
+ ///
+ /// Indicates that this species is a candidate for species culling. This will normally occur when the
+ /// species has not improved for a number of generations.
+ ///
+ public bool CullCandidateFlag=false;
+
+ public void ResetFitnessValues()
+ {
+ TotalFitness = 0;
+ MeanFitness = 0;
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/Xml/IGenomeReader.cs b/SharpNeatWalker/SharpNeatLib/Evolution/Xml/IGenomeReader.cs
new file mode 100644
index 000000000..5ffb8dfa3
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/Xml/IGenomeReader.cs
@@ -0,0 +1,11 @@
+using System;
+using System.Xml;
+using SharpNeatLib.Evolution;
+
+namespace SharpNeatLib.Evolution.Xml
+{
+ public interface IGenomeReader
+ {
+ IGenome Read(XmlElement xmlGenome);
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/Xml/XmlPopulationReaderStatic.cs b/SharpNeatWalker/SharpNeatLib/Evolution/Xml/XmlPopulationReaderStatic.cs
new file mode 100644
index 000000000..bb417a5bd
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/Xml/XmlPopulationReaderStatic.cs
@@ -0,0 +1,27 @@
+using System;
+using System.Xml;
+
+using SharpNeatLib.Xml;
+
+namespace SharpNeatLib.Evolution.Xml
+{
+ public class XmlPopulationReader
+ {
+ public static Population Read(XmlDocument doc, IGenomeReader genomeReader, IIdGeneratorFactory idGeneratorFactory)
+ {
+ XmlElement xmlPopulation = (XmlElement)doc.SelectSingleNode("population");
+ return Read(xmlPopulation, genomeReader, idGeneratorFactory);
+ }
+
+ public static Population Read(XmlElement xmlPopulation, IGenomeReader genomeReader, IIdGeneratorFactory idGeneratorFactory)
+ {
+ GenomeList genomeList = new GenomeList();
+ XmlNodeList listGenomes = xmlPopulation.SelectNodes("genome");
+ foreach(XmlElement xmlGenome in listGenomes)
+ genomeList.Add(genomeReader.Read(xmlGenome));
+
+ IdGenerator idGenerator = idGeneratorFactory.CreateIdGenerator(genomeList);
+ return new Population(idGenerator, genomeList);
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Evolution/Xml/XmlPopulationWriterStatic.cs b/SharpNeatWalker/SharpNeatLib/Evolution/Xml/XmlPopulationWriterStatic.cs
new file mode 100644
index 000000000..df0e9f3f4
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Evolution/Xml/XmlPopulationWriterStatic.cs
@@ -0,0 +1,22 @@
+using System;
+using System.Xml;
+
+using SharpNeatLib.NeuralNetwork;
+using SharpNeatLib.Xml;
+
+namespace SharpNeatLib.Evolution.Xml
+{
+ public class XmlPopulationWriter
+ {
+ public static void Write(XmlNode parentNode, Population p, IActivationFunction activationFn)
+ {
+ XmlElement xmlPopulation = XmlUtilities.AddElement(parentNode, "population");
+ XmlUtilities.AddAttribute(xmlPopulation, "activation-fn-id", activationFn.FunctionId);
+
+ foreach(IGenome genome in p.GenomeList)
+ {
+ genome.Write(xmlPopulation);
+ }
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/ExperimentUtils/Functions/IFunction.cs b/SharpNeatWalker/SharpNeatLib/ExperimentUtils/Functions/IFunction.cs
new file mode 100644
index 000000000..1f6c09f0b
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/ExperimentUtils/Functions/IFunction.cs
@@ -0,0 +1,17 @@
+using System;
+
+namespace SharpNeatLib.Experiments
+{
+ ///
+ /// Describes a function for the function regression experiments.
+ ///
+ public interface IFunction
+ {
+ ///
+ /// Gets an array of values sampled over a continuous range of some function.
+ ///
+ ///
+ ///
+ double[] GetFunctionValueArray(int length);
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/ExperimentUtils/Functions/LogisticMapFunction.cs b/SharpNeatWalker/SharpNeatLib/ExperimentUtils/Functions/LogisticMapFunction.cs
new file mode 100644
index 000000000..c2194242f
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/ExperimentUtils/Functions/LogisticMapFunction.cs
@@ -0,0 +1,61 @@
+using System;
+
+using SharpNeatLib;
+
+namespace SharpNeatLib.Experiments
+{
+ public class LogisticMapFunction : IFunction
+ {
+ #region Class Variables
+
+ // Logistic map parameters.
+ const double x_init = 0.8;
+ const double r = 4.0;
+
+// // Reading_interval defines the rate at which we take a reading from the samples.
+// int reading_interval = 10;
+
+ //--- Working variables.
+ double x_current;
+
+ #endregion
+
+ #region Private Methods
+
+ private void InitialiseFunction()
+ {
+ x_current = x_init;
+ }
+
+ private double GetNextValue()
+ {
+ x_current = r * x_current*(1-x_current);
+ return x_current;
+ }
+
+ #endregion
+
+ #region IFunction Members
+
+ public double[] GetFunctionValueArray(int length)
+ {
+ InitialiseFunction();
+
+ double[] valueArray = new double[length];
+
+ for(int i=0; i
+ /// The Mackey_Glass equation.
+ /// Returns the value of x at time t+1 given x at time t, and x at time t-tau.
+ ///
+ /// Current value of x.
+ ///
+ ///
+ private double mg_equation(double x, double x_tau)
+ {
+ return ((a*x_tau)/(1.0+Math.Pow(x_tau,10.0))) - b*x;
+ }
+
+ ///
+ /// Runge_Kutta approximation of the next value of the Mackey_Glass equation.
+ /// x - current value of x, at time t.
+ /// t - the current time.
+ ///
+ ///
+ ///
+ ///
+ ///
+ private double rk4(double x, double t)
+ {
+ double x1, x2, x3, x4;
+
+ // We only need to get the vaue of x at t-tau once because the sample point at half
+ // t_delta still points to the same historical x value.
+ double x_tau = x_history(tau);
+
+ x1 = t_delta * mg_equation(x, x_history(tau));
+ x2 = t_delta * mg_equation(x+0.5*x1, x_tau);
+ x3 = t_delta * mg_equation(x+0.5*x2, x_tau);
+ x4 = t_delta * mg_equation(x+0.5*x3, x_tau);
+
+ return x + (x1+x4)/6.0 + (x2+x3)/3.0;
+ }
+
+ ///
+ /// Get a the already calculated value of x at time t-t_ago [from the history buffer].
+ ///
+ ///
+ ///
+ private double x_history(double t_ago)
+ {
+ // How many sample points ago? Cast directly to an int. Don't bother rounding.
+ int points = (int)(t_ago/t_delta);
+
+ int buffer_length = x_history_buffer.Length;
+ if(buffer_length>=points)
+ { // The history goes far back enough. Return the point's x value.
+ return x_history_buffer[buffer_length-points];
+ }
+
+ // The history doesn't go that far back. The point must be before t=0, therefore
+ // the value of x is predefiend as being x_init.
+ return x_init;
+ }
+
+ private void InitialiseFunction()
+ {
+ x_history_length = (int)Math.Ceiling(tau/t_delta);
+ x_history_buffer = new DoubleCircularBuffer(x_history_length);
+
+ x_current = x_init;
+ t_current = 0.0;
+
+ while(t_current
+ /// Refresh the view using the provided network. The intention here is that the current best network
+ /// will be passed in. We can then update the view showing how well that network performs.
+ ///
+ ///
+ abstract public void RefreshView(INetwork network);
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Experiments/AbstractExperimentView.resources b/SharpNeatWalker/SharpNeatLib/Experiments/AbstractExperimentView.resources
new file mode 100644
index 000000000..45275d930
Binary files /dev/null and b/SharpNeatWalker/SharpNeatLib/Experiments/AbstractExperimentView.resources differ
diff --git a/SharpNeatWalker/SharpNeatLib/Experiments/AbstractExperimentView.resx b/SharpNeatWalker/SharpNeatLib/Experiments/AbstractExperimentView.resx
new file mode 100644
index 000000000..3f337e081
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Experiments/AbstractExperimentView.resx
@@ -0,0 +1,42 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/microsoft-resx
+
+
+ 1.0.0.0
+
+
+ System.Resources.ResXResourceReader, System.Windows.Forms, Version=1.0.5000.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=1.0.5000.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
diff --git a/SharpNeatWalker/SharpNeatLib/Experiments/HyperNEATParameters.cs b/SharpNeatWalker/SharpNeatLib/Experiments/HyperNEATParameters.cs
new file mode 100644
index 000000000..25087ae20
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Experiments/HyperNEATParameters.cs
@@ -0,0 +1,97 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+using SharpNeatLib.NeuralNetwork;
+
+namespace SharpNeatLib.Experiments
+{
+ public class HyperNEATParameters
+ {
+ public static double threshold = 0;
+ public static double weightRange = 0;
+ public static int numThreads = 0;
+ public static IActivationFunction substrateActivationFunction = null;
+ public static System.Collections.Generic.Dictionary activationFunctions = new Dictionary();
+ public static System.Collections.Generic.Dictionary parameters = new Dictionary();
+ static HyperNEATParameters()
+ {
+ loadParameterFile();
+ }
+
+ public static void loadParameterFile()
+ {
+ try
+ {
+ System.IO.StreamReader input = new System.IO.StreamReader(@"params.txt");
+ string[] line;
+ double probability;
+ bool readingActivation = false;
+ while (!input.EndOfStream)
+ {
+ line = input.ReadLine().Split(' ');
+ if (line[0].Equals("StartActivationFunctions"))
+ {
+ readingActivation = true;
+ }
+ else if (line[0].Equals("EndActivationFunctions"))
+ {
+ readingActivation = false;
+ }
+ else
+ {
+ if (readingActivation)
+ {
+ double.TryParse(line[1], out probability);
+ activationFunctions.Add(line[0], probability);
+ }
+ else
+ {
+ parameters.Add(line[0].ToLower(), line[1]);
+ }
+ }
+ }
+ }
+ catch (Exception e)
+ {
+ System.Console.WriteLine(e.Message);
+ System.Console.WriteLine("Error reading config file, check file location and formation");
+ //close program
+ }
+ ActivationFunctionFactory.setProbabilities(activationFunctions);
+
+ setParameterDouble("threshold", ref threshold);
+ setParameterDouble("weightrange", ref weightRange);
+ setParameterInt("numberofthreads", ref numThreads);
+ setSubstrateActivationFunction();
+ }
+
+ private static void setSubstrateActivationFunction()
+ {
+ string parameter=getParameter("substrateactivationfunction");
+ if(parameter!=null)
+ substrateActivationFunction=ActivationFunctionFactory.GetActivationFunction(parameter);
+ }
+
+ public static string getParameter(string parameter)
+ {
+ if (parameters.ContainsKey(parameter))
+ return parameters[parameter];
+ else
+ return null;
+ }
+
+ public static void setParameterDouble(string parameter, ref double target)
+ {
+ parameter = getParameter(parameter.ToLower());
+ if (parameter != null)
+ double.TryParse(parameter, out target);
+ }
+
+ public static void setParameterInt(string parameter, ref int target)
+ {
+ parameter = getParameter(parameter.ToLower());
+ if (parameter != null)
+ int.TryParse(parameter, out target);
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Experiments/IExperiment.cs b/SharpNeatWalker/SharpNeatLib/Experiments/IExperiment.cs
new file mode 100644
index 000000000..3448389f7
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Experiments/IExperiment.cs
@@ -0,0 +1,88 @@
+using System;
+using System.Collections;
+
+using SharpNeatLib.Evolution;
+using SharpNeatLib.NeuralNetwork;
+
+namespace SharpNeatLib.Experiments
+{
+ public interface IExperiment
+ {
+ ///
+ /// This method is called immediately following instantiation of an experiment. It is used
+ /// to pass in a hashtable of string key-value pairs from the 'experimentParameters'
+ /// block of the experiment configuration block within the application config file.
+ ///
+ /// If no parameters where specified then an empty Hashtable is used.
+ ///
+ ///
+ void LoadExperimentParameters(Hashtable parameterTable);
+
+ ///
+ /// The IPopulationEvaluator to use for the experiment. This is passed to the
+ /// constructor of EvolutionAlgorithm.
+ ///
+ IPopulationEvaluator PopulationEvaluator
+ {
+ get;
+ }
+
+ ///
+ /// This is called prior to constructing a new EvolutionAlgorithm to ensure we have a
+ /// fresh evaluator - some evaluators have state.
+ ///
+ ///
+ void ResetEvaluator(IActivationFunction activationFn);
+
+ ///
+ /// The number of input neurons required for an experiment. This figure is used
+ /// to generate a population of genomes with the correct number of inputs.
+ ///
+ int InputNeuronCount
+ {
+ get;
+ }
+
+ ///
+ /// The number of output neurons required for an experiment. This figure is used
+ /// to generate a population of genomes with the correct number of outputs.
+ ///
+ int OutputNeuronCount
+ {
+ get;
+ }
+
+ ///
+ /// The default NeatParameters object to use for the experiment.
+ ///
+ NeatParameters DefaultNeatParameters
+ {
+ get;
+ }
+
+ ///
+ /// This is the suggested netowkr activation function for an experiment. The default
+ /// activation function is shown within SharpNEAT's GUI and can be overriden by
+ /// selecting an alternative function in the drop-down combobox.
+ ///
+ IActivationFunction SuggestedActivationFunction
+ {
+ get;
+ }
+
+ ///
+ /// Returns a Form based view of the experiment. It is accetable to return null to
+ /// indicate that no view is available.
+ ///
+ ///
+ AbstractExperimentView CreateExperimentView();
+
+ ///
+ /// A description of the evaluator and domain to aid new users.
+ ///
+ string ExplanatoryText
+ {
+ get;
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Experiments/INetworkEvaluator.cs b/SharpNeatWalker/SharpNeatLib/Experiments/INetworkEvaluator.cs
new file mode 100644
index 000000000..bf3ae797f
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Experiments/INetworkEvaluator.cs
@@ -0,0 +1,43 @@
+using System;
+using System.Threading;
+using SharpNeatLib.NeuralNetwork;
+
+namespace SharpNeatLib.Experiments
+{
+ ///
+ /// A simple interface that describes a class that can evaluate a single INetwork.
+ /// Typically this interface can be passed to the constructor of
+ /// SingleFilePopulationEvaluator to provide an IPopulationEvaluator to the
+ /// EvolutionAlgorithm. See comments on SingleFilePopulationEvaluator for more information.
+ ///
+ public interface INetworkEvaluator
+ {
+ ///
+ /// Evaluates the argument INetwork.
+ ///
+ ///
+ /// Fitness of the network.
+ double EvaluateNetwork(INetwork network);
+
+ ///
+ /// Evaluates the argument INetwork which is a CPPN, and decodes the CPPN in a thread safe manner.
+ ///
+ ///
+ ///
+ /// Fitness of the network.
+ double[] threadSafeEvaluateNetwork(INetwork network);
+
+ ///
+ /// A human readable message that describes the state of the evaluator. This is useful if the
+ /// evaluator has several modes (e.g. difficulty levels in incremenetal evolution) and we want
+ /// to let the user know what mode the evaluator is in.
+ ///
+ string EvaluatorStateMessage
+ {
+ get;
+ }
+
+ // GWM - method called at the end of a generation for updating novelty threshold
+ void endOfGeneration();
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Experiments/INetworkPairEvaluator.cs b/SharpNeatWalker/SharpNeatLib/Experiments/INetworkPairEvaluator.cs
new file mode 100644
index 000000000..546402c85
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Experiments/INetworkPairEvaluator.cs
@@ -0,0 +1,27 @@
+using System;
+using SharpNeatLib.NeuralNetwork;
+
+namespace SharpNeatLib.Experiments
+{
+ ///
+ /// This structure is used by INetworkPairEvaluator. Using such a structure
+ /// may be slightly more efficient than a 2 element array, and also less
+ /// not prone to index out of bound errors.
+ ///
+ public struct FitnessPair
+ {
+ public double fitness1;
+ public double fitness2;
+ }
+
+ ///
+ /// An interface that defines a method for evaluating a pair of networks.
+ /// This interface is a useful abstraction for certain types of
+ /// co-evolution experiment where networks are evaluated by comparing
+ /// against a set of other networks, one at a time - in pairs.
+ ///
+ public interface INetworkPairEvaluator
+ {
+ FitnessPair EvaluateNetworkPair(INetwork net1, INetwork net2);
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Experiments/ISimulator.cs b/SharpNeatWalker/SharpNeatLib/Experiments/ISimulator.cs
new file mode 100644
index 000000000..ed47c186d
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Experiments/ISimulator.cs
@@ -0,0 +1,31 @@
+using System;
+using SharpNeatLib.NeuralNetwork;
+
+namespace SharpNeatLib.Experiments
+{
+ public interface ISimulator
+ {
+ void Initialise_Random();
+ bool PerformSingleStep(INetwork network);
+
+ #region Properties
+
+ ///
+ /// The number of input signals used in the simulator.
+ ///
+ int InputNeuronCount
+ {
+ get;
+ }
+
+ ///
+ /// The number of output signals used in the simulator.
+ ///
+ int OutputNeuronCount
+ {
+ get;
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Experiments/MultiThreadedPopulationEvaluator.cs b/SharpNeatWalker/SharpNeatLib/Experiments/MultiThreadedPopulationEvaluator.cs
new file mode 100644
index 000000000..71998cea7
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Experiments/MultiThreadedPopulationEvaluator.cs
@@ -0,0 +1,137 @@
+using System;
+using System.Threading;
+using SharpNeatLib.Evolution;
+using SharpNeatLib.NeuralNetwork;
+
+namespace SharpNeatLib.Experiments
+{
+ ///
+ /// An implementation of IPopulationEvaluator that evaluates all new genomes(EvaluationCount==0)
+ /// within the population using multiple threads, using an INetworkEvaluator provided at construction time.
+ ///
+ /// This class provides an IPopulationEvaluator for use within the EvolutionAlgorithm by simply
+ /// providing an INetworkEvaluator to its constructor. This usage is intended for experiments
+ /// where the genomes are evaluated independently of each other (e.g. not simultaneoulsy in
+ /// a simulated world) using a fixed evaluation function that can be described by an INetworkEvaluator.
+ ///
+ public class MultiThreadedPopulationEvaluator : IPopulationEvaluator
+ {
+ private readonly INetworkEvaluator _networkEvaluator;
+ private readonly IActivationFunction _activationFn;
+ private static readonly Semaphore Sem = new Semaphore(HyperNEATParameters.numThreads, HyperNEATParameters.numThreads);
+ private ulong _evaluationCount;
+
+ #region Constructor
+
+ public MultiThreadedPopulationEvaluator(INetworkEvaluator networkEvaluator, IActivationFunction activationFn)
+ {
+ _networkEvaluator = networkEvaluator;
+ _activationFn = activationFn;
+ }
+
+ #endregion
+
+ #region IPopulationEvaluator Members
+
+ public void EvaluatePopulation(Population pop, EvolutionAlgorithm ea)
+ {
+ int count = pop.GenomeList.Count;
+
+ for (var i = 0; i < count; i++)
+ {
+ Sem.WaitOne();
+ var g = pop.GenomeList[i];
+ var e = new EvalPack(_networkEvaluator, _activationFn, g);
+
+ ThreadPool.QueueUserWorkItem(EvalNet, e);
+
+ // Update master evaluation counter.
+ _evaluationCount++;
+ }
+
+ for (int j = 0; j < HyperNEATParameters.numThreads; j++)
+ Sem.WaitOne();
+ for (int j = 0; j < HyperNEATParameters.numThreads; j++)
+ Sem.Release();
+
+ _networkEvaluator.endOfGeneration(); // GWM - Update novelty stuff
+ }
+
+
+ public ulong EvaluationCount
+ {
+ get
+ {
+ return _evaluationCount;
+ }
+ }
+
+ public string EvaluatorStateMessage
+ {
+ get
+ { // Pass on the network evaluator's message.
+ return _networkEvaluator.EvaluatorStateMessage;
+ }
+ }
+
+ public bool BestIsIntermediateChampion
+ {
+ get
+ { // Only relevant to incremental evolution experiments.
+ return false;
+ }
+ }
+
+ public bool SearchCompleted
+ {
+ get
+ { // This flag is not yet supported in the main search algorithm.
+ return false;
+ }
+ }
+
+ internal static void EvalNet(object input)
+ {
+ try
+ {
+ var e = (EvalPack)input;
+ var g = e.Genome;
+ if (g == null)//|| g.EvaluationCount != 0)
+ return;
+ INetwork network = g.Decode(e.ActivationFn);
+ if (network == null)
+ { // Future genomes may not decode - handle the possibility.
+ g.Fitness = EvolutionAlgorithm.MIN_GENOME_FITNESS;
+ }
+ else
+ {
+ double[] fitnesses = e.NetworkEvaluator.threadSafeEvaluateNetwork(network);
+ g.Fitness = Math.Max(fitnesses[0], EvolutionAlgorithm.MIN_GENOME_FITNESS);
+ g.ObjectiveFitness = fitnesses[1];
+ }
+
+ // Reset these genome level statistics.
+ g.TotalFitness += g.Fitness;
+ g.EvaluationCount += 1;
+ }
+ //catch (Exception ex) { System.Diagnostics.Debug.WriteLine("EvalNet failed: " + ex); } // Catch? Stop?
+ finally { Sem.Release(); }
+ }
+
+ #endregion
+ }
+
+ internal class EvalPack
+ {
+ public readonly INetworkEvaluator NetworkEvaluator;
+ public readonly IActivationFunction ActivationFn;
+ public readonly IGenome Genome;
+
+ public EvalPack(INetworkEvaluator n, IActivationFunction a, IGenome g)
+ {
+ NetworkEvaluator = n;
+ ActivationFn = a;
+ Genome = g;
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Experiments/SingleFilePopulationEvaluator.cs b/SharpNeatWalker/SharpNeatLib/Experiments/SingleFilePopulationEvaluator.cs
new file mode 100644
index 000000000..41c7c05b0
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Experiments/SingleFilePopulationEvaluator.cs
@@ -0,0 +1,101 @@
+using System;
+using SharpNeatLib.Evolution;
+using SharpNeatLib.NeuralNetwork;
+
+namespace SharpNeatLib.Experiments
+{
+ ///
+ /// An implementation of IPopulationEvaluator that evaluates all new genomes(EvaluationCount==0)
+ /// within the population in single-file, using an INetworkEvaluator provided at construction time.
+ ///
+ /// This class provides an IPopulationEvaluator for use within the EvolutionAlgorithm by simply
+ /// providing an INetworkEvaluator to its constructor. This usage is intended for experiments
+ /// where the genomes are evaluated independently of each other (e.g. not simultaneoulsy in
+ /// a simulated world) using a fixed evaluation function that can be described by an INetworkEvaluator.
+ ///
+ public class SingleFilePopulationEvaluator : IPopulationEvaluator
+ {
+ public INetworkEvaluator networkEvaluator;
+ public IActivationFunction activationFn;
+ public ulong evaluationCount=0;
+
+ #region Constructor
+ public SingleFilePopulationEvaluator()
+ {
+ }
+ public SingleFilePopulationEvaluator(INetworkEvaluator networkEvaluator, IActivationFunction activationFn)
+ {
+ this.networkEvaluator = networkEvaluator;
+ this.activationFn = activationFn;
+ }
+
+ #endregion
+
+ #region IPopulationEvaluator Members
+
+ public virtual void EvaluatePopulation(Population pop, EvolutionAlgorithm ea)
+ {
+ // Evaluate in single-file each genome within the population.
+ // Only evaluate new genomes (those with EvaluationCount==0).
+ int count = pop.GenomeList.Count;
+ for(int i=0; i
+ /// By placing decode routines in a seperate class we decouple the Genome and Network classes.
+ /// Ideally this would be achieved by using intermediate generic data structures, however that
+ /// approach can cause a performance hit. This is a nice balance that allows decoupling without
+ /// performance loss. The downside is that we need knowledge of the Network code's 'guts' in order
+ /// to construct them.
+ ///
+ public class GenomeDecoder
+ {
+ #region Decode To ConcurrentNetwork
+
+ static public INetwork DecodeToConcurrentNetwork(NeatGenome.NeatGenome g, IActivationFunction activationFn)
+ {
+ //----- Loop the neuronGenes. Create Neuron for each one.
+ // Store a table of neurons keyed by their id.
+ Hashtable neuronTable = new Hashtable(g.NeuronGeneList.Count);
+ NeuronList neuronList = new NeuronList();
+
+ foreach(NeuronGene neuronGene in g.NeuronGeneList)
+ {
+ Neuron newNeuron = new Neuron(activationFn, neuronGene.NeuronType, neuronGene.InnovationId, neuronGene.NeuronBias, neuronGene.TimeConstant);
+ neuronTable.Add(newNeuron.Id, newNeuron);
+ neuronList.Add(newNeuron);
+ }
+
+ //----- Loop the connection genes. Create a Connection for each one and bind them to the relevant Neurons.
+ foreach(ConnectionGene connectionGene in g.ConnectionGeneList)
+ {
+ Connection newConnection = new Connection(connectionGene.SourceNeuronId, connectionGene.TargetNeuronId, connectionGene.Weight);
+
+ // Bind the connection to it's source neuron.
+ newConnection.SetSourceNeuron((Neuron)neuronTable[connectionGene.SourceNeuronId]);
+
+ // Store the new connection against it's target neuron.
+ ((Neuron)(neuronTable[connectionGene.TargetNeuronId])).ConnectionList.Add(newConnection);
+ }
+
+ return new ConcurrentNetwork(neuronList);
+ }
+
+ #endregion
+
+ #region Decode To FastConcurrentNetwork
+
+ ///
+ /// Create a single comparer to limit the need to reconstruct for each network.
+ /// Not multithread safe!
+ ///
+ //static FastConnectionComparer fastConnectionComparer = new FastConnectionComparer();
+ static FloatFastConnection[] fastConnectionArray;
+ static IActivationFunction[] activationFunctionArray;
+
+ static public FloatFastConcurrentNetwork DecodeToFloatFastConcurrentNetwork(NeatGenome.NeatGenome g, IActivationFunction activationFn)
+ {
+ int outputNeuronCount = g.OutputNeuronCount;
+ int neuronGeneCount = g.NeuronGeneList.Count;
+
+ // gwm - arrays added for leaky integrator support
+ double[] biasArray = new double[neuronGeneCount];
+ double[] timeConstantArray = new double[neuronGeneCount];
+
+ // Slightly inefficient - determine the number of bias nodes. Fortunately there is not actually
+ // any reason to ever have more than one bias node - although there may be 0.
+
+ activationFunctionArray = new IActivationFunction[neuronGeneCount];
+
+ int neuronGeneIdx=0;
+ for(; neuronGeneIdx=0 && fastConnectionArray[connectionIdx].targetNeuronIdx>=0, "invalid idx");
+
+ fastConnectionArray[connectionIdx].weight = (float)connectionGene.Weight;
+ connectionIdx++;
+ }
+ }
+ else
+ {
+ // Build a table of indexes (ints) keyed on neuron ID. This approach is faster when dealing with large numbers
+ // of lookups.
+ Hashtable neuronIndexTable = new Hashtable(neuronGeneCount);
+ for(int i=0; i1)
+ // QuickSortFastConnections(0, fastConnectionArray.Length-1);
+
+ return new FloatFastConcurrentNetwork( biasNodeCount, inputNeuronCount,
+ outputNeuronCount, neuronGeneCount,
+ fastConnectionArray, activationFunctionArray, biasArray, timeConstantArray);
+ }
+
+ static public FastConcurrentMultiplicativeNetwork DecodeToFastConcurrentMultiplicativeNetwork(NeatGenome.NeatGenome g, IActivationFunction activationFn)
+ {
+
+ int outputNeuronCount = g.OutputNeuronCount;
+ int neuronGeneCount = g.NeuronGeneList.Count;
+
+ // Slightly inefficient - determine the number of bias nodes. Fortunately there is not actually
+ // any reason to ever have more than one bias node - although there may be 0.
+ int neuronGeneIdx=0;
+ for(; neuronGeneIdx
+ /// Create a single comparer to limit the need to reconstruct for each network.
+ /// Not multithread safe!
+ ///
+ //static FastConnectionComparer fastConnectionComparer = new FastConnectionComparer();
+ static IntegerFastConnection[] intFastConnectionArray;
+
+ static public IntegerFastConcurrentNetwork DecodeToIntegerFastConcurrentNetwork(NeatGenome.NeatGenome g)
+ {
+ int outputNeuronCount = g.OutputNeuronCount;
+ int neuronGeneCount = g.NeuronGeneList.Count;
+
+ // Slightly inefficient - determine the number of bias nodes. Fortunately there is not actually
+ // any reason to ever have more than one bias node - although there may be 0.
+ int neuronGeneIdx=0;
+ for(; neuronGeneIdx=0 && intFastConnectionArray[connectionIdx].targetNeuronIdx>=0, "invalid idx");
+
+ // Scale weight to range expected by the integer network class.
+ // +-5 -> +-0x1000
+ intFastConnectionArray[connectionIdx].weight = (int)(connectionGene.Weight * 0x333D);
+ connectionIdx++;
+ }
+ }
+ else
+ {
+ // Build a table of indexes (ints) keyed on neuron ID. This approach is faster when dealing with large numbers
+ // of lookups.
+ Hashtable neuronIndexTable = new Hashtable(neuronGeneCount);
+ for(int i=0; i +-0x1000
+ intFastConnectionArray[connectionIdx].weight = (int)(connectionGene.Weight * 0x333D);
+ connectionIdx++;
+ }
+ }
+
+ // Now sort the connection array on sourceNeuronIdx, secondary sort on targetNeuronIdx.
+ // Using Array.Sort is 10 times slower than the hand-coded sorting routine. See notes on that routine for more
+ // information. Also note that in tests that this sorting did no t actually improve the speed of the network!
+ // However, it may have a benefit for CPUs with small caches or when networks are very large, and since the new
+ // sort takes up hardly any time for even large networks, it seems reasonable to leave in the sort.
+ //Array.Sort(fastConnectionArray, fastConnectionComparer);
+ if(intFastConnectionArray.Length>1)
+ QuickSortIntFastConnections(0, intFastConnectionArray.Length-1);
+
+ return new IntegerFastConcurrentNetwork(biasNodeCount, inputNeuronCount,
+ outputNeuronCount, neuronGeneCount,
+ intFastConnectionArray);
+ }
+
+ #endregion
+
+ #region Built-In FastConnection Sorting
+
+ // This is a quick sort algorithm that manipulates FastConnection structures. Although this
+ // is the same sorting technique used internally by Array.Sort this is approximately 10 times
+ // faster because it eliminates the need for boxing and unboxing of the structs.
+ // So although this code could be replcaed by a single Array.Sort statement, the pay off
+ // was though to be worth it.
+
+ private static int CompareKeys(ref FloatFastConnection a, ref FloatFastConnection b)
+ {
+ int diff = a.sourceNeuronIdx - b.sourceNeuronIdx;
+ if(diff==0)
+ {
+ // Secondary sort on targetNeuronIdx.
+ return a.targetNeuronIdx - b.targetNeuronIdx;
+ }
+ else
+ {
+ return diff;
+ }
+ }
+
+ ///
+ /// Standard qquicksort algorithm.
+ ///
+ ///
+ ///
+ private static void QuickSortFastConnections(int left, int right)
+ {
+ do
+ {
+ int i = left;
+ int j = right;
+ FloatFastConnection x = fastConnectionArray[(i + j) >> 1];
+ do
+ {
+ while (CompareKeys(ref fastConnectionArray[i], ref x) < 0) i++;
+ while (CompareKeys(ref x, ref fastConnectionArray[j]) < 0) j--;
+
+ System.Diagnostics.Debug.Assert(i>=left && j<=right, "(i>=left && j<=right) Sort failed - Is your IComparer bogus?");
+ if (i > j) break;
+ if (i < j)
+ {
+ FloatFastConnection key = fastConnectionArray[i];
+ fastConnectionArray[i] = fastConnectionArray[j];
+ fastConnectionArray[j] = key;
+ }
+ i++;
+ j--;
+ } while (i <= j);
+
+ if (j-left <= right-i)
+ {
+ if (left < j) QuickSortFastConnections(left, j);
+ left = i;
+ }
+ else
+ {
+ if (i < right) QuickSortFastConnections(i, right);
+ right = j;
+ }
+ } while (left < right);
+ }
+
+ #endregion
+
+ #region Built-In IntegerFastConnection Sorting
+
+ // This is a quick sort algorithm that manipulates FastConnection structures. Although this
+ // is the same sorting technique used internally by Array.Sort this is approximately 10 times
+ // faster because it eliminates the need for boxing and unboxing of the structs.
+ // So although this code could be replcaed by a single Array.Sort statement, the pay off
+ // was though to be worth it.
+
+ private static int CompareKeys(ref IntegerFastConnection a, ref IntegerFastConnection b)
+ {
+ int diff = a.sourceNeuronIdx - b.sourceNeuronIdx;
+ if(diff==0)
+ {
+ // Secondary sort on targetNeuronIdx.
+ return a.targetNeuronIdx - b.targetNeuronIdx;
+ }
+ else
+ {
+ return diff;
+ }
+ }
+
+ ///
+ /// Standard qquicksort algorithm.
+ ///
+ ///
+ ///
+ private static void QuickSortIntFastConnections(int left, int right)
+ {
+ do
+ {
+ int i = left;
+ int j = right;
+ IntegerFastConnection x = intFastConnectionArray[(i + j) >> 1];
+ do
+ {
+ while (CompareKeys(ref intFastConnectionArray[i], ref x) < 0) i++;
+ while (CompareKeys(ref x, ref intFastConnectionArray[j]) < 0) j--;
+
+ System.Diagnostics.Debug.Assert(i>=left && j<=right, "(i>=left && j<=right) Sort failed - Is your IComparer bogus?");
+ if (i > j) break;
+ if (i < j)
+ {
+ IntegerFastConnection key = intFastConnectionArray[i];
+ intFastConnectionArray[i] = intFastConnectionArray[j];
+ intFastConnectionArray[j] = key;
+ }
+ i++;
+ j--;
+ } while (i <= j);
+
+ if (j-left <= right-i)
+ {
+ if (left < j) QuickSortIntFastConnections(left, j);
+ left = i;
+ }
+ else
+ {
+ if (i < right) QuickSortIntFastConnections(i, right);
+ right = j;
+ }
+ } while (left < right);
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Maths/FastRandom.cs b/SharpNeatWalker/SharpNeatLib/Maths/FastRandom.cs
new file mode 100644
index 000000000..ea06f49a9
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Maths/FastRandom.cs
@@ -0,0 +1,295 @@
+using System;
+
+namespace SharpNeatLib.Maths
+{
+ ///
+ /// A fast random number generator for .NET
+ /// Colin Green, January 2005
+ ///
+ /// September 4th 2005
+ /// Added NextBytesUnsafe() - commented out by default.
+ /// Fixed bug in Reinitialise() - y,z and w variables were not being reset.
+ ///
+ /// Key points:
+ /// 1) Based on a simple and fast xor-shift pseudo random number generator (RNG) specified in:
+ /// Marsaglia, George. (2003). Xorshift RNGs.
+ /// http://www.jstatsoft.org/v08/i14/xorshift.pdf
+ ///
+ /// This particular implementation of xorshift has a period of 2^128-1. See the above paper to see
+ /// how this can be easily extened if you need a longer period. At the time of writing I could find no
+ /// information on the period of System.Random for comparison.
+ ///
+ /// 2) Faster than System.Random. Up to 15x faster, depending on which methods are called.
+ ///
+ /// 3) Direct replacement for System.Random. This class implements all of the methods that System.Random
+ /// does plus some additional methods. The like named methods are functionally equivalent.
+ ///
+ /// 4) Allows fast re-initialisation with a seed, unlike System.Random which accepts a seed at construction
+ /// time which then executes a relatively expensive initialisation routine. This provides a vast speed improvement
+ /// if you need to reset the pseudo-random number sequence many times, e.g. if you want to re-generate the same
+ /// sequence many times. An alternative might be to cache random numbers in an array, but that approach is limited
+ /// by memory capacity and the fact that you may also want a large number of different sequences cached. Each sequence
+ /// can each be represented by a single seed value (int) when using FastRandom.
+ ///
+ /// Notes.
+ /// A further performance improvement can be obtained by declaring local variables as static, thus avoiding
+ /// re-allocation of variables on each call. However care should be taken if multiple instances of
+ /// FastRandom are in use or if being used in a multi-threaded environment.
+ ///
+ ///
+ public class FastRandom
+ {
+ // The +1 ensures NextDouble doesn't generate 1.0
+ const double REAL_UNIT_INT = 1.0/((double)int.MaxValue+1.0);
+ const double REAL_UNIT_UINT = 1.0/((double)uint.MaxValue+1.0);
+ const uint Y=842502087, Z=3579807591, W=273326509;
+
+ uint x, y, z, w;
+
+ #region Constructors
+
+ ///
+ /// Initialises a new instance using time dependent seed.
+ ///
+ public FastRandom()
+ {
+ // Initialise using the system tick count.
+ Reinitialise((int)Environment.TickCount);
+ }
+
+ ///
+ /// Initialises a new instance using an int value as seed.
+ /// This constructor signature is provided to maintain compatibility with
+ /// System.Random
+ ///
+ public FastRandom(int seed)
+ {
+ Reinitialise(seed);
+ }
+
+ #endregion
+
+ #region Public Methods [Reinitialisation]
+
+ ///
+ /// Reinitialises using an int value as a seed.
+ ///
+ ///
+ public void Reinitialise(int seed)
+ {
+ // The only stipulation stated for the xorshift RNG is that at least one of
+ // the seeds x,y,z,w is non-zero. We fulfill that requirement by only allowing
+ // resetting of the x seed
+ x = (uint)seed;
+ y = Y;
+ z = Z;
+ w = W;
+ }
+
+ #endregion
+
+ #region Public Methods [Next* methods]
+
+ ///
+ /// Generates a uint. Values returned are over the full range of a uint,
+ /// uint.MinValue to uint.MaxValue, including the min and max values.
+ ///
+ ///
+ public uint NextUInt()
+ {
+ uint t=(x^(x<<11));
+ x=y; y=z; z=w;
+ return (w=(w^(w>>19))^(t^(t>>8)));
+ }
+
+ ///
+ /// Generates a random int. Values returned are over the range 0 to int.MaxValue-1.
+ /// MaxValue is not generated to remain functionally equivalent to System.Random.Next().
+ /// If you require an int from the full range, including negative values then call
+ /// NextUint() and cast the value to an int.
+ ///
+ ///
+ public int Next()
+ {
+ uint t=(x^(x<<11));
+ x=y; y=z; z=w;
+ return (int)(0x7FFFFFFF&(w=(w^(w>>19))^(t^(t>>8))));
+ }
+
+ ///
+ /// Generates a random int over the range 0 to upperBound-1, and not including upperBound.
+ ///
+ ///
+ ///
+ public int Next(int upperBound)
+ {
+ if(upperBound<0)
+ throw new ArgumentOutOfRangeException("upperBound", upperBound, "upperBound must be >=0");
+
+ uint t=(x^(x<<11));
+ x=y; y=z; z=w;
+
+ // The explicit int cast before the first multiplication gives better performance.
+ // See comments in NextDouble.
+ return (int)((REAL_UNIT_INT*(int)(0x7FFFFFFF&(w=(w^(w>>19))^(t^(t>>8)))))*upperBound);
+ }
+
+ ///
+ /// Generates a random int over the range lowerBound to upperBound-1, and not including upperBound.
+ /// upperBound must be >= lowerBound. lowerBound may be negative.
+ ///
+ ///
+ ///
+ ///
+ public int Next(int lowerBound, int upperBound)
+ {
+ if(lowerBound>upperBound)
+ throw new ArgumentOutOfRangeException("upperBound", upperBound, "upperBound must be >=lowerBound");
+
+ uint t=(x^(x<<11));
+ x=y; y=z; z=w;
+
+ // The explicit int cast before the first multiplication gives better performance.
+ // See comments in NextDouble.
+ int range = upperBound-lowerBound;
+ if(range<0)
+ { // If range is <0 then an overflow has occured and must resort to using long integer arithmetic instead (slower).
+ // We also must use all 32 bits of precision, instead of the normal 31, which again is slower.
+ return lowerBound+(int)((REAL_UNIT_UINT*(double)(w=(w^(w>>19))^(t^(t>>8))))*(double)((long)upperBound-(long)lowerBound));
+ }
+
+ // 31 bits of precision will suffice if range<=int.MaxValue. This allows us to cast to an int anf gain
+ // a little more performance.
+ return lowerBound+(int)((REAL_UNIT_INT*(double)(int)(0x7FFFFFFF&(w=(w^(w>>19))^(t^(t>>8)))))*(double)range);
+ }
+
+ ///
+ /// Generates a random double. Values returned are from 0.0 up to but not including 1.0.
+ ///
+ ///
+ public double NextDouble()
+ {
+ uint t=(x^(x<<11));
+ x=y; y=z; z=w;
+
+ // Here we can gain a 2x speed improvement by generating a value that can be cast to
+ // an int instead of the more easily available uint. If we then explicitly cast to an
+ // int the compiler will then cast the int to a double to perform the multiplication,
+ // this final cast is a lot faster than casting from a uint to a double. The extra cast
+ // to an int is very fast (the allocated bits remain the same) and so the overall effect
+ // of the extra cast is a significant performance improvement.
+ return (REAL_UNIT_INT*(int)(0x7FFFFFFF&(w=(w^(w>>19))^(t^(t>>8)))));
+ }
+
+ ///
+ /// Fills the provided byte array with random bytes.
+ /// Increased performance is achieved by dividing and packaging bits directly from the
+ /// random number generator and storing them in 4 byte 'chunks'.
+ ///
+ ///
+ public void NextBytes(byte[] buffer)
+ {
+ // Fill up the bulk of the buffer in chunks of 4 bytes at a time.
+ uint x=this.x, y=this.y, z=this.z, w=this.w;
+ int i=0;
+ uint t;
+ for(; i>19))^(t^(t>>8));
+
+ buffer[i++] = (byte)( w&0x000000FF);
+ buffer[i++] = (byte)((w&0x0000FF00) >> 8);
+ buffer[i++] = (byte)((w&0x00FF0000) >> 16);
+ buffer[i++] = (byte)((w&0xFF000000) >> 24);
+ }
+
+ // Fill up any remaining bytes in the buffer.
+ if(i>19))^(t^(t>>8));
+
+ buffer[i++] = (byte)(w&0x000000FF);
+ if(i> 8);
+ if(i> 16);
+ if(i> 24);
+ }
+ }
+ }
+ }
+ this.x=x; this.y=y; this.z=z; this.w=w;
+ }
+
+
+// ///
+// /// A version of NextBytes that uses a pointer to set 4 bytes of the byte buffer in one operation
+// /// thus providing a nice speedup. Note that this requires the unsafe compilation flag to be specified
+// /// and so is commented out by default.
+// ///
+// ///
+// public unsafe void NextBytesUnsafe(byte[] buffer)
+// {
+// if(buffer.Length % 4 != 0)
+// throw new ArgumentException("Buffer length must be divisible by 4", "buffer");
+//
+// uint x=this.x, y=this.y, z=this.z, w=this.w;
+// uint t;
+//
+// fixed(byte* pByte0 = buffer)
+// {
+// uint* pDWord = (uint*)pByte0;
+// for(int i = 0, len = buffer.Length>>2; i < len; i++)
+// {
+// t=(x^(x<<11));
+// x=y; y=z; z=w;
+// *pDWord++ = w = (w^(w>>19))^(t^(t>>8));
+// }
+// }
+//
+// this.x=x; this.y=y; this.z=z; this.w=w;
+// }
+
+ // Buffer 32 bits in bitBuffer, return 1 at a time, keep track of how many have been returned
+ // with bitBufferIdx.
+ uint bitBuffer;
+ int bitBufferIdx=32;
+
+ ///
+ /// Generates random bool.
+ /// Increased performance is achieved by buffering 32 random bits for
+ /// future calls. Thus the random number generator is only invoked once
+ /// in every 32 calls.
+ ///
+ ///
+ public bool NextBool()
+ {
+ if(bitBufferIdx==32)
+ {
+ // Generate 32 more bits.
+ uint t=(x^(x<<11));
+ x=y; y=z; z=w;
+ bitBuffer=w=(w^(w>>19))^(t^(t>>8));
+
+ // Reset the idx that tells us which bit to read next.
+ bitBufferIdx = 1;
+ return (bitBuffer & 0x1)==1;
+ }
+
+ bitBufferIdx++;
+ return ((bitBuffer>>=1) & 0x1)==1;
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Maths/MathsException.cs b/SharpNeatWalker/SharpNeatLib/Maths/MathsException.cs
new file mode 100644
index 000000000..8b4991042
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Maths/MathsException.cs
@@ -0,0 +1,19 @@
+using System;
+
+namespace SharpNeatLib.Maths
+{
+ public class MathsException : System.Exception
+ {
+ public MathsException()
+ {
+ }
+
+ public MathsException(string message) : base(message)
+ {
+ }
+
+ public MathsException(string message, System.Exception innerException) : base(message, innerException)
+ {
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Maths/RandLib.cs b/SharpNeatWalker/SharpNeatLib/Maths/RandLib.cs
new file mode 100644
index 000000000..4ecc887bc
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Maths/RandLib.cs
@@ -0,0 +1,210 @@
+using System;
+
+namespace SharpNeatLib.Maths
+{
+ ///
+ /// Selected pieces of RandLib 1.3 translated into C#.
+ /// See http://hpux.asknet.de/hppd/hpux/Maths/Misc/randlib-1.3/ for more info.
+ ///
+ public class RandLib
+ {
+ static Random random = new Random();
+
+
+ ///
+ /// Details from randlib comments...
+ /// GENerate random deviate from a NORmal distribution
+ ///
+ /// Function
+ /// Generates a single random deviate from a normal distribution
+ /// with mean, AV, and standard deviation, SD.
+ ///
+ /// Arguments
+ /// av --> Mean of the normal distribution.
+ /// sd --> Standard deviation of the normal distribution.
+ /// JJV (sd >= 0)
+ ///
+ /// Method
+ /// Renames SNORM from TOMS as slightly modified by BWB to use RANF
+ /// instead of SUNIF.
+ ///
+ /// For details see:
+ /// Ahrens, J.H. and Dieter, U.
+ /// Extensions of Forsythe's Method for Random
+ /// Sampling from the Normal Distribution.
+ /// Math. Comput., 27,124 (Oct. 1973), 927 - 937.
+ ///
+ ///
+ ///
+ ///
+ static public double gennor(double av, double sd)
+ {
+ if(sd < 0.0)
+ throw new MathsException("gennor() - invalid sd");
+
+ return sd*snorm()+av;
+ }
+
+
+ static double[] a = new double[32]
+ {
+ 0.0,3.917609E-2,7.841241E-2,0.11777,0.1573107,0.1970991,0.2372021,0.2776904,
+ 0.3186394,0.36013,0.4022501,0.4450965,0.4887764,0.5334097,0.5791322,
+ 0.626099,0.6744898,0.7245144,0.7764218,0.8305109,0.8871466,0.9467818,
+ 1.00999,1.077516,1.150349,1.229859,1.318011,1.417797,1.534121,1.67594,
+ 1.862732,2.153875
+ };
+
+ static double[] d = new double[31]
+ {
+ 0.0,0.0,0.0,0.0,0.0,0.2636843,0.2425085,0.2255674,0.2116342,0.1999243,
+ 0.1899108,0.1812252,0.1736014,0.1668419,0.1607967,0.1553497,0.1504094,
+ 0.1459026,0.14177,0.1379632,0.1344418,0.1311722,0.128126,0.1252791,
+ 0.1226109,0.1201036,0.1177417,0.1155119,0.1134023,0.1114027,0.1095039
+ };
+
+
+ static double[] t = new double[31]
+ {
+ 7.673828E-4,2.30687E-3,3.860618E-3,5.438454E-3,7.0507E-3,8.708396E-3,
+ 1.042357E-2,1.220953E-2,1.408125E-2,1.605579E-2,1.81529E-2,2.039573E-2,
+ 2.281177E-2,2.543407E-2,2.830296E-2,3.146822E-2,3.499233E-2,3.895483E-2,
+ 4.345878E-2,4.864035E-2,5.468334E-2,6.184222E-2,7.047983E-2,8.113195E-2,
+ 9.462444E-2,0.1123001,0.136498,0.1716886,0.2276241,0.330498,0.5847031
+ };
+
+ static double[] h = new double[31]
+ {
+ 3.920617E-2,3.932705E-2,3.951E-2,3.975703E-2,4.007093E-2,4.045533E-2,
+ 4.091481E-2,4.145507E-2,4.208311E-2,4.280748E-2,4.363863E-2,4.458932E-2,
+ 4.567523E-2,4.691571E-2,4.833487E-2,4.996298E-2,5.183859E-2,5.401138E-2,
+ 5.654656E-2,5.95313E-2,6.308489E-2,6.737503E-2,7.264544E-2,7.926471E-2,
+ 8.781922E-2,9.930398E-2,0.11556,0.1404344,0.1836142,0.2790016,0.7010474
+ };
+
+
+
+ ///
+ /// Details from randlib comments...
+ ///
+ /// **********************************************************************
+ ///
+ ///
+ /// (STANDARD-) N O R M A L DISTRIBUTION
+ ///
+ ///
+ /// **********************************************************************
+ /// **********************************************************************
+ ///
+ /// FOR DETAILS SEE:
+ ///
+ /// AHRENS, J.H. AND DIETER, U.
+ /// EXTENSIONS OF FORSYTHE'S METHOD FOR RANDOM
+ /// SAMPLING FROM THE NORMAL DISTRIBUTION.
+ /// MATH. COMPUT., 27,124 (OCT. 1973), 927 - 937.
+ ///
+ /// ALL STATEMENT NUMBERS CORRESPOND TO THE STEPS OF ALGORITHM 'FL'
+ /// (M=5) IN THE ABOVE PAPER (SLIGHTLY MODIFIED IMPLEMENTATION)
+ ///
+ /// Modified by Barry W. Brown, Feb 3, 1988 to use RANF instead of
+ /// SUNIF. The argument IR thus goes away.
+ ///
+ /// **********************************************************************
+ /// THE DEFINITIONS OF THE CONSTANTS A(K), D(K), T(K) AND
+ /// H(K) ARE ACCORDING TO THE ABOVEMENTIONED ARTICLE
+ ///
+ ///
+ ///
+ static public double snorm()
+ {
+ int i;
+ double snorm, u,s,ustar,aa,w,y,tt;
+
+ u = ranf();
+ s = 0.0;
+ if(u > 0.5) s = 1.0;
+ u += (u-s);
+ u = 32.0*u;
+ i = (int) (u);
+ if(i == 32) i = 31;
+ if(i == 0) goto S100;
+ /*
+ START CENTER
+ */
+ ustar = u-(double)i;
+ aa = a[i-1];
+ S40:
+ if(ustar <= t[i-1]) goto S60;
+ w = (ustar-t[i-1]) * h[i-1];
+ S50:
+ /*
+ EXIT (BOTH CASES)
+ */
+ y = aa+w;
+ snorm = y;
+ if(s == 1.0) snorm = -y;
+ return snorm;
+ S60:
+ /*
+ CENTER CONTINUED
+ */
+ u = ranf();
+ w = u*(a[i]-aa);
+ tt = (0.5*w+aa)*w;
+ goto S80;
+ S70:
+ tt = u;
+ ustar = ranf();
+ S80:
+ if(ustar > tt) goto S50;
+ u = ranf();
+ if(ustar >= u) goto S70;
+ ustar = ranf();
+ goto S40;
+ S100:
+ /*
+ START TAIL
+ */
+ i = 6;
+ aa = a[31];
+ goto S120;
+ S110:
+ aa += d[i-1];
+ i += 1;
+ S120:
+ u += u;
+ if(u < 1.0) goto S110;
+ u -= 1.0;
+ S140:
+ w = u * d[i-1];
+ tt = (0.5*w+aa)*w;
+ goto S160;
+ S150:
+ tt = u;
+ S160:
+ ustar = ranf();
+ if(ustar > tt) goto S50;
+ u = ranf();
+ if(ustar >= u) goto S150;
+ u = ranf();
+ goto S140;
+ }
+
+ ///
+ /// A version of random.NextDouble() that avoids 0.0
+ ///
+ ///
+ static private double ranf()
+ {
+ double ranf;
+
+ do
+ {
+ ranf = random.NextDouble();
+ }
+ while(ranf==0.0);
+
+ return ranf;
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Maths/RouletteWheel.cs b/SharpNeatWalker/SharpNeatLib/Maths/RouletteWheel.cs
new file mode 100644
index 000000000..4c3ef6bd8
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Maths/RouletteWheel.cs
@@ -0,0 +1,119 @@
+using System;
+
+namespace SharpNeatLib.Maths
+{
+ public class RouletteWheel
+ {
+ static private Random random = new Random();
+
+ ///
+ /// A simple single throw routine.
+ ///
+ /// A probability between 0..1 that the throw will result in a true result.
+ ///
+ static public bool SingleThrow(double probability)
+ {
+ if(random.NextDouble() <=probability)
+ return true;
+ else
+ return false;
+ }
+
+ ///
+ /// Performs a single throw for a given number of outcomes with equal probabilities.
+ ///
+ ///
+ /// An integer between 0..numberOfOutcomes-1. In effect this routine selects one of the possible outcomes.
+ static public int SingleThrowEven(int numberOfOutcomes)
+ {
+ double probability= 1.0 / (double)numberOfOutcomes;
+ double accumulator=0;
+ double throwValue = random.NextDouble();
+
+ for(int i=0; i
+ /// Performs a single thrown onto a roulette wheel where the wheel's space is unevenly divided.
+ /// The probabilty that a segment will be selected is given by that segment's value in the 'probabilities'
+ /// array. The probabilities are normalised before tossing the ball so that their total is always equal to 1.0.
+ ///
+ ///
+ ///
+ static public int SingleThrow(double[] probabilities)
+ {
+ double pTotal=0; // Total probability
+
+ //-----
+ for(int i=0; i
+ /// Similar in functionality to SingleThrow(double[] probabilities). However the 'probabilities' array is
+ /// not normalised. Therefore if the total goes beyond 1 then we allow extra throws, thus if the total is 10
+ /// then we perform 10 throws.
+ ///
+ ///
+ ///
+ static public int[] MultipleThrows(double[] probabilities)
+ {
+ double pTotal=0; // Total probability
+ int numberOfThrows;
+
+ //----- Determine how many throws of the ball onto the wheel.
+ for(int i=0; i 1 then we take this as meaning more than one throw of the ball.
+ double pTotalInteger = Math.Floor(pTotal);
+ double pTotalRemainder = pTotal - pTotalInteger;
+ numberOfThrows = (int)pTotalInteger;
+
+ if(random.NextDouble() <= pTotalRemainder)
+ numberOfThrows++;
+
+ //----- Now throw the ball the determined number of times. For each throw store an integer indicating the outcome.
+ int[] outcomes = new int[numberOfThrows];
+
+ for(int i=0; i
+ /// Summary description for ValueMutation.
+ ///
+ public class ValueMutation
+ {
+ static FastRandom random = new FastRandom();
+
+
+ ///
+ /// Boundless mutation.
+ ///
+ ///
+ ///
+ static public double Mutate(double v, double sigma)
+ {
+ // Sigma=0.1 gives numbers in the range -0.5 to 0.5.
+ // Multiply by delta to adjust the mutation's scale in line with magnitude of the value.
+ v+= RandLib.gennor(0, 0.015); //;0.025);
+ return v;
+ }
+
+//
+// ///
+// /// Boundless mutation.
+// ///
+// ///
+// ///
+// static public double Mutate(double v, double baseValue)
+// {
+// double delta = Math.Abs(v-baseValue);
+//
+// // Sigma=0.1 gives numbers in the range -0.5 to 0.5.
+// // Multiply by delta to adjust the mutation's scale in line with magnitude of the value.
+// v+= delta*RandLib.gennor(0, 0.1);
+// return v;
+// }
+
+
+ static public double Mutate(double v, double baseValue, double lowerLimit)
+ {
+ double delta = Math.Abs(v-baseValue);
+
+ v+= delta*RandLib.gennor(0, 0.1);
+
+ if(vhighLimit)
+ {
+ if(vhighLimit)
+ v=highLimit-(v-highLimit);
+ }
+ return v;
+ }
+
+ static public int Mutate(int v, int baseValue)
+ {
+
+ int delta = Math.Abs(v-baseValue);
+
+ if(delta <= 10)
+ v+= (int)Math.Round(RandLib.gennor(baseValue, 5));
+ else if(delta>10 && delta<=100)
+ v+= (int)Math.Round((double)delta*RandLib.gennor(0, 1));
+ else// if(delta>100)
+ v+= (int)Math.Round((double)delta*RandLib.gennor(0, 0.1));
+
+ return v;
+ }
+
+ static public int Mutate(int v, int baseValue, int lowerLimit, int highLimit)
+ {
+ v=Mutate(v,baseValue);
+
+ if(vhighLimit)
+ v=highLimit;
+
+ return v;
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeatGenome/ConnectionGene.cs b/SharpNeatWalker/SharpNeatLib/NeatGenome/ConnectionGene.cs
new file mode 100644
index 000000000..459aa71b2
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeatGenome/ConnectionGene.cs
@@ -0,0 +1,135 @@
+using System;
+
+namespace SharpNeatLib.NeatGenome
+{
+ public class ConnectionGene
+ {
+ uint innovationId;
+ uint sourceNeuronId;
+ uint targetNeuronId;
+// bool enabled;
+ double weight;
+ bool fixedWeight=false;
+
+ ///
+ /// Used by the connection mutation routine to flag mutated connections so that they aren't
+ /// mutated more than once.
+ ///
+ bool isMutated=false;
+
+ #region Constructor
+
+ ///
+ /// Copy constructor.
+ ///
+ ///
+ public ConnectionGene(ConnectionGene copyFrom)
+ {
+ this.innovationId = copyFrom.innovationId;
+ this.sourceNeuronId = copyFrom.sourceNeuronId;
+ this.targetNeuronId = copyFrom.targetNeuronId;
+// this.enabled = copyFrom.enabled;
+ this.weight = copyFrom.weight;
+ this.fixedWeight = copyFrom.fixedWeight;
+ }
+
+ public ConnectionGene(uint innovationId, uint sourceNeuronId, uint targetNeuronId, double weight)
+ {
+ this.innovationId = innovationId;
+ this.sourceNeuronId = sourceNeuronId;
+ this.targetNeuronId = targetNeuronId;
+// this.enabled = enabled;
+ this.weight = weight;
+ }
+
+ #endregion
+
+ #region Properties
+
+ public uint InnovationId
+ {
+ get
+ {
+ return innovationId;
+ }
+ set
+ {
+ innovationId = value;
+ }
+ }
+
+ public uint SourceNeuronId
+ {
+ get
+ {
+ return sourceNeuronId;
+ }
+ set
+ {
+ sourceNeuronId = value;
+ }
+ }
+
+ public uint TargetNeuronId
+ {
+ get
+ {
+ return targetNeuronId;
+ }
+ set
+ {
+ targetNeuronId = value;
+ }
+ }
+
+// public bool Enabled
+// {
+// get
+// {
+// return enabled;
+// }
+// set
+// {
+// enabled = value;
+// }
+// }
+
+ public double Weight
+ {
+ get
+ {
+ return weight;
+ }
+ set
+ {
+ weight = value;
+ }
+ }
+
+ public bool FixedWeight
+ {
+ get
+ {
+ return fixedWeight;
+ }
+ set
+ {
+ fixedWeight = value;
+ }
+ }
+
+ public bool IsMutated
+ {
+ get
+ {
+ return isMutated;
+ }
+ set
+ {
+ isMutated = value;
+ }
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeatGenome/ConnectionGeneComparer.cs b/SharpNeatWalker/SharpNeatLib/NeatGenome/ConnectionGeneComparer.cs
new file mode 100644
index 000000000..5f13b32ba
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeatGenome/ConnectionGeneComparer.cs
@@ -0,0 +1,27 @@
+using System;
+using System.Collections.Generic;
+
+namespace SharpNeatLib.NeatGenome
+{
+ ///
+ /// Compares the innovation ID of ConnectionGenes.
+ ///
+ public class ConnectionGeneComparer : IComparer
+ {
+
+ #region IComparer Members
+
+ public int Compare(ConnectionGene x, ConnectionGene y)
+ {
+ // Test the most likely cases first.
+ if ((x).InnovationId < (y).InnovationId)
+ return -1;
+ else if ((x).InnovationId > (y).InnovationId)
+ return 1;
+ else
+ return 0;
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeatGenome/ConnectionGeneList.cs b/SharpNeatWalker/SharpNeatLib/NeatGenome/ConnectionGeneList.cs
new file mode 100644
index 000000000..a6bdb2f9f
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeatGenome/ConnectionGeneList.cs
@@ -0,0 +1,128 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+
+
+namespace SharpNeatLib.NeatGenome
+{
+ public class ConnectionGeneList : List
+ {
+ static ConnectionGeneComparer connectionGeneComparer = new ConnectionGeneComparer();
+ //public bool OrderInvalidated=false;
+
+ #region Constructors
+
+ ///
+ /// Default constructor.
+ ///
+ public ConnectionGeneList()
+ {}
+
+ public ConnectionGeneList(int count)
+ {
+ Capacity = (int)(count*1.5);
+ }
+
+ ///
+ /// Copy constructor.
+ ///
+ ///
+ public ConnectionGeneList(ConnectionGeneList copyFrom)
+ {
+ int count = copyFrom.Count;
+ Capacity = count;
+ for(int i=0; i
+ /// Inserts a ConnectionGene into its correct (sorted) location within the gene list.
+ /// Normally connection genes can safely be assumed to have a new Innovation ID higher
+ /// than all existing ID's, and so we can just call Add().
+ /// This routine handles genes with older ID's that need placing correctly.
+ ///
+ ///
+ ///
+ public void InsertIntoPosition(ConnectionGene connectionGene)
+ {
+ // Determine the insert idx with a linear search, starting from the end
+ // since mostly we expect to be adding genes that belong only 1 or 2 genes
+ // from the end at most.
+ int idx=Count-1;
+ for(; idx>-1; idx--)
+ {
+ if(this[idx].InnovationId < connectionGene.InnovationId)
+ { // Insert idx found.
+ break;
+ }
+ }
+ Insert(idx+1, connectionGene);
+ }
+
+ /*public void Remove(ConnectionGene connectionGene)
+ {
+ Remove(connectionGene.InnovationId);
+
+ // This invokes a linear search. Invoke our binary search instead.
+ //InnerList.Remove(connectionGene);
+ }*/
+
+ public void Remove(uint innovationId)
+ {
+ int idx = BinarySearch(innovationId);
+ if(idx<0)
+ throw new Exception("Attempt to remove connection with an unknown innovationId");
+ else
+ RemoveAt(idx);
+ }
+
+ public void SortByInnovationId()
+ {
+ Sort(connectionGeneComparer);
+ //OrderInvalidated=false;
+ }
+
+ public int BinarySearch(uint innovationId)
+ {
+ int lo = 0;
+ int hi = Count-1;
+
+ while (lo <= hi)
+ {
+ int i = (lo + hi) >> 1;
+ int c = (int)(this[i]).InnovationId - (int)innovationId;
+ if (c == 0) return i;
+
+ if (c < 0)
+ lo = i + 1;
+ else
+ hi = i - 1;
+ }
+
+ return ~lo;
+ }
+
+ ///
+ /// For debug purposes only. Don't call this in normal circumstances as it is an
+ /// expensive O(n) operation.
+ ///
+ ///
+ public bool IsSorted()
+ {
+ uint prevId=0;
+ foreach(ConnectionGene gene in this)
+ {
+ if(gene.InnovationId
+ /// Create a default minimal genome that describes a NN with the given number of inputs and outputs.
+ ///
+ ///
+ public static IGenome CreateGenome(NeatParameters neatParameters, IdGenerator idGenerator, int inputNeuronCount, int outputNeuronCount, float connectionProportion)
+ {
+ IActivationFunction actFunct;
+ NeuronGene neuronGene; // temp variable.
+ NeuronGeneList inputNeuronGeneList = new NeuronGeneList(); // includes bias neuron.
+ NeuronGeneList outputNeuronGeneList = new NeuronGeneList();
+ NeuronGeneList neuronGeneList = new NeuronGeneList();
+ ConnectionGeneList connectionGeneList = new ConnectionGeneList();
+
+ // IMPORTANT NOTE: The neurons must all be created prior to any connections. That way all of the genomes
+ // will obtain the same innovation ID's for the bias,input and output nodes in the initial population.
+ // Create a single bias neuron.
+ //TODO: DAVID proper activation function change to NULL?
+ actFunct = ActivationFunctionFactory.GetActivationFunction("NullFn");
+ neuronGene = new NeuronGene(idGenerator.NextInnovationId, NeuronType.Bias, actFunct);
+ inputNeuronGeneList.Add(neuronGene);
+ neuronGeneList.Add(neuronGene);
+
+ // Create input neuron genes.
+ actFunct = ActivationFunctionFactory.GetActivationFunction("NullFn");
+ for(int i=0; i
+ /// Construct a GenomeList. This can be used to construct a new Population object.
+ ///
+ ///
+ ///
+ ///
+ ///
+ ///
+ public static GenomeList CreateGenomeList(NeatParameters neatParameters, IdGenerator idGenerator, int inputNeuronCount, int outputNeuronCount, float connectionProportion, int length)
+ {
+ GenomeList genomeList = new GenomeList();
+
+ for(int i=0; i= seedPopulation.GenomeList.Count)
+ { // Back to first genome.
+ seedIdx=0;
+ }
+ }
+ return genomeList;
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeatGenome/IdGeneratorFactory.cs b/SharpNeatWalker/SharpNeatLib/NeatGenome/IdGeneratorFactory.cs
new file mode 100644
index 000000000..c28deb1d5
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeatGenome/IdGeneratorFactory.cs
@@ -0,0 +1,131 @@
+using System;
+using System.Collections;
+
+using SharpNeatLib.Evolution;
+
+namespace SharpNeatLib.NeatGenome
+{
+ public class IdGeneratorFactory : IIdGeneratorFactory
+ {
+ ///
+ /// Create an IdGeneratoy by interrogating the provided population of Genomes.
+ /// This routine also fixes any duplicate IDs that are found in the
+ /// population.
+ ///
+ ///
+ ///
+ public IdGenerator CreateIdGenerator(GenomeList genomeList)
+ {
+ uint maxGenomeId=0;
+ uint maxInnovationId=0;
+
+ // First pass: Determine the current maximum genomeId and innovationId.
+ foreach(NeatGenome genome in genomeList)
+ {
+ if(genome.GenomeId > maxGenomeId)
+ maxGenomeId = genome.GenomeId;
+
+ // Neuron IDs actualy come from the innovation IDs generator, so although they
+ // aren't used as historical markers we should count them as innovation IDs here.
+ foreach(NeuronGene neuronGene in genome.NeuronGeneList)
+ {
+ if(neuronGene.InnovationId > maxInnovationId)
+ maxInnovationId = neuronGene.InnovationId;
+ }
+
+ foreach(ConnectionGene connectionGene in genome.ConnectionGeneList)
+ {
+ if(connectionGene.InnovationId > maxInnovationId)
+ maxInnovationId = connectionGene.InnovationId;
+ }
+ }
+
+ if(maxGenomeId==uint.MaxValue)
+ { //reset to zero.
+ maxGenomeId=0;
+ }
+ else
+ { // Increment to next available ID.
+ maxGenomeId++;
+ }
+
+ if(maxInnovationId==uint.MaxValue)
+ { //reset to zero.
+ maxInnovationId=0;
+ }
+ else
+ { // Increment to next available ID.
+ maxInnovationId++;
+ }
+
+ // Create an IdGenerator using the discovered maximum IDs.
+ IdGenerator idGenerator = new IdGenerator(maxGenomeId, maxInnovationId);
+
+ // Second pass: Check for duplicate genome IDs.
+ Hashtable genomeIdTable = new Hashtable();
+ Hashtable innovationIdTable = new Hashtable();
+ foreach(NeatGenome genome in genomeList)
+ {
+ if(genomeIdTable.Contains(genome.GenomeId))
+ { // Assign this genome a new Id.
+ genome.GenomeId = idGenerator.NextGenomeId;
+ }
+ //Register the ID.
+ genomeIdTable.Add(genome.GenomeId, null);
+ }
+
+ return idGenerator;
+ }
+
+
+ ///
+ /// Create an IdGeneratoy by interrogating the provided Genome.
+ ///
+ ///
+ ///
+ public IdGenerator CreateIdGenerator(NeatGenome genome)
+ {
+ uint maxGenomeId=0;
+ uint maxInnovationId=0;
+
+ // First pass: Determine the current maximum genomeId and innovationId.
+ if(genome.GenomeId > maxGenomeId)
+ maxGenomeId = genome.GenomeId;
+
+ // Neuron IDs actualy come from the innovation IDs generator, so although they
+ // aren't used as historical markers we should count them as innovation IDs here.
+ foreach(NeuronGene neuronGene in genome.NeuronGeneList)
+ {
+ if(neuronGene.InnovationId > maxInnovationId)
+ maxInnovationId = neuronGene.InnovationId;
+ }
+
+ foreach(ConnectionGene connectionGene in genome.ConnectionGeneList)
+ {
+ if(connectionGene.InnovationId > maxInnovationId)
+ maxInnovationId = connectionGene.InnovationId;
+ }
+
+ if(maxGenomeId==uint.MaxValue)
+ { //reset to zero.
+ maxGenomeId=0;
+ }
+ else
+ { // Increment to next available ID.
+ maxGenomeId++;
+ }
+
+ if(maxInnovationId==uint.MaxValue)
+ { //reset to zero.
+ maxInnovationId=0;
+ }
+ else
+ { // Increment to next available ID.
+ maxInnovationId++;
+ }
+
+ // Create an IdGenerator using the discovered maximum IDs.
+ return new IdGenerator(maxGenomeId, maxInnovationId);
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeatGenome/NeatGenome.cs b/SharpNeatWalker/SharpNeatLib/NeatGenome/NeatGenome.cs
new file mode 100644
index 000000000..44e472f47
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeatGenome/NeatGenome.cs
@@ -0,0 +1,1506 @@
+using System;
+using System.Collections;
+using System.Xml;
+using System.Diagnostics;
+
+using SharpNeatLib.Evolution;
+using SharpNeatLib.Maths;
+using SharpNeatLib.NeatGenome.Xml;
+using SharpNeatLib.NeuralNetwork;
+
+namespace SharpNeatLib.NeatGenome
+{
+ public class NeatGenome : AbstractGenome
+ {
+ #region NeuronConnectionLookup Class [Pruning]
+
+ class NeuronConnectionLookup
+ {
+ public NeuronGene neuronGene;
+ public ConnectionGeneList incomingList = new ConnectionGeneList();
+ public ConnectionGeneList outgoingList = new ConnectionGeneList();
+ }
+
+ #endregion
+
+ #region Class Variables [General]
+
+ // Ensure that the connectionGenes are sorted by innovation ID at all times.
+ NeuronGeneList neuronGeneList;
+ ConnectionGeneList connectionGeneList;
+
+ // For efficiency we store the number of input and output neurons. These two quantities do not change
+ // throughout the life of a genome. Note that inputNeuronCount does NOT include the bias neuron! use inputAndBiasNeuronCount.
+ // We also keep all input(including bias) neurons at the start of the neuronGeneList followed by
+ // the output neurons.
+ int inputNeuronCount;
+ int inputAndBiasNeuronCount;
+ int outputNeuronCount;
+ int inputBiasOutputNeuronCount;
+ int inputBiasOutputNeuronCountMinus2;
+
+ // Build on-demand to represnt all of the ConnectionGene that do not have the FixedWeight bit set,
+ // so that MutateConnectionWeights can operate more efficiently.
+ ConnectionGeneList mutableConnectionGeneList = null;
+
+ #endregion
+
+ #region Class Variables [Pruning]
+ // Temp tables.
+ Hashtable neuronConnectionLookupTable = null;
+ Hashtable neuronGeneTable = null;
+
+ #endregion
+
+ #region Constructors
+
+ ///
+ /// Default constructor.
+ ///
+ public NeatGenome( uint genomeId,
+ NeuronGeneList neuronGeneList,
+ ConnectionGeneList connectionGeneList,
+ int inputNeuronCount,
+ int outputNeuronCount)
+ {
+ this.genomeId = genomeId;
+
+ this.neuronGeneList = neuronGeneList;
+ this.connectionGeneList = connectionGeneList;
+
+ this.inputNeuronCount = inputNeuronCount;
+ this.inputAndBiasNeuronCount = inputNeuronCount+1;
+ this.outputNeuronCount = outputNeuronCount;
+ this.inputBiasOutputNeuronCount = inputAndBiasNeuronCount + outputNeuronCount;
+ this.inputBiasOutputNeuronCountMinus2 = inputBiasOutputNeuronCount-2;
+
+ Debug.Assert(connectionGeneList.IsSorted(), "ConnectionGeneList is not sorted by innovation ID");
+ }
+
+ ///
+ /// Copy constructor.
+ ///
+ ///
+ public NeatGenome(NeatGenome copyFrom, uint genomeId)
+ {
+ this.genomeId = genomeId;
+
+ // No need to loop the arrays to clone each element because NeuronGene and ConnectionGene are
+ // value data types (structs).
+ neuronGeneList = new NeuronGeneList(copyFrom.neuronGeneList);
+ connectionGeneList = new ConnectionGeneList(copyFrom.connectionGeneList);
+
+ inputNeuronCount = copyFrom.inputNeuronCount;
+ inputAndBiasNeuronCount = copyFrom.inputNeuronCount+1;
+ outputNeuronCount = copyFrom.outputNeuronCount;
+ inputBiasOutputNeuronCount = copyFrom.inputBiasOutputNeuronCount;
+ inputBiasOutputNeuronCountMinus2 = copyFrom.inputBiasOutputNeuronCountMinus2;
+
+ Debug.Assert(connectionGeneList.IsSorted(), "ConnectionGeneList is not sorted by innovation ID");
+ }
+
+ #endregion
+
+ #region Properties
+
+ public NeuronGeneList NeuronGeneList
+ {
+ get
+ {
+ return neuronGeneList;
+ }
+ }
+
+ public ConnectionGeneList ConnectionGeneList
+ {
+ get
+ {
+ return connectionGeneList;
+ }
+ }
+
+ public int InputNeuronCount
+ {
+ get
+ {
+ return inputNeuronCount;
+ }
+ }
+
+ public int OutputNeuronCount
+ {
+ get
+ {
+ return outputNeuronCount;
+ }
+ }
+
+ #endregion
+
+ #region IGenome
+
+ ///
+ /// Some(most) types of network have fixed numbers of input and output nodes and will not work correctly or
+ /// throw an exception if we try and use inputs/outputs that do not exist. This method allows us to check
+ /// compatibility before we begin.
+ ///
+ ///
+ ///
+ ///
+ public override bool IsCompatibleWithNetwork(int inputCount, int outputCount)
+ {
+ return (inputCount==inputNeuronCount) && (outputCount==outputNeuronCount);
+ }
+
+ ///
+ /// Asexual reproduction with built in mutation.
+ ///
+ ///
+ public override IGenome CreateOffspring_Asexual(EvolutionAlgorithm ea)
+ {
+ // Make an exact copy this Genome.
+ NeatGenome offspring = new NeatGenome(this, ea.NextGenomeId);
+
+ // Mutate the new genome.
+ offspring.Mutate(ea);
+ return offspring;
+ }
+
+
+ private void CreateOffspring_Sexual_AddGene(ConnectionGene connectionGene, bool overwriteExisting)
+ {
+ ConnectionEndpointsStruct connectionKey = new ConnectionEndpointsStruct(
+ connectionGene.SourceNeuronId,
+ connectionGene.TargetNeuronId);
+
+ // Check if a matching gene has already been added.
+ object oIdx = newConnectionGeneTable[connectionKey];
+ if(oIdx==null)
+ { // No matching gene has been added.
+ // Register this new gene with the newConnectionGeneTable - store its index within newConnectionGeneList.
+ newConnectionGeneTable[connectionKey] = newConnectionGeneList.Count;
+
+ // Add the gene to the list.
+ newConnectionGeneList.Add(connectionGene);
+ }
+ else if(overwriteExisting)
+ {
+ // Overwrite the existing matching gene with this one. In fact only the weight value differs between two
+ // matching connection genes, so just overwrite the existing genes weight value.
+
+ // Remember that we stored the gene's index in newConnectionGeneTable. So use it here.
+ newConnectionGeneList[(int)oIdx].Weight = connectionGene.Weight;
+ }
+ }
+
+
+ private void CreateOffspring_Sexual_ProcessCorrelationItem(CorrelationItem correlationItem, byte fitSwitch, bool combineDisjointExcessFlag, NeatParameters np)
+ {
+ switch(correlationItem.CorrelationItemType)
+ {
+ // Disjoint and excess genes.
+ case CorrelationItemType.DisjointConnectionGene:
+ case CorrelationItemType.ExcessConnectionGene:
+ {
+ // If the gene is in the fittest parent then override any existing entry in the connectionGeneTable.
+ if(fitSwitch==1 && correlationItem.ConnectionGene1!=null)
+ {
+ CreateOffspring_Sexual_AddGene(correlationItem.ConnectionGene1, true);
+ return;
+ }
+
+ if(fitSwitch==2 && correlationItem.ConnectionGene2!=null)
+ {
+ CreateOffspring_Sexual_AddGene(correlationItem.ConnectionGene2, true);
+ return;
+ }
+
+ // The disjoint/excess gene is on the least fit parent.
+ //if(Utilities.NextDouble() < np.pDisjointExcessGenesRecombined)
+ if(combineDisjointExcessFlag)
+ { // Include the gene n% of the time from whichever parent contains it.
+ if(correlationItem.ConnectionGene1!=null)
+ {
+ // Gene is on least fit parent. Only add it to the genome if a matching gene hasn't already been added.
+ CreateOffspring_Sexual_AddGene(correlationItem.ConnectionGene1, false);
+ return;
+ }
+ if(correlationItem.ConnectionGene2!=null)
+ {
+ // Gene is on least fit parent. Only add it to the genome if a matching gene hasn't already been added.
+ CreateOffspring_Sexual_AddGene(correlationItem.ConnectionGene2, false);
+ return;
+ }
+ }
+ break;
+ }
+
+ case CorrelationItemType.MatchedConnectionGenes:
+ {
+ if(RouletteWheel.SingleThrow(0.5))
+ {
+ // Override any existing entries in the table.
+ CreateOffspring_Sexual_AddGene(correlationItem.ConnectionGene1, true);
+ }
+ else
+ {
+ // Override any existing entries in the table.
+ CreateOffspring_Sexual_AddGene(correlationItem.ConnectionGene2, true);
+ }
+ break;
+ }
+ }
+ }
+
+ ///
+ /// A table that keeps a track of which connections have added to the sexually reproduced child genome.
+ /// This is cleared on each call to CreateOffspring_Sexual() and is only declared at class level to
+ /// prevent having to re-allocate the table and it's associated memory on each invokation.
+ ///
+ Hashtable newConnectionGeneTable;
+ Hashtable newNeuronGeneTable;
+ ConnectionGeneList newConnectionGeneList;
+
+ public override IGenome CreateOffspring_Sexual(EvolutionAlgorithm ea, IGenome parent)
+ {
+ CorrelationResults correlationResults = CorrelateConnectionGeneLists(connectionGeneList, ((NeatGenome)parent).connectionGeneList);
+
+ Debug.Assert(correlationResults.PerformIntegrityCheck(), "CorrelationResults failed integrity check.");
+
+ //----- Connection Genes.
+ // We will temporarily store the offspring's genes in newConnectionGeneList and keeping track of which genes
+ // exist with newConnectionGeneTable. Here we ensure these objects are created, and if they already existed
+ // then ensure they are cleared. Clearing existing objects is more efficient that creating new ones because
+ // allocated memory can be re-used.
+ if(newConnectionGeneTable==null)
+ { // Provide a capacity figure to the new Hashtable. The offspring will be the same length (or thereabouts).
+ newConnectionGeneTable = new Hashtable(connectionGeneList.Count);
+ }
+ else
+ {
+ newConnectionGeneTable.Clear();
+ }
+ //TODO: No 'capacity' constructor on CollectionBase. Create modified/custom CollectionBase.
+ // newConnectionGeneList must be constructed on each call because it is passed to a new NeatGenome
+ // at construction time and a permanent reference to the list is kept.
+ newConnectionGeneList = new ConnectionGeneList(ConnectionGeneList.Count);
+
+ // A switch that stores which parent is fittest 1 or 2. 0 if both are equal. More efficient to calculate this just once.
+ byte fitSwitch;
+ if(Fitness > parent.Fitness)
+ fitSwitch = 1;
+ else if(Fitness < parent.Fitness)
+ fitSwitch = 2;
+ else
+ { // Select one of the parents at random to be the 'master' genome during crossover.
+ if(Utilities.NextDouble() < 0.5)
+ fitSwitch = 1;
+ else
+ fitSwitch = 2;
+ }
+
+ bool combineDisjointExcessFlag = Utilities.NextDouble() < ea.NeatParameters.pDisjointExcessGenesRecombined ? true : false;
+
+ // Loop through the correlationResults, building a table of ConnectionGenes from the parent's that will make it into our
+ // new [single] offspring. We use a table keyed on connection end points to prevent passing connections to the offspring
+ // that may have the same end points but a different innovation number - effectively we filter out duplicate connections.
+ int idxBound = correlationResults.CorrelationItemList.Count;
+ for(int i=0; i
+ /// Decode the genome's 'DNA' into a working network.
+ ///
+ ///
+ public override INetwork Decode(IActivationFunction activationFn)
+ {
+ lock (SyncRoot)
+ if (network == null)
+ {
+ //network = GenomeDecoder.DecodeToConcurrentNetwork(this, activationFn);
+ network = GenomeDecoder.DecodeToFloatFastConcurrentNetwork(this, activationFn);
+ //network = GenomeDecoder.DecodeToIntegerFastConcurrentNetwork(this);
+ //network = GenomeDecoder.DecodeToFastConcurrentMultiplicativeNetwork(this, activationFn);
+ }
+
+ return network;
+ }
+
+ ///
+ /// Clone this genome.
+ ///
+ ///
+ public override IGenome Clone(EvolutionAlgorithm ea)
+ {
+ // Utilise the copy constructor for cloning.
+ return new NeatGenome(this, ea.NextGenomeId);
+ }
+
+
+ public override bool IsCompatibleWithGenome(IGenome comparisonGenome, NeatParameters neatParameters)
+ {
+ /* A very simple way of implementing this routine is to call CorrelateConnectionGeneLists and to then loop
+ * through the correlation items, calculating a compatibility score as we go. However, this routine
+ * is heavily used and in performance tests was shown consume 40% of the CPU time for the core NEAT code.
+ * Therefore this new routine has been rewritten with it's own version of the logic within
+ * CorrelateConnectionGeneLists. This allows us to only keep comparing genes up to the point where the
+ * threshold is passed. This also eliminates the need to build the correlation results list, this difference
+ * alone is responsible for a 200x performance improvement when testing with a 1664 length genome!!
+ *
+ * A further optimisation is achieved by comparing the genes starting at the end of the genomes which is
+ * where most disparities are located - new novel genes are always attached to the end of genomes. This
+ * has the result of complicating the routine because we must now invoke additional logic to determine
+ * which genes are excess and when the first disjoint gene is found. This is done with an extra integer:
+ *
+ * int excessGenesSwitch=0; // indicates to the loop that it is handling the first gene.
+ * =1; // Indicates that the first gene was excess and on genome 1.
+ * =2; // Indicates that the first gene was excess and on genome 2.
+ * =3; // Indicates that there are no more excess genes.
+ *
+ * This extra logic has a slight performance hit, but this is minor especially in comparison to the savings that
+ * are expected to be achieved overall during a NEAT search.
+ *
+ * If you have trouble understanding this logic then it might be best to work through the previous version of
+ * this routine (below) that scans through the genomes from start to end, and which is a lot simpler.
+ *
+ */
+ ConnectionGeneList list1 = this.connectionGeneList;
+ ConnectionGeneList list2 = ((NeatGenome)comparisonGenome).connectionGeneList;
+ int excessGenesSwitch=0;
+
+ // Store these heavily used values locally.
+ int list1Count = list1.Count;
+ int list2Count = list2.Count;
+
+ //----- Test for special cases.
+ if(list1Count==0 && list2Count==0)
+ { // Both lists are empty! No disparities, therefore the genomes are compatible!
+ return true;
+ }
+
+ if(list1Count==0)
+ { // All list2 genes are excess.
+ return ((list2.Count * neatParameters.compatibilityExcessCoeff) < neatParameters.compatibilityThreshold);
+ }
+
+ if(list2Count==0)
+ {
+ // All list1 genes are excess.
+ return ((list1Count * neatParameters.compatibilityExcessCoeff) < neatParameters.compatibilityThreshold);
+ }
+
+ //----- Both ConnectionGeneLists contain genes - compare the contents.
+ double compatibility=0;
+ int list1Idx=list1Count-1;
+ int list2Idx=list2Count-1;
+ ConnectionGene connectionGene1 = list1[list1Idx];
+ ConnectionGene connectionGene2 = list2[list2Idx];
+ for(;;)
+ {
+ if(connectionGene2.InnovationId > connectionGene1.InnovationId)
+ {
+ // Most common test case(s) at top for efficiency.
+ if(excessGenesSwitch==3)
+ { // No more excess genes. Therefore this mismatch is disjoint.
+ compatibility += neatParameters.compatibilityDisjointCoeff;
+ }
+ else if(excessGenesSwitch==2)
+ { // Another excess gene on genome 2.
+ compatibility += neatParameters.compatibilityExcessCoeff;
+ }
+ else if(excessGenesSwitch==1)
+ { // We have found the first non-excess gene.
+ excessGenesSwitch=3;
+ compatibility += neatParameters.compatibilityDisjointCoeff;
+ }
+ else //if(excessGenesSwitch==0)
+ { // First gene is excess, and is on genome 2.
+ excessGenesSwitch = 2;
+ compatibility += neatParameters.compatibilityExcessCoeff;
+ }
+
+ // Move to the next gene in list2.
+ list2Idx--;
+ }
+ else if(connectionGene1.InnovationId == connectionGene2.InnovationId)
+ {
+ // No more excess genes. It's quicker to set this every time than to test if is not yet 3.
+ excessGenesSwitch=3;
+
+ // Matching genes. Increase compatibility by weight difference * coeff.
+ compatibility += Math.Abs(connectionGene1.Weight-connectionGene2.Weight) * neatParameters.compatibilityWeightDeltaCoeff;
+
+ // Move to the next gene in both lists.
+ list1Idx--;
+ list2Idx--;
+ }
+ else // (connectionGene2.InnovationId < connectionGene1.InnovationId)
+ {
+ // Most common test case(s) at top for efficiency.
+ if(excessGenesSwitch==3)
+ { // No more excess genes. Therefore this mismatch is disjoint.
+ compatibility += neatParameters.compatibilityDisjointCoeff;
+ }
+ else if(excessGenesSwitch==1)
+ { // Another excess gene on genome 1.
+ compatibility += neatParameters.compatibilityExcessCoeff;
+ }
+ else if(excessGenesSwitch==2)
+ { // We have found the first non-excess gene.
+ excessGenesSwitch=3;
+ compatibility += neatParameters.compatibilityDisjointCoeff;
+ }
+ else //if(excessGenesSwitch==0)
+ { // First gene is excess, and is on genome 1.
+ excessGenesSwitch = 1;
+ compatibility += neatParameters.compatibilityExcessCoeff;
+ }
+
+ // Move to the next gene in list1.
+ list1Idx--;
+ }
+
+ if(compatibility >= neatParameters.compatibilityThreshold)
+ return false;
+
+ // Check if we have reached the end of one (or both) of the lists. If we have reached the end of both then
+ // we execute the first 'if' block - but it doesn't matter since the loop is not entered if both lists have
+ // been exhausted.
+ if(list1Idx < 0)
+ {
+ // All remaining list2 genes are disjoint.
+ compatibility += (list2Idx+1) * neatParameters.compatibilityDisjointCoeff;
+ return (compatibility < neatParameters.compatibilityThreshold);
+ }
+
+ if(list2Idx < 0)
+ {
+ // All remaining list1 genes are disjoint.
+ compatibility += (list1Idx+1) * neatParameters.compatibilityDisjointCoeff;
+ return (compatibility < neatParameters.compatibilityThreshold);
+ }
+
+ connectionGene1 = list1[list1Idx];
+ connectionGene2 = list2[list2Idx];
+ }
+ }
+
+/* The first version of the optimised IsCompatibleWithGenome(). This version scans forward through the genomes,
+ * keeping a running total of the compatibility figure as it goes. This version has been superceded by the one above!
+ */
+// public override bool IsCompatibleWithGenome(IGenome comparisonGenome, NeatParameters neatParameters)
+// {
+// /* A very simple way of implementing this routine is to call CorrelateConnectionGeneLists and to then loop
+// * through the correlation items, calculating a compatibility score as we go. However, this routine
+// * is heavily used and in performance tests was shown consume 40% of the CPU time for the core NEAT code.
+// * Therefore this new routine has been rewritten with it's own version of the logic within
+// * CorrelateConnectionGeneLists. This allows us to only keep comparing genes up to the point where the
+// * threshold is passed.
+// */
+// ConnectionGeneList list1 = this.connectionGeneList;
+// ConnectionGeneList list2 = ((NeatGenome)comparisonGenome).connectionGeneList;
+//
+// // Store these heavily used values locally.
+// int list1Count = list1.Count;
+// int list2Count = list2.Count;
+//
+// //----- Test for special cases.
+// if(list1Count==0 && list2Count==0)
+// { // Both lists are empty! No disparities, therefore the genomes are compatible!
+// return true;
+// }
+//
+// if(list1Count==0)
+// { // All list2 genes are excess.
+// return ((list2Count * neatParameters.compatibilityExcessCoeff) < neatParameters.compatibilityThreshold);
+// }
+//
+// if(list2Count==0)
+// {
+// // All list1 genes are excess.
+// return ((list1Count * neatParameters.compatibilityExcessCoeff) < neatParameters.compatibilityThreshold);
+// }
+//
+// //----- Both ConnectionGeneLists contain genes - compare the contents.
+// double compatibility=0;
+// int list1Idx=0;
+// int list2Idx=0;
+// ConnectionGene connectionGene1 = list1[list1Idx];
+// ConnectionGene connectionGene2 = list2[list2Idx];
+// for(;;)
+// {
+// if(connectionGene2.InnovationId < connectionGene1.InnovationId)
+// {
+// // connectionGene2 is disjoint.
+// compatibility += neatParameters.compatibilityDisjointCoeff;
+//
+// // Move to the next gene in list2.
+// list2Idx++;
+// }
+// else if(connectionGene1.InnovationId == connectionGene2.InnovationId)
+// {
+// // Matching genes. Increase compatibility by weight difference * coeff.
+// compatibility += Math.Abs(connectionGene1.Weight-connectionGene2.Weight) * neatParameters.compatibilityWeightDeltaCoeff;
+//
+// // Move to the next gene in both lists.
+// list1Idx++;
+// list2Idx++;
+// }
+// else // (connectionGene2.InnovationId > connectionGene1.InnovationId)
+// {
+// // connectionGene1 is disjoint.
+// compatibility += neatParameters.compatibilityDisjointCoeff;
+//
+// // Move to the next gene in list1.
+// list1Idx++;
+// }
+//
+// if(compatibility >= neatParameters.compatibilityThreshold)
+// return false;
+//
+// // Check if we have reached the end of one (or both) of the lists. If we have reached the end of both then
+// // we execute the first 'if' block - but it doesn't matter since the loop is not entered if both lists have
+// // been exhausted.
+// if(list1Idx >= list1Count)
+// {
+// // All remaining list2 genes are excess.
+// compatibility += (list2Count - list2Idx) * neatParameters.compatibilityExcessCoeff;
+// return (compatibility < neatParameters.compatibilityThreshold);
+// }
+//
+// if(list2Idx >= list2Count)
+// {
+// // All remaining list1 genes are excess.
+// compatibility += (list1Count - list1Idx) * neatParameters.compatibilityExcessCoeff;
+// return (compatibility < neatParameters.compatibilityThreshold);
+// }
+//
+// connectionGene1 = list1[list1Idx];
+// connectionGene2 = list2[list2Idx];
+// }
+// }
+
+
+
+/* The original CalculateCompatibility function coverted to IsCompatibleWithGenome(). This calls CorrelateConnectionGeneLists() and then calculates
+ * a compatibility score from the results. If the score is over the threshold then the genomes are incompatible.
+ * This routine is superceded by the far more efficient IsCompatibleWithGenome() method.
+ */
+// ///
+// /// Compare this IGenome with the provided one. This routine is utilized by the speciation logic.
+// ///
+// ///
+// ///
+// public override bool IsCompatibleWithGenome(IGenome comparisonGenome, NeatParameters neatParameters)
+// {
+// CorrelationResults correlationResults = CorrelateConnectionGeneLists(connectionGeneList, ((NeatGenome)comparisonGenome).connectionGeneList);
+//
+// double compatibilityVal = neatParameters.compatibilityDisjointCoeff * correlationResults.CorrelationStatistics.DisjointConnectionGeneCount +
+// neatParameters.compatibilityExcessCoeff * correlationResults.CorrelationStatistics.ExcessConnectionGeneCount;
+//
+// if(correlationResults.CorrelationStatistics.MatchingGeneCount > 0)
+// {
+// compatibilityVal += neatParameters.compatibilityWeightDeltaCoeff * correlationResults.CorrelationStatistics.ConnectionWeightDelta;
+// }
+//
+// return compatibilityVal < neatParameters.compatibilityThreshold;
+// }
+
+ public override void Write(XmlNode parentNode)
+ {
+ XmlGenomeWriterStatic.Write(parentNode, this);
+ }
+
+ ///
+ /// For debug purposes only.
+ ///
+ /// Returns true if genome integrity checks out OK.
+ public override bool PerformIntegrityCheck()
+ {
+ return connectionGeneList.IsSorted();
+ }
+
+ #endregion
+
+ #region Public Methods
+
+ public void FixConnectionWeights()
+ {
+ int bound = connectionGeneList.Count;
+ for(int i=0; i
+ /// Add a new node to the Genome. We do this by removing a connection at random and inserting
+ /// a new node and two new connections that make the same circuit as the original connection.
+ ///
+ /// This way the new node is properly integrated into the network from the outset.
+ ///
+ ///
+ private void Mutate_AddNode(EvolutionAlgorithm ea)
+ {
+ if(connectionGeneList.Count==0)
+ return;
+
+ // Select a connection at random.
+ int connectionToReplaceIdx = (int)Math.Floor(Utilities.NextDouble() * connectionGeneList.Count);
+ ConnectionGene connectionToReplace = connectionGeneList[connectionToReplaceIdx];
+
+ // Delete the existing connection.
+ connectionGeneList.RemoveAt(connectionToReplaceIdx);
+
+ // Check if this connection has already been split on another genome. If so then we should re-use the
+ // neuron ID and two connection ID's so that matching structures within the population maintain the same ID.
+ object existingNeuronGeneStruct = ea.NewNeuronGeneStructTable[connectionToReplace.InnovationId];
+
+ NeuronGene newNeuronGene;
+ ConnectionGene newConnectionGene1;
+ ConnectionGene newConnectionGene2;
+ IActivationFunction actFunct;
+ if(existingNeuronGeneStruct==null)
+ { // No existing matching structure, so generate some new ID's.
+
+ //TODO: DAVID proper random activation function
+ // Replace connectionToReplace with two new connections and a neuron.
+ actFunct=ActivationFunctionFactory.GetRandomActivationFunction(ea.NeatParameters);
+ newNeuronGene = new NeuronGene(ea.NextInnovationId, NeuronType.Hidden, actFunct);
+ newConnectionGene1 = new ConnectionGene(ea.NextInnovationId, connectionToReplace.SourceNeuronId, newNeuronGene.InnovationId, 1.0);
+ newConnectionGene2 = new ConnectionGene(ea.NextInnovationId, newNeuronGene.InnovationId, connectionToReplace.TargetNeuronId, connectionToReplace.Weight);
+
+ // Register the new ID's with NewNeuronGeneStructTable.
+ ea.NewNeuronGeneStructTable.Add(connectionToReplace.InnovationId,
+ new NewNeuronGeneStruct(newNeuronGene, newConnectionGene1, newConnectionGene2));
+ }
+ else
+ { // An existing matching structure has been found. Re-use its ID's
+
+ //TODO: DAVID proper random activation function
+ // Replace connectionToReplace with two new connections and a neuron.
+ actFunct = ActivationFunctionFactory.GetRandomActivationFunction(ea.NeatParameters);
+ NewNeuronGeneStruct tmpStruct = (NewNeuronGeneStruct)existingNeuronGeneStruct;
+ newNeuronGene = new NeuronGene(tmpStruct.NewNeuronGene.InnovationId, NeuronType.Hidden, actFunct);
+ newConnectionGene1 = new ConnectionGene(tmpStruct.NewConnectionGene_Input.InnovationId, connectionToReplace.SourceNeuronId, newNeuronGene.InnovationId, 1.0);
+ newConnectionGene2 = new ConnectionGene(tmpStruct.NewConnectionGene_Output.InnovationId, newNeuronGene.InnovationId, connectionToReplace.TargetNeuronId, connectionToReplace.Weight);
+ }
+
+ // Add the new genes to the genome.
+ neuronGeneList.Add(newNeuronGene);
+ connectionGeneList.InsertIntoPosition(newConnectionGene1);
+ connectionGeneList.InsertIntoPosition(newConnectionGene2);
+ }
+
+ private void Mutate_AddConnection(EvolutionAlgorithm ea)
+ {
+ // We are always guaranteed to have enough neurons to form connections - because the input/output neurons are
+ // fixed. Any domain that doesn't require input/outputs is a bit nonsensical!
+
+ // Make a fixed number of attempts at finding a suitable connection to add.
+
+ if(neuronGeneList.Count>1)
+ { // At least 2 neurons, so we have a chance at creating a connection.
+
+ for(int attempts=0; attempts<5; attempts++)
+ {
+ // Select candidate source and target neurons. Any neuron can be used as the source. Input neurons
+ // should not be used as a target
+ int srcNeuronIdx;
+ int tgtNeuronIdx;
+
+ /* Here's some code for adding connections that attempts to avoid any recursive conenctions
+ * within a network by only linking to neurons with innovation id's greater than the source neuron.
+ * Unfortunately this doesn't work because new neurons with large innovations ID's are inserted
+ * randomly through a network's topology! Hence this code remains here in readyness to be resurrected
+ * as part of some future work to support feedforward nets.
+// if(ea.NeatParameters.feedForwardOnly)
+// {
+// /* We can ensure that all networks are feedforward only by only adding feedforward connections here.
+// * Feed forward connections fall into one of the following categories. All references to indexes
+// * are indexes within this genome's neuronGeneList:
+// * 1) Source neuron is an input or hidden node, target is an output node.
+// * 2) Source is an input or hidden node, target is a hidden node with an index greater than the source node's index.
+// * 3) Source is an output node, target is an output node with an index greater than the source node's index.
+// *
+// * These rules are easier to understand if you understand how the different types if neuron are arranged within
+// * the neuronGeneList array. Neurons are arranged in the following order:
+// *
+// * 1) A single bias neuron is always first.
+// * 2) Experiment specific input neurons.
+// * 3) Output neurons.
+// * 4) Hidden neurons.
+// *
+// * The quantity and innovationID of all neurons within the first 3 categories remains fixed throughout the life
+// * of an experiment, hence we always know where to find the bias, input and output nodes. The number of hidden nodes
+// * can vary as ne nodes are created, pruned away or perhaps dropped during crossover, however they are always arranged
+// * newest to oldest, or in other words sorted by innovation idea, lowest ID first.
+// *
+// * If output neurons were at the end of the list with hidden nodes in the middle then generating feedforward
+// * connections would be as easy as selecting a target neuron with a higher index than the source neuron. However, that
+// * type of arrangement is not conducive to the operation of other routines, hence this routine is a little bit more
+// * complicated as a result.
+// */
+//
+// // Ok, for a source neuron we can pick any neuron except the last output neuron.
+// int neuronIdxCount = neuronGeneList.Count;
+// int neuronIdxBound = neuronIdxCount-1;
+//
+// // Generate count-1 possibilities and avoid the last output neuron's idx.
+// srcNeuronIdx = (int)Math.Floor(Utilities.NextDouble() * neuronIdxBound);
+// if(srcNeuronIdx>inputBiasOutputNeuronCountMinus2) srcNeuronIdx++;
+//
+//
+// // Now generate a target idx depending on what type of neuron srcNeuronIdx is pointing to.
+// if(srcNeuronIdx
+ /// We define a simple neuron structure as a neuron that has a single outgoing or single incoming connection.
+ /// With such a structure we can easily eliminate the neuron and shift it's connections to an adjacent neuron.
+ /// If the neuron's non-linearity was not being used then such a mutation is a simplification of the network
+ /// structure that shouldn't adversly affect its functionality.
+ ///
+ private void Mutate_DeleteSimpleNeuronStructure(EvolutionAlgorithm ea)
+ {
+ // We will use the NeuronConnectionLookupTable to find the simple structures.
+ EnsureNeuronConnectionLookupTable();
+
+ // Build a list of candidate simple neurons to choose from.
+ ArrayList simpleNeuronIdList = new ArrayList();
+
+ foreach(NeuronConnectionLookup lookup in neuronConnectionLookupTable.Values)
+ {
+ // If we test the connection count with <=1 then we also pick up neurons that are in dead-end circuits,
+ // RemoveSimpleNeuron is then able to delete these neurons from the network structure along with any
+ // associated connections.
+ if(lookup.neuronGene.NeuronType == NeuronType.Hidden)
+ {
+ if((lookup.incomingList.Count<=1) || (lookup.outgoingList.Count<=1))
+ simpleNeuronIdList.Add(lookup.neuronGene.InnovationId);
+ }
+ }
+
+ // Are there any candiate simple neurons?
+ if(simpleNeuronIdList.Count==0)
+ { // No candidate neurons. As a fallback lets delete a connection.
+ Mutate_DeleteConnection();
+ return;
+ }
+
+ // Pick a simple neuron at random.
+ int idx = (int)Math.Floor(Utilities.NextDouble() * simpleNeuronIdList.Count);
+ uint neuronId = (uint)simpleNeuronIdList[idx];
+ RemoveSimpleNeuron(neuronId, ea);
+ }
+
+ ///
+ /// The routine also
+ ///
+ ///
+ ///
+ private void RemoveSimpleNeuron(uint neuronId, EvolutionAlgorithm ea)
+ {
+ // Create new connections that connect all of the incoming and outgoing neurons
+ // that currently exist for the simple neuron.
+ NeuronConnectionLookup lookup = (NeuronConnectionLookup)neuronConnectionLookupTable[neuronId];
+ foreach(ConnectionGene incomingConnection in lookup.incomingList)
+ {
+ foreach(ConnectionGene outgoingConnection in lookup.outgoingList)
+ {
+ if(TestForExistingConnection(incomingConnection.SourceNeuronId, outgoingConnection.TargetNeuronId))
+ { // Connection already exists.
+ continue;
+ }
+
+ // Test for matching connection within NewConnectionGeneTable.
+ ConnectionEndpointsStruct connectionKey = new ConnectionEndpointsStruct(incomingConnection.SourceNeuronId,
+ outgoingConnection.TargetNeuronId);
+ ConnectionGene existingConnection = (ConnectionGene)ea.NewConnectionGeneTable[connectionKey];
+ ConnectionGene newConnectionGene;
+ if(existingConnection==null)
+ { // No matching connection found. Create a connection with a new ID.
+ newConnectionGene = new ConnectionGene(ea.NextInnovationId,
+ incomingConnection.SourceNeuronId,
+ outgoingConnection.TargetNeuronId,
+ (Utilities.NextDouble() * ea.NeatParameters.connectionWeightRange) - ea.NeatParameters.connectionWeightRange/2.0);
+
+ // Register the new ID with NewConnectionGeneTable.
+ ea.NewConnectionGeneTable.Add(connectionKey, newConnectionGene);
+
+ // Add the new gene to the genome.
+ connectionGeneList.Add(newConnectionGene);
+ }
+ else
+ { // Matching connection found. Re-use its ID.
+ newConnectionGene = new ConnectionGene(existingConnection.InnovationId,
+ incomingConnection.SourceNeuronId,
+ outgoingConnection.TargetNeuronId,
+ (Utilities.NextDouble() * ea.NeatParameters.connectionWeightRange) - ea.NeatParameters.connectionWeightRange/2.0);
+
+ // Add the new gene to the genome. Use InsertIntoPosition() to ensure we don't break the sort
+ // order of the connection genes.
+ connectionGeneList.InsertIntoPosition(newConnectionGene);
+ }
+
+
+ }
+ }
+
+ // Delete the old connections.
+ foreach(ConnectionGene incomingConnection in lookup.incomingList)
+ connectionGeneList.Remove(incomingConnection);
+
+ foreach(ConnectionGene outgoingConnection in lookup.outgoingList)
+ {
+ // Filter out recurrent connections - they will have already been
+ // deleted in the loop through 'lookup.incomingList'.
+ if(outgoingConnection.TargetNeuronId != neuronId)
+ connectionGeneList.Remove(outgoingConnection);
+ }
+
+ // Delete the simple neuron - it no longer has any connections to or from it.
+ neuronGeneList.Remove(neuronId);
+ }
+
+
+ private void MutateConnectionWeight(ConnectionGene connectionGene, NeatParameters np, ConnectionMutationParameterGroup paramGroup)
+ {
+ switch(paramGroup.PerturbationType)
+ {
+ case ConnectionPerturbationType.JiggleEven:
+ {
+ connectionGene.Weight += (Utilities.NextDouble()*2-1.0) * paramGroup.PerturbationFactor;
+
+ // Cap the connection weight. Large connections weights reduce the effectiveness of the search.
+ connectionGene.Weight = Math.Max(connectionGene.Weight, -np.connectionWeightRange/2.0);
+ connectionGene.Weight = Math.Min(connectionGene.Weight, np.connectionWeightRange/2.0);
+ break;
+ }
+ case ConnectionPerturbationType.JiggleND:
+ {
+ connectionGene.Weight += RandLib.gennor(0, paramGroup.Sigma);
+
+ // Cap the connection weight. Large connections weights reduce the effectiveness of the search.
+ connectionGene.Weight = Math.Max(connectionGene.Weight, -np.connectionWeightRange/2.0);
+ connectionGene.Weight = Math.Min(connectionGene.Weight, np.connectionWeightRange/2.0);
+ break;
+ }
+ case ConnectionPerturbationType.Reset:
+ {
+ // TODO: Precalculate connectionWeightRange / 2.
+ connectionGene.Weight = (Utilities.NextDouble()*np.connectionWeightRange) - np.connectionWeightRange/2.0;
+ break;
+ }
+ default:
+ {
+ throw new Exception("Unexpected ConnectionPerturbationType");
+ }
+ }
+ }
+
+ private void Mutate_ConnectionWeights(EvolutionAlgorithm ea)
+ {
+ // Determine the type of weight mutation to perform.
+ int groupCount = ea.NeatParameters.ConnectionMutationParameterGroupList.Count;
+ double[] probabilties = new double[groupCount];
+ for(int i=0; i0)
+ { // Perform at least one mutation. Pick a gene at random.
+ MutateConnectionWeight( connectionGeneList[(int)(Utilities.NextDouble() * connectionCount)],
+ ea.NeatParameters,
+ paramGroup);
+ }
+ }
+ else // if(paramGroup.SelectionType==ConnectionSelectionType.FixedQuantity)
+ {
+ // Determine how many mutations to perform. At least one - if there are any genes.
+ int connectionCount = connectionGeneList.Count;
+ int mutations = Math.Min(connectionCount, Math.Max(1, paramGroup.Quantity));
+ if(mutations==0) return;
+
+ // The mutation loop. Here we pick an index at random and scan forward from that point
+ // for the first non-mutated gene. This prevents any gene from being mutated more than once without
+ // too much overhead. In fact it's optimal for small numbers of mutations where clashes are unlikely
+ // to occur.
+ for(int i=0; i
+ /// Correlate the ConnectionGenes within the two ConnectionGeneLists - based upon innovation number.
+ /// Return an ArrayList of ConnectionGene[2] structures - pairs of matching ConnectionGenes.
+ ///
+ ///
+ ///
+ ///
+ private CorrelationResults CorrelateConnectionGeneLists(ConnectionGeneList list1, ConnectionGeneList list2)
+ {
+ CorrelationResults correlationResults = new CorrelationResults();
+
+ //----- Test for special cases.
+ if(list1.Count==0 && list2.Count==0)
+ { // Both lists are empty!
+ return correlationResults;
+ }
+
+ if(list1.Count==0)
+ { // All list2 genes are excess.
+ correlationResults.CorrelationStatistics.ExcessConnectionGeneCount = list2.Count;
+ foreach(ConnectionGene connectionGene in list2)
+ correlationResults.CorrelationItemList.Add(new CorrelationItem(CorrelationItemType.ExcessConnectionGene, null, connectionGene));
+
+ return correlationResults;
+ }
+
+ if(list2.Count==0)
+ { // All list1 genes are excess.
+ correlationResults.CorrelationStatistics.ExcessConnectionGeneCount = list1.Count;
+ foreach(ConnectionGene connectionGene in list1)
+ correlationResults.CorrelationItemList.Add(new CorrelationItem(CorrelationItemType.ExcessConnectionGene, null, connectionGene));
+
+ return correlationResults;
+ }
+
+ //----- Both ConnectionGeneLists contain genes - compare the contents.
+ int list1Idx=0;
+ int list2Idx=0;
+ ConnectionGene connectionGene1 = list1[list1Idx];
+ ConnectionGene connectionGene2 = list2[list2Idx];
+ for(;;)
+ {
+ if(connectionGene2.InnovationId < connectionGene1.InnovationId)
+ {
+ // connectionGene2 is disjoint.
+ correlationResults.CorrelationItemList.Add(new CorrelationItem(CorrelationItemType.DisjointConnectionGene, null, connectionGene2));
+ correlationResults.CorrelationStatistics.DisjointConnectionGeneCount++;
+
+ // Move to the next gene in list2.
+ list2Idx++;
+ }
+ else if(connectionGene1.InnovationId == connectionGene2.InnovationId)
+ {
+ correlationResults.CorrelationItemList.Add(new CorrelationItem(CorrelationItemType.MatchedConnectionGenes, connectionGene1, connectionGene2));
+ correlationResults.CorrelationStatistics.ConnectionWeightDelta += Math.Abs(connectionGene1.Weight-connectionGene2.Weight);
+ correlationResults.CorrelationStatistics.MatchingGeneCount++;
+
+ // Move to the next gene in both lists.
+ list1Idx++;
+ list2Idx++;
+ }
+ else // (connectionGene2.InnovationId > connectionGene1.InnovationId)
+ {
+ // connectionGene1 is disjoint.
+ correlationResults.CorrelationItemList.Add(new CorrelationItem(CorrelationItemType.DisjointConnectionGene, connectionGene1, null));
+ correlationResults.CorrelationStatistics.DisjointConnectionGeneCount++;
+
+ // Move to the next gene in list1.
+ list1Idx++;
+ }
+
+ // Check if we have reached the end of one (or both) of the lists. If we have reached the end of both then
+ // we execute the first if block - but it doesn't matter since the loop is not entered if both lists have
+ // been exhausted.
+ if(list1Idx >= list1.Count)
+ {
+ // All remaining list2 genes are excess.
+ for(; list2Idx= list2.Count)
+ {
+ // All remaining list1 genes are excess.
+ for(; list1Idx
+ /// If the neuron is a hidden neuron and no connections connect to it then it is redundant.
+ ///
+ private bool IsNeuronRedundant(uint neuronId)
+ {
+ NeuronGene neuronGene = neuronGeneList.GetNeuronById(neuronId);
+ if(neuronGene.NeuronType!=NeuronType.Hidden)
+ return false;
+
+ return !IsNeuronConnected(neuronId);
+ }
+
+
+ private bool IsNeuronConnected(uint neuronId)
+ {
+ int bound = connectionGeneList.Count;
+ for(int i=0; i
+ /// Copy constructor.
+ ///
+ ///
+ public NeuronGene(NeuronGene copyFrom)
+ {
+ this.innovationId = copyFrom.innovationId;
+ this.neuronType = copyFrom.neuronType;
+ this.activationFunction = copyFrom.activationFunction;
+ this.neuronBias = copyFrom.neuronBias;
+ this.timeConstant = copyFrom.timeConstant;
+ this.timeCounter = copyFrom.timeCounter;
+ this.xValue = copyFrom.xValue;
+ this.yValue = copyFrom.yValue;
+ }
+
+ public NeuronGene(uint innovationId, NeuronType neuronType, IActivationFunction activationFunction)
+ {
+ this.innovationId = innovationId;
+ this.neuronType = neuronType;
+ this.activationFunction = activationFunction;
+ this.neuronBias = 0;
+ this.timeConstant = 1;
+ }
+
+ #endregion
+
+ #region Properties
+
+ public uint InnovationId
+ {
+ get
+ {
+ return innovationId;
+ }
+ set
+ {
+ innovationId = value;
+ }
+ }
+
+ public NeuronType NeuronType
+ {
+ get
+ {
+ return neuronType;
+ }
+ set
+ {
+ neuronType = value;
+ }
+ }
+
+ public IActivationFunction ActivationFunction
+ {
+ get
+ {
+ return activationFunction;
+ }
+ set
+ {
+ activationFunction = value;
+ }
+ }
+
+ public double NeuronBias
+ {
+ get
+ {
+ return neuronBias;
+ }
+ set
+ {
+ neuronBias = value;
+ }
+ }
+
+ public double TimeConstant
+ {
+ get
+ {
+ return timeConstant;
+ }
+ set
+ {
+ timeConstant = value;
+ }
+ }
+
+ public int TimeCounter
+ {
+ get
+ {
+ return timeCounter;
+ }
+ set
+ {
+ timeCounter = value;
+ }
+ }
+
+ public double XValue
+ {
+ get
+ {
+ return xValue;
+ }
+ set
+ {
+ xValue = value;
+ }
+ }
+
+ public double YValue
+ {
+ get
+ {
+ return yValue;
+ }
+ set
+ {
+ yValue = value;
+ }
+ }
+
+ public Boolean FirstStepComplete
+ {
+ get
+ {
+ return firstStepComplete;
+ }
+ set
+ {
+ firstStepComplete = value;
+ }
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeatGenome/NeuronGeneComparer.cs b/SharpNeatWalker/SharpNeatLib/NeatGenome/NeuronGeneComparer.cs
new file mode 100644
index 000000000..09872ad38
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeatGenome/NeuronGeneComparer.cs
@@ -0,0 +1,26 @@
+using System;
+using System.Collections.Generic;
+
+namespace SharpNeatLib.NeatGenome
+{
+ ///
+ /// Compares the innovation ID of NeuronGenes.
+ ///
+ public class NeuronGeneComparer : IComparer
+ {
+ #region IComparer Members
+
+ public int Compare(NeuronGene x, NeuronGene y)
+ {
+ // Test the most likely cases first.
+ if (x.InnovationId < y.InnovationId)
+ return -1;
+ else if (x.InnovationId > y.InnovationId)
+ return 1;
+ else
+ return 0;
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeatGenome/NeuronGeneList.cs b/SharpNeatWalker/SharpNeatLib/NeatGenome/NeuronGeneList.cs
new file mode 100644
index 000000000..3429603df
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeatGenome/NeuronGeneList.cs
@@ -0,0 +1,149 @@
+using System;
+using System.Collections.Generic;
+
+
+namespace SharpNeatLib.NeatGenome
+{
+ public class NeuronGeneList : List
+ {
+ static NeuronGeneComparer neuronGeneComparer = new NeuronGeneComparer();
+ public bool OrderInvalidated=false;
+
+ #region Constructors
+
+ ///
+ /// Default constructor.
+ ///
+ public NeuronGeneList()
+ {}
+
+ public NeuronGeneList(int count)
+ {
+ Capacity = (int)(count*1.5);
+ }
+
+ ///
+ /// Copy constructor.
+ ///
+ ///
+ public NeuronGeneList(NeuronGeneList copyFrom)
+ {
+ int count = copyFrom.Count;
+ Capacity = count;
+
+ for(int i=0; i> 1;
+
+ if(this[i].InnovationIdinnovationId)
+ hi = i - 1;
+ else
+ return i;
+
+
+ // TODO: This is wrong. It will fail for large innovation numbers because they are of type uint.
+ // Fortunately it's very unlikely anyone has reached such large numbers!
+// int c = (int)((NeuronGene)InnerList[i]).InnovationId - (int)innovationId;
+// if (c == 0) return i;
+//
+// if (c < 0)
+// lo = i + 1;
+// else
+// hi = i - 1;
+ }
+
+ return ~lo;
+ }
+
+ // For debug purposes only.
+// public bool IsSorted()
+// {
+// uint prevId=0;
+// foreach(NeuronGene gene in InnerList)
+// {
+// if(gene.InnovationId
+ ///
+ ///
+ ///
+ ///
+ /// Not strictly part of a genome. But it is useful to document which function
+ /// the genome is supposed to run against when decoded into a network.
+ public static void Write(XmlNode parentNode, NeatGenome genome, IActivationFunction activationFn)
+ {
+ //----- Start writing. Create document root node.
+ XmlElement xmlGenome = XmlUtilities.AddElement(parentNode, "genome");
+ XmlUtilities.AddAttribute(xmlGenome, "id", genome.GenomeId.ToString());
+ XmlUtilities.AddAttribute(xmlGenome, "species-id", genome.SpeciesId.ToString());
+ XmlUtilities.AddAttribute(xmlGenome, "age", genome.GenomeAge.ToString());
+ XmlUtilities.AddAttribute(xmlGenome, "fitness", genome.Fitness.ToString("0.00"));
+ XmlUtilities.AddAttribute(xmlGenome, "objective-fitness", genome.ObjectiveFitness.ToString("0.00"));
+ XmlUtilities.AddAttribute(xmlGenome, "activation-fn-id", activationFn.FunctionId);
+
+ //----- Write neurons.
+ XmlElement xmlNeurons = XmlUtilities.AddElement(xmlGenome, "neurons");
+ foreach(NeuronGene neuronGene in genome.NeuronGeneList)
+ WriteNeuron(xmlNeurons, neuronGene);
+
+ //----- Write Connections.
+ XmlElement xmlConnections = XmlUtilities.AddElement(xmlGenome, "connections");
+ foreach(ConnectionGene connectionGene in genome.ConnectionGeneList)
+ WriteConnectionGene(xmlConnections, connectionGene);
+ }
+
+ #region Private Static Methods
+
+ private static void WriteNeuron(XmlElement xmlNeurons, NeuronGene neuronGene)
+ {
+ XmlElement xmlNeuron = XmlUtilities.AddElement(xmlNeurons, "neuron");
+
+ XmlUtilities.AddAttribute(xmlNeuron, "id", neuronGene.InnovationId.ToString());
+ XmlUtilities.AddAttribute(xmlNeuron, "type", XmlUtilities.GetNeuronTypeString(neuronGene.NeuronType));
+ XmlUtilities.AddAttribute(xmlNeuron, "activationFunction", neuronGene.ActivationFunction.FunctionId);
+ }
+
+ private static void WriteConnectionGene(XmlElement xmlConnections, ConnectionGene connectionGene)
+ {
+ XmlElement xmlConnectionGene = XmlUtilities.AddElement(xmlConnections, "connection");
+
+ XmlUtilities.AddAttribute(xmlConnectionGene, "innov-id", connectionGene.InnovationId.ToString());
+ XmlUtilities.AddAttribute(xmlConnectionGene, "src-id", connectionGene.SourceNeuronId.ToString());
+ XmlUtilities.AddAttribute(xmlConnectionGene, "tgt-id", connectionGene.TargetNeuronId.ToString());
+ XmlUtilities.AddAttribute(xmlConnectionGene, "weight", connectionGene.Weight.ToString("R"));
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeatGenome/Xml/XmlNeatGenomeReader.cs b/SharpNeatWalker/SharpNeatLib/NeatGenome/Xml/XmlNeatGenomeReader.cs
new file mode 100644
index 000000000..428f98aa0
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeatGenome/Xml/XmlNeatGenomeReader.cs
@@ -0,0 +1,16 @@
+using System;
+using System.Xml;
+
+using SharpNeatLib.Evolution;
+using SharpNeatLib.Evolution.Xml;
+
+namespace SharpNeatLib.NeatGenome.Xml
+{
+ public class XmlNeatGenomeReader : IGenomeReader
+ {
+ public IGenome Read(XmlElement xmlGenome)
+ {
+ return XmlNeatGenomeReaderStatic.Read(xmlGenome);
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeatGenome/Xml/XmlNeatGenomeReaderStatic.cs b/SharpNeatWalker/SharpNeatLib/NeatGenome/Xml/XmlNeatGenomeReaderStatic.cs
new file mode 100644
index 000000000..10c6f3c79
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeatGenome/Xml/XmlNeatGenomeReaderStatic.cs
@@ -0,0 +1,78 @@
+using System;
+using System.Xml;
+
+using SharpNeatLib.Evolution;
+using SharpNeatLib.Evolution.Xml;
+using SharpNeatLib.NeatGenome;
+using SharpNeatLib.NeuralNetwork;
+using SharpNeatLib.Xml;
+
+namespace SharpNeatLib.NeatGenome.Xml
+{
+ public class XmlNeatGenomeReaderStatic
+ {
+ public static NeatGenome Read(XmlDocument doc)
+ {
+ XmlElement xmlGenome = (XmlElement)doc.SelectSingleNode("genome");
+ if(xmlGenome==null)
+ throw new Exception("The genome XML is missing the root 'genome' element.");
+
+ return Read(xmlGenome);
+ }
+
+ public static NeatGenome Read(XmlElement xmlGenome)
+ {
+ int inputNeuronCount=0;
+ int outputNeuronCount=0;
+
+ uint id = uint.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "id"));
+
+ //--- Read neuron genes into a list.
+ NeuronGeneList neuronGeneList = new NeuronGeneList();
+ XmlNodeList listNeuronGenes = xmlGenome.SelectNodes("neurons/neuron");
+ foreach(XmlElement xmlNeuronGene in listNeuronGenes)
+ {
+ NeuronGene neuronGene = ReadNeuronGene(xmlNeuronGene);
+
+ // Count the input and output neurons as we go.
+ switch(neuronGene.NeuronType)
+ {
+ case NeuronType.Input:
+ inputNeuronCount++;
+ break;
+ case NeuronType.Output:
+ outputNeuronCount++;
+ break;
+ }
+
+ neuronGeneList.Add(neuronGene);
+ }
+
+ //--- Read connection genes into a list.
+ ConnectionGeneList connectionGeneList = new ConnectionGeneList();
+ XmlNodeList listConnectionGenes = xmlGenome.SelectNodes("connections/connection");
+ foreach(XmlElement xmlConnectionGene in listConnectionGenes)
+ connectionGeneList.Add(ReadConnectionGene(xmlConnectionGene));
+
+ return new NeatGenome(id, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
+ }
+
+ private static NeuronGene ReadNeuronGene(XmlElement xmlNeuronGene)
+ {
+ uint id = uint.Parse(XmlUtilities.GetAttributeValue(xmlNeuronGene, "id"));
+ NeuronType neuronType = XmlUtilities.GetNeuronType(XmlUtilities.GetAttributeValue(xmlNeuronGene, "type"));
+ string activationFn = XmlUtilities.GetAttributeValue(xmlNeuronGene, "activationFunction");
+ return new NeuronGene(id, neuronType, ActivationFunctionFactory.GetActivationFunction(activationFn));
+ }
+
+ private static ConnectionGene ReadConnectionGene(XmlElement xmlConnectionGene)
+ {
+ uint innovationId = uint.Parse(XmlUtilities.GetAttributeValue(xmlConnectionGene, "innov-id"));
+ uint sourceNeuronId = uint.Parse(XmlUtilities.GetAttributeValue(xmlConnectionGene, "src-id"));
+ uint targetNeuronId = uint.Parse(XmlUtilities.GetAttributeValue(xmlConnectionGene, "tgt-id"));
+ double weight = double.Parse(XmlUtilities.GetAttributeValue(xmlConnectionGene, "weight"));
+
+ return new ConnectionGene(innovationId, sourceNeuronId, targetNeuronId, weight);
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/AbstractNetwork.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/AbstractNetwork.cs
new file mode 100644
index 000000000..53bfe72f7
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/AbstractNetwork.cs
@@ -0,0 +1,147 @@
+using System;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ ///
+ /// A base class for neural networks. This class provides the underlying data structures
+ /// for neurons and connections but not a technique for 'executing' the network.
+ ///
+ public abstract class AbstractNetwork : INetwork
+ {
+ // The master list of ALL neurons within the network.
+ protected NeuronList masterNeuronList;
+
+ // There follows a number of Lists that hold various neuron subsets. Perhaps not
+ // a particularly efficient way of doing things, but at least clear!
+
+ // All input neurons. *Not* including single bias neuron. Used by SetInputSignal().
+ NeuronList inputNeuronList;
+
+ // All output neurons. Used by GetOutputSignal().
+ NeuronList outputNeuronList;
+
+ #region Constructor
+
+ public AbstractNetwork(NeuronList neuronList)
+ {
+ inputNeuronList = new NeuronList();
+ outputNeuronList = new NeuronList();
+ LoadNeuronList(neuronList);
+ }
+
+ #endregion
+
+ #region Properties
+
+ public int InputNeuronCount
+ {
+ get
+ {
+ return inputNeuronList.Count;
+ }
+ }
+
+ public int OutputNeuronCount
+ {
+ get
+ {
+ return outputNeuronList.Count;
+ }
+ }
+
+ abstract public int TotalNeuronCount
+ {
+ get;
+
+ }
+
+ public NeuronList MasterNeuronList
+ {
+ get
+ {
+ return masterNeuronList;
+ }
+ }
+
+ #endregion
+
+ #region INetwork [Implemented]
+
+ public void SetInputSignal(int index, double signalValue)
+ {
+ inputNeuronList[index].OutputValue = signalValue;
+ }
+
+ public void SetInputSignals(double[] signalArray)
+ {
+ // For speed we don't bother with bounds checks.
+ for(int i=0; i
+ ///
+ ///
+ /// The number of timesteps to run the network before we give up.
+ ///
+ /// False if the network did not relax. E.g. due to oscillating signals.
+ abstract public bool RelaxNetwork(int maxSteps, double maxAllowedSignalDelta);
+
+ #endregion
+
+ #region Private Methods
+
+ ///
+ /// Accepts a list of interconnected neurons that describe the network and loads them into this class instance
+ /// so that the network can be run. This primarily means placing input and output nodes into their own Lists
+ /// for use during the run.
+ ///
+ ///
+ private void LoadNeuronList(NeuronList neuronList)
+ {
+ masterNeuronList = neuronList;
+
+ int loopBound = masterNeuronList.Count;
+ for(int j=0; j probs)
+ {
+ probabilities = new double[probs.Count];
+ functions = new IActivationFunction[probs.Count];
+ int counter = 0;
+ foreach (KeyValuePair funct in probs)
+ {
+ probabilities[counter] = funct.Value;
+ functions[counter]= GetActivationFunction(funct.Key);
+ counter++;
+ }
+
+ }
+
+ public static Hashtable activationFunctionTable = new Hashtable();
+
+ public static IActivationFunction GetActivationFunction(string functionId)
+ {
+ IActivationFunction activationFunction = (IActivationFunction)ActivationFunctionFactory.activationFunctionTable[functionId];
+ if(activationFunction==null)
+ {
+ activationFunction = CreateActivationFunction(functionId);
+ activationFunctionTable.Add(functionId, activationFunction);
+ }
+ return activationFunction;
+ }
+
+ private static IActivationFunction CreateActivationFunction(string functionId)
+ {
+ // For now the function ID is the name of a class that implements IActivationFunction.
+ string className = typeof(ActivationFunctionFactory).Namespace + '.' + functionId;
+ return (IActivationFunction)Assembly.GetExecutingAssembly().CreateInstance(className);
+ }
+
+ public static IActivationFunction GetRandomActivationFunction(Evolution.NeatParameters np)
+ {
+ return functions[Maths.RouletteWheel.SingleThrow(probabilities)];
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/ActivationFunctionType.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/ActivationFunctionType.cs
new file mode 100644
index 000000000..56d0200bc
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/ActivationFunctionType.cs
@@ -0,0 +1,12 @@
+using System;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ public enum ActivationFunctionType
+ {
+ Sigmoid,
+ Gaussian,
+ Sin,
+ Linear
+ }
+}
\ No newline at end of file
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/BipolarSigmoid.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/BipolarSigmoid.cs
new file mode 100644
index 000000000..756cb6ea1
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/BipolarSigmoid.cs
@@ -0,0 +1,41 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ class BipolarSigmoid : IActivationFunction
+ {
+
+ #region IActivationFunction Members
+
+ public double Calculate(double inputSignal)
+ {
+ //return (2.0 / (1.0 + Math.Exp(-4.9 * inputSignal))) - 1.0;
+ return (2.0 / (1.0 + Math.Exp(-1 * inputSignal))) - 1.0;
+ }
+
+ public float Calculate(float inputSignal)
+ {
+ //return (2.0F / (1.0F + (float)Math.Exp(-4.9F * inputSignal))) - 1.0F;
+ return (2.0F / (1.0F + (float)Math.Exp(-1 * inputSignal))) - 1.0F;
+ }
+
+ public string FunctionId
+ {
+ get { return this.GetType().Name; }
+ }
+
+ public string FunctionString
+ {
+ get { return "2.0/(1.0 + exp(-4.9*inputSignal)) - 1.0"; }
+ }
+
+ public string FunctionDescription
+ {
+ get { return "bipolar steepend sigmoid"; }
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/Gaussian.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/Gaussian.cs
new file mode 100644
index 000000000..2c632505a
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/Gaussian.cs
@@ -0,0 +1,38 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ class Gaussian : IActivationFunction
+ {
+ #region IActivationFunction Members
+
+ public double Calculate(double inputSignal)
+ {
+ return 2 * Math.Exp(-Math.Pow(inputSignal * 2.5, 2)) - 1;
+ }
+
+ public float Calculate(float inputSignal)
+ {
+ return (float)(2 * Math.Exp(-Math.Pow(inputSignal * 2.5, 2)) - 1);
+ }
+
+ public string FunctionId
+ {
+ get { return this.GetType().Name; }
+ }
+
+ public string FunctionString
+ {
+ get { return "2*e^(-(input*2.5)^2) - 1"; }
+ }
+
+ public string FunctionDescription
+ {
+ get { return "bimodal gaussian"; }
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/InverseAbsoluteSigmoid.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/InverseAbsoluteSigmoid.cs
new file mode 100644
index 000000000..f88c94537
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/InverseAbsoluteSigmoid.cs
@@ -0,0 +1,54 @@
+using System;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ public class InverseAbsoluteSigmoid : IActivationFunction
+ {
+ public double Calculate(double inputSignal)
+ {
+ //return 1.0+(inputSignal/(0.1+Math.Abs(inputSignal)));
+ return 0.5 + ((inputSignal / (1.0+Math.Abs(inputSignal)))*0.5);
+ }
+
+ public float Calculate(float inputSignal)
+ {
+
+ //return 1.0F+(inputSignal/(0.1F+Math.Abs(inputSignal)));
+ return 0.5F + ((inputSignal / (1.0F+Math.Abs(inputSignal)))*0.5F);
+ }
+
+ ///
+ /// Unique ID. Stored in network XML to identify which function network the network is supposed to use.
+ ///
+ public string FunctionId
+ {
+ get
+ {
+ return this.GetType().Name;
+ }
+ }
+
+ ///
+ /// The function as a string in a platform agnostic form. For documentation purposes only, this isn;t actually compiled!
+ ///
+ public string FunctionString
+ {
+ get
+ {
+ return "";
+ }
+ }
+
+
+ ///
+ /// A human readable / verbose description of the activation function.
+ ///
+ public string FunctionDescription
+ {
+ get
+ {
+ return "";
+ }
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/Linear.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/Linear.cs
new file mode 100644
index 000000000..ddce8ab9f
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/Linear.cs
@@ -0,0 +1,62 @@
+using System;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ public class Linear : IActivationFunction
+ {
+ public double Calculate(double inputSignal)
+ {
+ /* if(inputSignal<-1.0)
+ return 0.0;
+ else if(inputSignal>1.0)
+ return 1.0;
+ else
+ return (inputSignal+1.0)*0.5; */
+ return Math.Abs(inputSignal);
+ }
+
+ public float Calculate(float inputSignal)
+ {
+ /* if(inputSignal<0.0F)
+ return 0.0F;
+ else if(inputSignal>1.0F)
+ return 1.0F;
+ else
+ return (inputSignal+1.0F)*0.5F; */
+ return Math.Abs(inputSignal);
+ }
+
+ ///
+ /// Unique ID. Stored in network XML to identify which function network the network is supposed to use.
+ ///
+ public string FunctionId
+ {
+ get
+ {
+ return this.GetType().Name;
+ }
+ }
+
+ ///
+ /// The function as a string in a platform agnostic form. For documentation purposes only, this isn;t actually compiled!
+ ///
+ public string FunctionString
+ {
+ get
+ {
+ return "(x+1)/2 [min=0, max=1]";
+ }
+ }
+
+ ///
+ /// A human readable / verbose description of the activation function.
+ ///
+ public string FunctionDescription
+ {
+ get
+ {
+ return "Linear";
+ }
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/Modulus.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/Modulus.cs
new file mode 100644
index 000000000..d817be139
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/Modulus.cs
@@ -0,0 +1,92 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ class Modulus : IActivationFunction
+ {
+
+ static int factor = 5;
+ double ddelta = (2.0 / factor);
+ float fdelta = 2.0f / factor;
+ static float constant = 10000000;
+
+ #region IActivationFunction Members
+
+ public float Calculate(float inputSignal, int fact)
+ {
+ float delta = 2.0f / fact;
+ inputSignal += 51;
+ inputSignal *= constant;
+ inputSignal = (int)inputSignal % (int)(delta * constant);
+ inputSignal /= constant;
+ inputSignal *= fact;
+ return inputSignal - 1;
+ }
+
+ public double Calculate(double inputSignal, int fact)
+ {
+ double delta = 2.0 / fact;
+ inputSignal += 51;
+ inputSignal *= constant;
+ inputSignal = (int)inputSignal % (int)(delta * constant);
+ inputSignal /= constant;
+ inputSignal *= fact;
+ return inputSignal - 1;
+ }
+
+
+ public double Calculate(double inputSignal)
+ {
+ //shift to 0-max#
+ inputSignal = ((51 + inputSignal));
+
+ //find modulus (inputSignal>0 ddelta)
+ // inputSignal -= ddelta;
+ inputSignal /= constant;
+ inputSignal = inputSignal * (factor);
+
+ return (inputSignal)-1;
+ }
+
+ public float Calculate(float inputSignal)
+ {
+ //shift to 0-max#
+ inputSignal = ((51 + inputSignal));
+
+ //find modulus (inputSignal>0 ddelta)
+ // inputSignal -= ddelta;
+ inputSignal /= constant;
+ inputSignal = inputSignal * (factor);
+
+ return (inputSignal) - 1;
+ }
+
+ public string FunctionId
+ {
+ get
+ {
+ return this.GetType().Name;
+ }
+ }
+
+ public string FunctionString
+ {
+ get { return "Mod " + factor.ToString(); }
+ }
+
+ public string FunctionDescription
+ {
+ get { return "Modulus"; }
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/NullFn.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/NullFn.cs
new file mode 100644
index 000000000..5cd525bae
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/NullFn.cs
@@ -0,0 +1,54 @@
+using System;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ ///
+ /// Summary description for FastSigmoid.
+ ///
+ public class NullFn : IActivationFunction
+ {
+ public double Calculate(double inputSignal)
+ {
+ return 0.0;
+ }
+
+ public float Calculate(float inputSignal)
+ {
+ return 0.0F;
+ }
+
+ ///
+ /// Unique ID. Stored in network XML to identify which function network the network is supposed to use.
+ ///
+ public string FunctionId
+ {
+ get
+ {
+ return this.GetType().Name;
+ }
+ }
+
+ ///
+ /// The function as a string in a platform agnostic form. For documentation purposes only, this isn;t actually compiled!
+ ///
+ public string FunctionString
+ {
+ get
+ {
+ return "";
+ }
+ }
+
+
+ ///
+ /// A human readable / verbose description of the activation function.
+ ///
+ public string FunctionDescription
+ {
+ get
+ {
+ return "";
+ }
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/PlainSigmoid.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/PlainSigmoid.cs
new file mode 100644
index 000000000..176ef5812
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/PlainSigmoid.cs
@@ -0,0 +1,53 @@
+using System;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ public class PlainSigmoid : IActivationFunction
+ {
+ public double Calculate(double inputSignal)
+ {
+ // good for x input range -5.0->5.0 (y 0.0->1.0)
+ return 1.0D/(1.0D+(Math.Exp(-inputSignal)));
+ }
+
+ public float Calculate(float inputSignal)
+ {
+ // good for x input range -5.0->5.0 (y 0.0->1.0)
+ return 1.0F/(1.0F+((float)Math.Exp(-inputSignal)));
+ }
+
+ ///
+ /// Unique ID. Stored in network XML to identify which function network the network is supposed to use.
+ ///
+ public string FunctionId
+ {
+ get
+ {
+ return this.GetType().Name;
+ }
+ }
+
+ ///
+ /// The function as a string in a platform agnostic form. For documentation purposes only, this isn;t actually compiled!
+ ///
+ public string FunctionString
+ {
+ get
+ {
+ return "1.0/(1.0+(exp(-inputSignal)))";
+ }
+ }
+
+
+ ///
+ /// A human readable / verbose description of the activation function.
+ ///
+ public string FunctionDescription
+ {
+ get
+ {
+ return "Plain sigmoid [xrange -5.0,5.0][yrange, 0.0,1.0]";
+ }
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/ReducedSigmoid.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/ReducedSigmoid.cs
new file mode 100644
index 000000000..f7706e121
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/ReducedSigmoid.cs
@@ -0,0 +1,51 @@
+using System;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ public class ReducedSigmoid : IActivationFunction
+ {
+ public double Calculate(double inputSignal)
+ {
+ return 1.0D/(1.0D+(Math.Exp(-0.5*inputSignal)));
+ }
+
+ public float Calculate(float inputSignal)
+ {
+ return 1.0F/(1.0F+((float)Math.Exp(-0.5*inputSignal)));
+ }
+
+ ///
+ /// Unique ID. Stored in network XML to identify which function network the network is supposed to use.
+ ///
+ public string FunctionId
+ {
+ get
+ {
+ return this.GetType().Name;
+ }
+ }
+
+ ///
+ /// The function as a string in a platform agnostic form. For documentation purposes only, this isn;t actually compiled!
+ ///
+ public string FunctionString
+ {
+ get
+ {
+ return "1.0/(1.0+(exp(-0.5*inputSignal)))";
+ }
+ }
+
+
+ ///
+ /// A human readable / verbose description of the activation function.
+ ///
+ public string FunctionDescription
+ {
+ get
+ {
+ return "Plain sigmoid [xrange -5.0,5.0][yrange, 0.0,1.0]";
+ }
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/SigmoidApproximation.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/SigmoidApproximation.cs
new file mode 100644
index 000000000..fa7cd822d
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/SigmoidApproximation.cs
@@ -0,0 +1,91 @@
+using System;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ public class SigmoidApproximation : IActivationFunction
+ {
+ public double Calculate(double inputSignal)
+ {
+ const double four = 4.0F;
+ const double one_32nd = 0.03125F;
+
+ if(inputSignal<-4.0)
+ {
+ return 0.0;
+ }
+ else if(inputSignal<0.0)
+ {
+ return (inputSignal+four)*(inputSignal+four)*one_32nd;
+ }
+ else if(inputSignal<4.0)
+ {
+ return 1.0-(inputSignal-four)*(inputSignal-four)*one_32nd;
+ }
+ else
+ {
+ return 1.0;
+ }
+ }
+
+ public float Calculate(float inputSignal)
+ {
+ const float four = 4.0F;
+ const float one_32nd = 0.03125F;
+
+ if(inputSignal<-4.0F)
+ {
+ return 0.0F;
+ }
+ else if(inputSignal<0.0F)
+ {
+// float d=inputSignal+four;
+// return d*d*one_32nd;
+ return (inputSignal+four)*(inputSignal+four)*one_32nd;
+ }
+ else if(inputSignal<4.0F)
+ {
+// float d=inputSignal-four;
+// return 1.0F-d*d*one_32nd;
+ return 1.0F-(inputSignal-four)*(inputSignal-four)*one_32nd;
+ }
+ else
+ {
+ return 1.0F;
+ }
+ }
+
+ ///
+ /// Unique ID. Stored in network XML to identify which function network the network is supposed to use.
+ ///
+ public string FunctionId
+ {
+ get
+ {
+ return this.GetType().Name;
+ }
+ }
+
+ ///
+ /// The function as a string in a platform agnostic form. For documentation purposes only, this isn;t actually compiled!
+ ///
+ public string FunctionString
+ {
+ get
+ {
+ return "";
+ }
+ }
+
+
+ ///
+ /// A human readable / verbose description of the activation function.
+ ///
+ public string FunctionDescription
+ {
+ get
+ {
+ return "";
+ }
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/Sine.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/Sine.cs
new file mode 100644
index 000000000..e00f136dc
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/Sine.cs
@@ -0,0 +1,39 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ class Sine : IActivationFunction
+ {
+ #region IActivationFunction Members
+
+ public double Calculate(double inputSignal)
+ {
+ return Math.Sin(2*inputSignal);
+
+ }
+
+ public float Calculate(float inputSignal)
+ {
+ return (float)Math.Sin(2*inputSignal);
+ }
+
+ public string FunctionId
+ {
+ get { return this.GetType().Name; }
+ }
+
+ public string FunctionString
+ {
+ get { return "Sin(2*inputSignal)"; }
+ }
+
+ public string FunctionDescription
+ {
+ get { return "Sin function with doubled period"; }
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/SteepenedSigmoid.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/SteepenedSigmoid.cs
new file mode 100644
index 000000000..4b1d2c558
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/SteepenedSigmoid.cs
@@ -0,0 +1,61 @@
+using System;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ public class SteepenedSigmoid : IActivationFunction
+ {
+ public double Calculate(double inputSignal)
+ {
+ /*if (inputSignal == 0)
+ return 0;*/
+ // good for x input range -1.0->1.0 (y 0.0->1.0)
+ //if (inputSignal > -.25 && inputSignal < .25)
+ // return 0;
+ return 1.0/(1.0 + Math.Exp(-4.9*inputSignal));
+ }
+
+ public float Calculate(float inputSignal)
+ {
+ /* if (inputSignal == 0)
+ return 0;*/
+ // good for x input range -1.0->1.0 (y 0.0->1.0)
+ //if (inputSignal > -.25f && inputSignal < .25f)
+ // return 0;
+ return 1.0F/(1.0F + (float)Math.Exp(-4.9F*inputSignal));
+ }
+
+ ///
+ /// Unique ID. Stored in network XML to identify which function network the network is supposed to use.
+ ///
+ public string FunctionId
+ {
+ get
+ {
+ return this.GetType().Name;
+ }
+ }
+
+ ///
+ /// The function as a string in a platform agnostic form. For documentation purposes only, this isn;t actually compiled!
+ ///
+ public string FunctionString
+ {
+ get
+ {
+ return "1.0/(1.0 + exp(-4.9*inputSignal))";
+ }
+ }
+
+
+ ///
+ /// A human readable / verbose description of the activation function.
+ ///
+ public string FunctionDescription
+ {
+ get
+ {
+ return "Steepened sigmoid [xrange -1.0,1.0][yrange, 0.0,1.0]";
+ }
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/SteepenedSigmoidApproximation.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/SteepenedSigmoidApproximation.cs
new file mode 100644
index 000000000..0db0740d7
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/SteepenedSigmoidApproximation.cs
@@ -0,0 +1,91 @@
+using System;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ public class SteepenedSigmoidApproximation : IActivationFunction
+ {
+ public double Calculate(double inputSignal)
+ {
+ const double one = 1.0;
+ const double one_half = 0.5;
+
+ if(inputSignal<-1.0)
+ {
+ return 0.0;
+ }
+ else if(inputSignal<0.0)
+ {
+ return (inputSignal+one)*(inputSignal+one)*one_half;
+ }
+ else if(inputSignal<1.0)
+ {
+ return 1.0-(inputSignal-one)*(inputSignal-one)*one_half;
+ }
+ else
+ {
+ return 1.0;
+ }
+ }
+
+ public float Calculate(float inputSignal)
+ {
+ const float one = 1.0F;
+ const float one_half = 0.5F;
+
+ if(inputSignal<-1.0F)
+ {
+ return 0.0F;
+ }
+ else if(inputSignal<0.0F)
+ {
+// float d=inputSignal+four;
+// return d*d*one_32nd;
+ return (inputSignal+one)*(inputSignal+one)*one_half;
+ }
+ else if(inputSignal<1.0F)
+ {
+// float d=inputSignal-four;
+// return 1.0F-d*d*one_32nd;
+ return 1.0F-(inputSignal-one)*(inputSignal-one)*one_half;
+ }
+ else
+ {
+ return 1.0F;
+ }
+ }
+
+ ///
+ /// Unique ID. Stored in network XML to identify which function network the network is supposed to use.
+ ///
+ public string FunctionId
+ {
+ get
+ {
+ return this.GetType().Name;
+ }
+ }
+
+ ///
+ /// The function as a string in a platform agnostic form. For documentation purposes only, this isn;t actually compiled!
+ ///
+ public string FunctionString
+ {
+ get
+ {
+ return "";
+ }
+ }
+
+
+ ///
+ /// A human readable / verbose description of the activation function.
+ ///
+ public string FunctionDescription
+ {
+ get
+ {
+ return "";
+ }
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/StepFunction.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/StepFunction.cs
new file mode 100644
index 000000000..661df2489
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ActivationFunctions/StepFunction.cs
@@ -0,0 +1,57 @@
+using System;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ public class StepFunction : IActivationFunction
+ {
+ public double Calculate(double inputSignal)
+ {
+ if(inputSignal<0.0F)
+ return 0.0;
+ else
+ return 1.0;
+ }
+
+ public float Calculate(float inputSignal)
+ {
+ if(inputSignal<0F)
+ return 0F;
+ else
+ return 1F;
+ }
+
+ ///
+ /// Unique ID. Stored in network XML to identify which function network the network is supposed to use.
+ ///
+ public string FunctionId
+ {
+ get
+ {
+ return this.GetType().Name;
+ }
+ }
+
+ ///
+ /// The function as a string in a platform agnostic form. For documentation purposes only, this isn;t actually compiled!
+ ///
+ public string FunctionString
+ {
+ get
+ {
+ return "x<0 ? 0.0 : 1.0";
+ }
+ }
+
+
+ ///
+ /// A human readable / verbose description of the activation function.
+ ///
+ public string FunctionDescription
+ {
+ get
+ {
+ return "Step function [xrange -5.0,5.0][yrange, 0.0,1.0]";
+ }
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ConcurrentNetwork/ConcurrentNetwork.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ConcurrentNetwork/ConcurrentNetwork.cs
new file mode 100644
index 000000000..ff386baae
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ConcurrentNetwork/ConcurrentNetwork.cs
@@ -0,0 +1,96 @@
+using System;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ ///
+ /// A network that simulates a network in real-time. That is, each neuron in the network
+ /// calculates its accumulated input and output from the previous timestep's outputs.
+ /// Each neuron then switches to the new 'next timestep' state in unison.
+ ///
+ /// This is opposed to an activation traversal network where the output signal updates
+ /// are updated by a traversal algorithm that follows the network's connections.
+ ///
+ public class ConcurrentNetwork : AbstractNetwork
+ {
+ #region Constructor
+
+ public ConcurrentNetwork(NeuronList neuronList)
+ : base(neuronList)
+ {
+ }
+
+ #endregion
+
+ #region INetwork
+
+ public override void SingleStep()
+ {
+ int loopBound = masterNeuronList.Count;
+ for(int j=0; j
+ ///
+ ///
+ /// The number of timesteps to run the network before we give up.
+ ///
+ /// False if the network did not relax. E.g. due to oscillating signals.
+ public override bool RelaxNetwork(int maxSteps, double maxAllowedSignalDelta)
+ {
+ // Perform at least one step.
+ SingleStep();
+
+ // Now perform steps until the network is relaxed or maxSteps is reached.
+ int loopBound;
+ bool isRelaxed=false;
+ for(int i=0; i maxAllowedSignalDelta)
+ isRelaxed=false;
+ }
+ }
+ }
+
+ for(int j=0; j
+ {
+
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ConcurrentNetwork/Neuron.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ConcurrentNetwork/Neuron.cs
new file mode 100644
index 000000000..d8aba53b2
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/ConcurrentNetwork/Neuron.cs
@@ -0,0 +1,185 @@
+using System;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ public enum NeuronType
+ {
+ Input,
+ Bias,
+ Hidden,
+ Output,
+ Undefined
+ }
+
+
+ public class Neuron
+ {
+ NeuronType neuronType;
+ uint id;
+
+ double outputValue; // Output signal. Can be initialised when neuron is created.
+ double outputRecalc; // The recalculated output is not updated immediately. A complete pass of the network is
+ // done using the existing output values, and then we switch the network over to the the
+ // recalced values in a second pass. This way we simulate the workings of a parallel network.
+
+ ConnectionList connectionList; // All of the incoming connections to a neuron. The neuron can recalculate it's own output value by iterating throgh this collection.
+
+ IActivationFunction activationFn;
+
+ // GWM - Note that placing the bias and time constant in here requires the use of the concurrentnetwork, which seems to be broken...
+ double neuronBias; // GWM - Bias added for CTRNN architecture
+ double timeConstant; // GWM - Added for CTRNN architecture - Time constant of 1 = normal neuron (no leak)
+
+ #region Constructor
+
+ public Neuron(IActivationFunction activationFn, NeuronType neuronType, uint id, double neuronBias, double timeConstant)
+ {
+ this.activationFn = activationFn;
+ this.neuronType = neuronType;
+ this.id = id;
+ this.neuronBias = neuronBias;
+ this.timeConstant = timeConstant;
+ connectionList = new ConnectionList();
+
+ if(neuronType == NeuronType.Bias)
+ this.outputValue = 1.0D;
+ else
+ this.outputValue = 0.0D;
+ }
+
+ #endregion
+
+ #region Properties
+
+ public NeuronType NeuronType
+ {
+ get
+ {
+ return neuronType;
+ }
+ }
+
+ public uint Id
+ {
+ get
+ {
+ return id;
+ }
+ }
+
+ public double OutputValue
+ {
+ get
+ {
+ return outputValue;
+ }
+ set
+ { // Set is required for input nodes.
+ outputValue = value;
+ }
+ }
+
+ // GWM - Property added
+ public double NeuronBias
+ {
+ set
+ {
+ neuronBias = value;
+ }
+ }
+
+ ///
+ /// The OutputValue delta between this timestep and the previous. This property is only valid
+ /// after calleing Recalc() and before calling UseRecalculatedValue().
+ ///
+ public double OutputDelta
+ {
+ get
+ {
+ return Math.Abs(outputValue-outputRecalc);
+ }
+ }
+
+ public ConnectionList ConnectionList
+ {
+ get
+ {
+ return connectionList;
+ }
+ }
+
+ #endregion
+
+ #region Public methods
+
+ ///
+ /// Recalculate this neuron's output value.
+ ///
+ public void Recalc()
+ {
+ // No recalculation required for input or bias nodes.
+ if(neuronType==NeuronType.Input || neuronType==NeuronType.Bias)
+ return;
+
+ // Iterate the connections and total up the input signal from all of them.
+ double accumulator=0;
+ int loopBound = connectionList.Count;
+ for(int i=0; i
+ {
+
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/FastConcurrentNetwork/FastConcurrentMultiplicativeNetwork.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/FastConcurrentNetwork/FastConcurrentMultiplicativeNetwork.cs
new file mode 100644
index 000000000..42e935a24
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/FastConcurrentNetwork/FastConcurrentMultiplicativeNetwork.cs
@@ -0,0 +1,278 @@
+using System;
+using System.Collections;
+using System.Collections.Specialized;
+
+using SharpNeatLib.NeatGenome;
+using SharpNeatLib.NeuralNetwork;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ ///
+ /// A fast implementation of a network with concurrently activated neurons, that is, each
+ /// neuron's output signal is calculated for a given timestep using the output signals
+ /// from the previous timestep. This then simulates each neuron activating concurrently.
+ ///
+ public class FastConcurrentMultiplicativeNetwork : INetwork
+ {
+ #region Class Variables
+
+ IActivationFunction activationFn;
+
+ // Neurons are ordered with bias and input nodes at the head of the list, then output nodes and
+ // hidden nodes on the array's tail.
+ float[] neuronSignalArray;
+ float[] _neuronSignalArray;
+ BitArray neuronSignalFlagArray;
+ FloatFastConnection[] connectionArray;
+
+ ///
+ /// The number of input neurons. Also the index 1 after the last input neuron.
+ ///
+ int inputNeuronCount;
+ int totalInputNeuronCount;
+ int outputNeuronCount;
+
+ ///
+ /// This is the index of the first hidden neuron in the array (inputNeuronCount + outputNeuronCount).
+ ///
+ int biasNeuronCount;
+
+ #endregion
+
+ #region Constructor
+
+ public FastConcurrentMultiplicativeNetwork( int biasNeuronCount,
+ int inputNeuronCount,
+ int outputNeuronCount,
+ int totalNeuronCount,
+ FloatFastConnection[] connectionArray,
+ IActivationFunction activationFn)
+ {
+ this.biasNeuronCount = biasNeuronCount;
+ this.inputNeuronCount = inputNeuronCount;
+ this.totalInputNeuronCount = biasNeuronCount + inputNeuronCount;
+ this.outputNeuronCount = outputNeuronCount;
+
+ this.connectionArray = connectionArray;
+ this.activationFn = activationFn;
+
+ //----- Allocate the arrays that make up the neural network.
+ // The neurons signals are initialised to 0 by default. Only bias nodes need setting to 1.
+ neuronSignalArray = new float[totalNeuronCount];
+ _neuronSignalArray = new float[totalNeuronCount];
+ neuronSignalFlagArray = new BitArray(totalNeuronCount);
+
+ for(int i=0; i
+ /// Using RelaxNetwork erodes some of the perofrmance gain of FastConcurrentNetwork because of the slightly
+ /// more complex implemementation of the third loop - whe compared to SingleStep().
+ ///
+ ///
+ ///
+ ///
+ public bool RelaxNetwork(int maxSteps, double maxAllowedSignalDelta)
+ {
+ bool isRelaxed=false;
+ for(int j=0; j maxAllowedSignalDelta)
+ isRelaxed=false;
+
+ // Take the opportunity to reset the pre-activation signal array.
+ // Reset to 1.0 for multiplicative network.
+ //_neuronSignalArray[i]=1.0F;
+ }
+ }
+
+ return isRelaxed;
+ }
+
+ public void SetInputSignal(int index, double signalValue)
+ {
+ neuronSignalArray[biasNeuronCount + index] = (float)signalValue;
+ }
+
+ public void SetInputSignals(double[] signalArray)
+ {
+ // For speed we don't bother with bounds checks.
+ for(int i=0; i
+ /// A fast implementation of a network with concurrently activated neurons, that is, each
+ /// neuron's output signal is calculated for a given timestep using the output signals
+ /// from the previous timestep. This then simulates each neuron activating concurrently.
+ ///
+ //[Serializable()]
+ public class FloatFastConcurrentNetwork : INetwork
+ {
+ #region Class Variables
+
+ IActivationFunction[] activationFnArray;
+
+ Modulus mod = (Modulus)ActivationFunctionFactory.GetActivationFunction("Modulus");
+ // Neurons are ordered with bias and input nodes at the head of the list, then output nodes and
+ // hidden nodes on the array's tail.
+ public float[] neuronSignalArray;
+ public float[] _neuronSignalArray;
+ public FloatFastConnection[] connectionArray;
+
+ // GWM - added for leaky integrators
+ public double[] biasArray;
+ public double[] timeConstantArray;
+
+ // GWM - added for SUPG support
+ public float[] overrideSignals;
+
+ ///
+ /// The number of input neurons. Also the index 1 after the last input neuron.
+ ///
+ int inputNeuronCount;
+ int totalInputNeuronCount;
+ int outputNeuronCount;
+
+ ///
+ /// This is the index of the first hidden neuron in the array (inputNeuronCount + outputNeuronCount).
+ ///
+ int biasNeuronCount;
+
+ bool useSUPG;
+
+ #endregion
+
+ #region Constructor
+
+ public FloatFastConcurrentNetwork( int biasNeuronCount,
+ int inputNeuronCount,
+ int outputNeuronCount,
+ int totalNeuronCount,
+ FloatFastConnection[] connectionArray,
+ IActivationFunction[] activationFnArray,
+ double[] biasArray,
+ double[] timeConstantArray)
+ {
+ this.biasNeuronCount = biasNeuronCount;
+ this.inputNeuronCount = inputNeuronCount;
+ this.totalInputNeuronCount = biasNeuronCount + inputNeuronCount;
+ this.outputNeuronCount = outputNeuronCount;
+
+ this.biasArray = biasArray;
+ this.timeConstantArray = timeConstantArray;
+
+ this.connectionArray = connectionArray;
+ this.activationFnArray = activationFnArray;
+
+ //----- Allocate the arrays that make up the neural network.
+ // The neuron signals are initialised to 0 by default. Only bias nodes need setting to 1.
+ neuronSignalArray = new float[totalNeuronCount];
+ _neuronSignalArray = new float[totalNeuronCount];
+
+ for(int i=0; i
+ /// Using RelaxNetwork erodes some of the perofrmance gain of FastConcurrentNetwork because of the slightly
+ /// more complex implemementation of the third loop - whe compared to SingleStep().
+ ///
+ ///
+ ///
+ ///
+ public bool RelaxNetwork(int maxSteps, double maxAllowedSignalDelta)
+ {
+ bool isRelaxed=false;
+ for(int j=0; j maxAllowedSignalDelta)
+ isRelaxed=false;
+
+ // Take the opportunity to reset the pre-activation signal array.
+ _neuronSignalArray[i]=0.0F;
+ }
+ }
+
+ return isRelaxed;
+ }
+
+ public void SetInputSignal(int index, double signalValue)
+ {
+ neuronSignalArray[biasNeuronCount + index] = (float)signalValue;
+ }
+
+ public void SetInputSignals(double[] signalArray)
+ {
+ // For speed we don't bother with bounds checks.
+ for(int i=0; i
+ /// A float equivalent should be implemented as this provides approx. a 60% speed boost
+ /// in the right circumstances. Partly through not having to cast to/from double and partly
+ /// because floats are [sometimes] faster to calculate. They are also small and require less
+ /// memory bus bandwidth and CPU cache.
+ ///
+ ///
+ ///
+ float Calculate(float inputSignal);
+
+ ///
+ /// Unique ID. Stored in network XML to identify which function network the network is supposed to use.
+ ///
+ string FunctionId
+ {
+ get;
+ }
+
+ ///
+ /// The function as a string in a platform agnostic form. For documentation purposes only, this isn;t actually compiled!
+ ///
+ string FunctionString
+ {
+ get;
+ }
+
+ ///
+ /// A human readable / verbose description of the activation function.
+ ///
+ string FunctionDescription
+ {
+ get;
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/INetwork.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/INetwork.cs
new file mode 100644
index 000000000..2d1948b38
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/INetwork.cs
@@ -0,0 +1,69 @@
+using System;
+using SharpNeatLib.CPPNs;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+
+ public interface INetwork
+ {
+ void SingleStep();
+ void MultipleSteps(int numberOfSteps);
+
+ ///
+ /// Relax the network. Relaxing refers to activating a network until the amount that signals within
+ /// it are changing within a cetain limit, here defined by maxAllowedSignalDelta. Change is the
+ /// absolute difference between a neuron's output signals between two successive activations.
+ ///
+ /// The number of timesteps to run the network before we give up.
+ ///
+ /// False if the network did not relax. E.g. due to oscillating signals.
+ bool RelaxNetwork(int maxSteps, double maxAllowedSignalDelta);
+
+ ///
+ /// Assigns a single input signal value.
+ ///
+ ///
+ ///
+ void SetInputSignal(int index, double signalValue);
+
+ ///
+ /// Assigns an array of input signals. IF the array is too long then excess signals are ignored.
+ /// If too short then the input neurons with no input keep their pre-existing value.
+ ///
+ ///
+ void SetInputSignals(double[] signalArray);
+
+ ///
+ /// If index is greater than the number of output neurons then we loop back to the first neuron.
+ /// Therefore we return a value for any given index number >=0.
+ ///
+ ///
+ ///
+ float GetOutputSignal(int index);
+
+ ///
+ /// Reset all inter-neuron signals to zero. This is all neurons except the bias neuron.
+ /// Useful when performing successive trials on a network.
+ ///
+ void ClearSignals();
+
+ #region Properties
+
+ int InputNeuronCount
+ {
+ get;
+ }
+
+ int OutputNeuronCount
+ {
+ get;
+ }
+
+ int TotalNeuronCount
+ {
+ get;
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/IntegerFastConcurrentNetwork/IntegerFastConcurrentNetwork.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/IntegerFastConcurrentNetwork/IntegerFastConcurrentNetwork.cs
new file mode 100644
index 000000000..a8f2fc029
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/IntegerFastConcurrentNetwork/IntegerFastConcurrentNetwork.cs
@@ -0,0 +1,269 @@
+using System;
+using System.Collections;
+using System.Collections.Specialized;
+
+using SharpNeatLib.NeatGenome;
+using SharpNeatLib.NeuralNetwork;
+
+namespace SharpNeatLib.NeuralNetwork
+{
+ ///
+ /// A fast implementation of a network with concurrently activated neurons, that is, each
+ /// neuron's output signal is calculated for a given timestep using the output signals
+ /// from the previous timestep. This then simulates each neuron activating concurrently.
+ ///
+ public class IntegerFastConcurrentNetwork : INetwork
+ {
+ #region Class Variables
+
+// IActivationFunction activationFn;
+
+ // Neurons are ordered with bias and input nodes at the head of the list, then output nodes and
+ // hidden nodes on the array's tail.
+ int[] neuronSignalArray;
+ int[] _neuronSignalArray;
+ IntegerFastConnection[] connectionArray;
+
+ ///
+ /// The number of input neurons. Also the index 1 after the last input neuron.
+ ///
+ int inputNeuronCount;
+ int totalInputNeuronCount;
+ int outputNeuronCount;
+
+ ///
+ /// This is the index of the first hidden neuron in the array (inputNeuronCount + outputNeuronCount).
+ ///
+ int biasNeuronCount;
+
+ #endregion
+
+ #region Constructor
+
+ public IntegerFastConcurrentNetwork(int biasNeuronCount,
+ int inputNeuronCount,
+ int outputNeuronCount,
+ int totalNeuronCount,
+ IntegerFastConnection[] connectionArray)
+ {
+ this.biasNeuronCount = biasNeuronCount;
+ this.inputNeuronCount = inputNeuronCount;
+ this.totalInputNeuronCount = biasNeuronCount + inputNeuronCount;
+ this.outputNeuronCount = outputNeuronCount;
+
+ this.connectionArray = connectionArray;
+
+ //----- Allocate the arrays that make up the neural network.
+ // The neuron signals are initialised to 0 by default. Only bias nodes need setting to 1.
+ neuronSignalArray = new int[totalNeuronCount];
+ _neuronSignalArray = new int[totalNeuronCount];
+
+ for(int i=0; i>8) + 0x8000;
+
+ // Square tmp to generate the curve. max result is 2^30. Expected max output
+ // for this half of the curve is 2^11. 30-11=19, so...
+ result = ((tmp*tmp)>>19);
+ }
+ else if( x < 0x800000 )
+ {
+ // Same thing again except we flip the curve and translate it at the same time
+ // by subtracting the result from 2^12.
+ int tmp = (x>>8) - 0x8000;
+ result = 0x1000 - ((tmp*tmp)>>19);
+ }
+ else
+ {
+ result = 0x1000;
+ }
+
+ neuronSignalArray[i] = result;
+
+ // Take the opportunity to reset the pre-activation signal array.
+ _neuronSignalArray[i]=0;
+ }
+ }
+
+ public void MultipleSteps(int numberOfSteps)
+ {
+ for(int i=0; i
+ /// Using RelaxNetwork erodes some of the perofrmance gain of FastConcurrentNetwork because of the slightly
+ /// more complex implemementation of the third loop - whe compared to SingleStep().
+ ///
+ ///
+ ///
+ ///
+ public bool RelaxNetwork(int maxSteps, double maxAllowedSignalDelta)
+ {
+ bool isRelaxed=false;
+ int intMaxAllowedSignalDelta = (int)(maxAllowedSignalDelta * 0x1000D);
+
+ for(int j=0; j>8) + 0x8000;
+
+ // Square tmp to generate the curve. max result is 2^30. Expected max output
+ // for this half of the curve is 2^11. 30-11=19, so...
+ result = ((tmp*tmp)>>19);
+ }
+ else if( x < 0x800000 )
+ {
+ // Same thing again except we flip the curve and translate it at the same time
+ // by subtracting the result from 2^12.
+ int tmp = (x>>8) - 0x8000;
+ result = 0x1000 - ((tmp*tmp)>>19);
+ }
+ else
+ {
+ result = 0x1000;
+ }
+ neuronSignalArray[i] = result;
+
+
+ if(Math.Abs(neuronSignalArray[i]-oldSignal) > intMaxAllowedSignalDelta)
+ isRelaxed=false;
+
+ // Take the opportunity to reset the pre-activation signal array.
+ _neuronSignalArray[i]=0;
+ }
+ }
+
+ return isRelaxed;
+ }
+
+ public void SetInputSignal(int index, double signalValue)
+ {
+ // Scale the signal into our expected range for the integer network.
+ // +-5 -> +-2^31
+ neuronSignalArray[biasNeuronCount + index] = (int)(signalValue * 0x19999999D);
+
+ //neuronSignalArray[biasNeuronCount + index] = (float)signalValue;
+ }
+
+ public void SetInputSignals(double[] signalArray)
+ {
+ // Scale the signal into our expected range for the integer network.
+ // +-5 -> +-2^31
+ // For speed we don't bother with bounds checks.
+ for(int i=0; i 0 to 1.0
+ return neuronSignalArray[totalInputNeuronCount + index] / 0x1000D;
+ }
+
+ public void ClearSignals()
+ {
+ // Clear signals for input, hidden and output nodes. Only the bias node is untouched.
+ for(int i=biasNeuronCount; i
+ {
+
+
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/Xml/XmlNetworkReaderStatic.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/Xml/XmlNetworkReaderStatic.cs
new file mode 100644
index 000000000..f267dbd2f
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/Xml/XmlNetworkReaderStatic.cs
@@ -0,0 +1,114 @@
+using System;
+using System.Collections;
+using System.Xml;
+
+using SharpNeatLib.NeuralNetwork;
+using SharpNeatLib.Xml;
+
+
+namespace SharpNeatLib.NeuralNetwork.Xml
+{
+ public class XmlNetworkReaderStatic
+ {
+ public static ConcurrentNetwork Read(XmlDocument doc)
+ {
+ XmlElement network = (XmlElement)doc.SelectSingleNode("network");
+ if(network==null)
+ throw new Exception("The network XML is missing the root 'network' element.");
+
+ return Read(network);
+ }
+
+ public static ConcurrentNetwork Read(XmlElement xmlNetwork)
+ {
+ return ReadNetwork(xmlNetwork);
+ }
+
+ private static ConcurrentNetwork ReadNetwork(XmlElement xmlNetwork)
+ {
+ //--- Read the activation function id.
+ string activationFnId = XmlUtilities.GetAttributeValue(xmlNetwork, "activation-fn-id");
+ IActivationFunction activationFn = ActivationFunctionFactory.GetActivationFunction(activationFnId);
+
+ // Read the neurons into a list and also into a table keyed on id.
+ Hashtable neuronTable = new Hashtable();
+
+ NeuronList biasNeuronList = new NeuronList();
+ NeuronList inputNeuronList = new NeuronList();
+ NeuronList hiddenNeuronList = new NeuronList();
+ NeuronList outputNeuronList = new NeuronList();
+ NeuronList masterNeuronList = new NeuronList();
+
+ XmlNodeList listNeurons = xmlNetwork.SelectNodes("neurons/neuron");
+ foreach(XmlElement xmlNeuron in listNeurons)
+ {
+ Neuron neuron = ReadNeuron(xmlNeuron);
+ neuronTable.Add(neuron.Id, neuron);
+
+ switch(neuron.NeuronType)
+ {
+ case NeuronType.Bias:
+ biasNeuronList.Add(neuron);
+ break;
+ case NeuronType.Input:
+ inputNeuronList.Add(neuron);
+ break;
+ case NeuronType.Hidden:
+ hiddenNeuronList.Add(neuron);
+ break;
+ case NeuronType.Output:
+ outputNeuronList.Add(neuron);
+ break;
+ }
+ }
+
+ //----- Build a master list of neurons. Neurons must be ordered by type - bias,input,hidden,output.
+ if(biasNeuronList.Count != 1)
+ throw new SharpNeatLib.Xml.XmlException("Neural Network XML must contain exactly 1 bias node.");
+
+ foreach(Neuron neuron in biasNeuronList)
+ masterNeuronList.Add(neuron);
+
+ foreach(Neuron neuron in inputNeuronList)
+ masterNeuronList.Add(neuron);
+
+ foreach(Neuron neuron in hiddenNeuronList)
+ masterNeuronList.Add(neuron);
+
+ foreach(Neuron neuron in outputNeuronList)
+ masterNeuronList.Add(neuron);
+
+ //----- Read Connections and store against target neurons.
+ XmlNodeList listConnections = xmlNetwork.SelectNodes("connections/connection");
+ foreach(XmlElement xmlConnection in listConnections)
+ {
+ Connection connection = ReadConnection(xmlConnection);
+
+ // Store the connection with it's target neuron.
+ ((Neuron)neuronTable[connection.TargetNeuronId]).ConnectionList.Add(connection);
+
+ // Bind the connection to it's source neuron.
+ connection.SetSourceNeuron((Neuron)neuronTable[connection.SourceNeuronId]);
+ }
+
+ return new ConcurrentNetwork(masterNeuronList);
+ }
+
+ private static Neuron ReadNeuron(XmlElement xmlNeuron)
+ {
+ uint id = uint.Parse(XmlUtilities.GetAttributeValue(xmlNeuron, "id"));
+ NeuronType neuronType = XmlUtilities.GetNeuronType(XmlUtilities.GetAttributeValue(xmlNeuron, "type"));
+ string activationFn = XmlUtilities.GetAttributeValue(xmlNeuron, "activationFunction");
+ return new Neuron(ActivationFunctionFactory.GetActivationFunction(activationFn), neuronType, id, 0, 1);
+ }
+
+ private static Connection ReadConnection(XmlElement xmlConnection)
+ {
+ uint sourceNeuronId = uint.Parse(XmlUtilities.GetAttributeValue(xmlConnection, "src-id"));
+ uint targetNeuronId = uint.Parse(XmlUtilities.GetAttributeValue(xmlConnection, "tgt-id"));
+ double weight = double.Parse(XmlUtilities.GetAttributeValue(xmlConnection, "weight"));
+
+ return new Connection(sourceNeuronId, targetNeuronId, weight);
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/NeuralNetwork/Xml/XmlNetworkWriterStatic.cs b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/Xml/XmlNetworkWriterStatic.cs
new file mode 100644
index 000000000..7b116bd1b
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/NeuralNetwork/Xml/XmlNetworkWriterStatic.cs
@@ -0,0 +1,72 @@
+using System;
+using System.Xml;
+
+using SharpNeatLib.NeatGenome;
+using SharpNeatLib.NeuralNetwork;
+using SharpNeatLib.Xml;
+
+namespace SharpNeatLib.NeuralNetwork.Xml
+{
+ public class XmlNetworkWriterStatic
+ {
+ public static void Write(XmlNode parentNode, FloatFastConcurrentNetwork network, IActivationFunction activationFn)
+ {
+ //----- Start writing. Create document root node.
+ XmlElement xmlNetwork = XmlUtilities.AddElement(parentNode, "network");
+ XmlUtilities.AddAttribute(xmlNetwork, "activation-fn-id", activationFn.FunctionId);
+
+ //----- Write Connections.
+ XmlElement xmlConnections = XmlUtilities.AddElement(xmlNetwork, "connections");
+ foreach (FloatFastConnection connectionGene in network.connectionArray)
+ WriteConnection(xmlConnections, connectionGene);
+ }
+
+ public static void Write(XmlNode parentNode, NeatGenome.NeatGenome genome, IActivationFunction activationFn)
+ {
+ //----- Start writing. Create document root node.
+ XmlElement xmlNetwork = XmlUtilities.AddElement(parentNode, "network");
+ XmlUtilities.AddAttribute(xmlNetwork, "activation-fn-id", activationFn.FunctionId);
+
+ //----- Write neurons.
+ XmlElement xmlNeurons = XmlUtilities.AddElement(xmlNetwork, "neurons");
+ foreach(NeuronGene neuronGene in genome.NeuronGeneList)
+ WriteNeuron(xmlNeurons, neuronGene);
+
+ //----- Write Connections.
+ XmlElement xmlConnections = XmlUtilities.AddElement(xmlNetwork, "connections");
+ foreach(ConnectionGene connectionGene in genome.ConnectionGeneList)
+ WriteConnection(xmlConnections, connectionGene);
+ }
+
+ #region Private Methods
+
+ private static void WriteNeuron(XmlElement xmlNeurons, NeuronGene neuronGene)
+ {
+ XmlElement xmlNeuron = XmlUtilities.AddElement(xmlNeurons, "neuron");
+
+ XmlUtilities.AddAttribute(xmlNeuron, "id", neuronGene.InnovationId.ToString());
+ XmlUtilities.AddAttribute(xmlNeuron, "type", XmlUtilities.GetNeuronTypeString(neuronGene.NeuronType));
+ XmlUtilities.AddAttribute(xmlNeuron, "activationFunction", neuronGene.ActivationFunction.FunctionId);
+ }
+
+ private static void WriteConnection(XmlElement xmlConnections, ConnectionGene connectionGene)
+ {
+ XmlElement xmlConnection = XmlUtilities.AddElement(xmlConnections, "connection");
+
+ XmlUtilities.AddAttribute(xmlConnection, "src-id", connectionGene.SourceNeuronId.ToString() );
+ XmlUtilities.AddAttribute(xmlConnection, "tgt-id", connectionGene.TargetNeuronId.ToString());
+ XmlUtilities.AddAttribute(xmlConnection, "weight", connectionGene.Weight.ToString());
+ }
+
+ private static void WriteConnection(XmlElement xmlConnections, FloatFastConnection connectionGene)
+ {
+ XmlElement xmlConnection = XmlUtilities.AddElement(xmlConnections, "connection");
+
+ XmlUtilities.AddAttribute(xmlConnection, "src-id", connectionGene.sourceNeuronIdx.ToString());
+ XmlUtilities.AddAttribute(xmlConnection, "tgt-id", connectionGene.targetNeuronIdx.ToString());
+ XmlUtilities.AddAttribute(xmlConnection, "weight", connectionGene.weight.ToString());
+ }
+
+ #endregion
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/UtilityClasses/ByteCoord.cs b/SharpNeatWalker/SharpNeatLib/UtilityClasses/ByteCoord.cs
new file mode 100644
index 000000000..576e57402
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/UtilityClasses/ByteCoord.cs
@@ -0,0 +1,22 @@
+using System;
+
+namespace SharpNeatLib
+{
+ public struct ByteCoord
+ {
+ public byte x;
+ public byte y;
+
+ public ByteCoord(byte x, byte y)
+ {
+ this.x = x;
+ this.y = y;
+ }
+
+ public ByteCoord(int x, int y)
+ {
+ this.x = (byte)x;
+ this.y = (byte)y;
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/UtilityClasses/CircularBuffer.cs b/SharpNeatWalker/SharpNeatLib/UtilityClasses/CircularBuffer.cs
new file mode 100644
index 000000000..20d14d650
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/UtilityClasses/CircularBuffer.cs
@@ -0,0 +1,158 @@
+using System;
+
+namespace SharpNeatLib
+{
+
+ public class CircularBuffer
+ {
+ public object[] buffer;
+
+ // The index of the previously enqueued item. -1 if buffer is empty.
+ int headIdx;
+
+ // The index of the next item to be dequeued. -1 if buffer is empty.
+ int tailIdx;
+
+ public CircularBuffer(int capacity)
+ {
+ buffer = new object[capacity];
+ headIdx = tailIdx=-1;
+ }
+
+ public int Capacity
+ {
+ get
+ {
+ return buffer.Length;
+ }
+ }
+
+ public bool IsEmpty
+ {
+ get
+ {
+ return headIdx==-1;
+ }
+ }
+
+ public int Length
+ {
+ get
+ {
+ if(headIdx==-1)
+ return 0;
+
+ if(headIdx>tailIdx)
+ return (headIdx-tailIdx)+1;
+
+ if(tailIdx>headIdx)
+ return (buffer.Length-tailIdx) + headIdx+1;
+
+ return 1;
+ }
+ }
+
+ ///
+ /// Enqueue a new item onto the head of the queue, overwriting old values if the buffer overflows.
+ ///
+ ///
+ /// True if the buffer overflowed an an old item was overwritten
+ public bool Enqueue(object item)
+ {
+ if(headIdx==-1)
+ { // buffer is currently empty.
+ headIdx = tailIdx = 0;
+ buffer[0] = item;
+ return false;
+ }
+
+ // Determine the index to write to.
+ if(++headIdx==buffer.Length)
+ { // Wrap around.
+ headIdx=0;
+ }
+
+ if(headIdx==tailIdx)
+ { // Buffer overflow. Increment tailIdx.
+ if(++tailIdx==buffer.Length)
+ { // Wrap around.
+ tailIdx=0;
+ }
+ buffer[headIdx]=item;
+ return true;
+ }
+
+ buffer[headIdx]=item;
+ return false;
+ }
+
+ ///
+ /// Dequeue an old item from the tail of the queue.
+ ///
+ /// The dequeued item. Throws an exception if the buffer was empty,
+ /// check the buffer's length or IsEmpty property to avoid exceptions.
+ public object Dequeue()
+ {
+ if(tailIdx==-1)
+ { // buffer is currently empty.
+ throw new InvalidOperationException("buffer is empty.");
+ }
+
+ object o = buffer[tailIdx];
+
+ if(tailIdx==headIdx)
+ { // The buffer is now empty.
+ headIdx=tailIdx=-1;
+ return o;
+ }
+
+ if(++tailIdx==buffer.Length)
+ { // Wrap around.
+ tailIdx=0;
+ }
+
+ return o;
+ }
+
+ ///
+ /// Pop an item of the head of the queue.
+ ///
+ /// The popped item. Throws an exception if the buffer was empty.
+ public object Pop()
+ {
+ if(tailIdx==-1)
+ { // buffer is currently empty.
+ throw new InvalidOperationException("buffer is empty.");
+ }
+
+ object o = buffer[headIdx];
+
+ if(tailIdx==headIdx)
+ { // The buffer is now empty.
+ headIdx=tailIdx=-1;
+ return o;
+ }
+
+ if(--headIdx==-1)
+ { // Wrap around.
+ headIdx=buffer.Length-1;
+ }
+
+ return o;
+ }
+
+ ///
+ /// Peek at the item at the head of the queue.
+ ///
+ /// The item at the head of the queue. Throws an exception if the buffer was empty.
+ public object Peek()
+ {
+ if(tailIdx==-1)
+ { // buffer is currently empty.
+ throw new InvalidOperationException("buffer is empty.");
+ }
+
+ return buffer[headIdx];
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/UtilityClasses/Coord.cs b/SharpNeatWalker/SharpNeatLib/UtilityClasses/Coord.cs
new file mode 100644
index 000000000..55b7f547f
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/UtilityClasses/Coord.cs
@@ -0,0 +1,20 @@
+using System;
+
+namespace SharpNeatLib
+{
+ ///
+ /// Very similar to a System.Drawing.Point. Except it isn't a struct (which eliminates
+ /// need for boxing) and you don't need a reference to System.Drawing.
+ ///
+ public class Coord
+ {
+ public int x;
+ public int y;
+
+ public Coord(int x, int y)
+ {
+ this.x = x;
+ this.y = y;
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/UtilityClasses/DoubleCircularBuffer.cs b/SharpNeatWalker/SharpNeatLib/UtilityClasses/DoubleCircularBuffer.cs
new file mode 100644
index 000000000..7f5020319
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/UtilityClasses/DoubleCircularBuffer.cs
@@ -0,0 +1,180 @@
+using System;
+
+namespace SharpNeatLib
+{
+ public class DoubleCircularBuffer
+ {
+ public double[] buffer;
+
+ // The index of the previously enqueued item. -1 if buffer is empty.
+ int headIdx;
+
+ // The index of the next item to be dequeued. -1 if buffer is empty.
+ int tailIdx;
+
+ public DoubleCircularBuffer(int capacity)
+ {
+ buffer = new double[capacity];
+ headIdx = tailIdx=-1;
+ }
+
+ public int Capacity
+ {
+ get
+ {
+ return buffer.Length;
+ }
+ }
+
+ public bool IsEmpty
+ {
+ get
+ {
+ return headIdx==-1;
+ }
+ }
+
+ public int Length
+ {
+ get
+ {
+ if(headIdx==-1)
+ return 0;
+
+ if(headIdx>tailIdx)
+ return (headIdx-tailIdx)+1;
+
+ if(tailIdx>headIdx)
+ return (buffer.Length-tailIdx) + headIdx+1;
+
+ return 1;
+ }
+ }
+
+ ///
+ /// Gets the double at the specified index in the buffer.
+ ///
+ public double this[int index]
+ {
+ get
+ {
+ if(index>=Length)
+ throw new ArgumentOutOfRangeException();
+
+ index += tailIdx;
+ if(index>=buffer.Length)
+ index-=buffer.Length;
+
+ return buffer[index];
+ }
+ }
+
+ public void Clear()
+ {
+ headIdx = tailIdx = -1;
+ }
+
+ ///
+ /// Enqueue a new item onto the head of the queue, overwriting old values if the buffer overflows.
+ ///
+ ///
+ /// True if the buffer overflowed an an old item was overwritten
+ public bool Enqueue(double item)
+ {
+ if(headIdx==-1)
+ { // buffer is currently empty.
+ headIdx = tailIdx = 0;
+ buffer[0] = item;
+ return false;
+ }
+
+ // Determine the index to write to.
+ if(++headIdx==buffer.Length)
+ { // Wrap around.
+ headIdx=0;
+ }
+
+ if(headIdx==tailIdx)
+ { // Buffer overflow. Increment tailIdx.
+ if(++tailIdx==buffer.Length)
+ { // Wrap around.
+ tailIdx=0;
+ }
+ buffer[headIdx]=item;
+ return true;
+ }
+
+ buffer[headIdx]=item;
+ return false;
+ }
+
+ ///
+ /// Dequeue an old item from the tail of the queue.
+ ///
+ /// The dequeued item. Throws an exception if the buffer was empty,
+ /// check the buffer's length or IsEmpty property to avoid exceptions.
+ public double Dequeue()
+ {
+ if(tailIdx==-1)
+ { // buffer is currently empty.
+ throw new InvalidOperationException("buffer is empty.");
+ }
+
+ double o = buffer[tailIdx];
+
+ if(tailIdx==headIdx)
+ { // The buffer is now empty.
+ headIdx=tailIdx=-1;
+ return o;
+ }
+
+ if(++tailIdx==buffer.Length)
+ { // Wrap around.
+ tailIdx=0;
+ }
+
+ return o;
+ }
+
+ ///
+ /// Pop an item of the head of the queue.
+ ///
+ /// The popped item. Throws an exception if the buffer was empty.
+ public double Pop()
+ {
+ if(tailIdx==-1)
+ { // buffer is currently empty.
+ throw new InvalidOperationException("buffer is empty.");
+ }
+
+ double o = buffer[headIdx];
+
+ if(tailIdx==headIdx)
+ { // The buffer is now empty.
+ headIdx=tailIdx=-1;
+ return o;
+ }
+
+ if(--headIdx==-1)
+ { // Wrap around.
+ headIdx=buffer.Length-1;
+ }
+
+ return o;
+ }
+
+ ///
+ /// Peek at the item at the head of the queue.
+ ///
+ /// The item at the head of the queue. Throws an exception if the buffer was empty.
+ public double Peek()
+ {
+ if(headIdx==-1)
+ { // buffer is currently empty.
+ throw new InvalidOperationException("buffer is empty.");
+ }
+
+ return buffer[headIdx];
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/UtilityClasses/ListItem.cs b/SharpNeatWalker/SharpNeatLib/UtilityClasses/ListItem.cs
new file mode 100644
index 000000000..6b4dfbba3
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/UtilityClasses/ListItem.cs
@@ -0,0 +1,122 @@
+using System;
+
+
+namespace SharpNeatLib
+{
+ ///
+ /// This class is intended to be added to the Items collection of a combo box.
+ ///
+ public class ListItem : IComparable
+ {
+ private string itemCode;
+ private string itemDescription;
+ private object data;
+
+ #region Constructors
+
+ ///
+ /// Default constructor.
+ ///
+ public ListItem() {}
+
+ ///
+ /// Create a new ListItem with the provided itemCode and itemDescription.
+ ///
+ ///
+ ///
+ public ListItem(string itemCode, string itemDescription)
+ {
+ this.ItemCode = itemCode;
+ this.ItemDescription = itemDescription;
+ }
+
+ ///
+ /// Create a new ListItem with the provided itemCode, itemDescription and data.
+ ///
+ ///
+ ///
+ ///
+ public ListItem(string itemCode, string itemDescription, object data)
+ {
+ this.ItemCode = itemCode;
+ this.ItemDescription = itemDescription;
+ this.data = data;
+ }
+
+ #endregion
+
+ #region Properties
+
+ ///
+ /// Gets/Sets the ItemCode.
+ ///
+ public string ItemCode
+ {
+ get
+ {
+ return itemCode;
+ }
+ set
+ {
+ itemCode = (value==null ? "" : value);
+ }
+ }
+
+ ///
+ /// Gets/Sets the ItemDescription.
+ ///
+ public string ItemDescription
+ {
+ get
+ {
+ return itemDescription;
+ }
+ set
+ {
+ itemDescription = (value==null ? "" : value);
+ }
+ }
+
+ ///
+ /// Gets/Sets the Data. Data is decalred as Object and is provided so that additional data can be attached to a ListItem object.
+ ///
+ public object Data
+ {
+ get
+ {
+ return data;
+ }
+ set
+ {
+ data = value;
+ }
+ }
+
+ #endregion
+
+
+ #region IComparable
+
+ ///
+ /// Compare ListItems based on the itemCode.
+ ///
+ ///
+ ///
+ public int CompareTo(object obj)
+ {
+ return itemCode.CompareTo(((ListItem)obj).ItemCode);
+ }
+
+ #endregion
+
+
+ ///
+ /// Returns the item's description.
+ ///
+ ///
+ public override string ToString()
+ {
+ return itemDescription;
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/UtilityClasses/Utilities.cs b/SharpNeatWalker/SharpNeatLib/UtilityClasses/Utilities.cs
new file mode 100644
index 000000000..02c3f6423
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/UtilityClasses/Utilities.cs
@@ -0,0 +1,67 @@
+using System;
+using SharpNeatLib.Maths;
+
+namespace SharpNeatLib
+{
+ ///
+ /// Summary description for Utilities.
+ ///
+ public class Utilities
+ {
+ //private static Random random = new Random();
+ private static FastRandom random = new FastRandom();
+
+ // Static/global method for generating random numbers.
+ public static double NextDouble()
+ {
+ return random.NextDouble();
+ }
+
+ public static int Next(int upperBound)
+ {
+ return random.Next(upperBound);
+ }
+
+ public static double LimitRange(double val, double lower, double upper)
+ {
+ val = Math.Min(val, upper);
+ return Math.Max(val, lower);
+ }
+
+ public static void NormalizeValueArray(double targetMin, double targetMax, double[] valueArray)
+ {
+ if(valueArray==null || valueArray.Length==0)
+ return;
+
+ if(targetMin>=targetMax)
+ throw new ArgumentException();
+
+ // Scan the array and make note of the min and max values.
+ double min = valueArray[0];
+ double max = valueArray[0];
+
+ for(int i=1; imax)
+ max = valueArray[i];
+ }
+
+ // Now scale/translate the data into the target range.
+ double range = max-min;
+ double targetRange = targetMax-targetMin;
+
+ if(range>0)
+ {
+ for(int i=0; i
+ /// Summary description for XmlException.
+ ///
+ public class XmlException : System.Exception
+ {
+ public XmlException() : base()
+ {
+ }
+
+ public XmlException(string message) : base(message)
+ {
+ }
+ }
+}
diff --git a/SharpNeatWalker/SharpNeatLib/Xml/XmlUtilities.cs b/SharpNeatWalker/SharpNeatLib/Xml/XmlUtilities.cs
new file mode 100644
index 000000000..ccd30e20a
--- /dev/null
+++ b/SharpNeatWalker/SharpNeatLib/Xml/XmlUtilities.cs
@@ -0,0 +1,118 @@
+using System;
+using System.Xml;
+using SharpNeatLib.NeuralNetwork;
+
+namespace SharpNeatLib.Xml
+{
+ public class XmlUtilities
+ {
+ #region Public Static Methods [General Xml Reader/Writer Support]
+
+ public static void AddAttribute(XmlElement parent, string name, string attrValue)
+ {
+ XmlDocument doc = parent.OwnerDocument;
+
+ XmlAttribute attr = doc.CreateAttribute(name);
+ attr.Value = attrValue;
+ parent.Attributes.Append(attr);
+ }
+
+ public static XmlElement AddElement(XmlNode parentNode, string name)
+ {
+ XmlDocument doc;
+ if(parentNode is XmlDocument)
+ doc = (XmlDocument)parentNode;
+ else
+ doc = parentNode.OwnerDocument;
+
+ XmlElement elem = doc.CreateElement(name);
+ parentNode.AppendChild(elem);
+
+ return elem;
+ }
+
+ static public string GetAttributeValue(XmlNode xmlNode, string attributeName)
+ {
+ return GetAttributeValue(xmlNode, attributeName, true);
+ }
+
+ static public string GetAttributeValue(XmlNode xmlNode, string attributeName, bool mandatory)
+ {
+ XmlAttribute attr = GetAttribute(xmlNode, attributeName, mandatory);
+
+ if(attr==null)
+ return "";
+ else
+ return attr.Value;
+ }
+
+
+ static public XmlAttribute GetAttribute(XmlNode xmlNode, string attributeName)
+ {
+ return GetAttribute(xmlNode, attributeName, true);
+ }
+
+ static public XmlAttribute GetAttribute(XmlNode xmlNode, string attributeName, bool mandatory)
+ {
+ XmlAttribute attr = (XmlAttribute)xmlNode.Attributes.GetNamedItem(attributeName);
+
+ if(attr==null)
+ {
+ if(mandatory)
+ throw new Exception("Missing mandatory '" + attributeName + "' attribute on '" + xmlNode.LocalName + "' element"); //TODO: tidy up exception.
+ else
+ return null;
+ }
+
+ return attr;
+ }
+
+ #endregion
+
+ #region Public Static Methods [Type Conversion]
+
+ public static string GetNeuronTypeString(NeuronType type)
+ {
+ switch(type)
+ {
+ case NeuronType.Bias:
+ return "bias";
+ case NeuronType.Hidden:
+ return "hid";
+ case NeuronType.Input:
+ return "in";
+ case NeuronType.Output:
+ return "out";
+ default:
+ return string.Empty;
+ }
+ }
+
+ public static NeuronType GetNeuronType(string typeIdentifier)
+ {
+ if(typeIdentifier=="bias")
+ {
+ return NeuronType.Bias;
+ }
+ else if(typeIdentifier=="hid")
+ {
+ return NeuronType.Hidden;
+ }
+ else if(typeIdentifier=="in")
+ {
+ return NeuronType.Input;
+ }
+ else if(typeIdentifier=="out")
+ {
+ return NeuronType.Output;
+ }
+ else
+ {
+ throw new XmlException("Unrecognised neuron type identifier - '" + typeIdentifier + "'");
+ }
+ }
+
+ #endregion
+
+ }
+}