diff --git a/src/Directory.Build.props b/src/Directory.Build.props index 4fbef7f5..d00433c4 100644 --- a/src/Directory.Build.props +++ b/src/Directory.Build.props @@ -1,8 +1,8 @@ - 0.31.5.0 - 0.31.5.0 - 0.31.5.0 + 0.31.6.0 + 0.31.6.0 + 0.31.6.0 en Mads Dabros Copyright © Mads Dabros 2014 diff --git a/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs b/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs index 2a330621..6f38a8dc 100644 --- a/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs +++ b/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs @@ -1,5 +1,6 @@ using Microsoft.VisualStudio.TestTools.UnitTesting; using SharpLearning.Containers.Matrices; +using System; namespace SharpLearning.Containers.Test.Matrices { @@ -101,7 +102,7 @@ public void F64Matrix_Rows_Predefined() { var sut = CreateFeatures(); var actual = new F64Matrix(2, 3); - sut.Rows(new int [] { 0, 2}, actual); + sut.Rows(new int[] { 0, 2 }, actual); var expected = GetExpectedRowSubMatrix(); Assert.IsTrue(expected.Equals(actual)); @@ -128,6 +129,19 @@ public void F64Matrix_Columns_predefined() Assert.IsTrue(expected.Equals(actual)); } + [TestMethod] + public void F64Matrix_Implicit_Conversion() + { + Func converter = m => m; + + var actual = converter(new double[][] { new double[] { 0, 1 }, new double[] { 2, 3 } }); + + Assert.AreEqual(0, actual.At(0,0)); + Assert.AreEqual(1, actual.At(0,1)); + Assert.AreEqual(2, actual.At(1,0)); + Assert.AreEqual(3, actual.At(1,1)); + } + double[] GetExpectedColumn() { return new double[3] { 2, 20, 200 }; diff --git a/src/SharpLearning.Containers/Extensions/ArrayExtensions.cs b/src/SharpLearning.Containers/Extensions/ArrayExtensions.cs index 4121ed61..4801217a 100644 --- a/src/SharpLearning.Containers/Extensions/ArrayExtensions.cs +++ b/src/SharpLearning.Containers/Extensions/ArrayExtensions.cs @@ -195,10 +195,10 @@ public static void SortWith(this TKey[] keys, Interval1D interval /// /// /// - /// - public static void CopyTo(this T[] source, Interval1D interval, T[] distination) + /// + public static void CopyTo(this T[] source, Interval1D interval, T[] destination) { - Array.Copy(source, interval.FromInclusive, distination, interval.FromInclusive, interval.Length); + Array.Copy(source, interval.FromInclusive, destination, interval.FromInclusive, interval.Length); } /// @@ -225,7 +225,7 @@ public static void IndexedCopy(this int[] indices, T[] source, Interval1D int /// /// /// - public static void IndexedCopy(this int[] indices, F64MatrixColumnView source, + public static void IndexedCopy(this int[] indices, F64MatrixColumnView source, Interval1D interval, double[] destination) { for (int i = interval.FromInclusive; i < interval.ToExclusive; i++) @@ -354,8 +354,8 @@ public static double ScoreAtPercentile(this double[] values, double percentile) var index = percentile * (values.Length - 1.0); var i = (int)index; var diff = index - i; - - if(diff != 0.0) + + if (diff != 0.0) { var j = i + 1; var v1 = array[i]; @@ -363,13 +363,23 @@ public static double ScoreAtPercentile(this double[] values, double percentile) var v2 = array[j]; var w2 = index - i; - + return (v1 * w1 + v2 * w2) / (w1 + w2); } return array[i]; } + /// + /// Converts an array of arrays to an F64Matrix + /// + /// + /// + public static F64Matrix ToF64Matrix(this double[][] m) + { + return ToF64Matrix(m.ToList()); + } + /// /// Converts a list of arrays to an F64Matrix /// @@ -456,7 +466,7 @@ public static int[] StratifiedIndexSampling(this T[] data, int sampleSize, Ra { if (data.Length < sampleSize) { - throw new ArgumentException("SampleSize " + sampleSize + + throw new ArgumentException("SampleSize " + sampleSize + " is larger than data size " + data.Length); } @@ -467,7 +477,7 @@ public static int[] StratifiedIndexSampling(this T[] data, int sampleSize, Ra { if (kvp.Value == 0) { - throw new ArgumentException("Sample size is too small for value: " + + throw new ArgumentException("Sample size is too small for value: " + kvp.Key + " to be included."); } } @@ -477,12 +487,12 @@ public static int[] StratifiedIndexSampling(this T[] data, int sampleSize, Ra indices.Shuffle(random); var currentSampleCount = requiredSamples.ToDictionary(k => k.Key, k => 0); - + // might be slightly different than the specified depending on data distribution var actualSampleSize = requiredSamples.Select(s => s.Value).Sum(); - + // if actual sample size is different from specified add/subtract duff from largest class - if(actualSampleSize != sampleSize) + if (actualSampleSize != sampleSize) { var diff = sampleSize - actualSampleSize; var largestClassKey = requiredSamples.OrderByDescending(s => s.Value).First().Key; @@ -491,12 +501,12 @@ public static int[] StratifiedIndexSampling(this T[] data, int sampleSize, Ra var sampleIndices = new int[sampleSize]; var sampleIndex = 0; - + for (int i = 0; i < data.Length; i++) { var index = indices[i]; var value = data[index]; - if(currentSampleCount[value] != requiredSamples[value]) + if (currentSampleCount[value] != requiredSamples[value]) { sampleIndices[sampleIndex++] = index; currentSampleCount[value]++; @@ -531,15 +541,15 @@ public static int[] StratifiedIndexSampling(this T[] data, int sampleSize, Ra /// public static int[] StratifiedIndexSampling(this T[] data, int sampleSize, int[] dataIndices, Random random) { - if (dataIndices.Length < sampleSize) + if (dataIndices.Length < sampleSize) { - throw new ArgumentException("SampleSize " + sampleSize + + throw new ArgumentException("SampleSize " + sampleSize + " is larger than dataIndices size " + dataIndices.Length); } - if (data.Length < dataIndices.Length) + if (data.Length < dataIndices.Length) { - throw new ArgumentException("dataIndices " + dataIndices.Length + + throw new ArgumentException("dataIndices " + dataIndices.Length + " is larger than data size " + data.Length); } @@ -550,7 +560,7 @@ public static int[] StratifiedIndexSampling(this T[] data, int sampleSize, in { if (kvp.Value == 0) { - throw new ArgumentException("Sample size is too small for value: " + + throw new ArgumentException("Sample size is too small for value: " + kvp.Key + " to be included."); } } @@ -558,7 +568,7 @@ public static int[] StratifiedIndexSampling(this T[] data, int sampleSize, in var currentSampleCount = requiredSamples.ToDictionary(k => k.Key, k => 0); // might be slightly different than the specified depending on data distribution var actualSampleSize = requiredSamples.Select(s => s.Value).Sum(); - + // if actual sample size is different from specified add/subtract difference from largest class if (actualSampleSize != sampleSize) { @@ -584,7 +594,7 @@ public static int[] StratifiedIndexSampling(this T[] data, int sampleSize, in currentSampleCount[value]++; } - if(sampleIndex == sampleSize) + if (sampleIndex == sampleSize) { break; } diff --git a/src/SharpLearning.Containers/Matrices/F64Matrix.cs b/src/SharpLearning.Containers/Matrices/F64Matrix.cs index bfe60781..c6e578b2 100644 --- a/src/SharpLearning.Containers/Matrices/F64Matrix.cs +++ b/src/SharpLearning.Containers/Matrices/F64Matrix.cs @@ -1,12 +1,14 @@ using System; using System.Linq; using SharpLearning.Containers.Views; +using SharpLearning.Containers.Extensions; namespace SharpLearning.Containers.Matrices { /// /// Matrix of doubles /// + /// Can be implicitly converted from double[][] public sealed unsafe class F64Matrix : IMatrix, IEquatable { double[] m_featureArray; @@ -301,5 +303,7 @@ public override int GetHashCode() return hash; } } + + public static implicit operator F64Matrix(double[][] b) => b.ToF64Matrix(); } } diff --git a/src/SharpLearning.Neural.Test/Learners/ClassificationNeuralNetLearnerTest.cs b/src/SharpLearning.Neural.Test/Learners/ClassificationNeuralNetLearnerTest.cs index 7caff2d6..d2a3631f 100644 --- a/src/SharpLearning.Neural.Test/Learners/ClassificationNeuralNetLearnerTest.cs +++ b/src/SharpLearning.Neural.Test/Learners/ClassificationNeuralNetLearnerTest.cs @@ -20,7 +20,34 @@ public void ClassificationNeuralNetLearner_Learn() var numberOfClasses = 5; var random = new Random(32); - var (observations, targets) = CreateData(numberOfObservations, + var (observations, targets) = CreateData(numberOfObservations, + numberOfFeatures, numberOfClasses, random); + + var net = new NeuralNet(); + net.Add(new InputLayer(numberOfFeatures)); + net.Add(new DenseLayer(10)); + net.Add(new SvmLayer(numberOfClasses)); + + var sut = new ClassificationNeuralNetLearner(net, new AccuracyLoss()); + var model = sut.Learn(observations, targets); + + var predictions = model.Predict(observations); + + var evaluator = new TotalErrorClassificationMetric(); + var actual = evaluator.Error(targets, predictions); + + Assert.AreEqual(0.762, actual); + } + + [TestMethod] + public void ClassificationNeuralNetLearner_Learn_Array() + { + var numberOfObservations = 500; + var numberOfFeatures = 5; + var numberOfClasses = 5; + + var random = new Random(32); + var (observations, targets) = CreateArrayData(numberOfObservations, numberOfFeatures, numberOfClasses, random); var net = new NeuralNet(); @@ -48,10 +75,10 @@ public void ClassificationNeuralNetLearner_Learn_Early_Stopping() var random = new Random(32); - var (observations, targets) = CreateData(numberOfObservations, + var (observations, targets) = CreateData(numberOfObservations, numberOfFeatures, numberOfClasses, random); - var (validationObservations, validationTargets) = CreateData(numberOfObservations, + var (validationObservations, validationTargets) = CreateData(numberOfObservations, numberOfFeatures, numberOfClasses, random); var net = new NeuralNet(); @@ -92,5 +119,16 @@ public void ClassificationNeuralNetLearner_Constructor_Throw_On_Wrong_OutputLaye return (observations, targets); } + + (double[][] observations, double[] targets) CreateArrayData( + int numberOfObservations, int numberOfFeatures, int numberOfClasses, Random random) + { + var observations = Enumerable.Range(0, numberOfObservations).Select(i => Enumerable.Range(0, numberOfFeatures) + .Select(ii => random.NextDouble()).ToArray()).ToArray(); + var targets = Enumerable.Range(0, numberOfObservations) + .Select(i => (double)random.Next(0, numberOfClasses)).ToArray(); + + return (observations, targets); + } } }