Accord.Math.Random.Generator.Seed = 0; double[][] inputs = { new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, }; int[] xor = { -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, }; var gscv = GridSearch<double[], int>.CrossValidate( ranges: new { Complexity = GridSearch.Values(0.00000001, 5.20, 0.30, 0.50), Degree = GridSearch.Values(1, 10, 2, 3, 4, 5), Constant = GridSearch.Values(0, 1, 2), }, learner: (p, ss) => new SequentialMinimalOptimization<Polynomial> { Complexity = p.Complexity, Kernel = new Polynomial(p.Degree, p.Constant) }, fit: (teacher, x, y, w) => teacher.Learn(x, y, w), loss: (actual, expected, r) => new ZeroOneLoss(expected).Loss(actual), folds: 3 ); gscv.ParallelOptions.MaxDegreeOfParallelism = 1; var result = gscv.Learn(inputs, xor); var crossValidation = result.BestModel; double bestError = result.BestModelError; double trainError = result.BestModel.Training.Mean; double trainErrorVar = result.BestModel.Training.Variance; double valError = result.BestModel.Validation.Mean; double valErrorVar = result.BestModel.Validation.Variance; double bestC = result.BestParameters.Complexity; double bestDegree = result.BestParameters.Degree; double bestConstant = result.BestParameters.Constant;
RetroSearch is an open source project built by @garambo | Open a GitHub Issue
Search and Browse the WWW like it's 1997 | Search results from DuckDuckGo
HTML:
3.2
| Encoding:
UTF-8
| Version:
0.7.4