From eae2947a55df82213e5873281dc8910fc0f71d0a Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Thu, 29 Nov 2018 10:06:16 -0800 Subject: [PATCH 01/12] Remove auto-cache mechanism --- .../Training/TrainerEstimatorBase.cs | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs b/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs index 1e49c32ed3..647d928393 100644 --- a/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs +++ b/src/Microsoft.ML.Data/Training/TrainerEstimatorBase.cs @@ -132,21 +132,16 @@ protected virtual void CheckLabelCompatible(SchemaShape.Column labelCol) protected TTransformer TrainTransformer(IDataView trainSet, IDataView validationSet = null, IPredictor initPredictor = null) { - var cachedTrain = Info.WantCaching ? new CacheDataView(Host, trainSet, prefetch: null) : trainSet; + var trainRoleMapped = MakeRoles(trainSet); - var trainRoles = MakeRoles(cachedTrain); - - RoleMappedData validRoles; + RoleMappedData validRoleMapped; if (validationSet == null) - validRoles = null; + validRoleMapped = null; else - { - var cachedValid = Info.WantCaching ? new CacheDataView(Host, validationSet, prefetch: null) : validationSet; - validRoles = MakeRoles(cachedValid); - } + validRoleMapped = MakeRoles(validationSet); - var pred = TrainModelCore(new TrainContext(trainRoles, validRoles, null, initPredictor)); + var pred = TrainModelCore(new TrainContext(trainRoleMapped, validRoleMapped, null, initPredictor)); return MakeTransformer(pred, trainSet.Schema); } From 426514c974bb9ee3c7b07f804081fdbd47481850 Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Mon, 3 Dec 2018 16:08:56 -0800 Subject: [PATCH 02/12] Fix tests --- src/Microsoft.ML.Data/StaticPipe/DataView.cs | 14 ++++++++++++ src/Microsoft.ML.Data/StaticPipe/Estimator.cs | 9 ++++++++ .../Training.cs | 5 +++-- .../FeatureContributionTests.cs | 22 ++++++++++--------- .../Scenarios/Api/TestApi.cs | 6 ++++- test/Microsoft.ML.Tests/Scenarios/OvaTest.cs | 4 ++-- .../MatrixFactorizationTests.cs | 5 ++++- 7 files changed, 49 insertions(+), 16 deletions(-) diff --git a/src/Microsoft.ML.Data/StaticPipe/DataView.cs b/src/Microsoft.ML.Data/StaticPipe/DataView.cs index 153623c5cc..28d1621380 100644 --- a/src/Microsoft.ML.Data/StaticPipe/DataView.cs +++ b/src/Microsoft.ML.Data/StaticPipe/DataView.cs @@ -8,6 +8,7 @@ using Microsoft.ML.StaticPipe.Runtime; using System.Collections.Generic; using System; +using System.Linq; namespace Microsoft.ML.StaticPipe { @@ -23,6 +24,19 @@ internal DataView(IHostEnvironment env, IDataView view, StaticSchemaShape shape) AsDynamic = view; Shape.Check(Env, AsDynamic.Schema); } + + /// + /// This function return a whose columns are all cached in memory. + /// This returned is almost the same to the source . + /// The only difference are cache-related properties. + /// + public DataView Cache() + { + // Generate all column indexes in the source data. + var prefetched = Enumerable.Range(0, AsDynamic.Schema.ColumnCount).ToArray(); + // Create a cached version of the source data by caching all columns. + return new DataView(Env, new CacheDataView(Env, AsDynamic, prefetched), Shape); + } } public static class DataViewExtensions diff --git a/src/Microsoft.ML.Data/StaticPipe/Estimator.cs b/src/Microsoft.ML.Data/StaticPipe/Estimator.cs index 1575a0a2f1..c048f4113e 100644 --- a/src/Microsoft.ML.Data/StaticPipe/Estimator.cs +++ b/src/Microsoft.ML.Data/StaticPipe/Estimator.cs @@ -77,5 +77,14 @@ string NameMap(PipelineColumn col) return new Estimator(Env, est, _inShape, newOut); } } + + /// + /// Cache data produced in memory by this estimator. It may append an extra estimator the this estimator + /// for caching and return the new one. + /// + public Estimator AppendCacheCheckpoint() + { + return new Estimator(Env, AsDynamic.AppendCacheCheckpoint(Env), _inShape, Shape); + } } } diff --git a/test/Microsoft.ML.StaticPipelineTesting/Training.cs b/test/Microsoft.ML.StaticPipelineTesting/Training.cs index 83836447e9..f5e5b92ba0 100644 --- a/test/Microsoft.ML.StaticPipelineTesting/Training.cs +++ b/test/Microsoft.ML.StaticPipelineTesting/Training.cs @@ -716,8 +716,9 @@ public void KMeans() KMeansPredictor pred = null; var est = reader.MakeNewEstimator() - .Append(r => (label: r.label.ToKey(), r.features)) - .Append(r => (r.label, r.features, preds: env.Clustering.Trainers.KMeans(r.features, clustersCount: 3, onFit: p => pred = p, advancedSettings: s => s.NumThreads = 1))); + .AppendCacheCheckpoint() + .Append(r => (label: r.label.ToKey(), r.features)) + .Append(r => (r.label, r.features, preds: env.Clustering.Trainers.KMeans(r.features, clustersCount: 3, onFit: p => pred = p, advancedSettings: s => s.NumThreads = 1))); var pipe = reader.Append(est); diff --git a/test/Microsoft.ML.Tests/FeatureContributionTests.cs b/test/Microsoft.ML.Tests/FeatureContributionTests.cs index 9229c55a08..7cbd452851 100644 --- a/test/Microsoft.ML.Tests/FeatureContributionTests.cs +++ b/test/Microsoft.ML.Tests/FeatureContributionTests.cs @@ -71,9 +71,10 @@ public void TestFeatureImportance() var srcDV = bldr.GetDataView(); var pipeline = ML.Transforms.Concatenate("Features", "X1", "X2Important", "X3", "X4Rand") + .AppendCacheCheckpoint(ML) .Append(ML.Transforms.Normalize("Features")); var data = pipeline.Fit(srcDV).Transform(srcDV); - var model = ML.Regression.Trainers.OnlineGradientDescent().Fit(data); + var model = ML.Regression.Trainers.OrdinaryLeastSquares().Fit(data); var args = new FeatureContributionCalculationTransform.Arguments() { Bottom = 10, @@ -85,19 +86,20 @@ public void TestFeatureImportance() var enumerator = output.AsEnumerable(Env, true).GetEnumerator(); ScoreAndContribution row = null; var expectedValues = new List(); - expectedValues.Add(new float[4] { 0.15640761F, 1, 0.155862764F, 0.07276783F }); - expectedValues.Add(new float[4] { 0.09507586F, 1, 0.1835608F, 0.0437548943F }); - expectedValues.Add(new float[4] { 0.297142357F, 1, 0.2855884F, 0.193529665F }); - expectedValues.Add(new float[4] { 0.45465675F, 0.8805887F, 0.4031663F, 1 }); - expectedValues.Add(new float[4] { 0.0595234372F, 0.99999994F, 0.349647522F, 0.137912869F }); + expectedValues.Add(new float[4] { 0.06319684F, 1, 0.1386623F, 4.46209469E-06F }); + expectedValues.Add(new float[4] { 0.03841561F, 1, 0.1633037F, 2.68303256E-06F }); + expectedValues.Add(new float[4] { 0.12006103F, 1, 0.254072F, 1.18671605E-05F }); + expectedValues.Add(new float[4] { 0.20861618F, 0.99999994F, 0.407312155F, 6.963478E-05F }); + expectedValues.Add(new float[4] { 0.024050576F, 0.99999994F, 0.31106182F, 8.456762E-06F }); int index = 0; while (enumerator.MoveNext() && index < expectedValues.Count) { row = enumerator.Current; - Assert.True(row.FeatureContributions[0] == expectedValues[index][0]); - Assert.True(row.FeatureContributions[1] == expectedValues[index][1]); - Assert.True(row.FeatureContributions[2] == expectedValues[index][2]); - Assert.True(row.FeatureContributions[3] == expectedValues[index++][3]); + // We set predicion to 6 because the limit of floating-point numbers is 7. + Assert.Equal(expectedValues[index][0], row.FeatureContributions[0], 6); + Assert.Equal(expectedValues[index][1], row.FeatureContributions[1], 6); + Assert.Equal(expectedValues[index][2], row.FeatureContributions[2], 6); + Assert.Equal(expectedValues[index++][3], row.FeatureContributions[3], 6); } Done(); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs b/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs index b3086a68a1..a907f8dd70 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs @@ -177,7 +177,11 @@ public void TrainAveragedPerceptronWithCache() (i, s) => true, s => { globalCounter++; }); - new AveragedPerceptronTrainer(env, "Label", "Features", numIterations: 2).Fit(xf).Transform(xf); + // The baseline result of this was generated with everything cached in memory. As auto-cache is removed, + // an explicit step of caching is required to make this test ok. + var cached = env.Data.Cache(xf); + + new AveragedPerceptronTrainer(env, "Label", "Features", numIterations: 2).Fit(cached).Transform(cached); // Make sure there were 2 cursoring events. Assert.Equal(1, globalCounter); diff --git a/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs b/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs index 0c7d436008..5b86785ff8 100644 --- a/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs +++ b/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs @@ -133,10 +133,10 @@ public void OvaLinearSvm() }); // Data - var data = reader.Read(GetDataPath(dataPath)); + var data = mlContext.Data.Cache(reader.Read(GetDataPath(dataPath))); // Pipeline - var pipeline = new Ova(mlContext, new LinearSvm(mlContext), useProbabilities: false); + var pipeline = new Ova(mlContext, new LinearSvm(mlContext, numIterations: 100), useProbabilities: false); var model = pipeline.Fit(data); var predictions = model.Transform(data); diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs index f94400c9fc..ce403b548a 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs @@ -379,7 +379,10 @@ public void OneClassMatrixFactorizationInMemoryDataZeroBaseIndex() // factorization problem, unspecified matrix elements are all a constant provided by user. If that constant is 0.15, // the following list means a 3-by-2 training matrix with elements: // (0, 0, 1), (1, 1, 1), (0, 2, 1), (0, 1, 0.15), (1, 0, 0.15), (1, 2, 0.15). - // because matrix elements at (0, 1), (1, 0), and (1, 2) are not specified. + // because matrix elements at (0, 1), (1, 0), and (1, 2) are not specified. Below is a visualization of the training matrix. + // [1, ?] + // |?, 1| where ? will be set to 0.15 by user when creating the trainer. + // [1, ?] var dataMatrix = new List(); dataMatrix.Add(new OneClassMatrixElementZeroBased() { MatrixColumnIndex = 0, MatrixRowIndex = 0, Value = 1 }); dataMatrix.Add(new OneClassMatrixElementZeroBased() { MatrixColumnIndex = 1, MatrixRowIndex = 1, Value = 1 }); From e368a1a20bb4884841c9c8986d2044157facb615 Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Mon, 3 Dec 2018 16:25:33 -0800 Subject: [PATCH 03/12] Add caching usage into a sample --- docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs index 32a33cdc43..c3702b6b3f 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs @@ -38,11 +38,19 @@ public static void SDCA_BinaryClassification() // Read the data var data = reader.Read(dataFile); + // ML.NET doesn't cache data set by default. Therefore, if one reads a data set from a file and accesses it many times, it can be slow due to + // expensive disk operations. When the considered data can fit into memory, a solution is to cache the data in memory. Caching is especially + // helpful when working with iterative algorithms which needs many data passes. Since SDCA is the case, we cache. Inserting a + // cache step in a pipeline is also possible, please see the construction of pipeline below. + data = mlContext.Data.Cache(data); + + // Step 2: Pipeline // Featurize the text column through the FeaturizeText API. // Then append a binary classifier, setting the "Label" column as the label of the dataset, and - // the "Features" column produced by FeaturizeText as the features column. + // the "Features" column produced by FeaturizeText as the features column. var pipeline = mlContext.Transforms.Text.FeaturizeText("SentimentText", "Features") + .AppendCacheCheckpoint(mlContext) // Add a data-cache step within a pipeline. .Append(mlContext.BinaryClassification.Trainers.StochasticDualCoordinateAscent(labelColumn: "Sentiment", featureColumn: "Features", l2Const: 0.001f)); // Step 3: Run Cross-Validation on this pipeline. From a17e3fed0b2b1569265f286b45afe8c20240ec25 Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Mon, 3 Dec 2018 16:46:27 -0800 Subject: [PATCH 04/12] Add a test for new function --- test/Microsoft.ML.Tests/CachingTests.cs | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/test/Microsoft.ML.Tests/CachingTests.cs b/test/Microsoft.ML.Tests/CachingTests.cs index ef77eab19f..42c85cce41 100644 --- a/test/Microsoft.ML.Tests/CachingTests.cs +++ b/test/Microsoft.ML.Tests/CachingTests.cs @@ -78,5 +78,25 @@ public void CacheTest() data.GetColumn(ML, "Features").ToArray(); Assert.True(src.All(x => x.AccessCount == 1)); } + + [Fact] + public void StaticDataCacheTest() + { + var env = new MLContext(seed: 0); + var dataPath = GetDataPath(TestDatasets.breastCancer.trainFilename); + var dataSource = new MultiFileSource(dataPath); + + var reader = TextLoader.CreateReader(env, + c => (label: c.LoadBool(0), features: c.LoadFloat(1, 9))); + + var data = reader.Read(dataSource); + + var cachedData = data.Cache(); + + // Before caching, we are not able to shuffle the data. + Assert.True(data.AsDynamic.CanShuffle == false); + // After caching, we are able to shuffle the data! + Assert.True(cachedData.AsDynamic.CanShuffle == true); + } } } From 369bf11b236dc256ceefc26b8d82ce458fc83ac9 Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Mon, 3 Dec 2018 16:48:02 -0800 Subject: [PATCH 05/12] Remove empty line --- docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs index c3702b6b3f..05d6aebdb2 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs @@ -44,7 +44,6 @@ public static void SDCA_BinaryClassification() // cache step in a pipeline is also possible, please see the construction of pipeline below. data = mlContext.Data.Cache(data); - // Step 2: Pipeline // Featurize the text column through the FeaturizeText API. // Then append a binary classifier, setting the "Label" column as the label of the dataset, and From 47d213e3f24920c0f4ecf699977f904e7d030518 Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Mon, 3 Dec 2018 16:49:36 -0800 Subject: [PATCH 06/12] Fix doc --- src/Microsoft.ML.Data/StaticPipe/Estimator.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Microsoft.ML.Data/StaticPipe/Estimator.cs b/src/Microsoft.ML.Data/StaticPipe/Estimator.cs index c048f4113e..3a7eda5fd0 100644 --- a/src/Microsoft.ML.Data/StaticPipe/Estimator.cs +++ b/src/Microsoft.ML.Data/StaticPipe/Estimator.cs @@ -79,8 +79,8 @@ string NameMap(PipelineColumn col) } /// - /// Cache data produced in memory by this estimator. It may append an extra estimator the this estimator - /// for caching and return the new one. + /// Cache data produced in memory by this estimator. It may append an extra estimator to the this estimator + /// for caching. The newly added estimator would be returned. /// public Estimator AppendCacheCheckpoint() { From fc8285c026474caf64c28509c5d37a767fe11769 Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Tue, 4 Dec 2018 08:49:55 -0800 Subject: [PATCH 07/12] Update baselines --- .../Common/OVA/OVA-CV-iris-out.txt | 32 +- .../Common/OVA/OVA-CV-iris-rp.txt | 2 +- .../BaselineOutput/Common/OVA/OVA-CV-iris.txt | 300 +++++++++--------- .../Common/OVA/OVA-TrainTest-iris-out.txt | 20 +- .../Common/OVA/OVA-TrainTest-iris-rp.txt | 2 +- .../Common/OVA/OVA-TrainTest-iris.txt | 300 +++++++++--------- .../Common/PKPD/PKPD-CV-iris-out.txt | 32 +- .../Common/PKPD/PKPD-CV-iris-rp.txt | 2 +- .../Common/PKPD/PKPD-CV-iris.txt | 300 +++++++++--------- .../Common/PKPD/PKPD-TrainTest-iris-out.txt | 22 +- .../Common/PKPD/PKPD-TrainTest-iris-rp.txt | 2 +- .../Common/PKPD/PKPD-TrainTest-iris.txt | 300 +++++++++--------- test/Microsoft.ML.Tests/Scenarios/OvaTest.cs | 2 +- 13 files changed, 658 insertions(+), 658 deletions(-) diff --git a/test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt b/test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt index 21874465e6..e81b3a9f75 100644 --- a/test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt +++ b/test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt @@ -21,35 +21,35 @@ Confusion table PREDICTED || 0 | 1 | 2 | Recall TRUTH ||======================== 0 || 21 | 0 | 0 | 1.0000 - 1 || 0 | 22 | 8 | 0.7333 + 1 || 0 | 20 | 10 | 0.6667 2 || 0 | 0 | 28 | 1.0000 ||======================== -Precision ||1.0000 |1.0000 |0.7778 | -Accuracy(micro-avg): 0.898734 -Accuracy(macro-avg): 0.911111 -Log-loss: 0.372620 -Log-loss reduction: 65.736556 +Precision ||1.0000 |1.0000 |0.7368 | +Accuracy(micro-avg): 0.873418 +Accuracy(macro-avg): 0.888889 +Log-loss: 0.393949 +Log-loss reduction: 63.775293 Confusion table ||======================== PREDICTED || 0 | 1 | 2 | Recall TRUTH ||======================== 0 || 29 | 0 | 0 | 1.0000 - 1 || 0 | 18 | 2 | 0.9000 + 1 || 0 | 19 | 1 | 0.9500 2 || 0 | 0 | 22 | 1.0000 ||======================== -Precision ||1.0000 |1.0000 |0.9167 | -Accuracy(micro-avg): 0.971831 -Accuracy(macro-avg): 0.966667 -Log-loss: 0.357704 -Log-loss reduction: 67.051654 +Precision ||1.0000 |1.0000 |0.9565 | +Accuracy(micro-avg): 0.985915 +Accuracy(macro-avg): 0.983333 +Log-loss: 0.299620 +Log-loss reduction: 72.401815 OVERALL RESULTS --------------------------------------- -Accuracy(micro-avg): 0.935283 (0.0365) -Accuracy(macro-avg): 0.938889 (0.0278) -Log-loss: 0.365162 (0.0075) -Log-loss reduction: 66.394105 (0.6575) +Accuracy(micro-avg): 0.929667 (0.0562) +Accuracy(macro-avg): 0.936111 (0.0472) +Log-loss: 0.346785 (0.0472) +Log-loss reduction: 68.088554 (4.3133) --------------------------------------- Physical memory usage(MB): %Number% diff --git a/test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt b/test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt index 7f9d1ab2be..23b93e5bc7 100644 --- a/test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt +++ b/test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt @@ -1,4 +1,4 @@ OVA Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings -0.935283 0.938889 0.365162 66.3941 AvgPer{lr=0.8} OVA %Data% %Output% 99 0 0 maml.exe CV tr=OVA{p=AvgPer{ lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:AvgPer{lr=0.8} +0.929667 0.936111 0.346785 68.08855 AvgPer{lr=0.8} OVA %Data% %Output% 99 0 0 maml.exe CV tr=OVA{p=AvgPer{ lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:AvgPer{lr=0.8} diff --git a/test/BaselineOutput/Common/OVA/OVA-CV-iris.txt b/test/BaselineOutput/Common/OVA/OVA-CV-iris.txt index ea773d1bba..42e07aea08 100644 --- a/test/BaselineOutput/Common/OVA/OVA-CV-iris.txt +++ b/test/BaselineOutput/Common/OVA/OVA-CV-iris.txt @@ -1,151 +1,151 @@ Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class -5 0 0 0.043587643807703136 0.957348645 0.04264102 1.03425764E-05 0 1 2 -6 0 0 0.20844569128859777 0.8118451 0.188126311 2.8563165E-05 0 1 2 -8 0 0 0.44491771498326443 0.640877 0.359043151 7.987263E-05 0 1 2 -9 0 0 0.28103366767537485 0.7550029 0.244961023 3.60610429E-05 0 1 2 -10 0 0 0.064111239185181926 0.937900662 0.0620922744 7.04143076E-06 0 1 2 -11 0 0 0.19511668953898065 0.822738647 0.17722775 3.361244E-05 0 1 2 -18 0 0 0.040957067767296483 0.959870338 0.0401218459 7.82396E-06 0 1 2 -20 0 0 0.12310363986545093 0.884172 0.115805365 2.26346256E-05 0 1 2 -21 0 0 0.080695089616231355 0.9224749 0.07751174 1.33279436E-05 0 1 2 -25 0 0 0.30682306393325992 0.7357808 0.2641595 5.97413928E-05 0 1 2 -28 0 0 0.13141817305409223 0.876851 0.12313617 1.279574E-05 0 1 2 -31 0 0 0.10895984751128654 0.8967664 0.103215657 1.78892569E-05 0 1 2 -32 0 0 0.035477802883361699 0.965144157 0.0348526426 3.21791072E-06 0 1 2 -35 0 0 0.20274726386806977 0.8164846 0.183501333 1.40994789E-05 0 1 2 -37 0 0 0.28103366767537485 0.7550029 0.244961023 3.60610429E-05 0 1 2 -40 0 0 0.12298365201239185 0.8842781 0.115710318 1.158336E-05 0 1 2 -41 0 0 0.63401679194266458 0.5304568 0.4693291 0.000214140542 0 1 2 -44 0 0 0.077454598775344219 0.925469041 0.07449977 3.11931653E-05 0 1 2 -45 0 0 0.3215830979624606 0.7250004 0.274949133 5.0463128E-05 0 1 2 -46 0 0 0.072538640149662562 0.9300298 0.06995974 1.0462416E-05 0 1 2 -48 0 0 0.070505947028000213 0.9319222 0.0680698752 7.905172E-06 0 1 2 -50 1 1 0.97585559443809855 0.376869768 0.358608037 0.264522225 1 2 0 -51 1 1 0.820648723050456 0.440146029 0.3583106 0.201543346 1 2 0 -52 1 2 1.0835275133336952 0.5653485 0.3383997 0.0962518156 2 1 0 -54 1 2 0.75898112148691677 0.472428739 0.468143165 0.0594281144 2 1 0 -56 1 2 1.0174162545586878 0.5111817 0.36152783 0.127290443 2 1 0 -60 1 1 0.30253484094402477 0.738942742 0.209481522 0.051575724 1 2 0 -63 1 2 0.77949402405350943 0.499299049 0.458638 0.04206293 2 1 0 -64 1 1 0.45505022537231249 0.6344161 0.288069278 0.0775146 1 0 2 -66 1 1 0.7154835565078782 0.488955617 0.46298942 0.04805498 1 2 0 -68 1 1 0.68322766519277334 0.504984438 0.482306838 0.0127087329 1 2 0 -69 1 1 0.31084089328775633 0.732830465 0.183353335 0.08381622 1 2 0 -70 1 2 1.1682944017762613 0.6530067 0.310896754 0.0360965841 2 1 0 -71 1 1 0.43377030209255479 0.6480611 0.185485169 0.166453749 1 0 2 -72 1 2 0.88766165771254424 0.578003168 0.41161713 0.0103796953 2 1 0 -73 1 1 0.66368933400718488 0.514948 0.4451455 0.03990646 1 2 0 -74 1 1 0.54404239638263385 0.5803973 0.245238408 0.174364269 1 2 0 -76 1 2 0.84677727980192752 0.5165659 0.4287946 0.0546395145 2 1 0 -77 1 2 1.2789386167391619 0.688494444 0.278332561 0.0331729874 2 1 0 -79 1 1 0.34033011681215469 0.7115354 0.2354252 0.0530394167 1 0 2 -82 1 1 0.35118632841443026 0.7038526 0.15209128 0.144056112 1 2 0 -88 1 1 0.4571578145475656 0.6330804 0.217808262 0.149111286 1 2 0 -90 1 1 0.54303381152243435 0.580983 0.390706122 0.0283109024 1 2 0 -91 1 1 0.7255881783753686 0.484039783 0.444033325 0.0719268844 1 2 0 -92 1 1 0.35286862388238727 0.7026695 0.20336625 0.09396427 1 2 0 -93 1 1 0.24150358472221847 0.785446 0.122569308 0.0919846743 1 0 2 -95 1 1 0.45747345580807686 0.6328806 0.223053813 0.144065529 1 2 0 -96 1 1 0.46692162584127184 0.6269292 0.2688824 0.1041884 1 2 0 -97 1 1 0.52181706235134551 0.593441248 0.265519917 0.14103885 1 2 0 -98 1 1 0.33964763199167614 0.7120212 0.255649149 0.03232969 1 0 2 -99 1 1 0.42298578084071409 0.655087948 0.2430654 0.10184665 1 2 0 -100 2 2 0.13591733259440952 0.8729148 0.125132382 0.00195282849 2 1 0 -102 2 2 0.13809510857610402 0.871015847 0.125785753 0.00319840666 2 1 0 -104 2 2 0.19932133588014422 0.8192866 0.178226635 0.00248679356 2 1 0 -105 2 2 0.09978434131070596 0.9050326 0.09390649 0.00106095837 2 1 0 -106 2 2 0.65516062299283195 0.519358635 0.4732639 0.00737748668 2 1 0 -108 2 2 0.36038464423836569 0.697408 0.300992548 0.00159944966 2 1 0 -109 2 2 0.042800052177573163 0.958102942 0.03757144 0.00432561943 2 1 0 -111 2 2 0.33893424257144178 0.7125293 0.282670647 0.004800048 2 1 0 -112 2 2 0.17819193567707683 0.8367818 0.156975582 0.006242614 2 1 0 -113 2 2 0.49781014911918742 0.6078603 0.388630718 0.00350892986 2 1 0 -115 2 2 0.1683952699484349 0.845019758 0.146008313 0.008971939 2 1 0 -117 2 2 0.023365514010699712 0.976905346 0.02015102 0.00294363522 2 1 0 -120 2 2 0.11133724227002473 0.894637 0.100207157 0.005155826 2 1 0 -121 2 2 0.43666882240878063 0.6461854 0.346793234 0.007021348 2 1 0 -122 2 2 0.13629671282280101 0.8725837 0.126684025 0.000732263 2 1 0 -123 2 2 0.4310483662194341 0.6498275 0.338038325 0.0121342046 2 1 0 -125 2 2 0.11330052370098145 0.8928823 0.101871319 0.0052463985 2 1 0 -128 2 2 0.27949760674881013 0.756163538 0.2411889 0.00264759478 2 1 0 -129 2 2 0.17530740569786113 0.839199 0.153847516 0.006953467 2 1 0 -131 2 2 0.031839393778411017 0.968662143 0.0223613773 0.008976495 2 1 0 -132 2 2 0.27137481365798816 0.7623307 0.235219285 0.00245000049 2 1 0 -133 2 2 0.43700297433440277 0.6459695 0.341389537 0.01264096 2 1 0 -137 2 2 0.23063259534491895 0.794031143 0.198468238 0.00750062242 2 1 0 -138 2 2 0.43845130281190237 0.6450346 0.3309319 0.0240335166 2 1 0 -141 2 2 0.17626166414917829 0.8383986 0.142890155 0.018711295 2 1 0 -144 2 2 0.099717233123952864 0.9050933 0.09041383 0.00449282629 2 1 0 -145 2 2 0.18787613378173548 0.828717351 0.161682889 0.009599784 2 1 0 -147 2 2 0.24798062433245444 0.780375063 0.20853655 0.01108838 2 1 0 -0 0 0 0.34881132522048625 0.705526233 0.294473559 1.92144441E-07 0 1 2 -1 0 0 0.36141580969752651 0.696689248 0.303309947 7.8389877E-07 0 1 2 -2 0 0 0.35568660847624228 0.7006922 0.299307227 5.929496E-07 0 1 2 -3 0 0 0.36470718348091719 0.694399953 0.30559817 1.84990029E-06 0 1 2 -4 0 0 0.34775770739677259 0.70627 0.293729782 2.00147142E-07 0 1 2 -7 0 0 0.35382023048081196 0.702001154 0.297998428 4.17606344E-07 0 1 2 -12 0 0 0.36098727532383801 0.696987867 0.303011417 7.32556146E-07 0 1 2 -13 0 0 0.35788558263546733 0.699153066 0.300846159 7.88259E-07 0 1 2 -14 0 0 0.33437356737542145 0.715786338 0.284213632 7.41558059E-09 0 1 2 -15 0 0 0.33259729807630167 0.7170589 0.2829411 2.302074E-08 0 1 2 -16 0 0 0.33963038748907248 0.712033451 0.2879665 5.7538923E-08 0 1 2 -17 0 0 0.34952968472792562 0.7050196 0.294980139 2.583559E-07 0 1 2 -19 0 0 0.34579385759256209 0.70765835 0.292341441 2.096021E-07 0 1 2 -22 0 0 0.34605819997965914 0.7074713 0.29252857 1.20912986E-07 0 1 2 -23 0 0 0.36811690986051288 0.6920363 0.307961673 2.02298725E-06 0 1 2 -24 0 0 0.37119922981165249 0.6899065 0.310090721 2.80658514E-06 0 1 2 -26 0 0 0.35941763729273518 0.698082745 0.301916152 1.08453048E-06 0 1 2 -27 0 0 0.35009366263991337 0.7046221 0.295377672 2.18394433E-07 0 1 2 -29 0 0 0.36473963008629695 0.6943774 0.305620819 1.74348168E-06 0 1 2 -30 0 0 0.36694890288891646 0.692845047 0.307153255 1.67791779E-06 0 1 2 -33 0 0 0.33532989874849606 0.715102136 0.284897834 2.06016182E-08 0 1 2 -34 0 0 0.36074853902438908 0.6971543 0.302845 6.899852E-07 0 1 2 -36 0 0 0.3442039710581144 0.708784342 0.2912156 5.37456621E-08 0 1 2 -38 0 0 0.36249420192434484 0.695938349 0.304059923 1.72375007E-06 0 1 2 -39 0 0 0.35302378267720547 0.7025605 0.2974392 3.31980715E-07 0 1 2 -42 0 0 0.35802404250832931 0.699056268 0.30094254 1.18013247E-06 0 1 2 -43 0 0 0.35964093968844252 0.6979269 0.3020715 1.62606943E-06 0 1 2 -47 0 0 0.35894549345005311 0.6984124 0.301586539 1.06725963E-06 0 1 2 -49 0 0 0.35354270878931848 0.702196 0.2978036 3.52685419E-07 0 1 2 -53 1 1 0.43814530313713385 0.645232 0.30703035 0.0477376431 1 2 0 -55 1 1 0.52991693789558192 0.588653862 0.3812489 0.0300972275 1 2 0 -57 1 1 0.3144098829942828 0.730219662 0.222309768 0.047470592 1 0 2 -58 1 1 0.16338480577647163 0.8492643 0.10643021 0.0443054661 1 2 0 -59 1 1 0.43732800775193598 0.6457596 0.2802832 0.0739572 1 2 0 -61 1 1 0.26826825521335035 0.7647026 0.165891945 0.06940546 1 2 0 -62 1 1 0.16519114388964468 0.84773165 0.07620503 0.0760633051 1 2 0 -65 1 1 0.12274628026245138 0.884488046 0.0737581253 0.0417538173 1 0 2 -67 1 1 0.18131529311314101 0.8341723 0.09860581 0.0672218949 1 0 2 -75 1 1 0.13597202822637602 0.872867048 0.06550142 0.0616315342 1 0 2 -78 1 1 0.45281685214847861 0.6358346 0.332585216 0.0315802023 1 2 0 -80 1 1 0.23111914057709354 0.7936449 0.105801627 0.10055349 1 0 2 -81 1 1 0.21336965456040224 0.807857454 0.137252137 0.0548904277 1 0 2 -83 1 2 1.1418187241491273 0.6765539 0.3192379 0.00420819456 2 1 0 -84 1 2 0.79904921193122347 0.5272309 0.449756384 0.0230127368 2 1 0 -85 1 1 0.32376577391953759 0.723419666 0.223107859 0.05347247 1 2 0 -86 1 1 0.19858499419365563 0.8198901 0.142534047 0.037575867 1 2 0 -87 1 1 0.32077518585346926 0.725586355 0.24517718 0.0292364415 1 2 0 -89 1 1 0.36379979176298916 0.695030332 0.242874637 0.06209502 1 2 0 -94 1 1 0.37955450575815275 0.684166133 0.262924 0.0529098734 1 2 0 -101 2 2 0.34078617573620174 0.711210966 0.28588894 0.002900116 2 1 0 -103 2 2 0.33454245635946434 0.71566546 0.282688916 0.00164566189 2 1 0 -107 2 2 0.32176538137222749 0.724868238 0.27461502 0.000516714761 2 1 0 -110 2 2 0.4735643051120339 0.622778535 0.371121377 0.006100062 2 1 0 -114 2 2 0.3247388954422577 0.722716033 0.2752217 0.00206224318 2 1 0 -116 2 2 0.36482478597312679 0.6943183 0.3033064 0.00237530912 2 1 0 -118 2 2 0.30234231913723036 0.739085 0.260826528 8.847632E-05 2 1 0 -119 2 2 0.37792268388420569 0.6852835 0.3114479 0.0032686044 2 1 0 -124 2 2 0.33777190179266953 0.713358 0.284956157 0.00168584171 2 1 0 -126 2 2 0.51721400727433164 0.5961792 0.395325035 0.008495758 2 1 0 -127 2 2 0.48223827874761288 0.617399931 0.374503255 0.008096788 2 1 0 -130 2 2 0.33112825051245398 0.718113065 0.281247973 0.0006389589 2 1 0 -134 2 2 0.34240991810493487 0.7100571 0.288253874 0.00168902089 2 1 0 -135 2 2 0.3238549270121831 0.7233552 0.2760729 0.0005719304 2 1 0 -136 2 2 0.31869296062169472 0.727098763 0.271312952 0.00158829393 2 1 0 -139 2 2 0.39792518591800413 0.6717123 0.325529337 0.002758371 2 1 0 -140 2 2 0.32133155702629967 0.7251828 0.273566216 0.0012509838 2 1 0 -142 2 2 0.34078617573620174 0.711210966 0.28588894 0.002900116 2 1 0 -143 2 2 0.31559105025409723 0.72935766 0.269767135 0.0008751799 2 1 0 -146 2 2 0.3760214987664387 0.6865876 0.310142934 0.00326948427 2 1 0 -148 2 2 0.3305544580259554 0.718525231 0.2789816 0.00249314215 2 1 0 -149 2 2 0.37408822283240173 0.6879162 0.307514042 0.00456974143 2 1 0 +5 0 0 0.14898499738372634 0.861582041 0.138407215 1.07481983E-05 0 1 2 +6 0 0 0.3065288836068612 0.73599726 0.2639699 3.28646565E-05 0 1 2 +8 0 0 0.42786795330515681 0.6518975 0.3480076 9.492641E-05 0 1 2 +9 0 0 0.32727882650425077 0.7208827 0.279081136 3.612931E-05 0 1 2 +10 0 0 0.17138787650099252 0.8424947 0.157498628 6.671493E-06 0 1 2 +11 0 0 0.29096005205092529 0.74754554 0.2524177 3.67338434E-05 0 1 2 +18 0 0 0.13536917585154803 0.8733934 0.1265994 7.166915E-06 0 1 2 +20 0 0 0.21879048630641887 0.803490043 0.196489558 2.03802119E-05 0 1 2 +21 0 0 0.19863559351573729 0.8198486 0.180137366 1.40261882E-05 0 1 2 +25 0 0 0.33531381208991595 0.71511364 0.2848282 5.815608E-05 0 1 2 +28 0 0 0.23158758890843326 0.7932732 0.2067149 1.18948565E-05 0 1 2 +31 0 0 0.20718141254425143 0.8128722 0.187111884 1.593467E-05 0 1 2 +32 0 0 0.14581809307946758 0.8643149 0.135681465 3.6142344E-06 0 1 2 +35 0 0 0.28094446239802623 0.755070269 0.244916424 1.32786363E-05 0 1 2 +37 0 0 0.32727882650425077 0.7208827 0.279081136 3.612931E-05 0 1 2 +40 0 0 0.23484944167000599 0.7906899 0.209298462 1.16704177E-05 0 1 2 +41 0 0 0.52141117030361683 0.59368217 0.406092882 0.000224944655 0 1 2 +44 0 0 0.19869528365124761 0.819799662 0.180165187 3.516637E-05 0 1 2 +45 0 0 0.35016624430023463 0.704570949 0.295377225 5.18644047E-05 0 1 2 +46 0 0 0.19245268368525237 0.82493335 0.17505537 1.12744219E-05 0 1 2 +48 0 0 0.18156112383605871 0.833967268 0.166025028 7.721445E-06 0 1 2 +50 1 1 0.64624484712856134 0.5240098 0.262612373 0.2133778 1 2 0 +51 1 1 0.63276740252217267 0.531119943 0.307034731 0.161845312 1 2 0 +52 1 2 0.83214667389569419 0.478386253 0.435114235 0.08649951 2 1 0 +54 1 1 0.74737111472015905 0.473609984 0.46261856 0.0637714639 1 2 0 +56 1 2 0.80606426845587942 0.4562977 0.446612358 0.09708993 2 1 0 +60 1 1 0.40475436673945725 0.667140663 0.2567001 0.07615923 1 2 0 +63 1 2 0.83848982664755245 0.5245598 0.432362974 0.0430772379 2 1 0 +64 1 1 0.4724140828680039 0.6234953 0.2927696 0.08373512 1 0 2 +66 1 2 0.81331552350806924 0.510974467 0.443385571 0.045639988 2 1 0 +68 1 2 0.88581390317653119 0.569699466 0.4123784 0.017922163 2 1 0 +69 1 1 0.4096502018565843 0.663882434 0.2231506 0.112966977 1 2 0 +70 1 2 1.1272952904290068 0.646777868 0.32390815 0.0293139871 2 1 0 +71 1 1 0.44999656255507409 0.637630343 0.198594451 0.1637752 1 0 2 +72 1 2 1.0945008493805846 0.6525861 0.334706634 0.0127072614 2 1 0 +73 1 2 0.75466565674392561 0.484873533 0.4701678 0.044958692 2 1 0 +74 1 1 0.50278888715808445 0.6048415 0.221343279 0.173815265 1 2 0 +76 1 2 0.78856604341887837 0.4854024 0.454496056 0.0601015352 2 1 0 +77 1 2 1.1512132035767342 0.6525395 0.316252857 0.0312076658 2 1 0 +79 1 1 0.42617249485652908 0.6530037 0.2899244 0.057071913 1 0 2 +82 1 1 0.41470700444302333 0.6605338 0.170520529 0.1689457 1 0 2 +88 1 1 0.49394371600700848 0.6102151 0.243849784 0.145935118 1 2 0 +90 1 1 0.71582408663720343 0.488789141 0.47624135 0.03496951 1 2 0 +91 1 1 0.73003131975183999 0.4818939 0.449290931 0.0688152 1 2 0 +92 1 1 0.43657621695425319 0.646245241 0.234681442 0.119073339 1 2 0 +93 1 1 0.34178431537548232 0.710501432 0.170813844 0.118684709 1 0 2 +95 1 1 0.49105836953030818 0.611978352 0.245221257 0.142800376 1 2 0 +96 1 1 0.52911389653841068 0.589126766 0.3021384 0.108734839 1 2 0 +97 1 1 0.51338818870184666 0.5984644 0.258210152 0.143325433 1 2 0 +98 1 1 0.44598737716555714 0.640191853 0.320325464 0.0394826569 1 0 2 +99 1 1 0.49633411615743322 0.6087582 0.278045028 0.113196738 1 2 0 +100 2 2 0.1657485118529694 0.8472593 0.15114595 0.00159478688 2 1 0 +102 2 2 0.15062702864772329 0.860168457 0.1369899 0.002841651 2 1 0 +104 2 2 0.19216183181956109 0.8251733 0.172580525 0.00224616844 2 1 0 +105 2 2 0.10783693379121159 0.897774 0.101297595 0.0009284109 2 1 0 +106 2 2 0.49803864682427001 0.607721448 0.38346687 0.008811677 2 1 0 +108 2 2 0.24869617414609116 0.779816866 0.218381524 0.00180161942 2 1 0 +109 2 2 0.088986239489588018 0.914858162 0.0819557458 0.00318608852 2 1 0 +111 2 2 0.27248498685253925 0.761484861 0.23352161 0.004993557 2 1 0 +112 2 2 0.19350236090832176 0.8240679 0.170360163 0.00557197351 2 1 0 +113 2 2 0.36301900972746426 0.6955732 0.3003853 0.0040415 2 1 0 +115 2 2 0.20394542591432366 0.8155069 0.177217782 0.00727535738 2 1 0 +117 2 2 0.060604416045993939 0.9411955 0.05663682 0.002167699 2 1 0 +120 2 2 0.14770763531771011 0.8626833 0.133085579 0.004231106 2 1 0 +121 2 2 0.35116812164489036 0.7038654 0.288968384 0.007166216 2 1 0 +122 2 2 0.11938337402387501 0.8874675 0.111851335 0.0006811737 2 1 0 +123 2 2 0.36594708304609636 0.6935395 0.293548316 0.0129121933 2 1 0 +125 2 2 0.14890267602086421 0.861653 0.133782089 0.004564954 2 1 0 +128 2 2 0.23149637602267908 0.7933456 0.204050332 0.002604086 2 1 0 +129 2 2 0.19738168861304117 0.820877254 0.172454879 0.006667889 2 1 0 +131 2 2 0.082877993597994665 0.920463443 0.0726075843 0.006928972 2 1 0 +132 2 2 0.2261608752358937 0.7975898 0.200027317 0.002382908 2 1 0 +133 2 2 0.37668679350259709 0.686130941 0.300454319 0.01341475 2 1 0 +137 2 2 0.23730028214241819 0.7887544 0.2045337 0.0067119 2 1 0 +138 2 2 0.41466847394913298 0.660559237 0.317496151 0.0219445918 2 1 0 +141 2 2 0.2378343130973842 0.7883333 0.195801422 0.01586528 2 1 0 +144 2 2 0.14102681941995521 0.868466 0.1280038 0.00353016751 2 1 0 +145 2 2 0.21316426895669463 0.8080234 0.183620155 0.008356474 2 1 0 +147 2 2 0.25860642449489452 0.772126853 0.217881814 0.009991349 2 1 0 +0 0 0 0.14822243523905682 0.8622393 0.137760535 1.42600655E-07 0 1 2 +1 0 0 0.20081429998478928 0.818064332 0.181935042 6.369149E-07 0 1 2 +2 0 0 0.18386421756476942 0.8320488 0.167950869 3.90649234E-07 0 1 2 +3 0 0 0.21430091092093637 0.8071055 0.192893252 1.25093914E-06 0 1 2 +4 0 0 0.14464925459452174 0.865325749 0.134674087 1.351252E-07 0 1 2 +7 0 0 0.16671164417651543 0.846443653 0.153556049 3.12257328E-07 0 1 2 +12 0 0 0.20372786340069227 0.8156843 0.184315056 6.161207E-07 0 1 2 +13 0 0 0.2053889533214896 0.8143305 0.185669 4.731557E-07 0 1 2 +14 0 0 0.086467323769418583 0.9171655 0.0828345 6.221406E-09 0 1 2 +15 0 0 0.081957730046945412 0.9213109 0.07868907 1.38329819E-08 0 1 2 +16 0 0 0.10829029194802958 0.89736706 0.102632925 3.48168818E-08 0 1 2 +17 0 0 0.14958801772137612 0.861062646 0.13893716 1.75314966E-07 0 1 2 +19 0 0 0.13322714868487645 0.875266254 0.124733619 1.28628983E-07 0 1 2 +22 0 0 0.13862518012010799 0.870554268 0.129445657 6.036393E-08 0 1 2 +23 0 0 0.19310245722698802 0.8243975 0.175601169 1.32743924E-06 0 1 2 +24 0 0 0.20865693931030993 0.811673641 0.188324288 2.048797E-06 0 1 2 +26 0 0 0.17749546972040989 0.8373648 0.1626345 6.926975E-07 0 1 2 +27 0 0 0.15019147117595094 0.8605432 0.13945666 1.74842683E-07 0 1 2 +29 0 0 0.20747930702213993 0.812630057 0.187368736 1.223238E-06 0 1 2 +30 0 0 0.21292293576370525 0.8082184 0.1917803 1.29386513E-06 0 1 2 +33 0 0 0.091640067349721938 0.9124335 0.0875665 1.41758205E-08 0 1 2 +34 0 0 0.19699061011964958 0.821198344 0.17880109 6.01980844E-07 0 1 2 +36 0 0 0.12779659882948383 0.880032361 0.119967587 4.84299072E-08 0 1 2 +38 0 0 0.21789921377726856 0.8042065 0.195792481 1.04123671E-06 0 1 2 +39 0 0 0.16240138202263071 0.8500999 0.149899811 2.621196E-07 0 1 2 +42 0 0 0.19811089534959545 0.8202789 0.179720446 6.594638E-07 0 1 2 +43 0 0 0.17222970502139762 0.8417858 0.158213392 8.341555E-07 0 1 2 +47 0 0 0.19599802886062165 0.822013855 0.17798546 6.796521E-07 0 1 2 +49 0 0 0.16848901720737497 0.844940543 0.1550592 2.68694436E-07 0 1 2 +53 1 1 0.33929501593555544 0.7122723 0.249259621 0.0384680778 1 2 0 +55 1 1 0.43740997493483258 0.645706654 0.327492118 0.026801208 1 2 0 +57 1 1 0.25341172081105845 0.77614826 0.19214052 0.0317112133 1 0 2 +58 1 1 0.20133568049613867 0.8176379 0.140906453 0.0414556079 1 2 0 +59 1 1 0.28494290112955128 0.7520572 0.176915661 0.07102712 1 2 0 +61 1 1 0.22031497886724014 0.802266061 0.126393482 0.07134045 1 2 0 +62 1 1 0.17283791289976866 0.841273963 0.100865625 0.05786044 1 2 0 +65 1 1 0.14734558889976437 0.8629957 0.08228925 0.054715097 1 0 2 +67 1 1 0.17615666464048946 0.8384866 0.0872618854 0.07425149 1 0 2 +75 1 1 0.15642215815585309 0.8551981 0.0761427358 0.0686592 1 2 0 +78 1 1 0.37642751820989312 0.686308861 0.28346312 0.030228015 1 2 0 +80 1 1 0.1944674886980301 0.823272943 0.08934537 0.08738167 1 2 0 +81 1 1 0.18446994007360493 0.831544936 0.115473866 0.05298119 1 0 2 +83 1 2 0.92853824559840126 0.6011451 0.395130873 0.0037240074 2 1 0 +84 1 1 0.56465177772236885 0.5685581 0.4072805 0.0241614021 1 2 0 +85 1 1 0.25727484156807667 0.7731557 0.1642441 0.0626002 1 2 0 +86 1 1 0.22862622189799611 0.795625865 0.166149229 0.03822488 1 2 0 +87 1 1 0.35292282916697471 0.7026314 0.275890559 0.02147802 1 2 0 +89 1 1 0.27794501284082396 0.757338464 0.189463019 0.0531985424 1 2 0 +94 1 1 0.29989849188818812 0.7408934 0.2118927 0.0472138971 1 2 0 +101 2 2 0.4738038899565814 0.622629344 0.3747406 0.002630036 2 1 0 +103 2 2 0.44910078675473908 0.6382018 0.360287219 0.00151102315 2 1 0 +107 2 2 0.42756627142640746 0.6520942 0.347433776 0.000472046959 2 1 0 +110 2 2 0.57916486299376979 0.560366154 0.432973266 0.00666057365 2 1 0 +114 2 2 0.45214138527005782 0.6362642 0.361806333 0.00192946056 2 1 0 +116 2 2 0.45497497267329101 0.634463847 0.363294363 0.00224180124 2 1 0 +118 2 2 0.470631821054253 0.6246075 0.375314832 7.76593442E-05 2 1 0 +119 2 2 0.51596566495735996 0.5969239 0.400479466 0.00259662978 2 1 0 +124 2 2 0.41382397334687865 0.6611173 0.33714956 0.00173310027 2 1 0 +126 2 2 0.64706268251636467 0.523581445 0.468205124 0.00821345 2 1 0 +127 2 2 0.6163570913261589 0.5399077 0.4518157 0.008276599 2 1 0 +130 2 2 0.42555053982455915 0.653409958 0.3460102 0.0005798615 2 1 0 +134 2 2 0.47967559175010499 0.618984163 0.379600972 0.00141487492 2 1 0 +135 2 2 0.39549652187626555 0.6733456 0.326093346 0.000561050954 2 1 0 +136 2 2 0.40373690532217765 0.6678198 0.330517977 0.001662206 2 1 0 +139 2 2 0.46630393097913669 0.6273166 0.3698809 0.00280250143 2 1 0 +140 2 2 0.41184706306881064 0.6624256 0.336315721 0.00125870958 2 1 0 +142 2 2 0.4738038899565814 0.622629344 0.3747406 0.002630036 2 1 0 +143 2 2 0.40730363349873294 0.6654421 0.333683163 0.0008747191 2 1 0 +146 2 2 0.49646297645874904 0.6086798 0.388417184 0.00290302956 2 1 0 +148 2 2 0.41691013162708651 0.659080148 0.338237643 0.00268219085 2 1 0 +149 2 2 0.49336894801249942 0.61056596 0.384958029 0.004475997 2 1 0 diff --git a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt index 4d9f2f452b..f0bb6cd1b8 100644 --- a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt +++ b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt @@ -13,21 +13,21 @@ Confusion table PREDICTED || 0 | 1 | 2 | Recall TRUTH ||======================== 0 || 50 | 0 | 0 | 1.0000 - 1 || 0 | 48 | 2 | 0.9600 + 1 || 0 | 46 | 4 | 0.9200 2 || 0 | 0 | 50 | 1.0000 ||======================== -Precision ||1.0000 |1.0000 |0.9615 | -Accuracy(micro-avg): 0.986667 -Accuracy(macro-avg): 0.986667 -Log-loss: 0.246444 -Log-loss reduction: 77.567746 +Precision ||1.0000 |1.0000 |0.9259 | +Accuracy(micro-avg): 0.973333 +Accuracy(macro-avg): 0.973333 +Log-loss: 0.291849 +Log-loss reduction: 73.434718 OVERALL RESULTS --------------------------------------- -Accuracy(micro-avg): 0.986667 (0.0000) -Accuracy(macro-avg): 0.986667 (0.0000) -Log-loss: 0.246444 (0.0000) -Log-loss reduction: 77.567746 (0.0000) +Accuracy(micro-avg): 0.973333 (0.0000) +Accuracy(macro-avg): 0.973333 (0.0000) +Log-loss: 0.291849 (0.0000) +Log-loss reduction: 73.434718 (0.0000) --------------------------------------- Physical memory usage(MB): %Number% diff --git a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt index 971b18dd55..12797db1fb 100644 --- a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt +++ b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt @@ -1,4 +1,4 @@ OVA Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings -0.986667 0.986667 0.246444 77.56775 AvgPer{lr=0.8} OVA %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=OVA{p=AvgPer{ lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /p:AvgPer{lr=0.8} +0.973333 0.973333 0.291849 73.43472 AvgPer{lr=0.8} OVA %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=OVA{p=AvgPer{ lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /p:AvgPer{lr=0.8} diff --git a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt index cbb0c14c4f..4e152462bd 100644 --- a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt +++ b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt @@ -1,151 +1,151 @@ Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class -0 0 0 0.064187757670407464 0.9378289 0.0621710941 8.6083265E-09 0 1 2 -1 0 0 0.1893002350721239 0.827538 0.172461912 5.35715046E-08 0 1 2 -2 0 0 0.13195967886821539 0.876376331 0.123623639 3.315678E-08 0 1 2 -3 0 0 0.18799826814680765 0.828616142 0.1713837 1.33305463E-07 0 1 2 -4 0 0 0.053931560367015983 0.947496951 0.0525030419 8.35157454E-09 0 1 2 -5 0 0 0.024550920036862604 0.975748 0.0242519863 8.78616E-09 0 1 2 -6 0 0 0.092104047221280363 0.912010252 0.08798967 5.086476E-08 0 1 2 -7 0 0 0.0890750453193484 0.9147769 0.08522307 2.25849668E-08 0 1 2 -8 0 0 0.2649651905225579 0.767232656 0.23276712 2.323326E-07 0 1 2 -9 0 0 0.17798339361396398 0.8369563 0.163043633 4.85697953E-08 0 1 2 -10 0 0 0.038712934227392123 0.962026834 0.037973173 3.67671626E-09 0 1 2 -11 0 0 0.10398316683257577 0.901240468 0.09875945 5.7562815E-08 0 1 2 -12 0 0 0.20701490311777249 0.813007534 0.186992422 5.15469765E-08 0 1 2 -13 0 0 0.20677930042755727 0.8131991 0.186800852 4.620836E-08 0 1 2 -14 0 0 0.012900958726204939 0.9871819 0.0128180888 1.53412311E-10 0 1 2 -15 0 0 0.0056796126449842428 0.9943365 0.00566351926 4.0924758E-10 0 1 2 -16 0 0 0.018169258095637245 0.9819948 0.018005196 1.47014889E-09 0 1 2 -17 0 0 0.059814766253305368 0.941939 0.0580609739 1.13122178E-08 0 1 2 -18 0 0 0.028932621466359262 0.9714819 0.02851807 4.23276969E-09 0 1 2 -19 0 0 0.032923391844126633 0.9676127 0.0323873 7.642096E-09 0 1 2 -20 0 0 0.08651372619399908 0.91712296 0.08287702 2.06279154E-08 0 1 2 -21 0 0 0.038368820295834986 0.962357938 0.03764207 1.32810882E-08 0 1 2 -22 0 0 0.04847884988937378 0.9526775 0.0473225228 3.781359E-09 0 1 2 -23 0 0 0.099972123278544239 0.904862642 0.09513722 1.30113463E-07 0 1 2 -24 0 0 0.12884486436940729 0.879110336 0.120889448 2.201515E-07 0 1 2 -25 0 0 0.20701321690255622 0.8130089 0.186990991 1.01994196E-07 0 1 2 -26 0 0 0.083707914672920755 0.919699848 0.08030011 6.121948E-08 0 1 2 -27 0 0 0.065971839514429781 0.9361572 0.063842766 1.05041069E-08 0 1 2 -28 0 0 0.076179360015026509 0.92665 0.0733500347 8.859791E-09 0 1 2 -29 0 0 0.16075048860491603 0.8515045 0.148495391 1.25408391E-07 0 1 2 -30 0 0 0.18626475341925278 0.8300538 0.169946045 1.27813465E-07 0 1 2 -31 0 0 0.065274392613973067 0.9368104 0.0631895959 1.46311239E-08 0 1 2 -32 0 0 0.018341836022086215 0.981825352 0.018174639 1.48765988E-09 0 1 2 -33 0 0 0.010831873835231624 0.9892266 0.0107734147 4.45536885E-10 0 1 2 -34 0 0 0.17798339361396398 0.8369563 0.163043633 4.85697953E-08 0 1 2 -35 0 0 0.1094558082339183 0.8963218 0.103678234 1.02393658E-08 0 1 2 -36 0 0 0.049986936620370084 0.951241851 0.04875815 2.05979567E-09 0 1 2 -37 0 0 0.17798339361396398 0.8369563 0.163043633 4.85697953E-08 0 1 2 -38 0 0 0.2126495148617171 0.808439434 0.191560462 1.15782285E-07 0 1 2 -39 0 0 0.085343924605329857 0.91819644 0.08180357 1.76697252E-08 0 1 2 -40 0 0 0.058200059976790904 0.9434612 0.05653884 9.271867E-09 0 1 2 -41 0 0 0.49728774260829139 0.60817796 0.391821355 7.12752467E-07 0 1 2 -42 0 0 0.14839409412278926 0.8620913 0.137908638 6.888182E-08 0 1 2 -43 0 0 0.058733854938205904 0.9429577 0.0570422448 8.043525E-08 0 1 2 -44 0 0 0.041432529938052375 0.959414065 0.0405859053 6.019737E-08 0 1 2 -45 0 0 0.18582725015020429 0.830417037 0.169582859 9.04475E-08 0 1 2 -46 0 0 0.038175474655158276 0.962544 0.03745597 9.106087E-09 0 1 2 -47 0 0 0.14649366336893571 0.8637312 0.13626872 6.586965E-08 0 1 2 -48 0 0 0.040528072578562842 0.9602822 0.03971778 4.706963E-09 0 1 2 -49 0 0 0.10224582319060745 0.9028076 0.09719238 1.898811E-08 0 1 2 -50 1 1 0.13228530826110765 0.876091 0.07536332 0.04854567 1 0 2 -51 1 1 0.19869535635760471 0.8197996 0.101386108 0.0788142756 1 2 0 -52 1 1 0.22406365925144375 0.799264252 0.175007358 0.0257283952 1 2 0 -53 1 1 0.22846786324242829 0.79575187 0.190147012 0.0141011067 1 2 0 -54 1 1 0.2217374832444966 0.801125646 0.1814971 0.0173772536 1 2 0 -55 1 1 0.36194510206226777 0.6963206 0.290218145 0.0134612536 1 2 0 -56 1 1 0.38227740308279812 0.682305753 0.26903218 0.04866208 1 2 0 -57 1 1 0.11711832955706668 0.889479935 0.09261171 0.01790834 1 0 2 -58 1 1 0.11588080546894164 0.890581369 0.0796031356 0.0298154745 1 2 0 -59 1 1 0.22549808191140069 0.7981186 0.1629105 0.0389709137 1 2 0 -60 1 1 0.11452610106565105 0.891788661 0.08094173 0.027269613 1 2 0 -61 1 1 0.19606227523723149 0.821961045 0.1124982 0.0655407459 1 2 0 -62 1 1 0.065211469379248863 0.9368693 0.0428994 0.0202313047 1 2 0 -63 1 1 0.38674137023255867 0.679266751 0.3082701 0.0124631459 1 2 0 -64 1 1 0.23030100927747191 0.7942945 0.193664417 0.0120411161 1 0 2 -65 1 1 0.13921020030567513 0.8700451 0.0984156 0.0315392464 1 0 2 -66 1 1 0.52761682473815386 0.5900094 0.392468572 0.0175220035 1 2 0 -67 1 1 0.084603908782305962 0.9188762 0.0453156643 0.03580814 1 0 2 -68 1 1 0.4618931830241112 0.630089641 0.3666358 0.00327458978 1 2 0 -69 1 1 0.090485397720604446 0.9134877 0.04816054 0.03835176 1 2 0 -70 1 2 1.0140603005132554 0.6271839 0.362743139 0.0100729465 2 1 0 -71 1 1 0.11008255968443192 0.8957602 0.07861323 0.0256266128 1 0 2 -72 1 1 0.64771052952961239 0.523242354 0.474618584 0.00213904912 1 2 0 -73 1 1 0.27755598196422471 0.75763315 0.230862036 0.011504828 1 2 0 -74 1 1 0.10244027519493581 0.902632058 0.0589159 0.0384520665 1 0 2 -75 1 1 0.11841763901117131 0.888325 0.0673675239 0.0443075225 1 0 2 -76 1 1 0.19025677181646344 0.8267468 0.160040483 0.0132126883 1 2 0 -77 1 1 0.61336152114875442 0.54152745 0.4505507 0.00792186148 1 2 0 -78 1 1 0.33449082060539098 0.7157024 0.264204919 0.0200926811 1 2 0 -79 1 1 0.1490904339680772 0.8614912 0.134173632 0.00433517434 1 0 2 -80 1 1 0.089822225657751095 0.9140937 0.04908715 0.0368192 1 2 0 -81 1 1 0.077764627112278675 0.925182164 0.0500315838 0.0247862767 1 0 2 -82 1 1 0.10062242034830922 0.9042744 0.06707825 0.028647339 1 0 2 -83 1 2 0.87242455033259836 0.580679059 0.417937 0.001383926 2 1 0 -84 1 1 0.64643118238444741 0.5239122 0.462312371 0.0137754688 1 2 0 -85 1 1 0.38078103791751566 0.6833275 0.219007626 0.09766489 1 2 0 -86 1 1 0.18654240234510375 0.8298234 0.129826292 0.0403503366 1 2 0 -87 1 1 0.19556197322229293 0.8223724 0.169585884 0.008041753 1 2 0 -88 1 1 0.16967653754303061 0.843937755 0.07920382 0.07685845 1 2 0 -89 1 1 0.17942883979161328 0.8357474 0.140764222 0.0234883726 1 2 0 -90 1 1 0.37073556760095389 0.690226436 0.2999358 0.009837814 1 2 0 -91 1 1 0.28153328535230204 0.7546258 0.221098259 0.02427597 1 2 0 -92 1 1 0.095651081105254609 0.908781052 0.0509764329 0.0402425081 1 2 0 -93 1 1 0.10201305873404877 0.903017759 0.07907509 0.0179071445 1 0 2 -94 1 1 0.21320461976679056 0.8079908 0.167838141 0.02417106 1 2 0 -95 1 1 0.14772366482527016 0.862669468 0.07083947 0.06649103 1 2 0 -96 1 1 0.1676055742511661 0.8456873 0.107580893 0.04673176 1 2 0 -97 1 1 0.11592859305433842 0.8905388 0.058125712 0.051335495 1 2 0 -98 1 1 0.24343470732109662 0.783930659 0.212896168 0.00317314919 1 0 2 -99 1 1 0.14288965000739814 0.8668497 0.0867102742 0.0464400165 1 2 0 -100 2 2 0.22003618725213686 0.802489758 0.197204947 0.000305285153 2 1 0 -101 2 2 0.4769116280424554 0.6206974 0.378338724 0.0009639265 2 1 0 -102 2 2 0.32248110542186914 0.7243496 0.2752483 0.000402083038 2 1 0 -103 2 2 0.4318088621640172 0.6493335 0.3500793 0.00058722886 2 1 0 -104 2 2 0.33701376028625557 0.713899 0.2857394 0.000361559069 2 1 0 -105 2 2 0.36111778351144497 0.6968969 0.303009778 9.33285046E-05 2 1 0 -106 2 2 0.57324912019977703 0.56369096 0.434159666 0.00214935932 2 1 0 -107 2 2 0.43241433229369408 0.648940444 0.3508895 0.000170046318 2 1 0 -108 2 2 0.5524780213978463 0.5755219 0.424283057 0.000195083121 2 1 0 -109 2 2 0.10018846863192231 0.9046669 0.0947069 0.000626212 2 1 0 -110 2 2 0.34368591687548161 0.7091516 0.2858574 0.00499094324 2 1 0 -111 2 2 0.46723123458330745 0.626735151 0.3724764 0.000788469857 2 1 0 -112 2 2 0.31582627414124387 0.7291861 0.2698556 0.0009583179 2 1 0 -113 2 2 0.52239827082819301 0.593096435 0.406178564 0.00072502665 2 1 0 -114 2 2 0.36058132074789678 0.6972709 0.301918417 0.000810695637 2 1 0 -115 2 2 0.21578587862382262 0.805907845 0.192418143 0.00167402264 2 1 0 -116 2 2 0.38868733060861138 0.6779462 0.3210066 0.00104722043 2 1 0 -117 2 2 0.097428111747803359 0.907167554 0.0924843 0.000348151079 2 1 0 -118 2 2 0.51096864540412867 0.5999142 0.400067 1.88108879E-05 2 1 0 -119 2 2 0.64730667194394087 0.5234537 0.4758455 0.000700797769 2 1 0 -120 2 2 0.21506259321317989 0.806490958 0.192730322 0.0007787307 2 1 0 -121 2 2 0.42372860353635938 0.6546015 0.3437634 0.00163504237 2 1 0 -122 2 2 0.45634525198899917 0.633595049 0.366349727 5.52281235E-05 2 1 0 -123 2 2 0.56778894609208741 0.5667772 0.4307018 0.00252098124 2 1 0 -124 2 2 0.22282641548991947 0.800253749 0.198714435 0.00103183649 2 1 0 -125 2 2 0.31027244305630025 0.733247161 0.266095132 0.0006576972 2 1 0 -126 2 2 0.59157505509617891 0.5534549 0.442328155 0.004216949 2 1 0 -127 2 2 0.48357066476934346 0.616577864 0.3786479 0.00477422541 2 1 0 -128 2 2 0.41595305224046536 0.659711242 0.339890242 0.0003985048 2 1 0 -129 2 2 0.45550524433223816 0.6341275 0.364983171 0.0008893516 2 1 0 -130 2 2 0.43130825350797314 0.6496586 0.3501238 0.000217600667 2 1 0 -131 2 2 0.11700817008638008 0.8895779 0.109250523 0.00117157528 2 1 0 -132 2 2 0.40127211138702135 0.669467866 0.330161959 0.000370198279 2 1 0 -133 2 2 0.63157763358593388 0.5317522 0.465779215 0.002468573 2 1 0 -134 2 2 0.58314815792662678 0.5581385 0.441456854 0.000404651684 2 1 0 -135 2 2 0.27204979854142897 0.7618163 0.2379036 0.000280094158 2 1 0 -136 2 2 0.1629980271721547 0.849592865 0.149376482 0.00103064778 2 1 0 -137 2 2 0.35495650373339338 0.701203942 0.297558725 0.00123733515 2 1 0 -138 2 2 0.51228059795956926 0.59912765 0.394762278 0.00611006049 2 1 0 -139 2 2 0.29479453684898038 0.7446846 0.253554732 0.00176069664 2 1 0 -140 2 2 0.23636956568495673 0.789488852 0.209835038 0.000676139141 2 1 0 -141 2 2 0.28994324565081614 0.748306036 0.247889653 0.0038042888 2 1 0 -142 2 2 0.4769116280424554 0.6206974 0.378338724 0.0009639265 2 1 0 -143 2 2 0.23597765808861054 0.7897983 0.209748372 0.0004533111 2 1 0 -144 2 2 0.16769889506754851 0.8456084 0.153658465 0.000733090041 2 1 0 -145 2 2 0.27656426046073113 0.7583849 0.239901781 0.001713358 2 1 0 -146 2 2 0.5356947512214818 0.585262537 0.413683951 0.00105350767 2 1 0 -147 2 2 0.35725414522256133 0.6995947 0.298375219 0.00203008 2 1 0 -148 2 2 0.16789042737345514 0.845446467 0.152793139 0.00176038919 2 1 0 -149 2 2 0.4005334114812254 0.6699626 0.327896535 0.002140856 2 1 0 +0 0 0 0.10523233415072977 0.9001154 0.0998846442 1.0279285E-08 0 1 2 +1 0 0 0.15045297688295883 0.8603182 0.1396817 1.00128133E-07 0 1 2 +2 0 0 0.13461746792169346 0.8740502 0.12594974 5.544896E-08 0 1 2 +3 0 0 0.16018456513825421 0.8519865 0.1480132 2.45973467E-07 0 1 2 +4 0 0 0.10163504747526553 0.9033592 0.09664083 9.512855E-09 0 1 2 +5 0 0 0.087135297305318909 0.9165531 0.08344689 7.194732E-09 0 1 2 +6 0 0 0.12881272716605582 0.8791386 0.120861337 7.34464E-08 0 1 2 +7 0 0 0.12020947959674978 0.886734664 0.1132653 2.94635036E-08 0 1 2 +8 0 0 0.18126320476886801 0.83421576 0.1657837 5.45412831E-07 0 1 2 +9 0 0 0.14710229495967375 0.8632057 0.136794254 8.353311E-08 0 1 2 +10 0 0 0.089318764342498622 0.914554 0.08544598 3.523288E-09 0 1 2 +11 0 0 0.13264827155619371 0.8757731 0.124226868 7.822249E-08 0 1 2 +12 0 0 0.15328973362849263 0.8578811 0.142118782 1.00022937E-07 0 1 2 +13 0 0 0.15323095619900862 0.857931554 0.142068356 1.04181886E-07 0 1 2 +14 0 0 0.057481053232275779 0.9441398 0.0558602326 1.140168E-10 0 1 2 +15 0 0 0.052653845383986382 0.948708355 0.0512916744 2.335356E-10 0 1 2 +16 0 0 0.072733745980773104 0.9298484 0.0701516 1.26047239E-09 0 1 2 +17 0 0 0.10587466502102688 0.8995374 0.100462623 1.34591636E-08 0 1 2 +18 0 0 0.084824480551934117 0.9186735 0.0813265 3.45837914E-09 0 1 2 +19 0 0 0.091700952027628138 0.912377954 0.0876220241 7.326582E-09 0 1 2 +20 0 0 0.11812077534840727 0.8885887 0.111411221 2.396447E-08 0 1 2 +21 0 0 0.098283485173918703 0.9063919 0.09360805 1.358749E-08 0 1 2 +22 0 0 0.095385487148060916 0.90902245 0.09097755 4.93118124E-09 0 1 2 +23 0 0 0.13988193670435536 0.8694609 0.130538926 1.726634E-07 0 1 2 +24 0 0 0.15291427062106341 0.8582033 0.141796425 2.92172246E-07 0 1 2 +25 0 0 0.16063310681505052 0.851604462 0.148395374 1.83643166E-07 0 1 2 +26 0 0 0.12759397101970846 0.8802107 0.11978925 7.834799E-08 0 1 2 +27 0 0 0.1070308001238898 0.898498 0.101502016 1.21276811E-08 0 1 2 +28 0 0 0.10894994409847403 0.8967753 0.103224695 1.11078133E-08 0 1 2 +29 0 0 0.15385148940418955 0.857399344 0.142600432 2.05544708E-07 0 1 2 +30 0 0 0.15925761030987443 0.852776647 0.1472231 2.22335842E-07 0 1 2 +31 0 0 0.10928146263621834 0.896478057 0.103521936 1.71705246E-08 0 1 2 +32 0 0 0.072791118439489405 0.929795 0.07020497 1.14868859E-09 0 1 2 +33 0 0 0.060183242535912201 0.941592 0.058408048 3.05011655E-10 0 1 2 +34 0 0 0.14710229495967375 0.8632057 0.136794254 8.353311E-08 0 1 2 +35 0 0 0.11911838542872015 0.8877027 0.112297252 1.59854281E-08 0 1 2 +36 0 0 0.090204603153647908 0.9137442 0.0862557739 2.25922059E-09 0 1 2 +37 0 0 0.14710229495967375 0.8632057 0.136794254 8.353311E-08 0 1 2 +38 0 0 0.16364290511877541 0.849045157 0.150954619 2.48957946E-07 0 1 2 +39 0 0 0.11701286032744909 0.889573753 0.110426195 2.24934382E-08 0 1 2 +40 0 0 0.1041189537265977 0.9011181 0.0988818854 1.141039E-08 0 1 2 +41 0 0 0.2409593240414564 0.7858736 0.214123532 2.847107E-06 0 1 2 +42 0 0 0.1455323565302056 0.8645619 0.13543798 1.24459433E-07 0 1 2 +43 0 0 0.12169677790176237 0.8854168 0.1145831 9.505505E-08 0 1 2 +44 0 0 0.11106771630833344 0.894878149 0.1051218 5.51358532E-08 0 1 2 +45 0 0 0.15554666072988901 0.855947137 0.144052714 1.71912419E-07 0 1 2 +46 0 0 0.095315460789796344 0.9090861 0.09091391 8.649329E-09 0 1 2 +47 0 0 0.14432694269533022 0.8656047 0.134395167 1.12255542E-07 0 1 2 +48 0 0 0.091843575178599535 0.912247837 0.087752156 4.616522E-09 0 1 2 +49 0 0 0.12227116687678849 0.8849084 0.115091577 2.69719624E-08 0 1 2 +50 1 1 0.078831361330132368 0.924195766 0.0509617627 0.02484247 1 0 2 +51 1 1 0.1196506498131379 0.887230337 0.06149472 0.0512749478 1 2 0 +52 1 1 0.14434030142402013 0.865593135 0.113934696 0.0204721466 1 2 0 +53 1 1 0.4934820006305104 0.610496938 0.371178329 0.0183247533 1 2 0 +54 1 1 0.24425490805116146 0.783287942 0.198487073 0.0182249676 1 2 0 +55 1 1 0.49207912028628681 0.611354 0.374479532 0.0141664632 1 2 0 +56 1 1 0.19891189934063311 0.8196221 0.151094437 0.02928347 1 2 0 +57 1 1 0.22857161004907667 0.7956693 0.147004321 0.05732636 1 0 2 +58 1 1 0.11986027517911223 0.88704437 0.08104583 0.0319098048 1 2 0 +59 1 1 0.36570630064133169 0.6937065 0.265697747 0.0405957177 1 2 0 +60 1 1 0.36092040289187055 0.6970345 0.259449363 0.0435161777 1 2 0 +61 1 1 0.17705837022933765 0.8377309 0.10888046 0.05338865 1 2 0 +62 1 1 0.16838200919468641 0.845030963 0.117188893 0.03778013 1 2 0 +63 1 1 0.42812390574384268 0.651730657 0.335861862 0.0124074444 1 2 0 +64 1 1 0.22844793909500513 0.7957677 0.188083753 0.0161485374 1 0 2 +65 1 1 0.099649536788089965 0.9051546 0.07398548 0.0208599083 1 0 2 +66 1 1 0.5453565395266291 0.5796351 0.405610561 0.0147543559 1 2 0 +67 1 1 0.13984594672930761 0.8694922 0.06589201 0.06461582 1 2 0 +68 1 2 0.7572861919735806 0.526890635 0.4689373 0.0041720774 2 1 0 +69 1 1 0.183017695190754 0.8327534 0.109402813 0.05784374 1 2 0 +70 1 2 0.73640284624029584 0.5139542 0.478833258 0.007212497 2 1 0 +71 1 1 0.13047042795082142 0.877682447 0.0879009 0.0344166756 1 0 2 +72 1 2 0.85715197211787608 0.5731456 0.424368978 0.00248538121 2 1 0 +73 1 1 0.37062883826437742 0.6903001 0.296230942 0.0134689622 1 2 0 +74 1 1 0.10740222974602938 0.898164332 0.0608370751 0.0409985669 1 0 2 +75 1 1 0.099088584274589248 0.9056625 0.0582326576 0.03610486 1 0 2 +76 1 1 0.2052441115809647 0.8144485 0.170464963 0.0150865708 1 2 0 +77 1 1 0.47976004541463291 0.6189319 0.3741432 0.006924921 1 2 0 +78 1 1 0.36081958960905797 0.697104752 0.284223527 0.0186717287 1 2 0 +79 1 1 0.23303488425015825 0.79212594 0.198442191 0.009431858 1 0 2 +80 1 1 0.2009401381302133 0.8179614 0.124288775 0.0577498041 1 2 0 +81 1 1 0.16485110862941516 0.848019958 0.08404041 0.06793966 1 0 2 +82 1 1 0.14609328821283002 0.8640771 0.0861562 0.0497667044 1 0 2 +83 1 2 1.0026545840412577 0.6316748 0.366904169 0.00142097753 2 1 0 +84 1 1 0.68502526450725787 0.5040775 0.484458417 0.0114640659 1 2 0 +85 1 1 0.17462188818465721 0.8397745 0.110411562 0.0498139337 1 2 0 +86 1 1 0.12483819546905675 0.8826397 0.0866358355 0.0307244845 1 2 0 +87 1 1 0.38610223888203354 0.67970103 0.308468759 0.0118302088 1 2 0 +88 1 1 0.17645790131635949 0.838234067 0.09166675 0.07009915 1 2 0 +89 1 1 0.35301563815054371 0.7025662 0.267733276 0.0297005326 1 2 0 +90 1 1 0.60666196289210772 0.5451676 0.443510771 0.011321608 1 2 0 +91 1 1 0.27313941699903715 0.7609867 0.21682857 0.0221847668 1 2 0 +92 1 1 0.16583954882677754 0.847182155 0.0972737148 0.0555441566 1 2 0 +93 1 1 0.216110318178794 0.8056464 0.133047938 0.061305657 1 0 2 +94 1 1 0.33904533886610982 0.712450147 0.2600237 0.02752616 1 2 0 +95 1 1 0.1608852461683882 0.851389766 0.08360588 0.0650043562 1 2 0 +96 1 1 0.20448474944375503 0.8150672 0.137656 0.0472768024 1 2 0 +97 1 1 0.1266300196897841 0.8810596 0.06579876 0.0531416424 1 2 0 +98 1 1 0.37085267211427492 0.6901456 0.301162064 0.008692351 1 0 2 +99 1 1 0.19570795638070573 0.822252333 0.12653698 0.0512106866 1 2 0 +100 2 2 0.40655867620709907 0.665938 0.333874941 0.000187000172 2 1 0 +101 2 2 0.43796444518642835 0.6453487 0.353739828 0.000911408337 2 1 0 +102 2 2 0.40727129871168982 0.6654636 0.334205151 0.000331235438 2 1 0 +103 2 2 0.42867974653227631 0.6513685 0.348098725 0.000532808946 2 1 0 +104 2 2 0.41635889462312808 0.659443557 0.34027496 0.000281510758 2 1 0 +105 2 2 0.42038138990784635 0.6567963 0.343124479 7.92395E-05 2 1 0 +106 2 2 0.46017554613877809 0.631172836 0.3666074 0.00221975357 2 1 0 +107 2 2 0.42771902078488977 0.6519946 0.3478424 0.000162985671 2 1 0 +108 2 2 0.45942242941974976 0.631648362 0.3681335 0.0002181388 2 1 0 +109 2 2 0.35632063155674082 0.700248063 0.2993861 0.000365806918 2 1 0 +110 2 2 0.59813038533862872 0.549838662 0.446502864 0.003658446 2 1 0 +111 2 2 0.43582388496630797 0.6467316 0.352501541 0.000766820565 2 1 0 +112 2 2 0.41992964480905842 0.657093048 0.342131168 0.0007757888 2 1 0 +113 2 2 0.45203899927824553 0.636329353 0.3629379 0.0007327517 2 1 0 +114 2 2 0.42251257719321916 0.655398 0.343965828 0.0006361924 2 1 0 +115 2 2 0.39888443163474774 0.671068251 0.327796131 0.00113561912 2 1 0 +116 2 2 0.43433821762281899 0.647693157 0.351395071 0.0009117467 2 1 0 +117 2 2 0.35882090036153141 0.698499441 0.3012908 0.000209733931 2 1 0 +118 2 2 0.48012712133830948 0.618704736 0.381276459 1.8805069E-05 2 1 0 +119 2 2 0.47851707234635865 0.6197017 0.379401237 0.000897047052 2 1 0 +120 2 2 0.39042472023472213 0.6767694 0.3226851 0.0005455372 2 1 0 +121 2 2 0.42800437996383239 0.65180856 0.3467855 0.00140595378 2 1 0 +122 2 2 0.443412170952244 0.6418426 0.3581035 5.38630848E-05 2 1 0 +123 2 2 0.51604974472334419 0.5968737 0.400675178 0.002451097 2 1 0 +124 2 2 0.39974298414777737 0.670492351 0.3287836 0.000724044454 2 1 0 +125 2 2 0.43554116923472252 0.6469145 0.3525337 0.0005518172 2 1 0 +126 2 2 0.57668850951089956 0.561755538 0.434410423 0.00383405667 2 1 0 +127 2 2 0.56072400509281628 0.570795655 0.425277829 0.003926514 2 1 0 +128 2 2 0.42931537169521405 0.6509546 0.34869352 0.00035188318 2 1 0 +129 2 2 0.51307680059299199 0.5986508 0.4004598 0.0008893604 2 1 0 +130 2 2 0.42609399915540624 0.653054953 0.346731 0.000214045882 2 1 0 +131 2 2 0.58811551010728713 0.5553729 0.4437172 0.0009098785 2 1 0 +132 2 2 0.42874407787955016 0.6513266 0.348355353 0.000318029517 2 1 0 +133 2 2 0.55828940663943993 0.572187 0.425267071 0.00254591252 2 1 0 +134 2 2 0.4642649883906762 0.628596961 0.3709423 0.0004607632 2 1 0 +135 2 2 0.39742590785588855 0.672047734 0.327726841 0.000225416516 2 1 0 +136 2 2 0.37475292580837244 0.6874591 0.311920583 0.000620306 2 1 0 +137 2 2 0.43104167040457064 0.649831831 0.349148631 0.00101951673 2 1 0 +138 2 2 0.58947794559538014 0.554616749 0.440451324 0.004931943 2 1 0 +139 2 2 0.47411410502523832 0.6224362 0.376182139 0.00138162647 2 1 0 +140 2 2 0.39041001226649513 0.6767793 0.322745562 0.0004751122 2 1 0 +141 2 2 0.57104908497416329 0.564932466 0.432308227 0.0027593167 2 1 0 +142 2 2 0.43796444518642835 0.6453487 0.353739828 0.000911408337 2 1 0 +143 2 2 0.39119688403383229 0.676247 0.323438883 0.000314130477 2 1 0 +144 2 2 0.37472258028119243 0.68748 0.3120606 0.0004594017 2 1 0 +145 2 2 0.43064486292504078 0.650089741 0.348618239 0.001292042 2 1 0 +146 2 2 0.45142977120547095 0.636717141 0.3621718 0.00111102767 2 1 0 +147 2 2 0.46269994854161944 0.6295815 0.3687677 0.00165078847 2 1 0 +148 2 2 0.38241482614892125 0.682212 0.3167025 0.00108551537 2 1 0 +149 2 2 0.44044916021338609 0.6437472 0.354472637 0.00178015162 2 1 0 diff --git a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt index f71990cd92..60bf133c7c 100644 --- a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt +++ b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt @@ -33,35 +33,35 @@ Confusion table PREDICTED || 0 | 1 | 2 | Recall TRUTH ||======================== 0 || 21 | 0 | 0 | 1.0000 - 1 || 0 | 26 | 4 | 0.8667 + 1 || 0 | 27 | 3 | 0.9000 2 || 0 | 0 | 28 | 1.0000 ||======================== -Precision ||1.0000 |1.0000 |0.8750 | -Accuracy(micro-avg): 0.949367 -Accuracy(macro-avg): 0.955556 -Log-loss: 0.343967 -Log-loss reduction: 68.371359 +Precision ||1.0000 |1.0000 |0.9032 | +Accuracy(micro-avg): 0.962025 +Accuracy(macro-avg): 0.966667 +Log-loss: 0.366057 +Log-loss reduction: 66.340104 Confusion table ||======================== PREDICTED || 0 | 1 | 2 | Recall TRUTH ||======================== 0 || 29 | 0 | 0 | 1.0000 - 1 || 0 | 19 | 1 | 0.9500 + 1 || 0 | 18 | 2 | 0.9000 2 || 0 | 0 | 22 | 1.0000 ||======================== -Precision ||1.0000 |1.0000 |0.9565 | -Accuracy(micro-avg): 0.985915 -Accuracy(macro-avg): 0.983333 -Log-loss: 0.277101 -Log-loss reduction: 74.475991 +Precision ||1.0000 |1.0000 |0.9167 | +Accuracy(micro-avg): 0.971831 +Accuracy(macro-avg): 0.966667 +Log-loss: 0.336734 +Log-loss reduction: 68.983181 OVERALL RESULTS --------------------------------------- -Accuracy(micro-avg): 0.967641 (0.0183) -Accuracy(macro-avg): 0.969444 (0.0139) -Log-loss: 0.310534 (0.0334) -Log-loss reduction: 71.423675 (3.0523) +Accuracy(micro-avg): 0.966928 (0.0049) +Accuracy(macro-avg): 0.966667 (0.0000) +Log-loss: 0.351395 (0.0147) +Log-loss reduction: 67.661643 (1.3215) --------------------------------------- Physical memory usage(MB): %Number% diff --git a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt index 4ee447e298..d2d984758b 100644 --- a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt +++ b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt @@ -1,4 +1,4 @@ PKPD Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings -0.967641 0.969444 0.310534 71.42368 AvgPer{lr=0.8} PKPD %Data% %Output% 99 0 0 maml.exe CV tr=PKPD{p=AvgPer { lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:AvgPer{lr=0.8} +0.966928 0.966667 0.351395 67.66164 AvgPer{lr=0.8} PKPD %Data% %Output% 99 0 0 maml.exe CV tr=PKPD{p=AvgPer { lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:AvgPer{lr=0.8} diff --git a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt index 100ad1843e..b436ddf13d 100644 --- a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt +++ b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt @@ -1,151 +1,151 @@ Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class -5 0 0 0.023753880830600982 0.976526 0.0234627537 1.12415537E-05 0 1 2 -6 0 0 0.1302157926188138 0.877905965 0.122053728 4.028206E-05 0 1 2 -8 0 0 0.38835109531985795 0.6781742 0.321734548 9.125436E-05 0 1 2 -9 0 0 0.25023100275372456 0.7786209 0.221342817 3.62790561E-05 0 1 2 -10 0 0 0.043651773791107358 0.957287252 0.0427056365 7.135807E-06 0 1 2 -11 0 0 0.13521420369658141 0.8735288 0.126428857 4.23762431E-05 0 1 2 -18 0 0 0.028430623462091707 0.9719697 0.02802308 7.212694E-06 0 1 2 -20 0 0 0.10458121712711159 0.900701642 0.0992766 2.17436227E-05 0 1 2 -21 0 0 0.045363134495307413 0.9556504 0.0443333276 1.62607685E-05 0 1 2 -25 0 0 0.28906012724616753 0.7489672 0.250976235 5.657253E-05 0 1 2 -28 0 0 0.10227830625980082 0.902778268 0.0972085 1.322162E-05 0 1 2 -31 0 0 0.08562861703311947 0.9179351 0.08204699 1.79389826E-05 0 1 2 -32 0 0 0.017136477098684464 0.9830095 0.0169868153 3.65674259E-06 0 1 2 -35 0 0 0.16731981579854963 0.845929 0.1540563 1.46633747E-05 0 1 2 -37 0 0 0.25023100275372456 0.7786209 0.221342817 3.62790561E-05 0 1 2 -40 0 0 0.079660704938754975 0.9234296 0.0765565857 1.37818333E-05 0 1 2 -41 0 0 0.64555225289552742 0.5243729 0.475452363 0.0001747969 0 1 2 -44 0 0 0.040687480851420016 0.960129142 0.0398315266 3.935343E-05 0 1 2 -45 0 0 0.27950273039157197 0.756159663 0.243786544 5.377483E-05 0 1 2 -46 0 0 0.040918754924550584 0.9599071 0.0400804244 1.24371918E-05 0 1 2 -48 0 0 0.046121164137562148 0.954926252 0.04506536 8.38812048E-06 0 1 2 -50 1 1 0.72887025154137375 0.482453734 0.272260427 0.245285824 1 0 2 -51 1 1 0.8088987568127185 0.445348233 0.336972356 0.2176794 1 2 0 -52 1 1 0.78136433840766417 0.457781017 0.438075066 0.104143932 1 2 0 -54 1 1 0.54403068907517893 0.5804041 0.36454007 0.0550558232 1 2 0 -56 1 2 1.1200462355861258 0.531909943 0.3262647 0.141825333 2 1 0 -60 1 1 0.23491005150903682 0.790641963 0.1784061 0.0309519637 1 2 0 -63 1 1 0.68939365760121074 0.5018803 0.458269477 0.0398502052 1 2 0 -64 1 1 0.4797019767355476 0.618967831 0.291831881 0.0892002955 1 0 2 -66 1 2 0.86155959099850343 0.5307835 0.422502637 0.04671388 2 1 0 -68 1 1 0.49420884947307447 0.61005336 0.3815175 0.008429126 1 2 0 -69 1 1 0.23145347728671642 0.7933796 0.145455226 0.061165195 1 2 0 -70 1 2 1.4851413739165595 0.736525655 0.226470321 0.03700401 2 1 0 -71 1 1 0.3561851307666371 0.700342953 0.164127111 0.13552995 1 0 2 -72 1 1 0.67173548994464938 0.5108213 0.4814403 0.007738397 1 2 0 -73 1 1 0.50612015658391962 0.602829933 0.3625389 0.034631148 1 2 0 -74 1 1 0.41484498646830853 0.66044265 0.18213135 0.157426015 1 2 0 -76 1 1 0.50271064477753691 0.6048888 0.345186323 0.0499248542 1 2 0 -77 1 2 1.0467776624316247 0.6128986 0.3510672 0.0360342041 2 1 0 -79 1 1 0.25594672318320155 0.7741832 0.183944046 0.0418727621 1 0 2 -82 1 1 0.28889429116536303 0.7490914 0.130277559 0.120631076 1 2 0 -88 1 1 0.52394753213635203 0.5921783 0.2563273 0.151494384 1 2 0 -90 1 1 0.45035315262659448 0.637403 0.3411746 0.0214223787 1 2 0 -91 1 1 0.68738585386581597 0.502889 0.425832123 0.07127889 1 2 0 -92 1 1 0.2697499931611898 0.763570368 0.162871167 0.0735584348 1 2 0 -93 1 1 0.17823538741474004 0.836745441 0.0821188241 0.08113571 1 2 0 -95 1 1 0.48391980144761892 0.616362631 0.241359085 0.1422783 1 2 0 -96 1 1 0.47873437290577431 0.619567037 0.281721354 0.09871157 1 2 0 -97 1 1 0.4309836113144338 0.649869561 0.220002741 0.130127683 1 2 0 -98 1 1 0.27001090521188814 0.76337117 0.202765256 0.0338635966 1 0 2 -99 1 1 0.40366256068064504 0.667869449 0.2403792 0.09175138 1 2 0 -100 2 2 0.098423959135663375 0.9062646 0.09183126 0.001904126 2 1 0 -102 2 2 0.18784362467414789 0.8287443 0.16809994 0.00315576326 2 1 0 -104 2 2 0.20305260298998334 0.8162353 0.181571469 0.00219323137 2 1 0 -105 2 2 0.16738689651485589 0.8458723 0.153207168 0.000920576451 2 1 0 -106 2 2 0.58800293393782621 0.5554354 0.439967334 0.00459726434 2 1 0 -108 2 2 0.4731297912724482 0.6230492 0.37598455 0.0009662311 2 1 0 -109 2 2 0.035500468049352432 0.9651223 0.0299819969 0.00489571551 2 1 0 -111 2 2 0.39672088491078095 0.6725217 0.3237172 0.00376107777 2 1 0 -112 2 2 0.20709995070378101 0.8129384 0.180399537 0.00666203769 2 1 0 -113 2 2 0.49689364308238115 0.6084177 0.3893122 0.002270126 2 1 0 -115 2 2 0.12645623913049378 0.8812127 0.109346546 0.009440767 2 1 0 -117 2 2 0.025087766630295084 0.9752243 0.02114348 0.00363221765 2 1 0 -120 2 2 0.11093517828288008 0.894996762 0.09939285 0.005610376 2 1 0 -121 2 2 0.37719650665097176 0.6857813 0.3084725 0.00574616855 2 1 0 -122 2 2 0.25512580405327051 0.774819 0.224642664 0.0005383256 2 1 0 -123 2 2 0.53470400837092702 0.585842669 0.4030093 0.0111480216 2 1 0 -125 2 2 0.15798263383297981 0.8538646 0.139957428 0.00617795158 2 1 0 -128 2 2 0.30543205932131445 0.736804962 0.2611207 0.00207435014 2 1 0 -129 2 2 0.31304921188329871 0.7312139 0.2608706 0.00791547 2 1 0 -131 2 2 0.050064073835890829 0.9511685 0.03442935 0.01440218 2 1 0 -132 2 2 0.29000752749814246 0.748257935 0.249804661 0.00193742709 2 1 0 -133 2 2 0.57582423964286222 0.562241256 0.4260323 0.0117264669 2 1 0 -137 2 2 0.23369102254431959 0.791606367 0.2005785 0.007815139 2 1 0 -138 2 2 0.38238249988924056 0.682234049 0.29368493 0.0240810253 2 1 0 -141 2 2 0.18873995406873867 0.8280018 0.150254741 0.0217434336 2 1 0 -144 2 2 0.080216932325286969 0.9229161 0.0723539 0.00473001366 2 1 0 -145 2 2 0.18503665462933813 0.8310738 0.158534229 0.0103919385 2 1 0 -147 2 2 0.25370059016834895 0.7759241 0.212361827 0.0117140962 2 1 0 -0 0 0 0.10211748577393479 0.902923465 0.09707638 1.31251113E-07 0 1 2 -1 0 0 0.18414005419872645 0.8318193 0.1681801 6.10566246E-07 0 1 2 -2 0 0 0.14492385261258312 0.865088165 0.134911478 3.69817315E-07 0 1 2 -3 0 0 0.18018182361642154 0.835118353 0.164880455 1.1808728E-06 0 1 2 -4 0 0 0.091854748075686457 0.912237644 0.08776226 1.22251592E-07 0 1 2 -7 0 0 0.12233785238606552 0.88484937 0.115150325 2.89829131E-07 0 1 2 -12 0 0 0.18838937109230286 0.828292131 0.1717073 5.786816E-07 0 1 2 -13 0 0 0.17483938525704146 0.839591861 0.1604077 4.46853E-07 0 1 2 -14 0 0 0.045135070620344352 0.955868363 0.0441316478 5.3407363E-09 0 1 2 -15 0 0 0.030612994376980014 0.969850838 0.0301491246 1.0891493E-08 0 1 2 -16 0 0 0.055112338614887763 0.9463788 0.05362115 3.08450865E-08 0 1 2 -17 0 0 0.10062993461057677 0.9042676 0.09573224 1.63700875E-07 0 1 2 -19 0 0 0.073413324188812371 0.9292167 0.07078323 1.13846653E-07 0 1 2 -22 0 0 0.080101141800871592 0.923023 0.07697697 5.50810348E-08 0 1 2 -23 0 0 0.14576182189759379 0.864363551 0.135635108 1.32143555E-06 0 1 2 -24 0 0 0.15527358603878322 0.8561809 0.143817171 1.903017E-06 0 1 2 -26 0 0 0.12564253084337382 0.881930053 0.118069261 6.645889E-07 0 1 2 -27 0 0 0.10587439997529158 0.8995376 0.100462213 1.60874819E-07 0 1 2 -29 0 0 0.16779715917645741 0.8455253 0.154473513 1.14837974E-06 0 1 2 -30 0 0 0.18480739195523371 0.8312644 0.168734416 1.2252525E-06 0 1 2 -33 0 0 0.04032543480505283 0.9604768 0.03952316 1.14692389E-08 0 1 2 -34 0 0 0.17552640171742001 0.839015245 0.160984188 5.622917E-07 0 1 2 -36 0 0 0.09204810470455442 0.9120613 0.0879386961 4.5151662E-08 0 1 2 -38 0 0 0.18661279637066258 0.829764962 0.170234054 9.896429E-07 0 1 2 -39 0 0 0.12071723979290006 0.88628453 0.113715246 2.43832517E-07 0 1 2 -42 0 0 0.15054831358788412 0.860236168 0.139763221 6.199213E-07 0 1 2 -43 0 0 0.10889597554551136 0.8968237 0.103175469 8.1666E-07 0 1 2 -47 0 0 0.15393456680529674 0.8573281 0.142671227 6.39328334E-07 0 1 2 -49 0 0 0.13066902060601812 0.877508163 0.122491583 2.52864339E-07 0 1 2 -53 1 1 0.2827592626915541 0.7537012 0.21389237 0.03240643 1 2 0 -55 1 1 0.40583815374571869 0.666418 0.308232874 0.0253491253 1 2 0 -57 1 1 0.22836674855401806 0.795832336 0.173427463 0.0307402182 1 0 2 -58 1 1 0.17705481273015428 0.837733865 0.126026779 0.0362393446 1 2 0 -59 1 1 0.28623948897938623 0.7510827 0.179181576 0.06973568 1 2 0 -61 1 1 0.23046176000020194 0.7941668 0.132671311 0.07316189 1 2 0 -62 1 1 0.13173560241334381 0.8765727 0.08075007 0.04267718 1 2 0 -65 1 1 0.14168260820038769 0.8678967 0.07845409 0.05364923 1 0 2 -67 1 1 0.15281502745602504 0.858288467 0.07580672 0.06590483 1 0 2 -75 1 1 0.14692167558386063 0.8633616 0.0731101856 0.06352824 1 2 0 -78 1 1 0.36772458144318781 0.69230783 0.278533429 0.0291587356 1 2 0 -80 1 1 0.16590807826044235 0.8471241 0.07986628 0.0730095953 1 2 0 -81 1 1 0.15418537033222618 0.8571131 0.09555078 0.0473361239 1 0 2 -83 1 2 0.87913156238340517 0.5822821 0.415143281 0.00257462286 2 1 0 -84 1 1 0.60329559653785991 0.547005951 0.428067416 0.0249266215 1 2 0 -85 1 1 0.30763923553748801 0.7351805 0.189976588 0.07484292 1 2 0 -86 1 1 0.22077802043432937 0.801894665 0.161515683 0.0365896225 1 2 0 -87 1 1 0.26521281231157245 0.7670427 0.216186985 0.0167703368 1 2 0 -89 1 1 0.24918952543906914 0.779432237 0.173652634 0.0469151475 1 2 0 -94 1 1 0.27882348553441511 0.756673455 0.19962126 0.04370527 1 2 0 -101 2 2 0.48364964736145688 0.616529167 0.3817476 0.00172325119 2 1 0 -103 2 2 0.46298590319892402 0.6294015 0.369625777 0.000972708163 2 1 0 -107 2 2 0.47875168972271287 0.6195563 0.380170017 0.000273669633 2 1 0 -110 2 2 0.50353121321209937 0.604392648 0.38902545 0.00658191368 2 1 0 -114 2 2 0.42627427461954132 0.652937233 0.345770031 0.00129272381 2 1 0 -116 2 2 0.46330187402047324 0.629202664 0.369202226 0.00159509375 2 1 0 -118 2 2 0.54017556235004138 0.582645953 0.4173134 4.062326E-05 2 1 0 -119 2 2 0.59238374503353441 0.5530075 0.445467323 0.00152520277 2 1 0 -124 2 2 0.37578192391690202 0.6867521 0.311931521 0.0013163907 2 1 0 -126 2 2 0.65538868877846068 0.5192402 0.4731488 0.007610987 2 1 0 -127 2 2 0.58488203770280534 0.5571716 0.4347405 0.008087933 2 1 0 -130 2 2 0.48483716808745803 0.61579746 0.383864969 0.0003375506 2 1 0 -134 2 2 0.53564556246675443 0.5852913 0.4138755 0.0008331971 2 1 0 -135 2 2 0.4222335993197745 0.6555809 0.344069272 0.0003498588 2 1 0 -136 2 2 0.3225183821825246 0.7243226 0.2744004 0.00127698807 2 1 0 -139 2 2 0.45570650730743484 0.6339999 0.3636862 0.002313912 2 1 0 -140 2 2 0.38195300929629561 0.6825271 0.316598237 0.00087465957 2 1 0 -142 2 2 0.48364964736145688 0.616529167 0.3817476 0.00172325119 2 1 0 -143 2 2 0.37769091414905154 0.6854423 0.313964784 0.0005928655 2 1 0 -146 2 2 0.54054677541576512 0.5824297 0.415693641 0.00187665562 2 1 0 -148 2 2 0.33311820419531579 0.7166855 0.281080544 0.00223395228 2 1 0 -149 2 2 0.47519601475299389 0.62176317 0.374690771 0.00354602537 2 1 0 +5 0 0 0.042901834728573146 0.9580054 0.0419840477 1.05340787E-05 0 1 2 +6 0 0 0.083706488880306434 0.919701159 0.08026548 3.335983E-05 0 1 2 +8 0 0 0.15648858149565906 0.8551413 0.144762635 9.60744146E-05 0 1 2 +9 0 0 0.11740370169687092 0.889226139 0.110738449 3.540015E-05 0 1 2 +10 0 0 0.049963314146599096 0.9512643 0.04872921 6.479303E-06 0 1 2 +11 0 0 0.089614043688354766 0.914284 0.085680455 3.558085E-05 0 1 2 +18 0 0 0.045030317118332754 0.9559685 0.0440244265 7.0445376E-06 0 1 2 +20 0 0 0.080502881922273103 0.922652245 0.07732772 2.00166778E-05 0 1 2 +21 0 0 0.053251321578678124 0.9481417 0.0518441126 1.42192039E-05 0 1 2 +25 0 0 0.1345256835252219 0.8741304 0.125810727 5.882482E-05 0 1 2 +28 0 0 0.071694176117578209 0.9308155 0.06917247 1.20015538E-05 0 1 2 +31 0 0 0.070443013528733611 0.931980848 0.06800209 1.70340463E-05 0 1 2 +32 0 0 0.032654422158073904 0.967873 0.0321238 3.193805E-06 0 1 2 +35 0 0 0.085718229139385371 0.9178528 0.08213324 1.393153E-05 0 1 2 +37 0 0 0.11740370169687092 0.889226139 0.110738449 3.540015E-05 0 1 2 +40 0 0 0.062928870664993405 0.939010262 0.06097769 1.20330114E-05 0 1 2 +41 0 0 0.26464229432497571 0.767480433 0.232282773 0.000236815 0 1 2 +44 0 0 0.059370648640626932 0.9423574 0.0576089844 3.36135163E-05 0 1 2 +45 0 0 0.12572539265519678 0.881857 0.118088 5.50402838E-05 0 1 2 +46 0 0 0.050943143901380036 0.9503327 0.04965677 1.0502069E-05 0 1 2 +48 0 0 0.051390686845431854 0.9499075 0.0500850454 7.45635452E-06 0 1 2 +50 1 1 0.3736753570058497 0.6882003 0.17439957 0.137400135 1 2 0 +51 1 1 0.46974941198892683 0.6251589 0.245875 0.1289661 1 2 0 +52 1 1 0.48996150579033221 0.61265 0.3297582 0.05759184 1 2 0 +54 1 1 0.5115137576811869 0.599587262 0.353570044 0.04684268 1 2 0 +56 1 1 0.60702971635293523 0.5449672 0.3745032 0.0805296 1 2 0 +60 1 1 0.36506630072943136 0.6941506 0.230054215 0.07579513 1 2 0 +63 1 1 0.61679568772755644 0.539670944 0.423799872 0.0365292132 1 2 0 +64 1 1 0.51079160996051487 0.6000204 0.311807573 0.08817205 1 0 2 +66 1 1 0.71014226716045414 0.491574258 0.463784844 0.04464092 1 2 0 +68 1 1 0.67782447270478297 0.507720351 0.479092419 0.0131872464 1 2 0 +69 1 1 0.35969209704682265 0.6978912 0.195766345 0.106342494 1 2 0 +70 1 2 0.94663324211722399 0.5851442 0.388045281 0.0268104747 2 1 0 +71 1 1 0.37710811805711025 0.6858419 0.171295851 0.142862245 1 0 2 +72 1 2 0.77788692984780239 0.531407952 0.459375679 0.0092163505 2 1 0 +73 1 1 0.52786345281280167 0.5898639 0.372436285 0.03769979 1 2 0 +74 1 1 0.36466649795555972 0.6944282 0.171437979 0.134133831 1 2 0 +76 1 1 0.4706543421646936 0.624593437 0.3352478 0.0401587524 1 2 0 +77 1 2 0.7535727724729181 0.5064204 0.4706819 0.0228977185 2 1 0 +79 1 1 0.42207386769765121 0.6556856 0.289230019 0.055084385 1 0 2 +82 1 1 0.37196204518518855 0.6893804 0.158601031 0.152018562 1 0 2 +88 1 1 0.46796960461157472 0.626272559 0.226254344 0.147473127 1 2 0 +90 1 1 0.57981657513372886 0.5600011 0.406342536 0.033656422 1 2 0 +91 1 1 0.55044769976504981 0.576691568 0.364665627 0.0586428121 1 2 0 +92 1 1 0.37206623633705027 0.6893086 0.203397572 0.107293814 1 2 0 +93 1 1 0.36403179497279287 0.6948691 0.1855017 0.119629189 1 0 2 +95 1 1 0.43682545951919971 0.6460842 0.214487135 0.139428675 1 2 0 +96 1 1 0.46570722322654323 0.627691031 0.26823175 0.10407722 1 2 0 +97 1 1 0.39291303807962741 0.675087452 0.207266659 0.117645919 1 2 0 +98 1 1 0.54273254198757837 0.581158042 0.372920245 0.0459217168 1 0 2 +99 1 1 0.44123101916283403 0.6432441 0.249400109 0.107355788 1 2 0 +100 2 2 0.33617401944991809 0.714498758 0.2845078 0.0009934271 2 1 0 +102 2 2 0.39689975338292421 0.6724014 0.325897127 0.00170145172 2 1 0 +104 2 2 0.39234380670092406 0.675471842 0.323201925 0.00132622325 2 1 0 +105 2 2 0.41028614985925072 0.6634604 0.336062819 0.000476817338 2 1 0 +106 2 2 0.5132441832999014 0.5985506 0.395236254 0.00621313741 2 1 0 +108 2 2 0.49929150642931341 0.606960535 0.392131835 0.0009075964 2 1 0 +109 2 2 0.26646094488200939 0.7660859 0.231548309 0.002365779 2 1 0 +111 2 2 0.46694178172420903 0.6269166 0.37000373 0.003079707 2 1 0 +112 2 2 0.40654775668194076 0.6659453 0.3302957 0.00375901931 2 1 0 +113 2 2 0.47945423566250328 0.6191212 0.378457546 0.002421245 2 1 0 +115 2 2 0.35276463261077462 0.7027426 0.291719645 0.00553779537 2 1 0 +117 2 2 0.26515686478225903 0.7670856 0.231291756 0.00162261887 2 1 0 +120 2 2 0.34688731344682866 0.706885 0.290222138 0.00289288373 2 1 0 +121 2 2 0.43965283179672848 0.644260049 0.3507073 0.005032635 2 1 0 +122 2 2 0.45541567136355199 0.6341843 0.365496635 0.000319047016 2 1 0 +123 2 2 0.54552139173103675 0.579539537 0.410656959 0.009803501 2 1 0 +125 2 2 0.41249593491283987 0.6619959 0.334856242 0.003147891 2 1 0 +128 2 2 0.42919597809006471 0.6510323 0.347469151 0.0014985021 2 1 0 +129 2 2 0.50220576404796058 0.6051943 0.390145928 0.00465982826 2 1 0 +131 2 2 0.29930157249559664 0.7413358 0.253664881 0.00499929441 2 1 0 +132 2 2 0.42241536257386447 0.6554617 0.343174279 0.00136402 2 1 0 +133 2 2 0.58981556575484373 0.554429531 0.434764743 0.0108057242 2 1 0 +137 2 2 0.42743776430605873 0.652178 0.342929065 0.00489296857 2 1 0 +138 2 2 0.53872416162362391 0.5834922 0.397239983 0.0192677956 2 1 0 +141 2 2 0.40652439645884153 0.665960848 0.32259953 0.0114396559 2 1 0 +144 2 2 0.31126961975707773 0.732516348 0.2650514 0.00243223668 2 1 0 +145 2 2 0.39667976209482653 0.672549367 0.3213434 0.00610723253 2 1 0 +147 2 2 0.44466626159250777 0.6410382 0.351262271 0.007699582 2 1 0 +0 0 0 0.18759581342652074 0.8289497 0.1710501 1.9547646E-07 0 1 2 +1 0 0 0.21987628671839457 0.8026181 0.197380453 1.43798934E-06 0 1 2 +2 0 0 0.2038049585744301 0.815621436 0.184377745 8.43562248E-07 0 1 2 +3 0 0 0.2236521678622588 0.7995932 0.200403824 2.96490521E-06 0 1 2 +4 0 0 0.18330310696789939 0.8325158 0.167484045 1.79656709E-07 0 1 2 +7 0 0 0.19967126091978882 0.818999946 0.180999577 4.76393581E-07 0 1 2 +12 0 0 0.21914588210495925 0.803204536 0.196794033 1.406126E-06 0 1 2 +13 0 0 0.20770898542680327 0.812443435 0.187555075 1.46583784E-06 0 1 2 +14 0 0 0.14854109570902546 0.8619646 0.138035417 4.3592463E-09 0 1 2 +15 0 0 0.14513911963444476 0.86490196 0.13509801 7.44877537E-09 0 1 2 +16 0 0 0.16241624648537462 0.8500873 0.149912685 3.33129044E-08 0 1 2 +17 0 0 0.18903730098766516 0.82775563 0.172244146 2.49378132E-07 0 1 2 +19 0 0 0.17785962794139024 0.8370599 0.162939966 1.41271613E-07 0 1 2 +22 0 0 0.1699063130036905 0.843743861 0.15625605 1.06977211E-07 0 1 2 +23 0 0 0.2245420938262514 0.798881948 0.201115757 2.27329838E-06 0 1 2 +24 0 0 0.229204736236894 0.7951657 0.204831034 3.225305E-06 0 1 2 +26 0 0 0.20940154601520181 0.8110695 0.188929364 1.11967393E-06 0 1 2 +27 0 0 0.19162167579232847 0.825619161 0.17438063 2.234952E-07 0 1 2 +29 0 0 0.22241379486318399 0.800584 0.1994135 2.49890059E-06 0 1 2 +30 0 0 0.22817293653384596 0.7959866 0.20401068 2.7336057E-06 0 1 2 +33 0 0 0.1498761619987731 0.8608146 0.139185429 9.332131E-09 0 1 2 +34 0 0 0.21777818951761163 0.8043038 0.195694983 1.18367143E-06 0 1 2 +36 0 0 0.17861934062632651 0.836424232 0.1635757 5.6025E-08 0 1 2 +38 0 0 0.21971722850899228 0.802745759 0.1972512 3.06361426E-06 0 1 2 +39 0 0 0.19852240288511083 0.8199414 0.180058211 3.809114E-07 0 1 2 +42 0 0 0.20729030754644143 0.812783659 0.187214673 1.64959692E-06 0 1 2 +43 0 0 0.20694320482416118 0.8130658 0.186932817 1.34293532E-06 0 1 2 +47 0 0 0.21072198499042374 0.8099992 0.189999253 1.51110225E-06 0 1 2 +49 0 0 0.19999502697398505 0.8187348 0.181264728 4.53224345E-07 0 1 2 +53 1 1 0.66911039918186277 0.512164 0.455997 0.0318390019 1 2 0 +55 1 1 0.58505952859509458 0.5570727 0.417532742 0.0253945347 1 2 0 +57 1 1 0.37984692367562606 0.6839661 0.2027571 0.1132768 1 0 2 +58 1 1 0.1751224720380232 0.8393542 0.120186314 0.04045944 1 2 0 +59 1 1 0.52831240938040969 0.589599133 0.346245944 0.0641549453 1 2 0 +61 1 1 0.26188627198696379 0.769598544 0.160627723 0.0697737262 1 2 0 +62 1 1 0.28652910933785641 0.7508652 0.193148091 0.0559866764 1 2 0 +65 1 1 0.12089803023097732 0.8861243 0.07507464 0.0388010442 1 0 2 +67 1 1 0.22276578883221054 0.800302267 0.111182526 0.08851518 1 2 0 +75 1 1 0.13613517691306831 0.872724652 0.0644540042 0.0628213137 1 0 2 +78 1 1 0.45097416965240078 0.6370073 0.3337904 0.02920233 1 2 0 +80 1 1 0.3382419257011689 0.713022768 0.2016004 0.08537684 1 2 0 +81 1 1 0.27941823305216318 0.756223559 0.126084387 0.117692038 1 2 0 +83 1 2 1.022795857870648 0.6375606 0.359588176 0.002851213 2 1 0 +84 1 2 0.7625488488996448 0.511201739 0.466475934 0.0223223064 2 1 0 +85 1 1 0.23335036702118436 0.7918761 0.145803913 0.06232002 1 2 0 +86 1 1 0.17283040277952735 0.8412803 0.121625617 0.0370940939 1 2 0 +87 1 1 0.50874982273809755 0.6012468 0.3793505 0.0194027033 1 2 0 +89 1 1 0.5094391481829712 0.600832462 0.351426542 0.04774102 1 2 0 +94 1 1 0.46648105509590337 0.6272055 0.3286032 0.04419127 1 2 0 +101 2 2 0.4153786841345754 0.660090268 0.338023067 0.00188670226 2 1 0 +103 2 2 0.42346376004261183 0.6547749 0.344130754 0.001094352 2 1 0 +107 2 2 0.43830355758261946 0.6451299 0.354542017 0.000328052876 2 1 0 +110 2 2 0.64043030720854799 0.5270656 0.46593073 0.00700371573 2 1 0 +114 2 2 0.40350630322747794 0.6679738 0.330664366 0.00136183063 2 1 0 +116 2 2 0.4558091754861342 0.6339348 0.364266962 0.00179821951 2 1 0 +118 2 2 0.46175810750647578 0.630174756 0.3697757 4.95243366E-05 2 1 0 +119 2 2 0.43781178515434294 0.645447254 0.35272187 0.00183087389 2 1 0 +124 2 2 0.44417506838660581 0.64135313 0.357245624 0.00140123651 2 1 0 +126 2 2 0.57398343636358973 0.5632772 0.4290593 0.007663534 2 1 0 +127 2 2 0.57237577260674821 0.5641835 0.427727461 0.008089082 2 1 0 +130 2 2 0.44368643858948603 0.6416666 0.357922435 0.000410959037 2 1 0 +134 2 2 0.43117898943914462 0.6497426 0.349251479 0.00100588414 2 1 0 +135 2 2 0.44319173399404627 0.6419841 0.3576087 0.000407162734 2 1 0 +136 2 2 0.40260894274567183 0.6685735 0.33020252 0.00122395344 2 1 0 +139 2 2 0.52407476588656432 0.592102945 0.405261934 0.002635112 2 1 0 +140 2 2 0.41465548039366562 0.6605678 0.338525534 0.000906673 2 1 0 +142 2 2 0.4153786841345754 0.660090268 0.338023067 0.00188670226 2 1 0 +143 2 2 0.41418493871989953 0.6608787 0.3385007 0.000620570441 2 1 0 +146 2 2 0.44112047883688321 0.6433152 0.354572773 0.00211200817 2 1 0 +148 2 2 0.41909754913633196 0.65764004 0.3402519 0.00210807845 2 1 0 +149 2 2 0.44896079794147509 0.6382911 0.358130246 0.0035786368 2 1 0 diff --git a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt index c319ba3d49..e4ceccca5e 100644 --- a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt +++ b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt @@ -19,21 +19,21 @@ Confusion table PREDICTED || 0 | 1 | 2 | Recall TRUTH ||======================== 0 || 50 | 0 | 0 | 1.0000 - 1 || 0 | 45 | 5 | 0.9000 - 2 || 0 | 1 | 49 | 0.9800 + 1 || 0 | 47 | 3 | 0.9400 + 2 || 0 | 0 | 50 | 1.0000 ||======================== -Precision ||1.0000 |0.9783 |0.9074 | -Accuracy(micro-avg): 0.960000 -Accuracy(macro-avg): 0.960000 -Log-loss: 0.293938 -Log-loss reduction: 73.244600 +Precision ||1.0000 |1.0000 |0.9434 | +Accuracy(micro-avg): 0.980000 +Accuracy(macro-avg): 0.980000 +Log-loss: 0.272667 +Log-loss reduction: 75.180769 OVERALL RESULTS --------------------------------------- -Accuracy(micro-avg): 0.960000 (0.0000) -Accuracy(macro-avg): 0.960000 (0.0000) -Log-loss: 0.293938 (0.0000) -Log-loss reduction: 73.244600 (0.0000) +Accuracy(micro-avg): 0.980000 (0.0000) +Accuracy(macro-avg): 0.980000 (0.0000) +Log-loss: 0.272667 (0.0000) +Log-loss reduction: 75.180769 (0.0000) --------------------------------------- Physical memory usage(MB): %Number% diff --git a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt index b8bbf72cd9..05d350bd96 100644 --- a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt +++ b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt @@ -1,4 +1,4 @@ PKPD Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings -0.96 0.96 0.293938 73.2446 AvgPer{lr=0.8} PKPD %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=PKPD{p=AvgPer { lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /p:AvgPer{lr=0.8} +0.98 0.98 0.272667 75.18077 AvgPer{lr=0.8} PKPD %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=PKPD{p=AvgPer { lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /p:AvgPer{lr=0.8} diff --git a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt index abaafba303..0e29a4e2ff 100644 --- a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt +++ b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt @@ -1,151 +1,151 @@ Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class -0 0 0 0.10709083790126951 0.898444057 0.101555951 1.09862652E-08 0 1 2 -1 0 0 0.14972937943085041 0.860940933 0.139058977 1.03412368E-07 0 1 2 -2 0 0 0.13145025814375275 0.8768229 0.123177059 6.145917E-08 0 1 2 -3 0 0 0.15528019966440448 0.856175244 0.143824473 2.62822567E-07 0 1 2 -4 0 0 0.10252830274490825 0.9025526 0.09744736 1.04376809E-08 0 1 2 -5 0 0 0.090053147743181336 0.9138826 0.08611736 8.101242E-09 0 1 2 -6 0 0 0.12357314644074496 0.883757 0.116242908 8.74405E-08 0 1 2 -7 0 0 0.12105140488742108 0.8859884 0.114011578 3.08569952E-08 0 1 2 -8 0 0 0.17274984965939472 0.841348052 0.158651352 5.97416147E-07 0 1 2 -9 0 0 0.14791348202123955 0.862505734 0.137494177 8.105486E-08 0 1 2 -10 0 0 0.093967949932628358 0.910311937 0.08968807 3.63372E-09 0 1 2 -11 0 0 0.13108575869963462 0.877142549 0.122857377 8.249825E-08 0 1 2 -12 0 0 0.15257914743793716 0.858490944 0.141508967 9.911802E-08 0 1 2 -13 0 0 0.14535615625152426 0.864714265 0.1352856 1.173372E-07 0 1 2 -14 0 0 0.06340435568818982 0.9385639 0.0614361 1.239382E-10 0 1 2 -15 0 0 0.056683708917569194 0.9448929 0.0551071428 2.807663E-10 0 1 2 -16 0 0 0.075170002932813201 0.9275858 0.07241419 1.54777513E-09 0 1 2 -17 0 0 0.10666806288443777 0.898824 0.101176038 1.5170011E-08 0 1 2 -18 0 0 0.091241664716928128 0.9127971 0.0872029141 3.50579832E-09 0 1 2 -19 0 0 0.092742902606664304 0.9114278 0.08857219 8.39613445E-09 0 1 2 -20 0 0 0.12374770832324052 0.883602738 0.116397209 2.285497E-08 0 1 2 -21 0 0 0.098256721071890374 0.9064162 0.093583785 1.62108957E-08 0 1 2 -22 0 0 0.092301570249172765 0.9118301 0.08816987 6.23606855E-09 0 1 2 -23 0 0 0.1380404335556189 0.8710635 0.12893635 2.00292092E-07 0 1 2 -24 0 0 0.15120025555722924 0.8596755 0.1403242 2.922401E-07 0 1 2 -25 0 0 0.16166208373075255 0.850728631 0.14927116 1.80764E-07 0 1 2 -26 0 0 0.12599234088657019 0.8816216 0.118378319 8.963828E-08 0 1 2 -27 0 0 0.11003318759164467 0.8958044 0.104195595 1.25205668E-08 0 1 2 -28 0 0 0.11184598225922286 0.894181967 0.105818 1.1564893E-08 0 1 2 -29 0 0 0.1505769302299601 0.860211551 0.139788255 2.15031562E-07 0 1 2 -30 0 0 0.15733441851850752 0.8544183 0.145581514 2.27139253E-07 0 1 2 -31 0 0 0.11219313224767015 0.8938716 0.1061284 1.89737328E-08 0 1 2 -32 0 0 0.076296562873109264 0.9265414 0.0734586 1.21595978E-09 0 1 2 -33 0 0 0.06454049206643403 0.937498152 0.06250186 3.385217E-10 0 1 2 -34 0 0 0.14791348202123955 0.862505734 0.137494177 8.105486E-08 0 1 2 -35 0 0 0.11957675391026261 0.8872959 0.112704061 1.73822077E-08 0 1 2 -36 0 0 0.095551393128538914 0.908871651 0.09112834 2.340219E-09 0 1 2 -37 0 0 0.14791348202123955 0.862505734 0.137494177 8.105486E-08 0 1 2 -38 0 0 0.15557611714722408 0.8559219 0.144077763 2.80571015E-07 0 1 2 -39 0 0 0.11898397051842637 0.887822032 0.112177923 2.32430128E-08 0 1 2 -40 0 0 0.10383152796029192 0.901377141 0.0986228138 1.3316324E-08 0 1 2 -41 0 0 0.23267843022808721 0.792408347 0.2075885 3.16824639E-06 0 1 2 -42 0 0 0.13825641388336071 0.870875359 0.129124492 1.43657843E-07 0 1 2 -43 0 0 0.11780844245896904 0.8888663 0.11113359 1.22518585E-07 0 1 2 -44 0 0 0.11106878201188827 0.8948772 0.105122752 6.136807E-08 0 1 2 -45 0 0 0.1519822314490463 0.859003544 0.140996248 1.90226359E-07 0 1 2 -46 0 0 0.097403275899291555 0.9071901 0.09280992 9.201566E-09 0 1 2 -47 0 0 0.13975224176627257 0.869573653 0.130426213 1.23595186E-07 0 1 2 -48 0 0 0.095640521579255233 0.908790648 0.09120932 4.824783E-09 0 1 2 -49 0 0 0.12297630490939666 0.8842846 0.115715332 2.84802919E-08 0 1 2 -50 1 1 0.072140726875920644 0.930399954 0.05064142 0.018958617 1 0 2 -51 1 1 0.12052363925502865 0.886456132 0.0577918626 0.0557519831 1 2 0 -52 1 1 0.11754125629409608 0.88910383 0.0897584558 0.0211377256 1 2 0 -53 1 1 0.55234194460144326 0.5756002 0.407715857 0.01668393 1 2 0 -54 1 1 0.22033072960732095 0.8022534 0.179187492 0.018559115 1 2 0 -55 1 1 0.48471715245043789 0.61587137 0.369235158 0.0148934675 1 2 0 -56 1 1 0.19649325323392461 0.8216069 0.144622579 0.03377054 1 2 0 -57 1 1 0.23907513650017656 0.7873557 0.144207269 0.06843699 1 0 2 -58 1 1 0.10057641320176978 0.904316 0.06448696 0.03119705 1 2 0 -59 1 1 0.4382277992649205 0.6451788 0.3130982 0.0417229943 1 2 0 -60 1 1 0.40378849464830396 0.667785347 0.294686764 0.0375279263 1 2 0 -61 1 1 0.19328713066283934 0.8242453 0.117469594 0.058285147 1 2 0 -62 1 1 0.14347483184211016 0.8663426 0.101038948 0.0326184332 1 2 0 -63 1 1 0.39254042807911971 0.675339043 0.3112865 0.013374473 1 2 0 -64 1 1 0.24625037004233435 0.7817265 0.199262485 0.01901104 1 0 2 -65 1 1 0.096962049648401036 0.907590449 0.07445438 0.0179551709 1 0 2 -66 1 1 0.58127624794437671 0.559184253 0.424495757 0.01631998 1 2 0 -67 1 1 0.12717766992503393 0.8805772 0.06211802 0.0573047921 1 0 2 -68 1 2 0.76418847827461567 0.5308523 0.4657117 0.003435955 2 1 0 -69 1 1 0.17854309398479379 0.836488 0.108136833 0.05537514 1 2 0 -70 1 2 0.7782997543015937 0.532311857 0.459186077 0.008502028 2 1 0 -71 1 1 0.12963404144590399 0.878416836 0.08763818 0.0339449868 1 0 2 -72 1 2 0.82527364026390571 0.5597655 0.4381151 0.00211936235 2 1 0 -73 1 1 0.31236741131512197 0.731712639 0.2543256 0.0139617641 1 2 0 -74 1 1 0.10091901291460656 0.904006243 0.0600393079 0.03595447 1 0 2 -75 1 1 0.094227600597530603 0.9100756 0.0583470054 0.0315773822 1 0 2 -76 1 1 0.16139006589793164 0.8509601 0.13421455 0.0148253879 1 2 0 -77 1 1 0.42271312925322674 0.6552666 0.336988866 0.00774457539 1 2 0 -78 1 1 0.36102952185045717 0.6969584 0.28298 0.0200616084 1 2 0 -79 1 1 0.22191911287049176 0.800980151 0.189535379 0.009484478 1 0 2 -80 1 1 0.20087047707276379 0.8180184 0.127419531 0.05456211 1 2 0 -81 1 1 0.15758181984651309 0.8542069 0.07838817 0.0674049258 1 0 2 -82 1 1 0.14551884398560022 0.8645736 0.08538146 0.0500449426 1 0 2 -83 1 2 1.0041035759831962 0.6325462 0.3663729 0.00108090381 2 1 0 -84 1 2 0.75476486164367673 0.517363548 0.470121145 0.0125153111 2 1 0 -85 1 1 0.1936571586064853 0.823940337 0.11649999 0.05955967 1 2 0 -86 1 1 0.1117176732744818 0.8942967 0.07367582 0.032027483 1 2 0 -87 1 1 0.35151004083460119 0.7036248 0.285515 0.0108601972 1 2 0 -88 1 1 0.18955293586722866 0.8273289 0.09654 0.0761311054 1 2 0 -89 1 1 0.38389360242414333 0.6812039 0.289862335 0.0289337616 1 2 0 -90 1 1 0.62618827467785798 0.534625769 0.454234719 0.0111395335 1 2 0 -91 1 1 0.25387959125212917 0.7757852 0.200217441 0.02399734 1 2 0 -92 1 1 0.16226915412183221 0.850212336 0.09567342 0.0541142225 1 2 0 -93 1 1 0.22196815331677711 0.8009409 0.127181739 0.07187738 1 0 2 -94 1 1 0.35124306791543597 0.703812659 0.268056452 0.02813091 1 2 0 -95 1 1 0.16237179398556273 0.8501251 0.08078788 0.06908702 1 2 0 -96 1 1 0.20996992406982243 0.8106086 0.139289573 0.05010183 1 2 0 -97 1 1 0.12058893069745849 0.886398256 0.0598800667 0.05372166 1 2 0 -98 1 1 0.37956731249976106 0.6841574 0.304465353 0.01137725 1 0 2 -99 1 1 0.20277814400339583 0.816459358 0.130446717 0.0530939251 1 2 0 -100 2 2 0.39358357371715191 0.674634933 0.32519117 0.00017387759 2 1 0 -101 2 2 0.42308350591214072 0.655023932 0.344296634 0.0006794688 2 1 0 -102 2 2 0.41936704076824455 0.657462835 0.342273951 0.00026323882 2 1 0 -103 2 2 0.42921904997620342 0.6510173 0.348579019 0.000403696467 2 1 0 -104 2 2 0.41208274684418961 0.6622695 0.337504864 0.00022567909 2 1 0 -105 2 2 0.44160572409172411 0.6430031 0.3569401 5.67683346E-05 2 1 0 -106 2 2 0.42934476448549436 0.6509355 0.347463518 0.00160101533 2 1 0 -107 2 2 0.45168489764494285 0.6365547 0.363331944 0.000113324531 2 1 0 -108 2 2 0.46645653711885576 0.627220869 0.372643322 0.000135785289 2 1 0 -109 2 2 0.35953291146180011 0.6980023 0.301614881 0.000382813538 2 1 0 -110 2 2 0.60830855615290891 0.5442707 0.451300323 0.004428956 2 1 0 -111 2 2 0.43425567366017515 0.6477466 0.35168618 0.0005671874 2 1 0 -112 2 2 0.42224814643798159 0.655571342 0.343755931 0.000672725844 2 1 0 -113 2 2 0.43378520195992787 0.648051441 0.351433426 0.000515127 2 1 0 -114 2 2 0.39828542770724013 0.671470344 0.328003943 0.00052572944 2 1 0 -115 2 2 0.38044631650760474 0.683556259 0.3153522 0.00109154719 2 1 0 -116 2 2 0.44214870680472357 0.642654061 0.3565694 0.0007765472 2 1 0 -117 2 2 0.39204321311235024 0.6756749 0.3240996 0.000225500859 2 1 0 -118 2 2 0.49534872735975943 0.6093584 0.390629977 1.165714E-05 2 1 0 -119 2 2 0.47792706875518332 0.6200674 0.379401267 0.0005313261 2 1 0 -120 2 2 0.38996755329866611 0.677078843 0.322423726 0.0004974509 2 1 0 -121 2 2 0.40418317857084346 0.667521834 0.331344754 0.00113342493 2 1 0 -122 2 2 0.46674185713969735 0.627041936 0.372923285 3.47993519E-05 2 1 0 -123 2 2 0.50860202326236292 0.601335645 0.396473944 0.00219042762 2 1 0 -124 2 2 0.40115690949067428 0.669545 0.3297638 0.0006912321 2 1 0 -125 2 2 0.47796417410708669 0.6200444 0.379430741 0.000524864765 2 1 0 -126 2 2 0.55674263926678424 0.573072731 0.422982275 0.00394499162 2 1 0 -127 2 2 0.53745603926068541 0.5842326 0.4114353 0.004332072 2 1 0 -128 2 2 0.42435197720848 0.6541936 0.3455436 0.000262821937 2 1 0 -129 2 2 0.59539649422184571 0.5513439 0.447734416 0.000921661733 2 1 0 -130 2 2 0.45136920579127138 0.6367557 0.363094628 0.000149650616 2 1 0 -131 2 1 0.87799286296834833 0.58302784 0.415616274 0.00135587773 1 2 0 -132 2 2 0.4217459387360496 0.655900657 0.343858719 0.0002406203 2 1 0 -133 2 2 0.58858419496856473 0.55511266 0.442362428 0.00252491026 2 1 0 -134 2 2 0.46954434981246995 0.6252871 0.3744188 0.000294059661 2 1 0 -135 2 2 0.41926206364690544 0.657531857 0.3422878 0.000180329866 2 1 0 -136 2 2 0.35983106354881655 0.6977942 0.301589876 0.000615945552 2 1 0 -137 2 2 0.43598868576509842 0.646625042 0.3524654 0.000909582246 2 1 0 -138 2 2 0.55146544914408413 0.576104939 0.418423772 0.00547132 2 1 0 -139 2 2 0.47595285901721079 0.62129277 0.377271771 0.00143543689 2 1 0 -140 2 2 0.38358127985275575 0.6814167 0.318166882 0.000416446419 2 1 0 -141 2 2 0.55824128131859518 0.572214544 0.4245338 0.00325164315 2 1 0 -142 2 2 0.42308350591214072 0.655023932 0.344296634 0.0006794688 2 1 0 -143 2 2 0.39101453828348787 0.6763703 0.32335642 0.000273291429 2 1 0 -144 2 2 0.36601283135925683 0.6934939 0.306069136 0.0004369896 2 1 0 -145 2 2 0.41354127957626624 0.661304235 0.337492228 0.00120350742 2 1 0 -146 2 2 0.44489957920965528 0.640888631 0.358320177 0.000791185652 2 1 0 -147 2 2 0.45420191711312169 0.6349545 0.363470852 0.00157464272 2 1 0 -148 2 2 0.36235400852316502 0.6960359 0.302856565 0.00110750983 2 1 0 -149 2 2 0.42624816689431871 0.6529543 0.3454746 0.00157109811 2 1 0 +0 0 0 0.10104000891558908 0.903896868 0.0961031 7.488518E-09 0 1 2 +1 0 0 0.14638353214498703 0.863826334 0.136173636 5.1798505E-08 0 1 2 +2 0 0 0.1283544437118587 0.8795416 0.120458394 3.08966719E-08 0 1 2 +3 0 0 0.15434957050199044 0.8569724 0.14302747 1.21021856E-07 0 1 2 +4 0 0 0.096795646835533725 0.9077415 0.09225848 7.11907022E-09 0 1 2 +5 0 0 0.082328633275609589 0.920969248 0.07903077 7.635004E-09 0 1 2 +6 0 0 0.12045209925290842 0.886519551 0.113480389 4.64870524E-08 0 1 2 +7 0 0 0.11605228917489584 0.890428662 0.109571308 1.93115284E-08 0 1 2 +8 0 0 0.17410254265902697 0.840210736 0.159789056 2.30003792E-07 0 1 2 +9 0 0 0.14493659920883731 0.865077138 0.134922832 4.20849844E-08 0 1 2 +10 0 0 0.086479346599929008 0.9171545 0.08284551 3.07338577E-09 0 1 2 +11 0 0 0.12775338798466579 0.8800704 0.119929567 4.74386574E-08 0 1 2 +12 0 0 0.15036907978730446 0.860390365 0.139609575 4.71005E-08 0 1 2 +13 0 0 0.14562357098244386 0.864483058 0.135516867 4.49310065E-08 0 1 2 +14 0 0 0.054979518813470657 0.946504533 0.05349546 1.40412862E-10 0 1 2 +15 0 0 0.048704298424504931 0.952462733 0.04753725 3.692506E-10 0 1 2 +16 0 0 0.067260639592286578 0.9349515 0.06504852 1.4465189E-09 0 1 2 +17 0 0 0.10037197724394951 0.9045009 0.0954990759 1.04363931E-08 0 1 2 +18 0 0 0.082536533834129983 0.9207778 0.07922219 3.5612675E-09 0 1 2 +19 0 0 0.086233524701821371 0.91738 0.08262004 6.584751E-09 0 1 2 +20 0 0 0.11691155601423082 0.8896639 0.110336132 1.701226E-08 0 1 2 +21 0 0 0.091647579743620436 0.912426651 0.08757332 1.2294044E-08 0 1 2 +22 0 0 0.087599273238671213 0.9161279 0.08387205 3.566754E-09 0 1 2 +23 0 0 0.13253155638908179 0.8758753 0.124124587 1.29247553E-07 0 1 2 +24 0 0 0.14941269327639764 0.8612136 0.138786182 1.68698662E-07 0 1 2 +25 0 0 0.1582994624330345 0.8535941 0.1464058 9.44424556E-08 0 1 2 +26 0 0 0.12078415803856805 0.8862252 0.113774747 5.72726E-08 0 1 2 +27 0 0 0.10368361459150026 0.9015105 0.0984895155 8.918391E-09 0 1 2 +28 0 0 0.10545352972030293 0.8999163 0.100083686 7.876811E-09 0 1 2 +29 0 0 0.14886573737670886 0.8616848 0.138315111 1.08243654E-07 0 1 2 +30 0 0 0.15525082157761474 0.8562004 0.143799469 1.14021653E-07 0 1 2 +31 0 0 0.10444827879709964 0.9008214 0.0991786 1.43583634E-08 0 1 2 +32 0 0 0.069636357350842421 0.93273294 0.06726707 1.110497E-09 0 1 2 +33 0 0 0.056951459880491904 0.9446399 0.05536007 3.6885342E-10 0 1 2 +34 0 0 0.14493659920883731 0.865077138 0.134922832 4.20849844E-08 0 1 2 +35 0 0 0.11424442200129405 0.8920399 0.107960112 9.937101E-09 0 1 2 +36 0 0 0.087529269528966488 0.916192055 0.08380793 1.889867E-09 0 1 2 +37 0 0 0.14493659920883731 0.865077138 0.134922832 4.20849844E-08 0 1 2 +38 0 0 0.15576561995295157 0.85575974 0.144240141 1.13207719E-07 0 1 2 +39 0 0 0.11339071420829476 0.892801762 0.107198223 1.5183268E-08 0 1 2 +40 0 0 0.097824714741614124 0.90680784 0.09319213 8.766067E-09 0 1 2 +41 0 0 0.23410689248439309 0.7912772 0.208721831 9.598655E-07 0 1 2 +42 0 0 0.13732206665130264 0.871689439 0.1283105 6.345321E-08 0 1 2 +43 0 0 0.11186897963522327 0.8941614 0.105838537 8.346816E-08 0 1 2 +44 0 0 0.10529789300522154 0.900056362 0.09994357 4.90767036E-08 0 1 2 +45 0 0 0.14905521801941385 0.861521542 0.138478383 9.212065E-08 0 1 2 +46 0 0 0.091279342891190621 0.9127627 0.08723732 7.162675E-09 0 1 2 +47 0 0 0.1378151957858233 0.8712597 0.128740251 5.954E-08 0 1 2 +48 0 0 0.088571634321459605 0.915237546 0.08476245 3.910685E-09 0 1 2 +49 0 0 0.11794819886458666 0.8887421 0.111257881 1.70392749E-08 0 1 2 +50 1 1 0.081209289486757055 0.9220007 0.04854119 0.029458113 1 0 2 +51 1 1 0.12866724127240567 0.8792665 0.0700123459 0.05072113 1 2 0 +52 1 1 0.15317350216907719 0.857980847 0.122576281 0.01944289 1 2 0 +53 1 1 0.32451954100807251 0.7228746 0.255729884 0.0213955659 1 2 0 +54 1 1 0.22103486276558745 0.801688731 0.1801593 0.018151952 1 2 0 +55 1 1 0.37419956812496991 0.6878396 0.296539336 0.0156210242 1 2 0 +56 1 1 0.22857827717944601 0.795664 0.175655216 0.0286807753 1 2 0 +57 1 1 0.19916289482341393 0.8194164 0.152322426 0.02826115 1 0 2 +58 1 1 0.1063902456719211 0.8990737 0.0701745749 0.0307517052 1 2 0 +59 1 1 0.29092696307238658 0.7475703 0.2072915 0.04513823 1 2 0 +60 1 1 0.18803891098121719 0.828582466 0.120945193 0.05047233 1 2 0 +61 1 1 0.17555318460619629 0.8389928 0.106392682 0.05461454 1 2 0 +62 1 1 0.10109658863590253 0.9038457 0.05832019 0.0378341079 1 2 0 +63 1 1 0.35751522850321965 0.699412048 0.287616372 0.01297156 1 2 0 +64 1 1 0.23270731497449093 0.792385459 0.193651378 0.0139631759 1 0 2 +65 1 1 0.10042898048975613 0.904449344 0.07210191 0.0234487578 1 0 2 +66 1 1 0.50046914869790193 0.6062462 0.378073484 0.0156803057 1 2 0 +67 1 1 0.1089625726286635 0.896764 0.06373632 0.0394997 1 0 2 +68 1 1 0.63447197276801259 0.5302154 0.46525687 0.004527755 1 2 0 +69 1 1 0.13171479543296832 0.876590967 0.06407214 0.05933687 1 2 0 +70 1 2 0.83290977262594312 0.558413148 0.434782326 0.00680455053 2 1 0 +71 1 1 0.12403842013034277 0.8833459 0.08760595 0.029048143 1 0 2 +72 1 2 0.78071021520697825 0.539567947 0.45808056 0.00235148682 2 1 0 +73 1 1 0.26216381029753316 0.769385 0.216382757 0.0142322574 1 2 0 +74 1 1 0.10090819981572396 0.904016 0.0594188645 0.03656511 1 0 2 +75 1 1 0.098780230652794412 0.9059418 0.05678719 0.037271034 1 0 2 +76 1 1 0.17661200292438084 0.8381049 0.14724794 0.014647142 1 2 0 +77 1 1 0.49709606267321277 0.608294547 0.385036558 0.006668892 1 2 0 +78 1 1 0.32465156022402802 0.722779155 0.257891983 0.0193288662 1 2 0 +79 1 1 0.22550988164835631 0.7981092 0.195976883 0.005913923 1 0 2 +80 1 1 0.1389190171826217 0.8702985 0.0696324259 0.06006905 1 2 0 +81 1 1 0.12784238539838114 0.879992068 0.0846114755 0.03539645 1 0 2 +82 1 1 0.13014830814989062 0.8779652 0.08652696 0.03550784 1 0 2 +83 1 2 0.97105425937888956 0.6202549 0.3786836 0.00106150063 2 1 0 +84 1 1 0.64076940168252139 0.5268869 0.460860282 0.0122528346 1 2 0 +85 1 1 0.20375190476499336 0.8156647 0.134265348 0.0500699468 1 2 0 +86 1 1 0.13263656539712049 0.8757833 0.09452618 0.0296905078 1 2 0 +87 1 1 0.24777251189508356 0.7805375 0.2066182 0.0128443064 1 2 0 +88 1 1 0.15989525404777322 0.852233052 0.0756609738 0.07210598 1 2 0 +89 1 1 0.24698619977615285 0.7811515 0.185964838 0.0328837 1 2 0 +90 1 1 0.41986134281607856 0.65713793 0.329622746 0.01323931 1 2 0 +91 1 1 0.24168899231074675 0.7853004 0.192192927 0.0225067027 1 2 0 +92 1 1 0.12954685205423969 0.8784934 0.06508461 0.056421984 1 2 0 +93 1 1 0.18237708602274647 0.83328706 0.137341574 0.0293713361 1 0 2 +94 1 1 0.25156542087626454 0.7775826 0.1925942 0.0298232343 1 2 0 +95 1 1 0.13971467998896822 0.8696063 0.06581085 0.06458283 1 0 2 +96 1 1 0.17168074450908882 0.842248 0.108960845 0.0487911142 1 2 0 +97 1 1 0.11489745057641973 0.891457558 0.05599328 0.05254919 1 2 0 +98 1 1 0.3779311207940162 0.6852777 0.309432238 0.00529004168 1 0 2 +99 1 1 0.16229109741496545 0.8501937 0.09691363 0.0528927 1 2 0 +100 2 2 0.39443484405146978 0.6740609 0.3258114 0.00012772638 2 1 0 +101 2 2 0.4352277688797892 0.647117257 0.3522631 0.0006196466 2 1 0 +102 2 2 0.40761852687857175 0.6652326 0.334541261 0.000226154822 2 1 0 +103 2 2 0.43460641750346979 0.647519469 0.352121562 0.000358995982 2 1 0 +104 2 2 0.41317523057189731 0.66154635 0.33826533 0.000188354912 2 1 0 +105 2 2 0.43554144564522806 0.6469143 0.353034973 5.072583E-05 2 1 0 +106 2 2 0.45750820880118187 0.632858634 0.365612477 0.00152889371 2 1 0 +107 2 2 0.44622295891202246 0.640041053 0.359854043 0.000104909741 2 1 0 +108 2 2 0.47264277844659991 0.6233527 0.37651068 0.0001366027 2 1 0 +109 2 2 0.33415974887088012 0.7159394 0.283796668 0.000263924216 2 1 0 +110 2 2 0.48470805507816545 0.615877 0.3809107 0.00321232 2 1 0 +111 2 2 0.43818687368346548 0.6452052 0.3542619 0.000532875 2 1 0 +112 2 2 0.40582294900584642 0.666428149 0.3330102 0.0005616462 2 1 0 +113 2 2 0.44628805630319068 0.6399994 0.3595179 0.000482706848 2 1 0 +114 2 2 0.40388721797545668 0.6677194 0.331844 0.000436590431 2 1 0 +115 2 2 0.36892849159540098 0.691474855 0.3076841 0.0008410496 2 1 0 +116 2 2 0.43612474984448363 0.646537066 0.352793843 0.0006690618 2 1 0 +117 2 2 0.34956071258049848 0.7049977 0.294855028 0.000147259445 2 1 0 +118 2 2 0.49580602117459666 0.6090798 0.390908957 1.12794305E-05 2 1 0 +119 2 2 0.5010768375091309 0.6058779 0.393525 0.0005971396 2 1 0 +120 2 2 0.37305187680517504 0.6886295 0.310981363 0.000389128429 2 1 0 +121 2 2 0.41706976410191654 0.658974946 0.3400311 0.0009939764 2 1 0 +122 2 2 0.46398985361366846 0.628769934 0.371196777 3.33121025E-05 2 1 0 +123 2 2 0.51852277206219199 0.595399439 0.4024652 0.00213534082 2 1 0 +124 2 2 0.38329117846085392 0.6816144 0.3178601 0.0005255363 2 1 0 +125 2 2 0.43366656102208012 0.648128331 0.351467848 0.000403808546 2 1 0 +126 2 2 0.56925127061263225 0.565949 0.430288017 0.00376297347 2 1 0 +127 2 2 0.52548236233338275 0.5912701 0.4049958 0.00373412925 2 1 0 +128 2 2 0.42798362216044067 0.6518221 0.347944975 0.000232911741 2 1 0 +129 2 2 0.52354520155204987 0.5924166 0.4068511 0.000732291839 2 1 0 +130 2 2 0.44040981017782477 0.643772542 0.3560869 0.000140576958 2 1 0 +131 2 2 0.44523128335715673 0.6406761 0.358631045 0.0006928492 2 1 0 +132 2 2 0.42486506730523199 0.653858 0.3459313 0.000210723374 2 1 0 +133 2 2 0.60184142333607371 0.547802 0.449737847 0.00246017659 2 1 0 +134 2 2 0.4849108299783475 0.6157521 0.383954018 0.000293882535 2 1 0 +135 2 2 0.39443908851825471 0.674058 0.3257882 0.000153778965 2 1 0 +136 2 2 0.35418947324267314 0.701742 0.297810316 0.000447696162 2 1 0 +137 2 2 0.42977843142555361 0.650653243 0.348588884 0.00075789704 2 1 0 +138 2 2 0.54197530692752183 0.5815983 0.4136512 0.004750538 2 1 0 +139 2 2 0.42892098754456459 0.6512114 0.347700328 0.001088271 2 1 0 +140 2 2 0.37551325171827621 0.6869366 0.312728733 0.000334628654 2 1 0 +141 2 2 0.42866501404653645 0.6513781 0.346361071 0.00226086657 2 1 0 +142 2 2 0.4352277688797892 0.647117257 0.3522631 0.0006196466 2 1 0 +143 2 2 0.38428882589639007 0.6809347 0.318848133 0.000217100533 2 1 0 +144 2 2 0.35562893581316218 0.7007326 0.298938572 0.000328857132 2 1 0 +145 2 2 0.39169244368937323 0.675911963 0.323115468 0.000972554262 2 1 0 +146 2 2 0.4544126826514176 0.6348207 0.364385724 0.0007935546 2 1 0 +147 2 2 0.4375766068676662 0.645599067 0.3530911 0.00130985165 2 1 0 +148 2 2 0.35398182110350701 0.7018877 0.297308266 0.0008040049 2 1 0 +149 2 2 0.43712837931120685 0.6458885 0.352742374 0.00136915036 2 1 0 diff --git a/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs b/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs index 5b86785ff8..4fe2e95914 100644 --- a/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs +++ b/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs @@ -65,7 +65,7 @@ public void OvaAveragedPerceptron() }); // Data - var data = reader.Read(GetDataPath(dataPath)); + var data = mlContext.Data.Cache(reader.Read(GetDataPath(dataPath))); // Pipeline var pipeline = new Ova( From 504f1483b62acc230314611bf2e9cbb5936c5a7c Mon Sep 17 00:00:00 2001 From: Justin Ormont Date: Tue, 4 Dec 2018 08:52:34 -0800 Subject: [PATCH 08/12] Update docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs Co-Authored-By: wschin --- docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs index 05d6aebdb2..a6c1904f6a 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/SDCA.cs @@ -39,7 +39,7 @@ public static void SDCA_BinaryClassification() var data = reader.Read(dataFile); // ML.NET doesn't cache data set by default. Therefore, if one reads a data set from a file and accesses it many times, it can be slow due to - // expensive disk operations. When the considered data can fit into memory, a solution is to cache the data in memory. Caching is especially + // expensive featurization and disk operations. When the considered data can fit into memory, a solution is to cache the data in memory. Caching is especially // helpful when working with iterative algorithms which needs many data passes. Since SDCA is the case, we cache. Inserting a // cache step in a pipeline is also possible, please see the construction of pipeline below. data = mlContext.Data.Cache(data); From 5e08d1e3cba4df4882ad52ee50c736000ca01f91 Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Tue, 4 Dec 2018 09:24:46 -0800 Subject: [PATCH 09/12] Fix tests --- .../Scenarios/Api/CookbookSamples/CookbookSamples.cs | 4 ++-- .../Api/CookbookSamples/CookbookSamplesDynamicApi.cs | 2 ++ test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs index 6c5f99591a..944a855abe 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs @@ -552,8 +552,8 @@ private void CrossValidationOn(string dataPath) // Default separator is tab, but the dataset has comma. separator: ','); - // Read the data. - var data = reader.Read(dataPath); + // Read the data and cache it in-memory to avoid accessing data on disk many times in training phase. + var data = reader.Read(dataPath).Cache(); // Build the training pipeline. var learningPipeline = reader.MakeNewEstimator() diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs index 6807faffc5..2d0a096931 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs @@ -440,6 +440,8 @@ private void CrossValidationOn(string dataPath) mlContext.Transforms.Concatenate("Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") // Note that the label is text, so it needs to be converted to key. .Append(mlContext.Transforms.Conversion.MapValueToKey("Label"), TransformerScope.TrainTest) + // Cache data in memory so that SDCA trainer will be able to randomly access training examples without disk operation. + .AppendCacheCheckpoint(mlContext) // Use the multi-class SDCA model to predict the label using features. .Append(mlContext.MulticlassClassification.Trainers.StochasticDualCoordinateAscent()); diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs index f8553d8218..c1cc386517 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs @@ -16,7 +16,7 @@ public void SdcaWorkout() var dataPath = GetDataPath("breast-cancer.txt"); var data = TextLoader.CreateReader(Env, ctx => (Label: ctx.LoadFloat(0), Features: ctx.LoadFloat(1, 10))) - .Read(dataPath); + .Read(dataPath).Cache(); var binaryTrainer = new SdcaBinaryTrainer(Env, "Label", "Features", advancedSettings: (s) => s.ConvergenceTolerance = 1e-2f); TestEstimatorCore(binaryTrainer, data.AsDynamic); From 067c88d36f9443b75633c76ec63b8f907f60714b Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Tue, 4 Dec 2018 15:32:02 -0800 Subject: [PATCH 10/12] Avoid prefetch step but insert a caching step in pipeline so that loading time becomes much shorter. --- .../Scenarios/Api/CookbookSamples/CookbookSamples.cs | 7 +++++-- test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs | 1 + 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs index 944a855abe..bdd34d3794 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs @@ -552,8 +552,8 @@ private void CrossValidationOn(string dataPath) // Default separator is tab, but the dataset has comma. separator: ','); - // Read the data and cache it in-memory to avoid accessing data on disk many times in training phase. - var data = reader.Read(dataPath).Cache(); + // Read the data. + var data = reader.Read(dataPath); // Build the training pipeline. var learningPipeline = reader.MakeNewEstimator() @@ -562,6 +562,9 @@ private void CrossValidationOn(string dataPath) Label: r.Label.ToKey(), // Concatenate all the features together into one column 'Features'. Features: r.SepalLength.ConcatWith(r.SepalWidth, r.PetalLength, r.PetalWidth))) + // Add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. + .AppendCacheCheckpoint() .Append(r => ( r.Label, // Train the multi-class SDCA model to predict the label using features. diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs index c1cc386517..691ea7447d 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs @@ -17,6 +17,7 @@ public void SdcaWorkout() var data = TextLoader.CreateReader(Env, ctx => (Label: ctx.LoadFloat(0), Features: ctx.LoadFloat(1, 10))) .Read(dataPath).Cache(); + var binaryTrainer = new SdcaBinaryTrainer(Env, "Label", "Features", advancedSettings: (s) => s.ConvergenceTolerance = 1e-2f); TestEstimatorCore(binaryTrainer, data.AsDynamic); From 1d963bd163078cc81a0d0dfc2355bb56e7816115 Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Tue, 4 Dec 2018 22:08:44 -0800 Subject: [PATCH 11/12] Cache data in some tests. Otherwise, building time may exceed 45 mins. --- .../Scenarios/Api/CookbookSamples/CookbookSamples.cs | 7 +++++++ .../Api/CookbookSamples/CookbookSamplesDynamicApi.cs | 2 ++ .../Scenarios/Api/Estimators/IntrospectiveTraining.cs | 1 + .../Scenarios/Api/Estimators/MultithreadedPrediction.cs | 1 + .../Scenarios/Api/Estimators/ReconfigurablePrediction.cs | 2 +- .../Scenarios/Api/Estimators/SimpleTrainAndPredict.cs | 1 + .../Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs | 1 + .../Scenarios/Api/Estimators/TrainWithInitialPredictor.cs | 5 +++-- .../Scenarios/Api/Estimators/TrainWithValidationSet.cs | 2 +- test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs | 1 + .../IrisPlantClassificationTests.cs | 1 + .../ScenariosWithDirectInstantiation/TensorflowTests.cs | 1 + .../Transformers/FeatureSelectionTests.cs | 3 ++- 13 files changed, 23 insertions(+), 5 deletions(-) diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs index bdd34d3794..fb7f62c486 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs @@ -185,6 +185,9 @@ private ITransformer TrainOnIris(string irisDataPath) r.Label, // Concatenate all the features together into one column 'Features'. Features: r.SepalLength.ConcatWith(r.SepalWidth, r.PetalLength, r.PetalWidth))) + // Cache data in memory because the following step is an iterative algorithm which + // scans through some columns multiple times. + .AppendCacheCheckpoint() .Append(r => ( r.Label, // Train the multi-class SDCA model to predict the label using features. @@ -267,6 +270,7 @@ private void TrainAndInspectWeights(string dataPath) // When the normalizer is trained, the below delegate is going to be called. // We use it to memorize the scales. onFit: (scales, offsets) => normScales = scales))) + .AppendCacheCheckpoint() // Cache data used in memory because the subsequently trainer needs to access the data multiple times. .Append(r => ( r.Label, // Train the multi-class SDCA model to predict the label using features. @@ -432,6 +436,7 @@ private void TextFeaturizationOn(string dataPath) // Apply various kinds of text operations supported by ML.NET. var learningPipeline = reader.MakeNewEstimator() + .AppendCacheCheckpoint() .Append(r => ( // One-stop shop to run the full text featurization. TextFeatures: r.Message.FeaturizeText(), @@ -495,6 +500,7 @@ private void CategoricalFeaturizationOn(params string[] dataPath) // Build several alternative featurization pipelines. var learningPipeline = reader.MakeNewEstimator() + .AppendCacheCheckpoint() .Append(r => ( r.Label, r.NumericalFeatures, @@ -557,6 +563,7 @@ private void CrossValidationOn(string dataPath) // Build the training pipeline. var learningPipeline = reader.MakeNewEstimator() + .AppendCacheCheckpoint() .Append(r => ( // Convert string label to a key. Label: r.Label.ToKey(), diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs index 2d0a096931..9af16d4e1a 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs @@ -179,6 +179,8 @@ private ITransformer TrainOnIris(string irisDataPath) mlContext.Transforms.Concatenate("Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") // Note that the label is text, so it needs to be converted to key. .Append(mlContext.Transforms.Conversion.MapValueToKey("Label"), TransformerScope.TrainTest) + // Cache data in moemory for steps after the cache check point stage. + .AppendCacheCheckpoint(mlContext) // Use the multi-class SDCA model to predict the label using features. .Append(mlContext.MulticlassClassification.Trainers.StochasticDualCoordinateAscent()) // Apply the inverse conversion from 'PredictedLabel' column back to string value. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs index d0ba9a82db..daf2777047 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/IntrospectiveTraining.cs @@ -38,6 +38,7 @@ public void New_IntrospectiveTraining() .Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features") + .AppendCacheCheckpoint(ml) .Append(ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: s => s.NumThreads = 1)); // Train. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs index 4f52662d34..f2285b5200 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs @@ -31,6 +31,7 @@ void New_MultithreadedPrediction() // Pipeline. var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features") + .AppendCacheCheckpoint(ml) .Append(ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: s => s.NumThreads = 1)); // Train. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs index 5755efe56e..ffda5b1ce1 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/ReconfigurablePrediction.cs @@ -32,7 +32,7 @@ public void New_ReconfigurablePrediction() .Fit(data); var trainer = ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: (s) => s.NumThreads = 1); - var trainData = pipeline.Transform(data); + var trainData = ml.Data.Cache(pipeline.Transform(data)); // Cache the data right before the trainer to boost the training speed. var model = trainer.Fit(trainData); var scoredTest = model.Transform(pipeline.Transform(testData)); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs index ad1f37e161..016acd6220 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs @@ -26,6 +26,7 @@ public void New_SimpleTrainAndPredict() var data = reader.Read(GetDataPath(TestDatasets.Sentiment.trainFilename)); // Pipeline. var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features") + .AppendCacheCheckpoint(ml) .Append(ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: s => s.NumThreads = 1)); // Train. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs index f917d84a1c..adab64dec1 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs @@ -31,6 +31,7 @@ public void New_TrainSaveModelAndPredict() // Pipeline. var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features") + .AppendCacheCheckpoint(ml) .Append(ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: s => s.NumThreads = 1)); // Train. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs index 32486abb27..a117de429c 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs @@ -27,8 +27,9 @@ public void New_TrainWithInitialPredictor() // Pipeline. var pipeline = ml.Transforms.Text.FeaturizeText("SentimentText", "Features"); - // Train the pipeline, prepare train set. - var trainData = pipeline.Fit(data).Transform(data); + // Train the pipeline, prepare train set. Since it will be scanned multiple times in the subsequent trainer, we cache the + // transformed data in memory. + var trainData = ml.Data.Cache(pipeline.Fit(data).Transform(data)); // Train the first predictor. var trainer = ml.BinaryClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features",advancedSettings: s => s.NumThreads = 1); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs index 367192ff47..2a7030ea94 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithValidationSet.cs @@ -31,7 +31,7 @@ public void New_TrainWithValidationSet() // Train model with validation set. var trainer = ml.BinaryClassification.Trainers.FastTree("Label","Features"); - var model = trainer.Train(trainData, validData); + var model = trainer.Train(ml.Data.Cache(trainData), ml.Data.Cache(validData)); } } } diff --git a/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs b/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs index b9fc1c21d2..f93f06912b 100644 --- a/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/TensorflowTests.cs @@ -41,6 +41,7 @@ public void TensorFlowTransforCifarEndToEndTest() .Append(new TensorFlowEstimator(mlContext, model_location, new[] { "Input" }, new[] { "Output" })) .Append(new ColumnConcatenatingEstimator(mlContext, "Features", "Output")) .Append(new ValueToKeyMappingEstimator(mlContext, "Label")) + .AppendCacheCheckpoint(mlContext) .Append(new SdcaMultiClassTrainer(mlContext)); diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs index 01b3d75d13..4752119236 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs @@ -41,6 +41,7 @@ public void TrainAndPredictIrisModelUsingDirectInstantiationTest() var pipe = mlContext.Transforms.Concatenate("Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") .Append(mlContext.Transforms.Normalize("Features")) + .AppendCacheCheckpoint(mlContext) .Append(mlContext.MulticlassClassification.Trainers.StochasticDualCoordinateAscent("Label", "Features", advancedSettings: s => s.NumThreads = 1)); // Read training and test data sets diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs index 8a98764996..10d0db46a9 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs @@ -455,6 +455,7 @@ private void ExecuteTFTransformMNISTConvTrainingTest(bool shuffle, int? shuffleS ReTrain = true })) .Append(mlContext.Transforms.Concatenate("Features", "Prediction")) + .AppendCacheCheckpoint(mlContext) .Append(mlContext.MulticlassClassification.Trainers.LightGbm("Label", "Features")); var trainedModel = pipe.Fit(preprocessedTrainData); diff --git a/test/Microsoft.ML.Tests/Transformers/FeatureSelectionTests.cs b/test/Microsoft.ML.Tests/Transformers/FeatureSelectionTests.cs index f11fb666e3..ee2a20855d 100644 --- a/test/Microsoft.ML.Tests/Transformers/FeatureSelectionTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/FeatureSelectionTests.cs @@ -39,6 +39,7 @@ public void FeatureSelectionWorkout() .Read(sentimentDataPath); var est = new WordBagEstimator(ML, "text", "bag_of_words") + .AppendCacheCheckpoint(ML) .Append(ML.Transforms.FeatureSelection.SelectFeaturesBasedOnCount("bag_of_words", "bag_of_words_count", 10) .Append(ML.Transforms.FeatureSelection.SelectFeaturesBasedOnMutualInformation("bag_of_words", "bag_of_words_mi", labelColumn: "label"))); @@ -110,7 +111,7 @@ public void CountFeatureSelectionWorkout() VectorDouble: ctx.LoadDouble(4, 8) )); - var data = reader.Read(new MultiFileSource(dataPath)).AsDynamic; + var data = ML.Data.Cache(reader.Read(new MultiFileSource(dataPath)).AsDynamic); var columns = new[] { new CountFeatureSelectingEstimator.ColumnInfo("VectorDouble", "FeatureSelectDouble", minCount: 1), From bca4ccf9c7a919a11aea7b465ed9ce5b53913956 Mon Sep 17 00:00:00 2001 From: Wei-Sheng Chin Date: Wed, 5 Dec 2018 10:09:52 -0800 Subject: [PATCH 12/12] Update some code samples to use caching --- docs/code/MlNetCookBook.md | 67 ++++++++++++++++++- .../Api/CookbookSamples/CookbookSamples.cs | 34 ++++++++-- .../CookbookSamplesDynamicApi.cs | 29 +++++++- 3 files changed, 123 insertions(+), 7 deletions(-) diff --git a/docs/code/MlNetCookBook.md b/docs/code/MlNetCookBook.md index 6ce3c2c67b..7ff7b1f8eb 100644 --- a/docs/code/MlNetCookBook.md +++ b/docs/code/MlNetCookBook.md @@ -443,10 +443,24 @@ var reader = mlContext.Data.TextReader(ctx => ( // Now read the file (remember though, readers are lazy, so the actual reading will happen when the data is accessed). var trainData = reader.Read(trainDataPath); +// Sometime, caching data in-memory after its first access can save some loading time when the data is going to be used +// several times somewhere. The caching mechanism is also lazy; it only caches things after being used. +// User can replace all the subsequently uses of "trainData" with "cachedTrainData". We still use "trainData" because +// a caching step, which provides the same caching function, will be inserted in the considered "learningPipeline." +var cachedTrainData = trainData.Cache(); + // Step two: define the learning pipeline. // We 'start' the pipeline with the output of the reader. var learningPipeline = reader.MakeNewEstimator() + // We add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. Otherwise, the following + // trainer will read data from disk multiple times. The caching mechanism uses an on-demand strategy. + // The data accessed in any downstream step will be cached since its first use. In general, you only + // need to add a caching step before trainable step, because caching is not helpful if the data is + // only scanned once. This step can be removed if user doesn't have enough memory to store the whole + // data set. + .AppendCacheCheckpoint() // Now we can add any 'training steps' to it. In our case we want to 'normalize' the data (rescale to be // between -1 and 1 for all examples) .Append(r => ( @@ -486,6 +500,12 @@ var reader = mlContext.Data.TextReader(new TextLoader.Arguments // Now read the file (remember though, readers are lazy, so the actual reading will happen when the data is accessed). var trainData = reader.Read(trainDataPath); +// Sometime, caching data in-memory after its first access can save some loading time when the data is going to be used +// several times somewhere. The caching mechanism is also lazy; it only caches things after being used. +// User can replace all the subsequently uses of "trainData" with "cachedTrainData". We still use "trainData" because +// a caching step, which provides the same caching function, will be inserted in the considered "dynamicPipeline." +var cachedTrainData = mlContext.Data.Cache(trainData); + // Step two: define the learning pipeline. // We 'start' the pipeline with the output of the reader. @@ -493,6 +513,15 @@ var dynamicPipeline = // First 'normalize' the data (rescale to be // between -1 and 1 for all examples) mlContext.Transforms.Normalize("FeatureVector") + // We add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. Otherwise, the following + // trainer will read data from disk multiple times. The caching mechanism uses an on-demand strategy. + // The data accessed in any downstream step will be cached since its first use. In general, you only + // need to add a caching step before trainable step, because caching is not helpful if the data is + // only scanned once. This step can be removed if user doesn't have enough memory to store the whole + // data set. Notice that in the upstream Transforms.Normalize step, we only scan through the data + // once so adding a caching step before it is not helpful. + .AppendCacheCheckpoint(mlContext) // Add the SDCA regression trainer. .Append(mlContext.Regression.Trainers.StochasticDualCoordinateAscent(label: "Target", features: "FeatureVector")); @@ -595,6 +624,13 @@ var learningPipeline = reader.MakeNewEstimator() r.Label, // Concatenate all the features together into one column 'Features'. Features: r.SepalLength.ConcatWith(r.SepalWidth, r.PetalLength, r.PetalWidth))) + // We add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. Otherwise, the following + // trainer will read data from disk multiple times. The caching mechanism uses an on-demand strategy. + // The data accessed in any downstream step will be cached since its first use. In general, you only + // need to add a caching step before trainable step, because caching is not helpful if the data is + // only scanned once. + .AppendCacheCheckpoint() .Append(r => ( r.Label, // Train the multi-class SDCA model to predict the label using features. @@ -640,6 +676,8 @@ var dynamicPipeline = mlContext.Transforms.Concatenate("Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") // Note that the label is text, so it needs to be converted to key. .Append(mlContext.Transforms.Categorical.MapValueToKey("Label"), TransformerScope.TrainTest) + // Cache data in moemory for steps after the cache check point stage. + .AppendCacheCheckpoint(mlContext) // Use the multi-class SDCA model to predict the label using features. .Append(mlContext.MulticlassClassification.Trainers.StochasticDualCoordinateAscent()) // Apply the inverse conversion from 'PredictedLabel' column back to string value. @@ -741,6 +779,7 @@ var trainData = mlContext.CreateStreamingDataView(churnData); var dynamicLearningPipeline = mlContext.Transforms.Categorical.OneHotEncoding("DemographicCategory") .Append(mlContext.Transforms.Concatenate("Features", "DemographicCategory", "LastVisits")) + .AppendCacheCheckpoint(mlContext) // FastTree will benefit from caching data in memory. .Append(mlContext.BinaryClassification.Trainers.FastTree("HasChurned", "Features", numTrees: 20)); var dynamicModel = dynamicLearningPipeline.Fit(trainData); @@ -757,6 +796,7 @@ var staticLearningPipeline = staticData.MakeNewEstimator() .Append(r => ( r.HasChurned, Features: r.DemographicCategory.OneHotEncoding().ConcatWith(r.LastVisits))) + .AppendCacheCheckpoint() // FastTree will benefit from caching data in memory. .Append(r => mlContext.BinaryClassification.Trainers.FastTree(r.HasChurned, r.Features, numTrees: 20)); var staticModel = staticLearningPipeline.Fit(staticData); @@ -813,6 +853,8 @@ var learningPipeline = reader.MakeNewEstimator() // When the normalizer is trained, the below delegate is going to be called. // We use it to memorize the scales. onFit: (scales, offsets) => normScales = scales))) + // Cache data used in memory because the subsequently trainer needs to access the data multiple times. + .AppendCacheCheckpoint() .Append(r => ( r.Label, // Train the multi-class SDCA model to predict the label using features. @@ -987,6 +1029,10 @@ var catColumns = data.GetColumn(r => r.CategoricalFeatures).Take(10).ToArray(); // Build several alternative featurization pipelines. var learningPipeline = reader.MakeNewEstimator() + // Cache data in memory in an on-demand manner. Columns used in any downstream step will be + // cached in memory at their first uses. This step can be removed if user's machine doesn't + // have enough memory. + .AppendCacheCheckpoint() .Append(r => ( r.Label, r.NumericalFeatures, @@ -1070,6 +1116,9 @@ var workclasses = transformedData.GetColumn(mlContext, "WorkclassOneHot var fullLearningPipeline = dynamicPipeline // Concatenate two of the 3 categorical pipelines, and the numeric features. .Append(mlContext.Transforms.Concatenate("Features", "NumericalFeatures", "CategoricalBag", "WorkclassOneHotTrimmed")) + // Cache data in memory so that the following trainer will be able to access training examples without + // reading them from disk multiple times. + .AppendCacheCheckpoint(mlContext) // Now we're ready to train. We chose our FastTree trainer for this classification task. .Append(mlContext.BinaryClassification.Trainers.FastTree(numTrees: 50)); @@ -1121,6 +1170,10 @@ var messageTexts = data.GetColumn(x => x.Message).Take(20).ToArray(); // Apply various kinds of text operations supported by ML.NET. var learningPipeline = reader.MakeNewEstimator() + // Cache data in memory in an on-demand manner. Columns used in any downstream step will be + // cached in memory at their first uses. This step can be removed if user's machine doesn't + // have enough memory. + .AppendCacheCheckpoint() .Append(r => ( // One-stop shop to run the full text featurization. TextFeatures: r.Message.FeaturizeText(), @@ -1243,6 +1296,9 @@ var learningPipeline = reader.MakeNewEstimator() Label: r.Label.ToKey(), // Concatenate all the features together into one column 'Features'. Features: r.SepalLength.ConcatWith(r.SepalWidth, r.PetalLength, r.PetalWidth))) + // Add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. + .AppendCacheCheckpoint() .Append(r => ( r.Label, // Train the multi-class SDCA model to predict the label using features. @@ -1298,6 +1354,10 @@ var dynamicPipeline = mlContext.Transforms.Concatenate("Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") // Note that the label is text, so it needs to be converted to key. .Append(mlContext.Transforms.Conversions.MapValueToKey("Label"), TransformerScope.TrainTest) + // Cache data in memory so that SDCA trainer will be able to randomly access training examples without + // reading data from disk multiple times. Data will be cached at its first use in any downstream step. + // Notice that unused part in the data may not be cached. + .AppendCacheCheckpoint(mlContext) // Use the multi-class SDCA model to predict the label using features. .Append(mlContext.MulticlassClassification.Trainers.StochasticDualCoordinateAscent()); @@ -1439,6 +1499,7 @@ public static ITransformer TrainModel(MLContext mlContext, IDataView trainData) Action mapping = (input, output) => output.Label = input.Income > 50000; // Construct the learning pipeline. var estimator = mlContext.Transforms.CustomMapping(mapping, null) + .AppendCacheCheckpoint(mlContext) .Append(mlContext.BinaryClassification.Trainers.FastTree(label: "Label")); return estimator.Fit(trainData); @@ -1480,8 +1541,12 @@ public class CustomMappings var estimator = mlContext.Transforms.CustomMapping(CustomMappings.IncomeMapping, nameof(CustomMappings.IncomeMapping)) .Append(mlContext.BinaryClassification.Trainers.FastTree(label: "Label")); +// If memory is enough, we can cache the data in-memory to avoid reading them from file +// when it will be accessed multiple times. +var cachedTrainData = mlContext.Data.Cache(trainData); + // Train the model. -var model = estimator.Fit(trainData); +var model = estimator.Fit(cachedTrainData); // Save the model. using (var fs = File.Create(modelPath)) diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs index fb7f62c486..104855c2a9 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs @@ -114,10 +114,24 @@ private void TrainRegression(string trainDataPath, string testDataPath, string m // Now read the file (remember though, readers are lazy, so the actual reading will happen when the data is accessed). var trainData = reader.Read(trainDataPath); + // Sometime, caching data in-memory after its first access can save some loading time when the data is going to used + // several times somewhere. The caching mechanism is also lazy; it only caches things after being used. + // User can replace all the subsequently uses of "trainData" with "cachedTrainData". We still use "trainData" because + // a caching step, which provides the same caching function, will be inserted in the considered "learningPipeline." + var cachedTrainData = trainData.Cache(); + // Step two: define the learning pipeline. // We 'start' the pipeline with the output of the reader. var learningPipeline = reader.MakeNewEstimator() + // We add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. Otherwise, the following + // trainer will read data from disk multiple times. The caching mechanism uses an on-demand strategy. + // The data accessed in any downstream step will be cached since its first use. In general, you only + // need to add a caching step before trainable step, because caching is not helpful if the data is + // only scanned once. This step can be removed if user doesn't have enough memory to store the whole + // data set. + .AppendCacheCheckpoint() // Now we can add any 'training steps' to it. In our case we want to 'normalize' the data (rescale to be // between -1 and 1 for all examples), and then train the model. .Append(r => ( @@ -185,8 +199,12 @@ private ITransformer TrainOnIris(string irisDataPath) r.Label, // Concatenate all the features together into one column 'Features'. Features: r.SepalLength.ConcatWith(r.SepalWidth, r.PetalLength, r.PetalWidth))) - // Cache data in memory because the following step is an iterative algorithm which - // scans through some columns multiple times. + // We add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. Otherwise, the following + // trainer will read data from disk multiple times. The caching mechanism uses an on-demand strategy. + // The data accessed in any downstream step will be cached since its first use. In general, you only + // need to add a caching step before trainable step, because caching is not helpful if the data is + // only scanned once. .AppendCacheCheckpoint() .Append(r => ( r.Label, @@ -270,7 +288,8 @@ private void TrainAndInspectWeights(string dataPath) // When the normalizer is trained, the below delegate is going to be called. // We use it to memorize the scales. onFit: (scales, offsets) => normScales = scales))) - .AppendCacheCheckpoint() // Cache data used in memory because the subsequently trainer needs to access the data multiple times. + // Cache data used in memory because the subsequently trainer needs to access the data multiple times. + .AppendCacheCheckpoint() .Append(r => ( r.Label, // Train the multi-class SDCA model to predict the label using features. @@ -390,6 +409,7 @@ public void TrainOnAutoGeneratedData() var dynamicLearningPipeline = mlContext.Transforms.Categorical.OneHotEncoding("DemographicCategory") .Append(new ColumnConcatenatingEstimator (mlContext, "Features", "DemographicCategory", "LastVisits")) + .AppendCacheCheckpoint(mlContext) // FastTree will benefit from caching data in memory. .Append(mlContext.BinaryClassification.Trainers.FastTree("HasChurned", "Features", numTrees: 20)); var dynamicModel = dynamicLearningPipeline.Fit(trainData); @@ -406,6 +426,7 @@ public void TrainOnAutoGeneratedData() .Append(r => ( r.HasChurned, Features: r.DemographicCategory.OneHotEncoding().ConcatWith(r.LastVisits))) + .AppendCacheCheckpoint() // FastTree will benefit from caching data in memory. .Append(r => mlContext.BinaryClassification.Trainers.FastTree(r.HasChurned, r.Features, numTrees: 20)); var staticModel = staticLearningPipeline.Fit(staticData); @@ -436,6 +457,9 @@ private void TextFeaturizationOn(string dataPath) // Apply various kinds of text operations supported by ML.NET. var learningPipeline = reader.MakeNewEstimator() + // Cache data in memory in an on-demand manner. Columns used in any downstream step will be + // cached in memory at their first uses. This step can be removed if user's machine doesn't + // have enough memory. .AppendCacheCheckpoint() .Append(r => ( // One-stop shop to run the full text featurization. @@ -500,6 +524,9 @@ private void CategoricalFeaturizationOn(params string[] dataPath) // Build several alternative featurization pipelines. var learningPipeline = reader.MakeNewEstimator() + // Cache data in memory in an on-demand manner. Columns used in any downstream step will be + // cached in memory at their first uses. This step can be removed if user's machine doesn't + // have enough memory. .AppendCacheCheckpoint() .Append(r => ( r.Label, @@ -563,7 +590,6 @@ private void CrossValidationOn(string dataPath) // Build the training pipeline. var learningPipeline = reader.MakeNewEstimator() - .AppendCacheCheckpoint() .Append(r => ( // Convert string label to a key. Label: r.Label.ToKey(), diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs index 9af16d4e1a..1c14a5b158 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs @@ -111,6 +111,12 @@ private void TrainRegression(string trainDataPath, string testDataPath, string m // Now read the file (remember though, readers are lazy, so the actual reading will happen when the data is accessed). var trainData = reader.Read(trainDataPath); + // Sometime, caching data in-memory after its first access can save some loading time when the data is going to be used + // several times somewhere. The caching mechanism is also lazy; it only caches things after being used. + // User can replace all the subsequently uses of "trainData" with "cachedTrainData". We still use "trainData" because + // a caching step, which provides the same caching function, will be inserted in the considered "dynamicPipeline." + var cachedTrainData = mlContext.Data.Cache(trainData); + // Step two: define the learning pipeline. // We 'start' the pipeline with the output of the reader. @@ -118,6 +124,15 @@ private void TrainRegression(string trainDataPath, string testDataPath, string m // First 'normalize' the data (rescale to be // between -1 and 1 for all examples), and then train the model. mlContext.Transforms.Normalize("FeatureVector") + // We add a step for caching data in memory so that the downstream iterative training + // algorithm can efficiently scan through the data multiple times. Otherwise, the following + // trainer will read data from disk multiple times. The caching mechanism uses an on-demand strategy. + // The data accessed in any downstream step will be cached since its first use. In general, you only + // need to add a caching step before trainable step, because caching is not helpful if the data is + // only scanned once. This step can be removed if user doesn't have enough memory to store the whole + // data set. Notice that in the upstream Transforms.Normalize step, we only scan through the data + // once so adding a caching step before it is not helpful. + .AppendCacheCheckpoint(mlContext) // Add the SDCA regression trainer. .Append(mlContext.Regression.Trainers.StochasticDualCoordinateAscent(labelColumn: "Target", featureColumn: "FeatureVector")); @@ -399,6 +414,9 @@ private void CategoricalFeaturizationOn(params string[] dataPath) var fullLearningPipeline = dynamicPipeline // Concatenate two of the 3 categorical pipelines, and the numeric features. .Append(mlContext.Transforms.Concatenate("Features", "NumericalFeatures", "CategoricalBag", "WorkclassOneHotTrimmed")) + // Cache data in memory so that the following trainer will be able to access training examples without + // reading them from disk multiple times. + .AppendCacheCheckpoint(mlContext) // Now we're ready to train. We chose our FastTree trainer for this classification task. .Append(mlContext.BinaryClassification.Trainers.FastTree(numTrees: 50)); @@ -442,7 +460,9 @@ private void CrossValidationOn(string dataPath) mlContext.Transforms.Concatenate("Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") // Note that the label is text, so it needs to be converted to key. .Append(mlContext.Transforms.Conversion.MapValueToKey("Label"), TransformerScope.TrainTest) - // Cache data in memory so that SDCA trainer will be able to randomly access training examples without disk operation. + // Cache data in memory so that SDCA trainer will be able to randomly access training examples without + // reading data from disk multiple times. Data will be cached at its first use in any downstream step. + // Notice that unused part in the data may not be cached. .AppendCacheCheckpoint(mlContext) // Use the multi-class SDCA model to predict the label using features. .Append(mlContext.MulticlassClassification.Trainers.StochasticDualCoordinateAscent()); @@ -545,8 +565,12 @@ private static void RunEndToEnd(MLContext mlContext, IDataView trainData, string var estimator = mlContext.Transforms.CustomMapping(CustomMappings.IncomeMapping, nameof(CustomMappings.IncomeMapping)) .Append(mlContext.BinaryClassification.Trainers.FastTree(labelColumn: "Label")); + // If memory is enough, we can cache the data in-memory to avoid reading them from file + // when it will be accessed multiple times. + var cachedTrainData = mlContext.Data.Cache(trainData); + // Train the model. - var model = estimator.Fit(trainData); + var model = estimator.Fit(cachedTrainData); // Save the model. using (var fs = File.Create(modelPath)) @@ -579,6 +603,7 @@ public static ITransformer TrainModel(MLContext mlContext, IDataView trainData) Action mapping = (input, output) => output.Label = input.Income > 50000; // Construct the learning pipeline. var estimator = mlContext.Transforms.CustomMapping(mapping, null) + .AppendCacheCheckpoint(mlContext) .Append(mlContext.BinaryClassification.Trainers.FastTree(labelColumn: "Label")); return estimator.Fit(trainData);