Skip to content

Commit b48c1fb

Browse files
author
Rogan Carr
committed
Addressing PR comments.
1 parent 11b9575 commit b48c1fb

File tree

1 file changed

+20
-12
lines changed

1 file changed

+20
-12
lines changed

test/Microsoft.ML.Functional.Tests/Training.cs

+20-12
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,8 @@ public void CompareTrainerEvaluations()
3131

3232
// Get the dataset.
3333
var data = mlContext.Data.LoadFromTextFile<TweetSentiment>(GetDataPath(TestDatasets.Sentiment.trainFilename),
34-
separatorChar: TestDatasets.Sentiment.fileSeparator, hasHeader: TestDatasets.Sentiment.fileHasHeader,
34+
separatorChar: TestDatasets.Sentiment.fileSeparator,
35+
hasHeader: TestDatasets.Sentiment.fileHasHeader,
3536
allowQuoting: TestDatasets.Sentiment.allowQuoting);
3637
var trainTestSplit = mlContext.BinaryClassification.TrainTestSplit(data);
3738
var trainData = trainTestSplit.TrainSet;
@@ -85,7 +86,8 @@ public void ContinueTrainingAveragePerceptron()
8586

8687
// Get the dataset.
8788
var data = mlContext.Data.LoadFromTextFile<TweetSentiment>(GetDataPath(TestDatasets.Sentiment.trainFilename),
88-
separatorChar: TestDatasets.Sentiment.fileSeparator, hasHeader: TestDatasets.Sentiment.fileHasHeader,
89+
separatorChar: TestDatasets.Sentiment.fileSeparator,
90+
hasHeader: TestDatasets.Sentiment.fileHasHeader,
8991
allowQuoting: TestDatasets.Sentiment.allowQuoting);
9092

9193
// Create a transformation pipeline.
@@ -128,7 +130,8 @@ public void ContinueTrainingFieldAwareFactorizationMachines()
128130

129131
// Get the dataset.
130132
var data = mlContext.Data.LoadFromTextFile<TweetSentiment>(GetDataPath(TestDatasets.Sentiment.trainFilename),
131-
separatorChar: TestDatasets.Sentiment.fileSeparator, hasHeader: TestDatasets.Sentiment.fileHasHeader,
133+
separatorChar: TestDatasets.Sentiment.fileSeparator,
134+
hasHeader: TestDatasets.Sentiment.fileHasHeader,
132135
allowQuoting: TestDatasets.Sentiment.allowQuoting);
133136

134137
// Create a transformation pipeline.
@@ -171,7 +174,8 @@ public void ContinueTrainingLinearSupportVectorMachine()
171174

172175
// Get the dataset.
173176
var data = mlContext.Data.LoadFromTextFile<TweetSentiment>(GetDataPath(TestDatasets.Sentiment.trainFilename),
174-
separatorChar: TestDatasets.Sentiment.fileSeparator, hasHeader: TestDatasets.Sentiment.fileHasHeader,
177+
separatorChar: TestDatasets.Sentiment.fileSeparator,
178+
hasHeader: TestDatasets.Sentiment.fileHasHeader,
175179
allowQuoting: TestDatasets.Sentiment.allowQuoting);
176180

177181
// Create a transformation pipeline.
@@ -214,7 +218,8 @@ public void ContinueTrainingLogisticRegression()
214218

215219
// Get the dataset.
216220
var data = mlContext.Data.LoadFromTextFile<TweetSentiment>(GetDataPath(TestDatasets.Sentiment.trainFilename),
217-
separatorChar: TestDatasets.Sentiment.fileSeparator, hasHeader: TestDatasets.Sentiment.fileHasHeader,
221+
separatorChar: TestDatasets.Sentiment.fileSeparator,
222+
hasHeader: TestDatasets.Sentiment.fileHasHeader,
218223
allowQuoting: TestDatasets.Sentiment.allowQuoting);
219224

220225
// Create a transformation pipeline.
@@ -256,8 +261,8 @@ public void ContinueTrainingLogisticRegressionMulticlass()
256261
var mlContext = new MLContext(seed: 1);
257262

258263
var data = mlContext.Data.LoadFromTextFile<Iris>(GetDataPath(TestDatasets.iris.trainFilename),
259-
hasHeader: TestDatasets.iris.fileHasHeader,
260-
separatorChar: TestDatasets.iris.fileSeparator);
264+
hasHeader: TestDatasets.iris.fileHasHeader,
265+
separatorChar: TestDatasets.iris.fileSeparator);
261266

262267
// Create a training pipeline.
263268
var featurizationPipeline = mlContext.Transforms.Concatenate("Features", Iris.Features)
@@ -306,7 +311,8 @@ public void ContinueTrainingOnlineGradientDescent()
306311

307312
// Get the dataset.
308313
var data = mlContext.Data.LoadFromTextFile<HousingRegression>(GetDataPath(TestDatasets.housing.trainFilename),
309-
separatorChar: TestDatasets.housing.fileSeparator, hasHeader: TestDatasets.housing.fileHasHeader);
314+
separatorChar: TestDatasets.housing.fileSeparator,
315+
hasHeader: TestDatasets.housing.fileHasHeader);
310316

311317
// Create a transformation pipeline.
312318
var featurizationPipeline = mlContext.Transforms.Concatenate("Features", HousingRegression.Features)
@@ -349,7 +355,8 @@ public void ContinueTrainingLinearSymbolicStochasticGradientDescent()
349355

350356
// Get the dataset.
351357
var data = mlContext.Data.LoadFromTextFile<TweetSentiment>(GetDataPath(TestDatasets.Sentiment.trainFilename),
352-
separatorChar: TestDatasets.Sentiment.fileSeparator, hasHeader: TestDatasets.Sentiment.fileHasHeader,
358+
separatorChar: TestDatasets.Sentiment.fileSeparator,
359+
hasHeader: TestDatasets.Sentiment.fileHasHeader,
353360
allowQuoting: TestDatasets.Sentiment.allowQuoting);
354361

355362
// Create a transformation pipeline.
@@ -396,7 +403,8 @@ public void ContinueTrainingPoissonRegression()
396403

397404
// Get the dataset.
398405
var data = mlContext.Data.LoadFromTextFile<HousingRegression>(GetDataPath(TestDatasets.housing.trainFilename),
399-
separatorChar: TestDatasets.housing.fileSeparator, hasHeader: TestDatasets.housing.fileHasHeader);
406+
separatorChar: TestDatasets.housing.fileSeparator,
407+
hasHeader: TestDatasets.housing.fileHasHeader);
400408

401409
// Create a transformation pipeline.
402410
var featurizationPipeline = mlContext.Transforms.Concatenate("Features", HousingRegression.Features)
@@ -439,8 +447,8 @@ public void MetacomponentsFunctionAsExpectedOva()
439447
var mlContext = new MLContext(seed: 1);
440448

441449
var data = mlContext.Data.LoadFromTextFile<Iris>(GetDataPath(TestDatasets.iris.trainFilename),
442-
hasHeader: TestDatasets.iris.fileHasHeader,
443-
separatorChar: TestDatasets.iris.fileSeparator);
450+
hasHeader: TestDatasets.iris.fileHasHeader,
451+
separatorChar: TestDatasets.iris.fileSeparator);
444452

445453
// Create a model training an OVA trainer with a binary classifier.
446454
var anomalyDetectionTrainer = mlContext.AnomalyDetection.Trainers.AnalyzeRandomizedPrincipalComponents();

0 commit comments

Comments
 (0)