Skip to content

Commit 41f062f

Browse files
committed
rebase fix
1 parent c871f69 commit 41f062f

20 files changed

+60
-47
lines changed

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LbfgsMaximumEntropy.cs

+3-2
Original file line numberDiff line numberDiff line change
@@ -59,13 +59,14 @@ public static void Example()
5959
// Micro Accuracy: 0.91
6060
// Macro Accuracy: 0.91
6161
// Log Loss: 0.24
62-
// Log Loss Reduction: 0.78
62+
// Log Loss Reduction: 0.79
6363
}
6464

65+
// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.
6566
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
6667
{
6768
var random = new Random(seed);
68-
float randomFloat() => (float)random.NextDouble();
69+
float randomFloat() => (float)(random.NextDouble() - 0.5);
6970
for (int i = 0; i < count; i++)
7071
{
7172
// Generate Labels that are integers 1, 2 or 3

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LbfgsMaximumEntropy.tt

+2-1
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ string TrainerOptions = null;
88
string OptionsInclude = "";
99
string Comments = "";
1010
bool CacheData = false;
11+
string DataGenerationComments= "// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.";
1112

1213
string ExpectedOutputPerInstance = @"// Expected output:
1314
// Label: 1, Prediction: 1
@@ -20,5 +21,5 @@ string ExpectedOutput = @"// Expected output:
2021
// Micro Accuracy: 0.91
2122
// Macro Accuracy: 0.91
2223
// Log Loss: 0.24
23-
// Log Loss Reduction: 0.78";
24+
// Log Loss Reduction: 0.79";
2425
#>

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LbfgsMaximumEntropyWithOptions.cs

+2-1
Original file line numberDiff line numberDiff line change
@@ -72,10 +72,11 @@ public static void Example()
7272
// Log Loss Reduction: 0.80
7373
}
7474

75+
// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.
7576
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
7677
{
7778
var random = new Random(seed);
78-
float randomFloat() => (float)random.NextDouble();
79+
float randomFloat() => (float)(random.NextDouble() - 0.5);
7980
for (int i = 0; i < count; i++)
8081
{
8182
// Generate Labels that are integers 1, 2 or 3

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LbfgsMaximumEntropyWithOptions.tt

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ string TrainerOptions = @"LbfgsMaximumEntropyMulticlassTrainer.Options
1212

1313
string OptionsInclude = "using Microsoft.ML.Trainers;";
1414
string Comments = "";
15-
15+
string DataGenerationComments= "// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.";
1616
bool CacheData = false;
1717

1818
string ExpectedOutputPerInstance = @"// Expected output:

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LightGbm.cs

+3-2
Original file line numberDiff line numberDiff line change
@@ -61,13 +61,14 @@ public static void Example()
6161
// Micro Accuracy: 0.99
6262
// Macro Accuracy: 0.99
6363
// Log Loss: 0.05
64-
// Log Loss Reduction: 0.96
64+
// Log Loss Reduction: 0.95
6565
}
6666

67+
// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.
6768
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
6869
{
6970
var random = new Random(seed);
70-
float randomFloat() => (float)random.NextDouble();
71+
float randomFloat() => (float)(random.NextDouble() - 0.5);
7172
for (int i = 0; i < count; i++)
7273
{
7374
// Generate Labels that are integers 1, 2 or 3

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LightGbm.tt

+2-1
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ string Comments = @"
1010
// This example requires installation of additional NuGet package
1111
// <a href=""https://www.nuget.org/packages/Microsoft.ML.FastTree/"">Microsoft.ML.FastTree</a>.";
1212
bool CacheData = false;
13+
string DataGenerationComments= "// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.";
1314

1415
string ExpectedOutputPerInstance = @"// Expected output:
1516
// Label: 1, Prediction: 1
@@ -22,5 +23,5 @@ string ExpectedOutput = @"// Expected output:
2223
// Micro Accuracy: 0.99
2324
// Macro Accuracy: 0.99
2425
// Log Loss: 0.05
25-
// Log Loss Reduction: 0.96";
26+
// Log Loss Reduction: 0.95";
2627
#>

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LightGbmWithOptions.cs

+4-3
Original file line numberDiff line numberDiff line change
@@ -72,14 +72,15 @@ public static void Example()
7272
// Expected output:
7373
// Micro Accuracy: 0.98
7474
// Macro Accuracy: 0.98
75-
// Log Loss: 0.06
76-
// Log Loss Reduction: 0.80
75+
// Log Loss: 0.07
76+
// Log Loss Reduction: 0.94
7777
}
7878

79+
// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.
7980
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
8081
{
8182
var random = new Random(seed);
82-
float randomFloat() => (float)random.NextDouble();
83+
float randomFloat() => (float)(random.NextDouble() - 0.5);
8384
for (int i = 0; i < count; i++)
8485
{
8586
// Generate Labels that are integers 1, 2 or 3

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LightGbmWithOptions.tt

+3-2
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ string Comments = @"
1818
// <a href=""https://www.nuget.org/packages/Microsoft.ML.FastTree/"">Microsoft.ML.FastTree</a>.";
1919

2020
bool CacheData = false;
21+
string DataGenerationComments= "// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.";
2122

2223
string ExpectedOutputPerInstance = @"// Expected output:
2324
// Label: 1, Prediction: 1
@@ -29,6 +30,6 @@ string ExpectedOutputPerInstance = @"// Expected output:
2930
string ExpectedOutput = @"// Expected output:
3031
// Micro Accuracy: 0.98
3132
// Macro Accuracy: 0.98
32-
// Log Loss: 0.06
33-
// Log Loss Reduction: 0.80";
33+
// Log Loss: 0.07
34+
// Log Loss Reduction: 0.94";
3435
#>

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/OneVersusAll.cs

+2-3
Original file line numberDiff line numberDiff line change
@@ -58,11 +58,10 @@ public static void Example()
5858
// Expected output:
5959
// Micro Accuracy: 0.90
6060
// Macro Accuracy: 0.90
61-
// Log Loss: 0.37
62-
// Log Loss Reduction: 0.67
61+
// Log Loss: 0.36
62+
// Log Loss Reduction: 0.68
6363
}
6464

65-
6665
// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.
6766
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
6867
{

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/OneVersusAll.tt

+4-4
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@ string TrainerOptions = null;
77

88
string OptionsInclude = "";
99
string Comments= "";
10-
string DataGenerationComments= @"
11-
// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.";
10+
string DataGenerationComments= "// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.";
11+
1212
bool CacheData = false;
1313

1414
string ExpectedOutputPerInstance= @"// Expected output:
@@ -21,6 +21,6 @@ string ExpectedOutputPerInstance= @"// Expected output:
2121
string ExpectedOutput = @"// Expected output:
2222
// Micro Accuracy: 0.90
2323
// Macro Accuracy: 0.90
24-
// Log Loss: 0.37
25-
// Log Loss Reduction: 0.67";
24+
// Log Loss: 0.36
25+
// Log Loss Reduction: 0.68";
2626
#>

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/PairwiseCoupling.cs

+2-3
Original file line numberDiff line numberDiff line change
@@ -58,11 +58,10 @@ public static void Example()
5858
// Expected output:
5959
// Micro Accuracy: 0.90
6060
// Macro Accuracy: 0.90
61-
// Log Loss: 0.38
62-
// Log Loss Reduction: 0.66
61+
// Log Loss: 0.36
62+
// Log Loss Reduction: 0.67
6363
}
6464

65-
6665
// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.
6766
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
6867
{

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/PairwiseCoupling.tt

+3-4
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,7 @@ string TrainerOptions = null;
77

88
string OptionsInclude = "";
99
string Comments= "";
10-
string DataGenerationComments= @"
11-
// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.";
10+
string DataGenerationComments= "// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.";
1211
bool CacheData = false;
1312

1413
string ExpectedOutputPerInstance= @"// Expected output:
@@ -21,6 +20,6 @@ string ExpectedOutputPerInstance= @"// Expected output:
2120
string ExpectedOutput = @"// Expected output:
2221
// Micro Accuracy: 0.90
2322
// Macro Accuracy: 0.90
24-
// Log Loss: 0.38
25-
// Log Loss Reduction: 0.66";
23+
// Log Loss: 0.36
24+
// Log Loss Reduction: 0.67";
2625
#>

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaMaximumEntropy.cs

+2-1
Original file line numberDiff line numberDiff line change
@@ -68,10 +68,11 @@ public static void Example()
6868
// Log Loss Reduction: 0.80
6969
}
7070

71+
// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.
7172
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
7273
{
7374
var random = new Random(seed);
74-
float randomFloat() => (float)random.NextDouble();
75+
float randomFloat() => (float)(random.NextDouble() - 0.5);
7576
for (int i = 0; i < count; i++)
7677
{
7778
// Generate Labels that are integers 1, 2 or 3

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaMaximumEntropy.tt

+1
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ string TrainerOptions = null;
88
string OptionsInclude = "";
99
string Comments = "";
1010
bool CacheData = true;
11+
string DataGenerationComments= "// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.";
1112

1213
string ExpectedOutputPerInstance = @"// Expected output:
1314
// Label: 1, Prediction: 1

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaMaximumEntropyWithOptions.cs

+6-5
Original file line numberDiff line numberDiff line change
@@ -73,16 +73,17 @@ public static void Example()
7373
PrintMetrics(metrics);
7474

7575
// Expected output:
76-
// Micro Accuracy: 0.91
77-
// Macro Accuracy: 0.91
78-
// Log Loss: 0.46
79-
// Log Loss Reduction: 0.58
76+
// Micro Accuracy: 0.92
77+
// Macro Accuracy: 0.92
78+
// Log Loss: 0.31
79+
// Log Loss Reduction: 0.72
8080
}
8181

82+
// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.
8283
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
8384
{
8485
var random = new Random(seed);
85-
float randomFloat() => (float)random.NextDouble();
86+
float randomFloat() => (float)(random.NextDouble() - 0.5);
8687
for (int i = 0; i < count; i++)
8788
{
8889
// Generate Labels that are integers 1, 2 or 3

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaMaximumEntropyWithOptions.tt

+6-4
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@ string TrainerOptions = @"SdcaMaximumEntropyMulticlassTrainer.Options
1414
string OptionsInclude = "using Microsoft.ML.Trainers;";
1515
string Comments = "";
1616
bool CacheData = true;
17+
string DataGenerationComments= "// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.";
18+
1719

1820
string ExpectedOutputPerInstance = @"// Expected output:
1921
// Label: 1, Prediction: 1
@@ -23,8 +25,8 @@ string ExpectedOutputPerInstance = @"// Expected output:
2325
// Label: 3, Prediction: 3";
2426

2527
string ExpectedOutput = @"// Expected output:
26-
// Micro Accuracy: 0.91
27-
// Macro Accuracy: 0.91
28-
// Log Loss: 0.46
29-
// Log Loss Reduction: 0.58";
28+
// Micro Accuracy: 0.92
29+
// Macro Accuracy: 0.92
30+
// Log Loss: 0.31
31+
// Log Loss Reduction: 0.72";
3032
#>

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaNonCalibrated.cs

+4-3
Original file line numberDiff line numberDiff line change
@@ -64,14 +64,15 @@ public static void Example()
6464
// Expected output:
6565
// Micro Accuracy: 0.91
6666
// Macro Accuracy: 0.91
67-
// Log Loss: 0.00
68-
// Log Loss Reduction: 1.00
67+
// Log Loss: 0.57
68+
// Log Loss Reduction: 0.48
6969
}
7070

71+
// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.
7172
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
7273
{
7374
var random = new Random(seed);
74-
float randomFloat() => (float)random.NextDouble();
75+
float randomFloat() => (float)(random.NextDouble() - 0.5);
7576
for (int i = 0; i < count; i++)
7677
{
7778
// Generate Labels that are integers 1, 2 or 3

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaNonCalibrated.tt

+3-2
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ string TrainerOptions = null;
88
string OptionsInclude = "";
99
string Comments = "";
1010
bool CacheData = true;
11+
string DataGenerationComments= "// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.";
1112

1213
string ExpectedOutputPerInstance = @"// Expected output:
1314
// Label: 1, Prediction: 1
@@ -19,6 +20,6 @@ string ExpectedOutputPerInstance = @"// Expected output:
1920
string ExpectedOutput = @"// Expected output:
2021
// Micro Accuracy: 0.91
2122
// Macro Accuracy: 0.91
22-
// Log Loss: 0.00
23-
// Log Loss Reduction: 1.00";
23+
// Log Loss: 0.57
24+
// Log Loss Reduction: 0.48";
2425
#>

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaNonCalibratedWithOptions.cs

+4-3
Original file line numberDiff line numberDiff line change
@@ -75,14 +75,15 @@ public static void Example()
7575
// Expected output:
7676
// Micro Accuracy: 0.91
7777
// Macro Accuracy: 0.91
78-
// Log Loss: 0.36
79-
// Log Loss Reduction: 0.67
78+
// Log Loss: 0.22
79+
// Log Loss Reduction: 0.80
8080
}
8181

82+
// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.
8283
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
8384
{
8485
var random = new Random(seed);
85-
float randomFloat() => (float)random.NextDouble();
86+
float randomFloat() => (float)(random.NextDouble() - 0.5);
8687
for (int i = 0; i < count; i++)
8788
{
8889
// Generate Labels that are integers 1, 2 or 3

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaNonCalibratedWithOptions.tt

+3-2
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ string TrainerOptions = @"SdcaNonCalibratedMulticlassTrainer.Options
1414
string OptionsInclude = "using Microsoft.ML.Trainers;";
1515
string Comments = "";
1616
bool CacheData = true;
17+
string DataGenerationComments= "// Generates random uniform doubles in [-0.5, 0.5) range with labels 1, 2 or 3.";
1718

1819
string ExpectedOutputPerInstance = @"// Expected output:
1920
// Label: 1, Prediction: 1
@@ -25,6 +26,6 @@ string ExpectedOutputPerInstance = @"// Expected output:
2526
string ExpectedOutput = @"// Expected output:
2627
// Micro Accuracy: 0.91
2728
// Macro Accuracy: 0.91
28-
// Log Loss: 0.36
29-
// Log Loss Reduction: 0.67";
29+
// Log Loss: 0.22
30+
// Log Loss Reduction: 0.80";
3031
#>

0 commit comments

Comments
 (0)