Skip to content

Commit de92658

Browse files
committed
PR feedback.
1 parent b3b6595 commit de92658

File tree

5 files changed

+134
-101
lines changed

5 files changed

+134
-101
lines changed

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/Calibrators/FixedPlatt.cs

Lines changed: 31 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
using System;
2+
using System.Collections.Generic;
23
using Microsoft.ML;
34

45
namespace Samples.Dynamic.Trainers.BinaryClassification.Calibrators
@@ -26,9 +27,8 @@ public static void Example()
2627
// Let's score the new data. The score will give us a numerical estimation of the chance that the particular sample
2728
// bears positive sentiment. This estimate is relative to the numbers obtained.
2829
var scoredData = transformer.Transform(trainTestData.TestSet);
29-
var scoredDataPreview = scoredData;
30-
31-
PrintRowViewValues(scoredDataPreview);
30+
var outScores = mlContext.Data.CreateEnumerable<ScoreValue>(scoredData, reuseRowObject: false);
31+
PrintScore(outScores, 5);
3232
// Preview of scoredDataPreview.RowView
3333
// Score 4.18144
3434
// Score -14.10248
@@ -45,39 +45,46 @@ public static void Example()
4545
// This column is a calibrated version of the "Score" column, meaning its values are a valid probability value in the [0, 1] interval
4646
// representing the chance that the respective sample bears positive sentiment.
4747
var finalData = calibratorTransformer.Transform(scoredData);
48-
PrintRowViewValues(finalData);
48+
var outScoresAndProbabilities = mlContext.Data.CreateEnumerable<ScoreAndProbabilityValue>(finalData, reuseRowObject: false);
49+
PrintScoreAndProbability(outScoresAndProbabilities, 5);
4950
// Score 4.18144 Probability 0.9856767
5051
// Score -14.10248 Probability 7.890148E-07
5152
// Score 2.731951 Probability 0.9416927
5253
// Score -2.554229 Probability 0.07556222
5354
// Score 5.36571 Probability 0.9955735
5455
}
5556

56-
private static void PrintRowViewValues(IDataView transformedData)
57+
private static void PrintScore(IEnumerable<ScoreValue> values, int rows)
5758
{
58-
int count = 0;
59-
using (var cursor = transformedData.GetRowCursor(transformedData.Schema))
59+
foreach (var value in values)
6060
{
61-
var scoreGetter = cursor.GetGetter<float>(cursor.Schema["Score"]);
62-
var probabilityGetter = cursor.Schema.GetColumnOrNull("Probability") != null ? cursor.GetGetter<float>(cursor.Schema["Probability"]) : null;
63-
while (cursor.MoveNext())
64-
{
65-
float score = default;
66-
scoreGetter(ref score);
67-
Console.Write("{0, -10} {1, -10}", "Score", score);
61+
if (rows-- <= 0)
62+
break;
63+
64+
Console.WriteLine("{0, -10} {1, -10}", "Score", value.Score);
65+
}
66+
}
6867

69-
if (probabilityGetter != null)
70-
{
71-
float probability = default;
72-
probabilityGetter(ref probability);
73-
Console.Write("{0, -10} {1, -10}", "Probability", probability);
74-
}
68+
private static void PrintScoreAndProbability(IEnumerable<ScoreAndProbabilityValue> values, int rows)
69+
{
70+
foreach (var value in values)
71+
{
72+
if (rows-- <= 0)
73+
break;
7574

76-
Console.WriteLine();
77-
if (count++ >= 4)
78-
break;
79-
}
75+
Console.WriteLine("{0, -10} {1, -10} {2, -10} {3, -10}", "Score", value.Score, "Probability", value.Probability);
8076
}
8177
}
78+
79+
private class ScoreValue
80+
{
81+
public float Score { get; set; }
82+
}
83+
84+
private class ScoreAndProbabilityValue
85+
{
86+
public float Score { get; set; }
87+
public float Probability { get; set; }
88+
}
8289
}
8390
}

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/Calibrators/Isotonic.cs

Lines changed: 31 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
using System;
2+
using System.Collections.Generic;
23
using Microsoft.ML;
34

45
namespace Samples.Dynamic.Trainers.BinaryClassification.Calibrators
@@ -26,9 +27,9 @@ public static void Example()
2627
// Let's score the new data. The score will give us a numerical estimation of the chance that the particular sample
2728
// bears positive sentiment. This estimate is relative to the numbers obtained.
2829
var scoredData = transformer.Transform(trainTestData.TestSet);
29-
var scoredDataPreview = scoredData;
30+
var outScores = mlContext.Data.CreateEnumerable<ScoreValue>(scoredData, reuseRowObject: false);
3031

31-
PrintRowViewValues(scoredDataPreview);
32+
PrintScore(outScores, 5);
3233
// Preview of scoredDataPreview.RowView
3334
// Score 4.18144
3435
// Score -14.10248
@@ -45,39 +46,46 @@ public static void Example()
4546
// This column is a calibrated version of the "Score" column, meaning its values are a valid probability value in the [0, 1] interval
4647
// representing the chance that the respective sample bears positive sentiment.
4748
var finalData = calibratorTransformer.Transform(scoredData);
48-
PrintRowViewValues(finalData);
49+
var outScoresAndProbabilities = mlContext.Data.CreateEnumerable<ScoreAndProbabilityValue>(finalData, reuseRowObject: false);
50+
PrintScoreAndProbability(outScoresAndProbabilities, 5);
4951
// Score 4.18144 Probability 0.8
5052
// Score -14.10248 Probability 1E-15
5153
// Score 2.731951 Probability 0.7370371
5254
// Score -2.554229 Probability 0.2063954
5355
// Score 5.36571 Probability 0.8958333
5456
}
5557

56-
private static void PrintRowViewValues(IDataView transformedData)
58+
private static void PrintScore(IEnumerable<ScoreValue> values, int rows)
5759
{
58-
int count = 0;
59-
using (var cursor = transformedData.GetRowCursor(transformedData.Schema))
60+
foreach (var value in values)
6061
{
61-
var scoreGetter = cursor.GetGetter<float>(cursor.Schema["Score"]);
62-
var probabilityGetter = cursor.Schema.GetColumnOrNull("Probability") != null ? cursor.GetGetter<float>(cursor.Schema["Probability"]) : null;
63-
while (cursor.MoveNext())
64-
{
65-
float score = default;
66-
scoreGetter(ref score);
67-
Console.Write("{0, -10} {1, -10}", "Score", score);
62+
if (rows-- <= 0)
63+
break;
6864

69-
if (probabilityGetter != null)
70-
{
71-
float probability = default;
72-
probabilityGetter(ref probability);
73-
Console.Write("{0, -10} {1, -10}", "Probability", probability);
74-
}
65+
Console.WriteLine("{0, -10} {1, -10}", "Score", value.Score);
66+
}
67+
}
68+
69+
private static void PrintScoreAndProbability(IEnumerable<ScoreAndProbabilityValue> values, int rows)
70+
{
71+
foreach (var value in values)
72+
{
73+
if (rows-- <= 0)
74+
break;
7575

76-
Console.WriteLine();
77-
if (count++ >= 4)
78-
break;
79-
}
76+
Console.WriteLine("{0, -10} {1, -10} {2, -10} {3, -10}", "Score", value.Score, "Probability", value.Probability);
8077
}
8178
}
79+
80+
private class ScoreValue
81+
{
82+
public float Score { get; set; }
83+
}
84+
85+
private class ScoreAndProbabilityValue
86+
{
87+
public float Score { get; set; }
88+
public float Probability { get; set; }
89+
}
8290
}
8391
}
Lines changed: 36 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
using System;
2+
using System.Collections.Generic;
23
using Microsoft.ML;
34

45
namespace Samples.Dynamic.Trainers.BinaryClassification.Calibrators
@@ -26,9 +27,8 @@ public static void Example()
2627
// Let's score the new data. The score will give us a numerical estimation of the chance that the particular sample
2728
// bears positive sentiment. This estimate is relative to the numbers obtained.
2829
var scoredData = transformer.Transform(trainTestData.TestSet);
29-
var scoredDataPreview = scoredData;
30-
31-
PrintRowViewValues(scoredDataPreview);
30+
var outScores = mlContext.Data.CreateEnumerable<ScoreValue>(scoredData, reuseRowObject: false);
31+
PrintScore(outScores, 5);
3232
// Preview of scoredDataPreview.RowView
3333
// Score 4.18144
3434
// Score -14.10248
@@ -45,39 +45,46 @@ public static void Example()
4545
// This column is a calibrated version of the "Score" column, meaning its values are a valid probability value in the [0, 1] interval
4646
// representing the chance that the respective sample bears positive sentiment.
4747
var finalData = calibratorTransformer.Transform(scoredData);
48-
PrintRowViewValues(finalData);
49-
// Score 4.18144 Probability 0.775
50-
// Score -14.10248 Probability 0.01923077
51-
// Score 2.731951 Probability 0.7738096
52-
// Score -2.554229 Probability 0.2011494
53-
// Score 5.36571 Probability 0.9117647
48+
var outScoresAndProbabilities = mlContext.Data.CreateEnumerable<ScoreAndProbabilityValue>(finalData, reuseRowObject: false);
49+
PrintScoreAndProbability(outScoresAndProbabilities, 5);
50+
// Score 4.18144 Probability 0.775
51+
// Score -14.10248 Probability 0.01923077
52+
// Score 2.731951 Probability 0.7738096
53+
// Score -2.554229 Probability 0.2011494
54+
// Score 5.36571 Probability 0.9117647
5455
}
5556

56-
private static void PrintRowViewValues(IDataView transformedData)
57+
private static void PrintScore(IEnumerable<ScoreValue> values, int rows)
5758
{
58-
int count = 0;
59-
using (var cursor = transformedData.GetRowCursor(transformedData.Schema))
59+
foreach (var value in values)
6060
{
61-
var scoreGetter = cursor.GetGetter<float>(cursor.Schema["Score"]);
62-
var probabilityGetter = cursor.Schema.GetColumnOrNull("Probability") != null ? cursor.GetGetter<float>(cursor.Schema["Probability"]) : null;
63-
while (cursor.MoveNext())
64-
{
65-
float score = default;
66-
scoreGetter(ref score);
67-
Console.Write("{0, -10} {1, -10}", "Score", score);
61+
if (rows-- <= 0)
62+
break;
63+
64+
Console.WriteLine("{0, -10} {1, -10}", "Score", value.Score);
65+
}
66+
}
6867

69-
if (probabilityGetter != null)
70-
{
71-
float probability = default;
72-
probabilityGetter(ref probability);
73-
Console.Write("{0, -10} {1, -10}", "Probability", probability);
74-
}
68+
private static void PrintScoreAndProbability(IEnumerable<ScoreAndProbabilityValue> values, int rows)
69+
{
70+
foreach (var value in values)
71+
{
72+
if (rows-- <= 0)
73+
break;
7574

76-
Console.WriteLine();
77-
if (count++ >= 4)
78-
break;
79-
}
75+
Console.WriteLine("{0, -10} {1, -10} {2, -10} {3, -10}", "Score", value.Score, "Probability", value.Probability);
8076
}
8177
}
78+
79+
private class ScoreValue
80+
{
81+
public float Score { get; set; }
82+
}
83+
84+
private class ScoreAndProbabilityValue
85+
{
86+
public float Score { get; set; }
87+
public float Probability { get; set; }
88+
}
8289
}
8390
}

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/Calibrators/Platt.cs

Lines changed: 31 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
using System;
2+
using System.Collections.Generic;
23
using Microsoft.ML;
34

45
namespace Samples.Dynamic.Trainers.BinaryClassification.Calibrators
@@ -26,9 +27,8 @@ public static void Example()
2627
// Let's score the new data. The score will give us a numerical estimation of the chance that the particular sample
2728
// bears positive sentiment. This estimate is relative to the numbers obtained.
2829
var scoredData = transformer.Transform(trainTestData.TestSet);
29-
var scoredDataPreview = scoredData;
30-
31-
PrintRowViewValues(scoredDataPreview);
30+
var outScores = mlContext.Data.CreateEnumerable<ScoreValue>(scoredData, reuseRowObject: false);
31+
PrintScore(outScores, 5);
3232
// Preview of scoredDataPreview.RowView
3333
// Score 4.18144
3434
// Score -14.10248
@@ -45,39 +45,46 @@ public static void Example()
4545
// This column is a calibrated version of the "Score" column, meaning its values are a valid probability value in the [0, 1] interval
4646
// representing the chance that the respective sample bears positive sentiment.
4747
var finalData = calibratorTransformer.Transform(scoredData);
48-
PrintRowViewValues(finalData);
48+
var outScoresAndProbabilities = mlContext.Data.CreateEnumerable<ScoreAndProbabilityValue>(finalData, reuseRowObject: false);
49+
PrintScoreAndProbability(outScoresAndProbabilities, 5);
4950
// Score 4.18144 Probability 0.8511352
5051
// Score -14.10248 Probability 0.001633563
5152
// Score 2.731951 Probability 0.7496456
5253
// Score -2.554229 Probability 0.2206048
5354
// Score 5.36571 Probability 0.9065308
5455
}
5556

56-
private static void PrintRowViewValues(IDataView transformedData)
57+
private static void PrintScore(IEnumerable<ScoreValue> values, int rows)
5758
{
58-
int count = 0;
59-
using (var cursor = transformedData.GetRowCursor(transformedData.Schema))
59+
foreach (var value in values)
6060
{
61-
var scoreGetter = cursor.GetGetter<float>(cursor.Schema["Score"]);
62-
var probabilityGetter = cursor.Schema.GetColumnOrNull("Probability") != null ? cursor.GetGetter<float>(cursor.Schema["Probability"]) : null;
63-
while (cursor.MoveNext())
64-
{
65-
float score = default;
66-
scoreGetter(ref score);
67-
Console.Write("{0, -10} {1, -10}", "Score", score);
61+
if (rows-- <= 0)
62+
break;
63+
64+
Console.WriteLine("{0, -10} {1, -10}", "Score", value.Score);
65+
}
66+
}
6867

69-
if (probabilityGetter != null)
70-
{
71-
float probability = default;
72-
probabilityGetter(ref probability);
73-
Console.Write("{0, -10} {1, -10}", "Probability", probability);
74-
}
68+
private static void PrintScoreAndProbability(IEnumerable<ScoreAndProbabilityValue> values, int rows)
69+
{
70+
foreach (var value in values)
71+
{
72+
if (rows-- <= 0)
73+
break;
7574

76-
Console.WriteLine();
77-
if (count++ >= 4)
78-
break;
79-
}
75+
Console.WriteLine("{0, -10} {1, -10} {2, -10} {3, -10}", "Score", value.Score, "Probability", value.Probability);
8076
}
8177
}
78+
79+
private class ScoreValue
80+
{
81+
public float Score { get; set; }
82+
}
83+
84+
private class ScoreAndProbabilityValue
85+
{
86+
public float Score { get; set; }
87+
public float Probability { get; set; }
88+
}
8289
}
8390
}
Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,16 @@
11
using Samples.Dynamic;
2+
using Samples.Dynamic.Trainers.BinaryClassification.Calibrators;
23

34
namespace Microsoft.ML.Samples
45
{
56
internal static class Program
67
{
78
static void Main(string[] args)
89
{
9-
CalculateFeatureContribution.Example();
10+
FixedPlatt.Example();
11+
Isotonic.Example();
12+
Naive.Example();
13+
Platt.Example();
1014
}
1115
}
1216
}

0 commit comments

Comments
 (0)