You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SymbolicStochasticGradientDescent.cs
+1-1
Original file line number
Diff line number
Diff line change
@@ -4,7 +4,7 @@ public static class SymbolicStochasticGradientDescent
4
4
{
5
5
// This example requires installation of additional nuget package <a href="https://www.nuget.org/packages/Microsoft.ML.HalLearners/">Microsoft.ML.HalLearners</a>.
6
6
// In this example we will use the adult income dataset. The goal is to predict
7
-
// if a person's income is above $50K or not, based on different pieces of information about that person.
7
+
// if a person's income is above $50K or not, based on demographic information about that person.
8
8
// For more details about this dataset, please see https://archive.ics.uci.edu/ml/datasets/adult
Copy file name to clipboardExpand all lines: docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SymbolicStochasticGradientDescentWithOptions.cs
+1-1
Original file line number
Diff line number
Diff line change
@@ -4,7 +4,7 @@ public static class SymbolicStochasticGradientDescentWithOptions
4
4
{
5
5
// This example requires installation of additional nuget package <a href="https://www.nuget.org/packages/Microsoft.ML.HalLearners/">Microsoft.ML.HalLearners</a>.
6
6
// In this example we will use the adult income dataset. The goal is to predict
7
-
// if a person's income is above $50K or not, based on different pieces of information about that person.
7
+
// if a person's income is above $50K or not, based on demographic information about that person.
8
8
// For more details about this dataset, please see https://archive.ics.uci.edu/ml/datasets/adult
[Argument(ArgumentType.AtMostOnce,HelpText="Column to use for example weight",ShortName="weight",SortOrder=4,Visibility=ArgumentAttribute.VisibilityType.EntryPointsOnly)]
/// The degree of lock-free parallelism used by SGD.
1736
+
/// </summary>
1737
+
/// <value>
1738
+
/// Defaults to automatic depending on data sparseness. Determinism is not guaranteed.
1739
+
/// </value>
1731
1740
[Argument(ArgumentType.AtMostOnce,HelpText="Degree of lock-free parallelism. Defaults to automatic depending on data sparseness. Determinism not guaranteed.",ShortName="nt,t,threads",SortOrder=50)]
1732
1741
[TGUI(Label="Number of threads",SuggestedSweeps="1,2,4")]
1733
1742
publicint?NumThreads;
1734
1743
1744
+
/// <summary>
1745
+
/// The convergence tolerance. If the exponential moving average of loss reductions falls below this tolerance,
1746
+
/// the algorithm is deemed to have converged and will stop.
1747
+
/// </summary>
1735
1748
[Argument(ArgumentType.AtMostOnce,HelpText="Exponential moving averaged improvement tolerance for convergence",ShortName="tol")]
/// The weight to be applied to the positive class. This is useful for training with imbalanced data.
1784
+
/// </summary>
1785
+
/// <value>
1786
+
/// Default value is 1, which means no extra weight.
1787
+
/// </value>
1753
1788
[Argument(ArgumentType.AtMostOnce,HelpText="Apply weight to the positive class, for imbalanced data",ShortName="piw")]
1754
1789
publicfloatPositiveInstanceWeight=1;
1755
1790
1791
+
/// <summary>
1792
+
/// Determines the frequency of checking for convergence in terms of number of iterations.
1793
+
/// </summary>
1794
+
/// <value>
1795
+
/// Default equals <see cref="NumThreads"/>."
1796
+
/// </value>
1756
1797
[Argument(ArgumentType.AtMostOnce,HelpText="Convergence check frequency (in terms of number of iterations). Default equals number of threads",ShortName="checkFreq")]
1757
1798
publicint?CheckFrequency;
1758
1799
@@ -1802,7 +1843,7 @@ internal static class Defaults
1802
1843
/// <param name="env">The environment to use.</param>
1803
1844
/// <param name="featureColumn">The name of the feature column.</param>
1804
1845
/// <param name="labelColumn">The name of the label column.</param>
1805
-
/// <param name="weightColumn">The name for the example weight column.</param>
1846
+
/// <param name="weightColumn">The name of the example weight column.</param>
1806
1847
/// <param name="maxIterations">The maximum number of iterations; set to 1 to simulate online learning.</param>
1807
1848
/// <param name="initLearningRate">The initial learning rate used by SGD.</param>
0 commit comments