Skip to content

API reference - Fix placeholder links #3359

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Apr 17, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ namespace Microsoft.ML.Trainers
public abstract class AveragedLinearOptions : OnlineLinearOptions
{
/// <summary>
/// <a href="tmpurl_lr">Learning rate</a>.
/// Learning rate.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should there be the uri here for what tempurl_lr referenced?

/// </summary>
[Argument(ArgumentType.AtMostOnce, HelpText = "Learning rate", ShortName = "lr", SortOrder = 50)]
[TGUI(Label = "Learning rate", SuggestedSweeps = "0.01,0.1,0.5,1.0")]
Expand Down Expand Up @@ -61,7 +61,7 @@ public abstract class AveragedLinearOptions : OnlineLinearOptions
public bool LazyUpdate = true;

/// <summary>
/// The L2 weight for <a href='tmpurl_regularization'>regularization</a>.
/// The L2 weight for <a href='https://en.wikipedia.org/wiki/Regularization_(mathematics)'>regularization</a>.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
/// The L2 weight for <a href='https://en.wikipedia.org/wiki/Regularization_(mathematics)'>regularization</a>.
/// The coefficient of L2-norm for <a href='https://en.wikipedia.org/wiki/Regularization_(mathematics)'>regularization on the weight vector</a>.

/// </summary>
[Argument(ArgumentType.AtMostOnce, HelpText = "L2 Regularization Weight", ShortName = "reg,L2RegularizerWeight", SortOrder = 50)]
[TGUI(Label = "L2 Regularization Weight")]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,18 +79,18 @@ public sealed class AveragedPerceptronTrainer : AveragedLinearTrainer<BinaryPred
public sealed class Options : AveragedLinearOptions
{
/// <summary>
/// A custom <a href="tmpurl_loss">loss</a>.
/// A custom <a href="https://en.wikipedia.org/wiki/Loss_function">loss</a>.
/// </summary>
[Argument(ArgumentType.Multiple, Name = "LossFunction", HelpText = "Loss Function", ShortName = "loss", SortOrder = 50)]
internal ISupportClassificationLossFactory ClassificationLossFunctionFactory = new HingeLoss.Options();

/// <summary>
/// A custom <a href="tmpurl_loss">loss</a>.
/// A custom <a href="https://en.wikipedia.org/wiki/Loss_function">loss</a>.
/// </summary>
public IClassificationLoss LossFunction { get; set; }

/// <summary>
/// The <a href="tmpurl_calib">calibrator</a> for producing probabilities. Default is exponential (aka Platt) calibration.
/// The <a href="https://en.wikipedia.org/wiki/Calibration_(statistics)">calibrator</a> for producing probabilities. Default is exponential (aka Platt) calibration.
/// </summary>
[Argument(ArgumentType.AtMostOnce, HelpText = "The calibrator kind to apply to the predictor. Specify null for no calibration", Visibility = ArgumentAttribute.VisibilityType.EntryPointsOnly)]
internal ICalibratorTrainerFactory Calibrator = new PlattCalibratorTrainerFactory();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ public sealed class Options : AveragedLinearOptions
internal ISupportRegressionLossFactory RegressionLossFunctionFactory = new SquaredLossFactory();

/// <summary>
/// A custom <a href="tmpurl_loss">loss</a>.
/// A custom <a href="https://en.wikipedia.org/wiki/Loss_function">loss</a>.
/// </summary>
public IRegressionLoss LossFunction { get; set; }

Expand Down
16 changes: 8 additions & 8 deletions src/Microsoft.ML.StandardTrainers/Standard/SdcaBinary.cs
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ public abstract class SdcaTrainerBase<TOptions, TTransformer, TModel> : Stochast
public abstract class OptionsBase : TrainerInputBaseWithWeight
{
/// <summary>
/// The L2 <a href='tmpurl_regularization'>regularization</a> hyperparameter.
/// The L2 <a href='https://en.wikipedia.org/wiki/Regularization_(mathematics)'>regularization</a> hyperparameter.
/// </summary>
[Argument(ArgumentType.AtMostOnce, HelpText = "L2 regularizer constant. By default the l2 constant is automatically inferred based on data set.", NullName = "<Auto>", ShortName = "l2, L2Const", SortOrder = 1)]
[TGUI(Label = "L2 Regularizer Constant", SuggestedSweeps = "<Auto>,1e-7,1e-6,1e-5,1e-4,1e-3,1e-2")]
Expand All @@ -166,7 +166,7 @@ public abstract class OptionsBase : TrainerInputBaseWithWeight

// REVIEW: make the default positive when we know how to consume a sparse model
/// <summary>
/// The L1 <a href='tmpurl_regularization'>regularization</a> hyperparameter.
/// The L1 <a href='https://en.wikipedia.org/wiki/Regularization_(mathematics)'>regularization</a> hyperparameter.
/// </summary>
[Argument(ArgumentType.AtMostOnce, HelpText = "L1 soft threshold (L1/L2). Note that it is easier to control and sweep using the threshold parameter than the raw L1-regularizer constant. By default the l1 threshold is automatically inferred based on data set.",
NullName = "<Auto>", Name = "L1Threshold", ShortName = "l1", SortOrder = 2)]
Expand Down Expand Up @@ -1547,7 +1547,7 @@ private protected override BinaryPredictionTransformer<TModelParameters> MakeTra

/// <summary>
/// The <see cref="IEstimator{TTransformer}"/> for training a binary logistic regression classification model using the stochastic dual coordinate ascent method.
/// The trained model is <a href='tmpurl_calib'>calibrated</a> and can produce probability by feeding the output value of the
/// The trained model is <a href='https://en.wikipedia.org/wiki/Calibration_(statistics)'>calibrated</a> and can produce probability by feeding the output value of the
/// linear function to a <see cref="PlattCalibrator"/>.
/// </summary>
/// <include file='doc.xml' path='doc/members/member[@name="SDCA_remarks"]/*' />
Expand Down Expand Up @@ -1623,7 +1623,7 @@ public sealed class SdcaNonCalibratedBinaryTrainer : SdcaBinaryTrainerBase<Linea
public sealed class Options : BinaryOptionsBase
{
/// <summary>
/// The custom <a href="tmpurl_loss">loss</a>.
/// The custom <a href="https://en.wikipedia.org/wiki/Loss_function">loss</a>.
/// </summary>
/// <value>
/// If unspecified, <see cref="LogLoss"/> will be used.
Expand All @@ -1632,7 +1632,7 @@ public sealed class Options : BinaryOptionsBase
internal ISupportSdcaClassificationLossFactory LossFunctionFactory = new LogLossFactory();

/// <summary>
/// The custom <a href="tmpurl_loss">loss</a>.
/// The custom <a href="https://en.wikipedia.org/wiki/Loss_function">loss</a>.
/// </summary>
/// <value>
/// If unspecified, <see cref="LogLoss"/> will be used.
Expand Down Expand Up @@ -1776,7 +1776,7 @@ public abstract class SgdBinaryTrainerBase<TModel> :
public class OptionsBase : TrainerInputBaseWithWeight
{
/// <summary>
/// The L2 weight for <a href='tmpurl_regularization'>regularization</a>.
/// The L2 weight for <a href='https://en.wikipedia.org/wiki/Regularization_(mathematics)'>regularization</a>.
/// </summary>
[Argument(ArgumentType.AtMostOnce, HelpText = "L2 Regularization constant", ShortName = "l2, L2Weight", SortOrder = 50)]
[TGUI(Label = "L2 Regularization Constant", SuggestedSweeps = "1e-7,5e-7,1e-6,5e-6,1e-5")]
Expand Down Expand Up @@ -1814,7 +1814,7 @@ public class OptionsBase : TrainerInputBaseWithWeight
public int NumberOfIterations = Defaults.NumberOfIterations;

/// <summary>
/// The initial <a href="tmpurl_lr">learning rate</a> used by SGD.
/// The initial learning rate used by SGD.
/// </summary>
[Argument(ArgumentType.AtMostOnce, HelpText = "Initial learning rate (only used by SGD)", Name = "InitialLearningRate", ShortName = "ilr,lr,InitLearningRate")]
[TGUI(Label = "Initial Learning Rate (for SGD)")]
Expand Down Expand Up @@ -2171,7 +2171,7 @@ private protected override void CheckLabel(RoleMappedData examples, out int weig

/// <summary>
/// The <see cref="IEstimator{TTransformer}"/> for training logistic regression using a parallel stochastic gradient method.
/// The trained model is <a href='tmpurl_calib'>calibrated</a> and can produce probability by feeding the output value of the
/// The trained model is <a href='https://en.wikipedia.org/wiki/Calibration_(statistics)'>calibrated</a> and can produce probability by feeding the output value of the
/// linear function to a <see cref="PlattCalibrator"/>.
/// </summary>
/// <remarks>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ public abstract class SdcaMulticlassTrainerBase<TModel> : SdcaTrainerBase<SdcaMu
public class MulticlassOptions : OptionsBase
{
/// <summary>
/// The custom <a href="tmpurl_loss">loss</a>.
/// The custom <a href="https://en.wikipedia.org/wiki/Loss_function">loss</a>.
/// </summary>
/// <value>
/// If unspecified, <see cref="LogLoss"/> will be used.
Expand Down
4 changes: 2 additions & 2 deletions src/Microsoft.ML.StandardTrainers/Standard/SdcaRegression.cs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public sealed class SdcaRegressionTrainer : SdcaTrainerBase<SdcaRegressionTraine
public sealed class Options : OptionsBase
{
/// <summary>
/// A custom <a href="tmpurl_loss">loss</a>.
/// A custom <a href="https://en.wikipedia.org/wiki/Loss_function">loss</a>.
/// </summary>
/// <value>
/// Defaults to <see cref="SquaredLoss"/>
Expand All @@ -47,7 +47,7 @@ public sealed class Options : OptionsBase
internal ISupportSdcaRegressionLossFactory LossFunctionFactory = new SquaredLossFactory();

/// <summary>
/// A custom <a href="tmpurl_loss">loss</a>.
/// A custom <a href="https://en.wikipedia.org/wiki/Loss_function">loss</a>.
/// </summary>
/// <value>
/// Defaults to <see cref="SquaredLoss"/>
Expand Down
Loading