-
Notifications
You must be signed in to change notification settings - Fork 1.9k
disable ols #286
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
disable ols #286
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -562,18 +562,6 @@ public void Add(Microsoft.ML.Trainers.OnlineGradientDescentRegressor input, Micr | |
_jsonNodes.Add(Serialize("Trainers.OnlineGradientDescentRegressor", input, output)); | ||
} | ||
|
||
public Microsoft.ML.Trainers.OrdinaryLeastSquaresRegressor.Output Add(Microsoft.ML.Trainers.OrdinaryLeastSquaresRegressor input) | ||
{ | ||
var output = new Microsoft.ML.Trainers.OrdinaryLeastSquaresRegressor.Output(); | ||
Add(input, output); | ||
return output; | ||
} | ||
|
||
public void Add(Microsoft.ML.Trainers.OrdinaryLeastSquaresRegressor input, Microsoft.ML.Trainers.OrdinaryLeastSquaresRegressor.Output output) | ||
{ | ||
_jsonNodes.Add(Serialize("Trainers.OrdinaryLeastSquaresRegressor", input, output)); | ||
} | ||
|
||
public Microsoft.ML.Trainers.PcaAnomalyDetector.Output Add(Microsoft.ML.Trainers.PcaAnomalyDetector input) | ||
{ | ||
var output = new Microsoft.ML.Trainers.PcaAnomalyDetector.Output(); | ||
|
@@ -6173,7 +6161,7 @@ public enum KMeansPlusPlusTrainerInitAlgorithm | |
/// <summary> | ||
/// K-means is a popular clustering algorithm. With K-means, the data is clustered into a specified number of clusters in order to minimize the within-cluster sum of squares. K-means++ improves upon K-means by using a better method for choosing the initial cluster centers. | ||
/// </summary> | ||
public sealed partial class KMeansPlusPlusClusterer : Microsoft.ML.Runtime.EntryPoints.CommonInputs.ITrainerInput, Microsoft.ML.ILearningPipelineItem | ||
public sealed partial class KMeansPlusPlusClusterer : Microsoft.ML.Runtime.EntryPoints.CommonInputs.IUnsupervisedTrainerWithWeight, Microsoft.ML.Runtime.EntryPoints.CommonInputs.ITrainerInput, Microsoft.ML.ILearningPipelineItem | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Is this intentionally included in this PR? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. yes and no. In reply to: 193298476 [](ancestors = 193298476) |
||
{ | ||
|
||
|
||
|
@@ -6208,6 +6196,11 @@ public sealed partial class KMeansPlusPlusClusterer : Microsoft.ML.Runtime.Entry | |
/// </summary> | ||
public int? NumThreads { get; set; } | ||
|
||
/// <summary> | ||
/// Column to use for example weight | ||
/// </summary> | ||
public Microsoft.ML.Runtime.EntryPoints.Optional<string> WeightColumn { get; set; } | ||
|
||
/// <summary> | ||
/// The data to be used for training | ||
/// </summary> | ||
|
@@ -6929,102 +6922,13 @@ public OnlineGradientDescentRegressorPipelineStep(Output output) | |
} | ||
} | ||
|
||
namespace Trainers | ||
{ | ||
|
||
/// <summary> | ||
/// Train an OLS regression model. | ||
/// </summary> | ||
public sealed partial class OrdinaryLeastSquaresRegressor : Microsoft.ML.Runtime.EntryPoints.CommonInputs.ITrainerInputWithWeight, Microsoft.ML.Runtime.EntryPoints.CommonInputs.ITrainerInputWithLabel, Microsoft.ML.Runtime.EntryPoints.CommonInputs.ITrainerInput, Microsoft.ML.ILearningPipelineItem | ||
{ | ||
|
||
|
||
/// <summary> | ||
/// L2 regularization weight | ||
/// </summary> | ||
[TlcModule.SweepableDiscreteParamAttribute("L2Weight", new object[]{1E-06f, 0.1f, 1f})] | ||
public float L2Weight { get; set; } = 1E-06f; | ||
|
||
/// <summary> | ||
/// Whether to calculate per parameter significance statistics | ||
/// </summary> | ||
public bool PerParameterSignificance { get; set; } = true; | ||
|
||
/// <summary> | ||
/// Column to use for example weight | ||
/// </summary> | ||
public Microsoft.ML.Runtime.EntryPoints.Optional<string> WeightColumn { get; set; } | ||
|
||
/// <summary> | ||
/// Column to use for labels | ||
/// </summary> | ||
public string LabelColumn { get; set; } = "Label"; | ||
|
||
/// <summary> | ||
/// The data to be used for training | ||
/// </summary> | ||
public Var<Microsoft.ML.Runtime.Data.IDataView> TrainingData { get; set; } = new Var<Microsoft.ML.Runtime.Data.IDataView>(); | ||
|
||
/// <summary> | ||
/// Column to use for features | ||
/// </summary> | ||
public string FeatureColumn { get; set; } = "Features"; | ||
|
||
/// <summary> | ||
/// Normalize option for the feature column | ||
/// </summary> | ||
public Models.NormalizeOption NormalizeFeatures { get; set; } = Models.NormalizeOption.Auto; | ||
|
||
/// <summary> | ||
/// Whether learner should cache input training data | ||
/// </summary> | ||
public Models.CachingOptions Caching { get; set; } = Models.CachingOptions.Auto; | ||
|
||
|
||
public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IRegressionOutput, Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITrainerOutput | ||
{ | ||
/// <summary> | ||
/// The trained model | ||
/// </summary> | ||
public Var<Microsoft.ML.Runtime.EntryPoints.IPredictorModel> PredictorModel { get; set; } = new Var<Microsoft.ML.Runtime.EntryPoints.IPredictorModel>(); | ||
|
||
} | ||
public Var<IDataView> GetInputData() => TrainingData; | ||
|
||
public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment) | ||
{ | ||
if (previousStep != null) | ||
{ | ||
if (!(previousStep is ILearningPipelineDataStep dataStep)) | ||
{ | ||
throw new InvalidOperationException($"{ nameof(OrdinaryLeastSquaresRegressor)} only supports an { nameof(ILearningPipelineDataStep)} as an input."); | ||
} | ||
|
||
TrainingData = dataStep.Data; | ||
} | ||
Output output = experiment.Add(this); | ||
return new OrdinaryLeastSquaresRegressorPipelineStep(output); | ||
} | ||
|
||
private class OrdinaryLeastSquaresRegressorPipelineStep : ILearningPipelinePredictorStep | ||
{ | ||
public OrdinaryLeastSquaresRegressorPipelineStep(Output output) | ||
{ | ||
Model = output.PredictorModel; | ||
} | ||
|
||
public Var<IPredictorModel> Model { get; } | ||
} | ||
} | ||
} | ||
|
||
namespace Trainers | ||
{ | ||
|
||
/// <summary> | ||
/// Train an PCA Anomaly model. | ||
/// </summary> | ||
public sealed partial class PcaAnomalyDetector : Microsoft.ML.Runtime.EntryPoints.CommonInputs.ITrainerInput, Microsoft.ML.ILearningPipelineItem | ||
public sealed partial class PcaAnomalyDetector : Microsoft.ML.Runtime.EntryPoints.CommonInputs.IUnsupervisedTrainerWithWeight, Microsoft.ML.Runtime.EntryPoints.CommonInputs.ITrainerInput, Microsoft.ML.ILearningPipelineItem | ||
{ | ||
|
||
|
||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Rather than just remove the entry-point, could we temporarily just remove the entire file from this repo since none of it is useful now, and only reintroduce it once we decide exactly what we're going to do about the underlying math library?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@glebuk
I don't mind to delete it, but if we plan to enable it one day, maybe it make more sense to keep it here, instead of shuffling around.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Very well. @glebk is the one whose opinions on this matter are the ones that are most relevant. If he has reasoned through the implications of this (e.g., everything that uses entry-points will no longer have access to this) and has concluded there are no reasons for concerns, then I have no concerns either.