Skip to content

Commit 20b758a

Browse files
committed
Internalization.
1 parent ec3e626 commit 20b758a

15 files changed

+87
-86
lines changed

src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineModelParameters.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -290,12 +290,12 @@ public sealed class FieldAwareFactorizationMachinePredictionTransformer : Predic
290290
/// <summary>
291291
/// The name of the feature column used by the prediction transformer.
292292
/// </summary>
293-
internal IReadOnlyList<string> FeatureColumns { get; }
293+
private IReadOnlyList<string> FeatureColumns { get; }
294294

295295
/// <summary>
296296
/// The type of the feature columns.
297297
/// </summary>
298-
internal IReadOnlyList<DataViewType> FeatureColumnTypes { get; }
298+
private IReadOnlyList<DataViewType> FeatureColumnTypes { get; }
299299

300300
private readonly string _thresholdColumn;
301301
private readonly float _threshold;

src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,8 @@ public abstract class LinearModelParameters : ModelParametersBase<float>,
5151
ISingleCanSavePfa,
5252
ISingleCanSaveOnnx
5353
{
54-
protected readonly VBuffer<float> Weight;
54+
[BestFriend]
55+
private protected readonly VBuffer<float> Weight;
5556

5657
// _weightsDense is not persisted and is used for performance when the input instance is sparse.
5758
private VBuffer<float> _weightsDense;
@@ -250,7 +251,7 @@ bool ISingleCanSaveOnnx.SaveAsOnnx(OnnxContext ctx, string[] outputs, string fea
250251
}
251252

252253
// Generate the score from the given values, assuming they have already been normalized.
253-
protected virtual float Score(in VBuffer<float> src)
254+
private protected virtual float Score(in VBuffer<float> src)
254255
{
255256
if (src.IsDense)
256257
{
@@ -711,7 +712,7 @@ private protected override void SaveCore(ModelSaveContext ctx)
711712
ctx.SetVersionInfo(GetVersionInfo());
712713
}
713714

714-
protected override float Score(in VBuffer<float> src)
715+
private protected override float Score(in VBuffer<float> src)
715716
{
716717
return MathUtils.ExpSlow(base.Score(in src));
717718
}

src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs

Lines changed: 32 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -105,28 +105,28 @@ internal static class Defaults
105105

106106
private const string RegisterName = nameof(LbfgsTrainerBase<TArgs, TTransformer, TModel>);
107107

108-
protected int NumFeatures;
109-
protected VBuffer<float> CurrentWeights;
110-
protected long NumGoodRows;
111-
protected Double WeightSum;
112-
protected bool ShowTrainingStats;
108+
private protected int NumFeatures;
109+
private protected VBuffer<float> CurrentWeights;
110+
private protected long NumGoodRows;
111+
private protected Double WeightSum;
112+
private protected bool ShowTrainingStats;
113113

114114
private TModel _srcPredictor;
115115

116-
protected readonly TArgs Args;
117-
protected readonly float L2Weight;
118-
protected readonly float L1Weight;
119-
protected readonly float OptTol;
120-
protected readonly int MemorySize;
121-
protected readonly int MaxIterations;
122-
protected readonly float SgdInitializationTolerance;
123-
protected readonly bool Quiet;
124-
protected readonly float InitWtsDiameter;
125-
protected readonly bool UseThreads;
126-
protected readonly int? NumThreads;
127-
protected readonly bool DenseOptimizer;
128-
protected readonly long MaxNormalizationExamples;
129-
protected readonly bool EnforceNonNegativity;
116+
private protected readonly TArgs Args;
117+
private protected readonly float L2Weight;
118+
private protected readonly float L1Weight;
119+
private protected readonly float OptTol;
120+
private protected readonly int MemorySize;
121+
private protected readonly int MaxIterations;
122+
private protected readonly float SgdInitializationTolerance;
123+
private protected readonly bool Quiet;
124+
private protected readonly float InitWtsDiameter;
125+
private protected readonly bool UseThreads;
126+
private protected readonly int? NumThreads;
127+
private protected readonly bool DenseOptimizer;
128+
private protected readonly long MaxNormalizationExamples;
129+
private protected readonly bool EnforceNonNegativity;
130130

131131
// The training data, when NOT using multiple threads.
132132
private RoleMappedData _data;
@@ -256,9 +256,9 @@ private static TArgs ArgsInit(string featureColumn, SchemaShape.Column labelColu
256256
return args;
257257
}
258258

259-
protected virtual int ClassCount => 1;
260-
protected int BiasCount => ClassCount;
261-
protected int WeightCount => ClassCount * NumFeatures;
259+
private protected virtual int ClassCount => 1;
260+
private protected int BiasCount => ClassCount;
261+
private protected int WeightCount => ClassCount * NumFeatures;
262262
private protected virtual Optimizer InitializeOptimizer(IChannel ch, FloatLabelCursor.Factory cursorFactory,
263263
out VBuffer<float> init, out ITerminationCriterion terminationCriterion)
264264
{
@@ -364,15 +364,15 @@ private protected virtual VBuffer<float> InitializeWeightsSgd(IChannel ch, Float
364364
return result;
365365
}
366366

367-
protected abstract VBuffer<float> InitializeWeightsFromPredictor(TModel srcPredictor);
367+
private protected abstract VBuffer<float> InitializeWeightsFromPredictor(TModel srcPredictor);
368368

369369
private protected abstract void CheckLabel(RoleMappedData data);
370370

371-
protected virtual void PreTrainingProcessInstance(float label, in VBuffer<float> feat, float weight)
371+
private protected virtual void PreTrainingProcessInstance(float label, in VBuffer<float> feat, float weight)
372372
{
373373
}
374374

375-
protected abstract TModel CreatePredictor();
375+
private protected abstract TModel CreatePredictor();
376376

377377
/// <summary>
378378
/// The basic training calls the optimizer
@@ -570,24 +570,24 @@ private protected virtual void TrainCore(IChannel ch, RoleMappedData data)
570570

571571
// Ensure that the bias portion of vec is represented in vec.
572572
// REVIEW: Is this really necessary?
573-
protected void EnsureBiases(ref VBuffer<float> vec)
573+
private protected void EnsureBiases(ref VBuffer<float> vec)
574574
{
575575
// REVIEW: Consider promoting this "densify first n entries" to a general purpose utility,
576576
// if we ever encounter other situations where this becomes useful.
577577
Contracts.Assert(vec.Length == BiasCount + WeightCount);
578578
VBufferUtils.DensifyFirst(ref vec, BiasCount);
579579
}
580580

581-
protected abstract float AccumulateOneGradient(in VBuffer<float> feat, float label, float weight,
581+
private protected abstract float AccumulateOneGradient(in VBuffer<float> feat, float label, float weight,
582582
in VBuffer<float> xDense, ref VBuffer<float> grad, ref float[] scratch);
583583

584584
private protected abstract void ComputeTrainingStatistics(IChannel ch, FloatLabelCursor.Factory cursorFactory, float loss, int numParams);
585585

586-
protected abstract void ProcessPriorDistribution(float label, float weight);
586+
private protected abstract void ProcessPriorDistribution(float label, float weight);
587587
/// <summary>
588588
/// The gradient being used by the optimizer
589589
/// </summary>
590-
protected virtual float DifferentiableFunction(in VBuffer<float> x, ref VBuffer<float> gradient,
590+
private protected virtual float DifferentiableFunction(in VBuffer<float> x, ref VBuffer<float> gradient,
591591
IProgressChannelProvider progress)
592592
{
593593
Contracts.Assert((_numChunks == 0) != (_data == null));
@@ -647,7 +647,7 @@ protected virtual float DifferentiableFunction(in VBuffer<float> x, ref VBuffer<
647647
/// REVIEW: consider getting rid of multithread-targeted members
648648
/// Using TPL, the distinction between Multithreaded and Sequential implementations is unnecessary
649649
/// </remarks>
650-
protected virtual float DifferentiableFunctionMultithreaded(in VBuffer<float> xDense, ref VBuffer<float> gradient, IProgressChannel pch)
650+
private protected virtual float DifferentiableFunctionMultithreaded(in VBuffer<float> xDense, ref VBuffer<float> gradient, IProgressChannel pch)
651651
{
652652
Contracts.Assert(_data == null);
653653
Contracts.Assert(_cursorFactory == null);
@@ -679,7 +679,7 @@ protected virtual float DifferentiableFunctionMultithreaded(in VBuffer<float> xD
679679
return loss;
680680
}
681681

682-
protected float DifferentiableFunctionComputeChunk(int ichk, in VBuffer<float> xDense, ref VBuffer<float> grad, IProgressChannel pch)
682+
private protected float DifferentiableFunctionComputeChunk(int ichk, in VBuffer<float> xDense, ref VBuffer<float> grad, IProgressChannel pch)
683683
{
684684
Contracts.Assert(0 <= ichk && ichk < _numChunks);
685685
Contracts.AssertValueOrNull(pch);
@@ -733,7 +733,7 @@ private protected float DifferentiableFunctionStream(FloatLabelCursor.Factory cu
733733
return (float)loss;
734734
}
735735

736-
protected VBuffer<float> InitializeWeights(IEnumerable<float> weights, IEnumerable<float> biases)
736+
private protected VBuffer<float> InitializeWeights(IEnumerable<float> weights, IEnumerable<float> biases)
737737
{
738738
Contracts.AssertValue(biases);
739739
Contracts.AssertValue(weights);

src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ protected override BinaryPredictionTransformer<CalibratedModelParametersBase<Lin
134134
public BinaryPredictionTransformer<CalibratedModelParametersBase<LinearBinaryModelParameters, PlattCalibrator>> Fit(IDataView trainData, IPredictor initialPredictor)
135135
=> TrainTransformer(trainData, initPredictor: initialPredictor);
136136

137-
protected override float AccumulateOneGradient(in VBuffer<float> feat, float label, float weight,
137+
private protected override float AccumulateOneGradient(in VBuffer<float> feat, float label, float weight,
138138
in VBuffer<float> x, ref VBuffer<float> grad, ref float[] scratch)
139139
{
140140
float bias = 0;
@@ -364,7 +364,7 @@ private protected override void ComputeTrainingStatistics(IChannel ch, FloatLabe
364364
}
365365
}
366366

367-
protected override void ProcessPriorDistribution(float label, float weight)
367+
private protected override void ProcessPriorDistribution(float label, float weight)
368368
{
369369
if (label > 0)
370370
_posWeight += weight;
@@ -384,7 +384,7 @@ private protected override Optimizer InitializeOptimizer(IChannel ch, FloatLabel
384384
return opt;
385385
}
386386

387-
protected override VBuffer<float> InitializeWeightsFromPredictor(CalibratedModelParametersBase<LinearBinaryModelParameters, PlattCalibrator> srcPredictor)
387+
private protected override VBuffer<float> InitializeWeightsFromPredictor(CalibratedModelParametersBase<LinearBinaryModelParameters, PlattCalibrator> srcPredictor)
388388
{
389389
Contracts.AssertValue(srcPredictor);
390390

@@ -393,7 +393,7 @@ protected override VBuffer<float> InitializeWeightsFromPredictor(CalibratedModel
393393
return InitializeWeights(pred.Weights, new[] { pred.Bias });
394394
}
395395

396-
protected override CalibratedModelParametersBase<LinearBinaryModelParameters, PlattCalibrator> CreatePredictor()
396+
private protected override CalibratedModelParametersBase<LinearBinaryModelParameters, PlattCalibrator> CreatePredictor()
397397
{
398398
// Logistic regression is naturally calibrated to
399399
// output probabilities when transformed using

src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ public sealed class Options : OptionsBase
6767

6868
private LinearModelStatistics _stats;
6969

70-
protected override int ClassCount => _numClasses;
70+
private protected override int ClassCount => _numClasses;
7171

7272
/// <summary>
7373
/// Initializes a new instance of <see cref="MulticlassLogisticRegression"/>
@@ -184,7 +184,7 @@ private protected override Optimizer InitializeOptimizer(IChannel ch, FloatLabel
184184
return opt;
185185
}
186186

187-
protected override float AccumulateOneGradient(in VBuffer<float> feat, float label, float weight,
187+
private protected override float AccumulateOneGradient(in VBuffer<float> feat, float label, float weight,
188188
in VBuffer<float> x, ref VBuffer<float> grad, ref float[] scores)
189189
{
190190
if (Utils.Size(scores) < _numClasses)
@@ -220,7 +220,7 @@ protected override float AccumulateOneGradient(in VBuffer<float> feat, float lab
220220
return weight * datumLoss;
221221
}
222222

223-
protected override VBuffer<float> InitializeWeightsFromPredictor(MulticlassLogisticRegressionModelParameters srcPredictor)
223+
private protected override VBuffer<float> InitializeWeightsFromPredictor(MulticlassLogisticRegressionModelParameters srcPredictor)
224224
{
225225
Contracts.AssertValue(srcPredictor);
226226
Contracts.Assert(srcPredictor.InputType.GetVectorSize() > 0);
@@ -232,7 +232,7 @@ protected override VBuffer<float> InitializeWeightsFromPredictor(MulticlassLogis
232232
return InitializeWeights(srcPredictor.DenseWeightsEnumerable(), srcPredictor.GetBiases());
233233
}
234234

235-
protected override MulticlassLogisticRegressionModelParameters CreatePredictor()
235+
private protected override MulticlassLogisticRegressionModelParameters CreatePredictor()
236236
{
237237
if (_numClasses < 1)
238238
throw Contracts.Except("Cannot create a multiclass predictor with {0} classes", _numClasses);
@@ -301,7 +301,7 @@ private protected override void ComputeTrainingStatistics(IChannel ch, FloatLabe
301301
_stats = new LinearModelStatistics(Host, NumGoodRows, numParams, deviance, nullDeviance);
302302
}
303303

304-
protected override void ProcessPriorDistribution(float label, float weight)
304+
private protected override void ProcessPriorDistribution(float label, float weight)
305305
{
306306
int iLabel = (int)label;
307307
Contracts.Assert(0 <= iLabel && iLabel < _numClasses);

src/Microsoft.ML.StandardLearners/Standard/ModelStatistics.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ public CoefficientStatistics(string name, float estimate, float stdError, float
5151
/// </summary>
5252
public sealed class LinearModelStatistics : ICanSaveModel
5353
{
54-
public const string LoaderSignature = "LinearModelStats";
54+
internal const string LoaderSignature = "LinearModelStats";
5555

5656
private static VersionInfo GetVersionInfo()
5757
{

src/Microsoft.ML.StandardLearners/Standard/Online/AveragedLinear.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -118,8 +118,8 @@ public abstract class AveragedLinearTrainer<TTransformer, TModel> : OnlineLinear
118118
where TTransformer : ISingleFeaturePredictionTransformer<TModel>
119119
where TModel : class
120120
{
121-
protected readonly AveragedLinearOptions AveragedLinearTrainerOptions;
122-
protected IScalarOutputLoss LossFunction;
121+
private protected readonly AveragedLinearOptions AveragedLinearTrainerOptions;
122+
private protected IScalarOutputLoss LossFunction;
123123

124124
private protected abstract class AveragedTrainStateBase : TrainStateBase
125125
{
@@ -295,7 +295,7 @@ private void IncrementAverageNonLazy()
295295
}
296296
}
297297

298-
protected AveragedLinearTrainer(AveragedLinearOptions options, IHostEnvironment env, string name, SchemaShape.Column label)
298+
private protected AveragedLinearTrainer(AveragedLinearOptions options, IHostEnvironment env, string name, SchemaShape.Column label)
299299
: base(options, env, name, label)
300300
{
301301
Contracts.CheckUserArg(options.LearningRate > 0, nameof(options.LearningRate), UserErrorPositive);

src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -163,7 +163,7 @@ public TrivialFactory(IClassificationLoss loss)
163163

164164
public override PredictionKind PredictionKind => PredictionKind.BinaryClassification;
165165

166-
protected override bool NeedCalibration => true;
166+
private protected override bool NeedCalibration => true;
167167

168168
protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSchema)
169169
{

src/Microsoft.ML.StandardLearners/Standard/Online/LinearSvm.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -223,7 +223,7 @@ public override LinearBinaryModelParameters CreatePredictor()
223223
}
224224
}
225225

226-
protected override bool NeedCalibration => true;
226+
private protected override bool NeedCalibration => true;
227227

228228
/// <summary>
229229
/// Initializes a new instance of <see cref="LinearSvmTrainer"/>.

src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLinear.cs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -70,8 +70,8 @@ public abstract class OnlineLinearTrainer<TTransformer, TModel> : TrainerEstimat
7070
where TTransformer : ISingleFeaturePredictionTransformer<TModel>
7171
where TModel : class
7272
{
73-
protected readonly OnlineLinearOptions OnlineLinearTrainerOptions;
74-
protected readonly string Name;
73+
private protected readonly OnlineLinearOptions OnlineLinearTrainerOptions;
74+
private protected readonly string Name;
7575

7676
/// <summary>
7777
/// An object to hold the mutable updatable state for the online linear trainers. Specific algorithms should subclass
@@ -246,12 +246,12 @@ public virtual float Margin(in VBuffer<float> feat)
246246
private const float _maxWeightScale = 1 << 10; // Exponent ranges 127 to -128, tolerate 10 being cut off that.
247247
private const float _minWeightScale = 1 / _maxWeightScale;
248248

249-
protected const string UserErrorPositive = "must be positive";
250-
protected const string UserErrorNonNegative = "must be non-negative";
249+
private protected const string UserErrorPositive = "must be positive";
250+
private protected const string UserErrorNonNegative = "must be non-negative";
251251

252252
public override TrainerInfo Info { get; }
253253

254-
protected virtual bool NeedCalibration => false;
254+
private protected virtual bool NeedCalibration => false;
255255

256256
private protected OnlineLinearTrainer(OnlineLinearOptions options, IHostEnvironment env, string name, SchemaShape.Column label)
257257
: base(Contracts.CheckRef(env, nameof(env)).Register(name), TrainerUtils.MakeR4VecFeature(options.FeatureColumn), label, TrainerUtils.MakeR4ScalarWeightColumn(options.InitialWeights))

0 commit comments

Comments
 (0)