Skip to content

Converted LpNorm, GcNorm and Whitening transforms into transformers/estimators… #961

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Sep 21, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions src/Microsoft.ML.Data/DataLoadSave/TransformWrapper.cs
Original file line number Diff line number Diff line change
Expand Up @@ -110,24 +110,24 @@ public TransformWrapper(IHostEnvironment env, ModelLoadContext ctx)
/// <summary>
/// Estimator for trained wrapped transformers.
/// </summary>
internal abstract class TrainedWrapperEstimatorBase : IEstimator<TransformWrapper>
public abstract class TrainedWrapperEstimatorBase : IEstimator<TransformWrapper>
{
private readonly IHost _host;
protected readonly IHost Host;

protected TrainedWrapperEstimatorBase(IHost host)
{
Contracts.CheckValue(host, nameof(host));
_host = host;
Host = host;
}

public abstract TransformWrapper Fit(IDataView input);

public SchemaShape GetOutputSchema(SchemaShape inputSchema)
{
_host.CheckValue(inputSchema, nameof(inputSchema));
Host.CheckValue(inputSchema, nameof(inputSchema));

var fakeSchema = new FakeSchema(_host, inputSchema);
var transformer = Fit(new EmptyDataView(_host, fakeSchema));
var fakeSchema = new FakeSchema(Host, inputSchema);
var transformer = Fit(new EmptyDataView(Host, fakeSchema));
return SchemaShape.Create(transformer.GetOutputSchema(fakeSchema));
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/Microsoft.ML.Transforms/GcnTransform.cs
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ public LpNormNormalizerTransform(IHostEnvironment env, GcnArguments args, IDataV
/// <param name="input">Input <see cref="IDataView"/>. This is the output from previous transform or loader.</param>
/// <param name="name">Name of the output column.</param>
/// <param name="source">Name of the column to be transformed. If this is null '<paramref name="name"/>' will be used.</param>
/// /// <param name="normKind">The norm to use to normalize each sample.</param>
/// <param name="normKind">The norm to use to normalize each sample.</param>
/// <param name="subMean">Subtract mean from each value before normalizing.</param>
public static IDataTransform CreateLpNormNormalizer(IHostEnvironment env,
IDataView input,
Expand Down
1 change: 1 addition & 0 deletions src/Microsoft.ML.Transforms/Microsoft.ML.Transforms.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
<ProjectReference Include="..\Microsoft.ML.Api\Microsoft.ML.Api.csproj" />
<ProjectReference Include="..\Microsoft.ML.Core\Microsoft.ML.Core.csproj" />
<ProjectReference Include="..\Microsoft.ML.Data\Microsoft.ML.Data.csproj" />
<PackageReference Include="MlNetMklDeps" Version="$(MlNetMklDepsPackageVersion)" />
</ItemGroup>

<ItemGroup>
Expand Down
11 changes: 1 addition & 10 deletions src/Microsoft.ML.Transforms/WhiteningTransform.cs
Original file line number Diff line number Diff line change
Expand Up @@ -34,16 +34,7 @@ public enum WhiteningKind
Zca
}

/// <summary>
/// Implements PCA (Principal Component Analysis) and ZCA (Zero phase Component Analysis) whitening.
/// The whitening process consists of 2 steps:
/// 1. Decorrelation of the input data. Input data is assumed to have zero mean.
/// 2. Rescale decorrelated features to have unit variance.
/// That is, PCA whitening is essentially just a PCA + rescale.
/// ZCA whitening tries to make resulting data to look more like input data by rotating it back to the
/// original input space.
/// More information: <a href="http://ufldl.stanford.edu/wiki/index.php/Whitening">http://ufldl.stanford.edu/wiki/index.php/Whitening</a>
/// </summary>
/// <include file='doc.xml' path='doc/members/member[@name="Whitening"]/*'/>
public sealed class WhiteningTransform : OneToOneTransformBase
{
private static class Defaults
Expand Down
221 changes: 221 additions & 0 deletions src/Microsoft.ML.Transforms/WrappedGcnTransformers.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,221 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.

using Microsoft.ML.Core.Data;
using Microsoft.ML.Data.StaticPipe.Runtime;
using Microsoft.ML.Runtime;
using Microsoft.ML.Runtime.Data;
using System.Collections.Generic;
using System.Linq;
using static Microsoft.ML.Runtime.Data.LpNormNormalizerTransform;

namespace Microsoft.ML.Transforms
{
/// <include file='doc.xml' path='doc/members/member[@name="LpNormalize"]/*'/>
public sealed class LpNormalizer : TrivialWrapperEstimator
{
/// <include file='doc.xml' path='doc/members/member[@name="LpNormalize"]/*'/>
/// <param name="env">The environment.</param>
/// <param name="inputColumn">The column containing text to tokenize.</param>
/// <param name="outputColumn">The column containing output tokens. Null means <paramref name="inputColumn"/> is replaced.</param>
/// <param name="normKind">Type of norm to use to normalize each sample.</param>
/// <param name="subMean">Subtract mean from each value before normalizing.</param>
public LpNormalizer(IHostEnvironment env, string inputColumn, string outputColumn = null, NormalizerKind normKind = NormalizerKind.L2Norm, bool subMean = false)
: this(env, new[] { (inputColumn, outputColumn ?? inputColumn) }, normKind, subMean)
{
}

/// <include file='doc.xml' path='doc/members/member[@name="LpNormalize"]/*'/>
/// <param name="env">The environment.</param>
/// <param name="columns">Pairs of columns to run the tokenization on.</param>
/// <param name="normKind">Type of norm to use to normalize each sample.</param>
/// <param name="subMean">Subtract mean from each value before normalizing.</param>
public LpNormalizer(IHostEnvironment env, (string input, string output)[] columns, NormalizerKind normKind = NormalizerKind.L2Norm, bool subMean = false)
: base(Contracts.CheckRef(env, nameof(env)).Register(nameof(LpNormalizer)), MakeTransformer(env, columns, normKind, subMean))
{
}

private static TransformWrapper MakeTransformer(IHostEnvironment env, (string input, string output)[] columns, NormalizerKind normKind, bool subMean)
{
Contracts.AssertValue(env);
env.CheckNonEmpty(columns, nameof(columns));
foreach (var (input, output) in columns)
{
env.CheckValue(input, nameof(input));
env.CheckValue(output, nameof(input));
}

var args = new LpNormNormalizerTransform.Arguments
{
Column = columns.Select(x => new LpNormNormalizerTransform.Column { Source = x.input, Name = x.output }).ToArray(),
SubMean = subMean,
NormKind = normKind
};

// Create a valid instance of data.
var schema = new SimpleSchema(env, columns.Select(x => new KeyValuePair<string, ColumnType>(x.input, new VectorType(NumberType.R4))).ToArray());
var emptyData = new EmptyDataView(env, schema);

return new TransformWrapper(env, new LpNormNormalizerTransform(env, args, emptyData));
}
}

/// <include file='doc.xml' path='doc/members/member[@name="GcNormalize"]/*'/>
public sealed class GlobalContrastNormalizer : TrivialWrapperEstimator
{
/// <include file='doc.xml' path='doc/members/member[@name="GcNormalize"]/*'/>
/// <param name="env">The environment.</param>
/// <param name="inputColumn">The column containing text to tokenize.</param>
/// <param name="outputColumn">The column containing output tokens. Null means <paramref name="inputColumn"/> is replaced.</param>
/// <param name="subMean">Subtract mean from each value before normalizing.</param>
/// <param name="useStdDev">Normalize by standard deviation rather than L2 norm.</param>
/// <param name="scale">Scale features by this value.</param>
public GlobalContrastNormalizer(IHostEnvironment env, string inputColumn, string outputColumn = null, bool subMean = true, bool useStdDev = false, float scale = 1)
: this(env, new[] { (inputColumn, outputColumn ?? inputColumn) }, subMean, useStdDev , scale)
{
}

/// <include file='doc.xml' path='doc/members/member[@name="GcNormalize"]/*'/>
/// <param name="env">The environment.</param>
/// <param name="columns">Pairs of columns to run the tokenization on.</param>
/// <param name="subMean">Subtract mean from each value before normalizing.</param>
/// <param name="useStdDev">Normalize by standard deviation rather than L2 norm.</param>
/// <param name="scale">Scale features by this value.</param>
public GlobalContrastNormalizer(IHostEnvironment env, (string input, string output)[] columns, bool subMean = true, bool useStdDev = false, float scale = 1)
: base(Contracts.CheckRef(env, nameof(env)).Register(nameof(GlobalContrastNormalizer)), MakeTransformer(env, columns, subMean, useStdDev, scale))
{
}

private static TransformWrapper MakeTransformer(IHostEnvironment env, (string input, string output)[] columns, bool subMean, bool useStdDev, float scale)
{
Contracts.AssertValue(env);
env.CheckNonEmpty(columns, nameof(columns));
foreach (var (input, output) in columns)
{
env.CheckValue(input, nameof(input));
env.CheckValue(output, nameof(input));
}

var args = new LpNormNormalizerTransform.GcnArguments
{
Column = columns.Select(x => new LpNormNormalizerTransform.GcnColumn { Source = x.input, Name = x.output }).ToArray(),
SubMean = subMean,
UseStdDev = useStdDev,
Scale = scale
};

// Create a valid instance of data.
var schema = new SimpleSchema(env, columns.Select(x => new KeyValuePair<string, ColumnType>(x.input, new VectorType(NumberType.R4))).ToArray());
var emptyData = new EmptyDataView(env, schema);

return new TransformWrapper(env, new LpNormNormalizerTransform(env, args, emptyData));
}
}

/// <summary>
/// Extensions for statically typed LpNormalizer estimator.
/// </summary>
public static class LpNormNormalizerExtensions
{
private sealed class OutPipelineColumn : Vector<float>
{
public readonly Vector<float> Input;

public OutPipelineColumn(Vector<float> input, NormalizerKind normKind, bool subMean)
: base(new Reconciler(normKind, subMean), input)
{
Input = input;
}
}

private sealed class Reconciler : EstimatorReconciler
{
private readonly NormalizerKind _normKind;
private readonly bool _subMean;

public Reconciler(NormalizerKind normKind, bool subMean)
{
_normKind = normKind;
_subMean = subMean;
}

public override IEstimator<ITransformer> Reconcile(IHostEnvironment env,
PipelineColumn[] toOutput,
IReadOnlyDictionary<PipelineColumn, string> inputNames,
IReadOnlyDictionary<PipelineColumn, string> outputNames,
IReadOnlyCollection<string> usedNames)
{
Contracts.Assert(toOutput.Length == 1);

var pairs = new List<(string input, string output)>();
foreach (var outCol in toOutput)
pairs.Add((inputNames[((OutPipelineColumn)outCol).Input], outputNames[outCol]));

return new LpNormalizer(env, pairs.ToArray(), _normKind, _subMean);
}
}

/// <include file='doc.xml' path='doc/members/member[@name="LpNormalize"]/*'/>
/// <param name="input">The column to apply to.</param>
/// <param name="normKind">Type of norm to use to normalize each sample.</param>
/// <param name="subMean">Subtract mean from each value before normalizing.</param>
public static Vector<float> LpNormalize(this Vector<float> input, NormalizerKind normKind = NormalizerKind.L2Norm, bool subMean = false) => new OutPipelineColumn(input, normKind, subMean);
}

/// <summary>
/// Extensions for statically typed GcNormalizer estimator.
/// </summary>
public static class GcNormalizerExtensions
{
private sealed class OutPipelineColumn : Vector<float>
{
public readonly Vector<float> Input;

public OutPipelineColumn(Vector<float> input, bool subMean, bool useStdDev, float scale)
: base(new Reconciler(subMean, useStdDev, scale), input)
{
Input = input;
}
}

private sealed class Reconciler : EstimatorReconciler
{
private readonly bool _subMean;
private readonly bool _useStdDev;
private readonly float _scale;

public Reconciler(bool subMean, bool useStdDev, float scale)
{
_subMean = subMean;
_useStdDev = useStdDev;
_scale = scale;
}

public override IEstimator<ITransformer> Reconcile(IHostEnvironment env,
PipelineColumn[] toOutput,
IReadOnlyDictionary<PipelineColumn, string> inputNames,
IReadOnlyDictionary<PipelineColumn, string> outputNames,
IReadOnlyCollection<string> usedNames)
{
Contracts.Assert(toOutput.Length == 1);

var pairs = new List<(string input, string output)>();
foreach (var outCol in toOutput)
pairs.Add((inputNames[((OutPipelineColumn)outCol).Input], outputNames[outCol]));

return new GlobalContrastNormalizer(env, pairs.ToArray(), _subMean, _useStdDev, _scale);
}
}

/// <include file='doc.xml' path='doc/members/member[@name="GcNormalize"]/*'/>
/// <param name="input">The column to apply to.</param>
/// <param name="subMean">Subtract mean from each value before normalizing.</param>
/// <param name="useStdDev">Normalize by standard deviation rather than L2 norm.</param>
/// <param name="scale">Scale features by this value.</param>
public static Vector<float> GlobalContrastNormalize(this Vector<float> input,
bool subMean = true,
bool useStdDev = false,
float scale = 1) => new OutPipelineColumn(input, subMean, useStdDev, scale);
}
}
Loading