diff --git a/build/Dependencies.props b/build/Dependencies.props index 49e2d5ce87..23763ed7fb 100644 --- a/build/Dependencies.props +++ b/build/Dependencies.props @@ -15,7 +15,7 @@ 3.5.1 2.2.1.1 - 0.1.5 + 0.2.0 0.0.0.7 2.1.3 4.5.0 diff --git a/pkg/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.nupkgproj b/pkg/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.nupkgproj index 27c03c1848..b817e809d1 100644 --- a/pkg/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.nupkgproj +++ b/pkg/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.nupkgproj @@ -7,7 +7,7 @@ - + diff --git a/src/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.csproj b/src/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.csproj index 81f014d392..ce2ac23746 100644 --- a/src/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.csproj +++ b/src/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.csproj @@ -9,7 +9,7 @@ - + diff --git a/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs b/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs index 79d7894db3..4141e46d05 100644 --- a/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs +++ b/src/Microsoft.ML.OnnxTransform/OnnxTransform.cs @@ -44,16 +44,19 @@ namespace Microsoft.ML.Transforms /// /// ///

Supports inferencing of models in ONNX 1.2 and 1.3 format (opset 7, 8 and 9), using the - /// Microsoft.ML.OnnxRuntime.Gpu library. + /// Microsoft.ML.OnnxRuntime library. ///

- ///

Models are scored on CPU by default. If GPU execution is needed (optional), install - /// CUDA 10.0 Toolkit + ///

Models are scored on CPU by default. If GPU execution is needed (optional), use the + /// NuGet package available at + /// Microsoft.ML.OnnxRuntime.Gpu + /// and download + /// CUDA 9.1 Toolkit /// and - /// cuDNN - /// , and set the parameter 'gpuDeviceId' to a valid non-negative integer. Typical device ID values are 0 or 1. + /// cuDNN. + /// Set parameter 'gpuDeviceId' to a valid non-negative integer. Typical device ID values are 0 or 1. ///

///

The inputs and outputs of the ONNX models must be Tensor type. Sequence and Maps are not yet supported.

- ///

OnnxRuntime currently works on Windows 64-bit platforms only. Linux and OSX to be supported soon.

+ ///

OnnxRuntime currently works on Windows and Ubuntu 16.04 Linux 64-bit platforms. Mac OS to be supported soon.

///

Visit https://github.com/onnx/models to see a list of readily available models to get started with.

///

Refer to http://onnx.ai' for more information about ONNX.

///
@@ -70,10 +73,10 @@ public sealed class Arguments : TransformInputBase [Argument(ArgumentType.Multiple | ArgumentType.Required, HelpText = "Name of the output column.", SortOrder = 2)] public string[] OutputColumns; - [Argument(ArgumentType.AtMostOnce | ArgumentType.Required, HelpText = "GPU device id to run on (e.g. 0,1,..). Null for CPU. Requires CUDA 10.0.", SortOrder = 3)] + [Argument(ArgumentType.AtMostOnce, HelpText = "GPU device id to run on (e.g. 0,1,..). Null for CPU. Requires CUDA 9.1.", SortOrder = 3)] public int? GpuDeviceId = null; - [Argument(ArgumentType.AtMostOnce | ArgumentType.Required, HelpText = "If true, resumes execution on CPU upon GPU error. If false, will raise the GPU execption.", SortOrder = 4)] + [Argument(ArgumentType.AtMostOnce, HelpText = "If true, resumes execution on CPU upon GPU error. If false, will raise the GPU execption.", SortOrder = 4)] public bool FallbackToCpu = false; } @@ -581,5 +584,4 @@ public override SchemaShape GetOutputSchema(SchemaShape inputSchema) return new SchemaShape(resultDic.Values); } } -} - +} \ No newline at end of file diff --git a/src/Microsoft.ML.OnnxTransform/OnnxUtils.cs b/src/Microsoft.ML.OnnxTransform/OnnxUtils.cs index 96562b2f6b..e5e5d489a7 100644 --- a/src/Microsoft.ML.OnnxTransform/OnnxUtils.cs +++ b/src/Microsoft.ML.OnnxTransform/OnnxUtils.cs @@ -82,11 +82,12 @@ public OnnxModel(string modelFile, int? gpuDeviceId = null, bool fallbackToCpu = { _modelFile = modelFile; - if (gpuDeviceId.HasValue) + if (gpuDeviceId != null) { try { - _session = new InferenceSession(modelFile, SessionOptions.MakeSessionOptionWithCudaProvider(gpuDeviceId.Value)); + _session = new InferenceSession(modelFile, + SessionOptions.MakeSessionOptionWithCudaProvider(gpuDeviceId.Value)); } catch (OnnxRuntimeException) { diff --git a/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs b/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs index 722b634547..70ec3dffc8 100644 --- a/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs +++ b/test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs @@ -49,7 +49,7 @@ private float[] GetSampleArrayData() { var samplevector = new float[inputSize]; for (int i = 0; i < inputSize; i++) - samplevector[i] = (i / ((float) inputSize)); + samplevector[i] = (i / ((float)inputSize)); return samplevector; } @@ -61,9 +61,11 @@ public DnnImageFeaturizerTests(ITestOutputHelper helper) : base(helper) [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] void TestDnnImageFeaturizer() { + // Onnxruntime supports Ubuntu 16.04, but not CentOS + // Do not execute on CentOS image if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) return; - + var samplevector = GetSampleArrayData(); @@ -112,7 +114,7 @@ public void OnnxStatic() imagePath: ctx.LoadText(0), name: ctx.LoadText(1))) .Read(dataFile); - + var pipe = data.MakeNewEstimator() .Append(row => ( row.name, @@ -144,7 +146,7 @@ public void TestOldSavingAndLoading() { if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) return; - + var samplevector = GetSampleArrayData(); @@ -158,7 +160,7 @@ public void TestOldSavingAndLoading() var inputNames = "data_0"; var outputNames = "output_1"; - var est = new DnnImageFeaturizerEstimator(Env, outputNames, m => m.ModelSelector.ResNet18(m.Environment, m.OutputColumn ,m.InputColumn), inputNames); + var est = new DnnImageFeaturizerEstimator(Env, outputNames, m => m.ModelSelector.ResNet18(m.Environment, m.OutputColumn, m.InputColumn), inputNames); var transformer = est.Fit(dataView); var result = transformer.Transform(dataView); var resultRoles = new RoleMappedData(result); diff --git a/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs b/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs index e06222ac2a..17a83041db 100644 --- a/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs +++ b/test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs @@ -21,9 +21,26 @@ namespace Microsoft.ML.Tests { - public class OnnxTransformTests : TestDataPipeBase + + /// + /// A Fact attribute for Onnx unit tests. Onnxruntime only supported + /// on Windows, Linux (Ubuntu 16.04) and 64-bit platforms. + /// + public class OnnxFact : FactAttribute { + public OnnxFact() + { + if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux) || + RuntimeInformation.IsOSPlatform(OSPlatform.OSX) || + !Environment.Is64BitProcess) + { + Skip = "Require 64 bit and Windows or Linux (Ubuntu 16.04)."; + } + } + } + public class OnnxTransformTests : TestDataPipeBase + { private const int inputSize = 150528; private class TestData @@ -83,16 +100,11 @@ public OnnxTransformTests(ITestOutputHelper output) : base(output) { } - [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 fails with "An attempt was made to load a program with an incorrect format." + [OnnxFact] void TestSimpleCase() { - if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) - return; - var modelFile = "squeezenet/00000001/model.onnx"; - var samplevector = GetSampleArrayData(); - var dataView = ML.Data.ReadFromEnumerable( new TestData[] { new TestData() @@ -126,7 +138,8 @@ void TestSimpleCase() catch (InvalidOperationException) { } } - [ConditionalTheory(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 fails with "An attempt was made to load a program with an incorrect format." + // x86 not supported + [ConditionalTheory(typeof(Environment), nameof(Environment.Is64BitProcess))] [InlineData(null, false)] [InlineData(null, true)] void TestOldSavingAndLoading(int? gpuDeviceId, bool fallbackToCpu) @@ -135,7 +148,6 @@ void TestOldSavingAndLoading(int? gpuDeviceId, bool fallbackToCpu) return; var modelFile = "squeezenet/00000001/model.onnx"; - var samplevector = GetSampleArrayData(); var dataView = ML.Data.ReadFromEnumerable( @@ -187,13 +199,10 @@ void TestOldSavingAndLoading(int? gpuDeviceId, bool fallbackToCpu) } } - [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 fails with "An attempt was made to load a program with an incorrect format." + [OnnxFact] public void OnnxStatic() { - if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) - return; - - var modelFile = "squeezenet/00000001/model.onnx"; + var modelFile = Path.Combine(Directory.GetCurrentDirectory(), "squeezenet", "00000001", "model.onnx"); var env = new MLContext(conc: 1); var imageHeight = 224; @@ -233,23 +242,17 @@ public void OnnxStatic() } } - [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 output differs from Baseline + [OnnxFact] void TestCommandLine() { - if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) - return; - var env = new MLContext(); - var x = Maml.Main(new[] { @"showschema loader=Text{col=data_0:R4:0-150527} xf=Onnx{InputColumns={data_0} OutputColumns={softmaxout_1} model={squeezenet/00000001/model.onnx} GpuDeviceId=0 FallbackToCpu=+}" }); + var x = Maml.Main(new[] { @"showschema loader=Text{col=data_0:R4:0-150527} xf=Onnx{InputColumns={data_0} OutputColumns={softmaxout_1} model={squeezenet/00000001/model.onnx}}" }); Assert.Equal(0, x); } - [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 output differs from Baseline + [OnnxFact] public void OnnxModelScenario() { - if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) - return; - var modelFile = "squeezenet/00000001/model.onnx"; using (var env = new ConsoleEnvironment(seed: 1, conc: 1)) { @@ -280,13 +283,10 @@ public void OnnxModelScenario() } } - [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 output differs from Baseline + [OnnxFact] public void OnnxModelMultiInput() { - if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) - return; - - var modelFile = @"twoinput\twoinput.onnx"; + var modelFile = Path.Combine(Directory.GetCurrentDirectory(), "twoinput", "twoinput.onnx"); using (var env = new ConsoleEnvironment(seed: 1, conc: 1)) { var samplevector = GetSampleArrayData(); @@ -323,12 +323,9 @@ public void OnnxModelMultiInput() } } - [ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 output differs from Baseline + [OnnxFact] public void TestUnknownDimensions() { - if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) - return; - // model contains -1 in input and output shape dimensions // model: input dims = [-1, 3], output argmax dims = [-1] var modelFile = @"unknowndimensions/test_unknowndimensions_float.onnx"; @@ -350,4 +347,3 @@ public void TestUnknownDimensions() } } } -