Skip to content

Commit 46efb8e

Browse files
authored
OnnxTransform -- Update to OnnxRuntime 0.2.0 (#2085)
* Initial checking to run unit tests on Linux * Test linux execution on dev package of onnxruntime * Added fix to onnxruntime nuget prop file to handle Linux dlls correctly * test package copying all native dlls to output folder * Add rpath to dll * Update to Onnxruntime 2.0 package * use cpu package * uncomment device id in unit test * folder PR comments * minor fix * minor change to kick off build * minor change to kick off build * remove reference to myget, and use nuget instead * removed nullgpuID. Use AtMostOnce for gpuDeviceID and fallBackToCpu variables * Folded more PR comments * minor change to kick of build status * minor change to kick off random build failure * minor change to kick off build * minor change to kick off build
1 parent a04dbf8 commit 46efb8e

File tree

7 files changed

+54
-53
lines changed

7 files changed

+54
-53
lines changed

build/Dependencies.props

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
<PropertyGroup>
1616
<GoogleProtobufPackageVersion>3.5.1</GoogleProtobufPackageVersion>
1717
<LightGBMPackageVersion>2.2.1.1</LightGBMPackageVersion>
18-
<MicrosoftMLOnnxRuntimeGpuPackageVersion>0.1.5</MicrosoftMLOnnxRuntimeGpuPackageVersion>
18+
<MicrosoftMLOnnxRuntimePackageVersion>0.2.0</MicrosoftMLOnnxRuntimePackageVersion>
1919
<MlNetMklDepsPackageVersion>0.0.0.7</MlNetMklDepsPackageVersion>
2020
<ParquetDotNetPackageVersion>2.1.3</ParquetDotNetPackageVersion>
2121
<SystemDrawingCommonPackageVersion>4.5.0</SystemDrawingCommonPackageVersion>

pkg/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.nupkgproj

+1-1
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
<ItemGroup>
99
<ProjectReference Include="../Microsoft.ML/Microsoft.ML.nupkgproj" />
10-
<PackageReference Include="Microsoft.ML.OnnxRuntime.Gpu" Version="$(MicrosoftMLOnnxRuntimeGpuPackageVersion)"/>
10+
<PackageReference Include="Microsoft.ML.OnnxRuntime" Version="$(MicrosoftMLOnnxRuntimePackageVersion)"/>
1111
</ItemGroup>
1212

1313
</Project>

src/Microsoft.ML.OnnxTransform/Microsoft.ML.OnnxTransform.csproj

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
<ItemGroup>
1010
<ProjectReference Include="..\Microsoft.ML.Core\Microsoft.ML.Core.csproj" />
1111
<ProjectReference Include="..\Microsoft.ML.Data\Microsoft.ML.Data.csproj" />
12-
<PackageReference Include="Microsoft.ML.OnnxRuntime.Gpu" Version="$(MicrosoftMLOnnxRuntimeGpuPackageVersion)" />
12+
<PackageReference Include="Microsoft.ML.OnnxRuntime" Version="$(MicrosoftMLOnnxRuntimePackageVersion)" />
1313
</ItemGroup>
1414

1515
</Project>

src/Microsoft.ML.OnnxTransform/OnnxTransform.cs

+12-10
Original file line numberDiff line numberDiff line change
@@ -44,16 +44,19 @@ namespace Microsoft.ML.Transforms
4444
/// </summary>
4545
/// <remarks>
4646
/// <p>Supports inferencing of models in ONNX 1.2 and 1.3 format (opset 7, 8 and 9), using the
47-
/// <a href='https://www.nuget.org/packages/Microsoft.ML.OnnxRuntime.Gpu/'>Microsoft.ML.OnnxRuntime.Gpu</a> library.
47+
/// <a href='https://www.nuget.org/packages/Microsoft.ML.OnnxRuntime/'>Microsoft.ML.OnnxRuntime</a> library.
4848
/// </p>
49-
/// <p>Models are scored on CPU by default. If GPU execution is needed (optional), install
50-
/// <a href='https://developer.nvidia.com/cuda-downloads'>CUDA 10.0 Toolkit</a>
49+
/// <p>Models are scored on CPU by default. If GPU execution is needed (optional), use the
50+
/// NuGet package available at
51+
/// <a href='https://www.nuget.org/packages/Microsoft.ML.OnnxRuntime.Gpu/'>Microsoft.ML.OnnxRuntime.Gpu</a>
52+
/// and download
53+
/// <a href='https://developer.nvidia.com/cuda-downloads'>CUDA 9.1 Toolkit</a>
5154
/// and
52-
/// <a href='https://developer.nvidia.com/cudnn'>cuDNN</a>
53-
/// , and set the parameter 'gpuDeviceId' to a valid non-negative integer. Typical device ID values are 0 or 1.
55+
/// <a href='https://developer.nvidia.com/cudnn'>cuDNN</a>.
56+
/// Set parameter 'gpuDeviceId' to a valid non-negative integer. Typical device ID values are 0 or 1.
5457
/// </p>
5558
/// <p>The inputs and outputs of the ONNX models must be Tensor type. Sequence and Maps are not yet supported.</p>
56-
/// <p>OnnxRuntime currently works on Windows 64-bit platforms only. Linux and OSX to be supported soon.</p>
59+
/// <p>OnnxRuntime currently works on Windows and Ubuntu 16.04 Linux 64-bit platforms. Mac OS to be supported soon.</p>
5760
/// <p>Visit https://github.com/onnx/models to see a list of readily available models to get started with.</p>
5861
/// <p>Refer to http://onnx.ai' for more information about ONNX.</p>
5962
/// </remarks>
@@ -70,10 +73,10 @@ public sealed class Arguments : TransformInputBase
7073
[Argument(ArgumentType.Multiple | ArgumentType.Required, HelpText = "Name of the output column.", SortOrder = 2)]
7174
public string[] OutputColumns;
7275

73-
[Argument(ArgumentType.AtMostOnce | ArgumentType.Required, HelpText = "GPU device id to run on (e.g. 0,1,..). Null for CPU. Requires CUDA 10.0.", SortOrder = 3)]
76+
[Argument(ArgumentType.AtMostOnce, HelpText = "GPU device id to run on (e.g. 0,1,..). Null for CPU. Requires CUDA 9.1.", SortOrder = 3)]
7477
public int? GpuDeviceId = null;
7578

76-
[Argument(ArgumentType.AtMostOnce | ArgumentType.Required, HelpText = "If true, resumes execution on CPU upon GPU error. If false, will raise the GPU execption.", SortOrder = 4)]
79+
[Argument(ArgumentType.AtMostOnce, HelpText = "If true, resumes execution on CPU upon GPU error. If false, will raise the GPU execption.", SortOrder = 4)]
7780
public bool FallbackToCpu = false;
7881
}
7982

@@ -581,5 +584,4 @@ public override SchemaShape GetOutputSchema(SchemaShape inputSchema)
581584
return new SchemaShape(resultDic.Values);
582585
}
583586
}
584-
}
585-
587+
}

src/Microsoft.ML.OnnxTransform/OnnxUtils.cs

+3-2
Original file line numberDiff line numberDiff line change
@@ -82,11 +82,12 @@ public OnnxModel(string modelFile, int? gpuDeviceId = null, bool fallbackToCpu =
8282
{
8383
_modelFile = modelFile;
8484

85-
if (gpuDeviceId.HasValue)
85+
if (gpuDeviceId != null)
8686
{
8787
try
8888
{
89-
_session = new InferenceSession(modelFile, SessionOptions.MakeSessionOptionWithCudaProvider(gpuDeviceId.Value));
89+
_session = new InferenceSession(modelFile,
90+
SessionOptions.MakeSessionOptionWithCudaProvider(gpuDeviceId.Value));
9091
}
9192
catch (OnnxRuntimeException)
9293
{

test/Microsoft.ML.OnnxTransformTest/DnnImageFeaturizerTest.cs

+7-5
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ private float[] GetSampleArrayData()
4949
{
5050
var samplevector = new float[inputSize];
5151
for (int i = 0; i < inputSize; i++)
52-
samplevector[i] = (i / ((float) inputSize));
52+
samplevector[i] = (i / ((float)inputSize));
5353
return samplevector;
5454
}
5555

@@ -61,9 +61,11 @@ public DnnImageFeaturizerTests(ITestOutputHelper helper) : base(helper)
6161
[ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))]
6262
void TestDnnImageFeaturizer()
6363
{
64+
// Onnxruntime supports Ubuntu 16.04, but not CentOS
65+
// Do not execute on CentOS image
6466
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
6567
return;
66-
68+
6769

6870
var samplevector = GetSampleArrayData();
6971

@@ -112,7 +114,7 @@ public void OnnxStatic()
112114
imagePath: ctx.LoadText(0),
113115
name: ctx.LoadText(1)))
114116
.Read(dataFile);
115-
117+
116118
var pipe = data.MakeNewEstimator()
117119
.Append(row => (
118120
row.name,
@@ -144,7 +146,7 @@ public void TestOldSavingAndLoading()
144146
{
145147
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
146148
return;
147-
149+
148150

149151
var samplevector = GetSampleArrayData();
150152

@@ -158,7 +160,7 @@ public void TestOldSavingAndLoading()
158160

159161
var inputNames = "data_0";
160162
var outputNames = "output_1";
161-
var est = new DnnImageFeaturizerEstimator(Env, outputNames, m => m.ModelSelector.ResNet18(m.Environment, m.OutputColumn ,m.InputColumn), inputNames);
163+
var est = new DnnImageFeaturizerEstimator(Env, outputNames, m => m.ModelSelector.ResNet18(m.Environment, m.OutputColumn, m.InputColumn), inputNames);
162164
var transformer = est.Fit(dataView);
163165
var result = transformer.Transform(dataView);
164166
var resultRoles = new RoleMappedData(result);

test/Microsoft.ML.OnnxTransformTest/OnnxTransformTests.cs

+29-33
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,26 @@
2121

2222
namespace Microsoft.ML.Tests
2323
{
24-
public class OnnxTransformTests : TestDataPipeBase
24+
25+
/// <summary>
26+
/// A Fact attribute for Onnx unit tests. Onnxruntime only supported
27+
/// on Windows, Linux (Ubuntu 16.04) and 64-bit platforms.
28+
/// </summary>
29+
public class OnnxFact : FactAttribute
2530
{
31+
public OnnxFact()
32+
{
33+
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ||
34+
RuntimeInformation.IsOSPlatform(OSPlatform.OSX) ||
35+
!Environment.Is64BitProcess)
36+
{
37+
Skip = "Require 64 bit and Windows or Linux (Ubuntu 16.04).";
38+
}
39+
}
40+
}
2641

42+
public class OnnxTransformTests : TestDataPipeBase
43+
{
2744
private const int inputSize = 150528;
2845

2946
private class TestData
@@ -83,16 +100,11 @@ public OnnxTransformTests(ITestOutputHelper output) : base(output)
83100
{
84101
}
85102

86-
[ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 fails with "An attempt was made to load a program with an incorrect format."
103+
[OnnxFact]
87104
void TestSimpleCase()
88105
{
89-
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
90-
return;
91-
92106
var modelFile = "squeezenet/00000001/model.onnx";
93-
94107
var samplevector = GetSampleArrayData();
95-
96108
var dataView = ML.Data.ReadFromEnumerable(
97109
new TestData[] {
98110
new TestData()
@@ -126,7 +138,8 @@ void TestSimpleCase()
126138
catch (InvalidOperationException) { }
127139
}
128140

129-
[ConditionalTheory(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 fails with "An attempt was made to load a program with an incorrect format."
141+
// x86 not supported
142+
[ConditionalTheory(typeof(Environment), nameof(Environment.Is64BitProcess))]
130143
[InlineData(null, false)]
131144
[InlineData(null, true)]
132145
void TestOldSavingAndLoading(int? gpuDeviceId, bool fallbackToCpu)
@@ -135,7 +148,6 @@ void TestOldSavingAndLoading(int? gpuDeviceId, bool fallbackToCpu)
135148
return;
136149

137150
var modelFile = "squeezenet/00000001/model.onnx";
138-
139151
var samplevector = GetSampleArrayData();
140152

141153
var dataView = ML.Data.ReadFromEnumerable(
@@ -187,13 +199,10 @@ void TestOldSavingAndLoading(int? gpuDeviceId, bool fallbackToCpu)
187199
}
188200
}
189201

190-
[ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 fails with "An attempt was made to load a program with an incorrect format."
202+
[OnnxFact]
191203
public void OnnxStatic()
192204
{
193-
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
194-
return;
195-
196-
var modelFile = "squeezenet/00000001/model.onnx";
205+
var modelFile = Path.Combine(Directory.GetCurrentDirectory(), "squeezenet", "00000001", "model.onnx");
197206

198207
var env = new MLContext(conc: 1);
199208
var imageHeight = 224;
@@ -233,23 +242,17 @@ public void OnnxStatic()
233242
}
234243
}
235244

236-
[ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 output differs from Baseline
245+
[OnnxFact]
237246
void TestCommandLine()
238247
{
239-
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
240-
return;
241-
242248
var env = new MLContext();
243-
var x = Maml.Main(new[] { @"showschema loader=Text{col=data_0:R4:0-150527} xf=Onnx{InputColumns={data_0} OutputColumns={softmaxout_1} model={squeezenet/00000001/model.onnx} GpuDeviceId=0 FallbackToCpu=+}" });
249+
var x = Maml.Main(new[] { @"showschema loader=Text{col=data_0:R4:0-150527} xf=Onnx{InputColumns={data_0} OutputColumns={softmaxout_1} model={squeezenet/00000001/model.onnx}}" });
244250
Assert.Equal(0, x);
245251
}
246252

247-
[ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 output differs from Baseline
253+
[OnnxFact]
248254
public void OnnxModelScenario()
249255
{
250-
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
251-
return;
252-
253256
var modelFile = "squeezenet/00000001/model.onnx";
254257
using (var env = new ConsoleEnvironment(seed: 1, conc: 1))
255258
{
@@ -280,13 +283,10 @@ public void OnnxModelScenario()
280283
}
281284
}
282285

283-
[ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 output differs from Baseline
286+
[OnnxFact]
284287
public void OnnxModelMultiInput()
285288
{
286-
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
287-
return;
288-
289-
var modelFile = @"twoinput\twoinput.onnx";
289+
var modelFile = Path.Combine(Directory.GetCurrentDirectory(), "twoinput", "twoinput.onnx");
290290
using (var env = new ConsoleEnvironment(seed: 1, conc: 1))
291291
{
292292
var samplevector = GetSampleArrayData();
@@ -323,12 +323,9 @@ public void OnnxModelMultiInput()
323323
}
324324
}
325325

326-
[ConditionalFact(typeof(Environment), nameof(Environment.Is64BitProcess))] // x86 output differs from Baseline
326+
[OnnxFact]
327327
public void TestUnknownDimensions()
328328
{
329-
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
330-
return;
331-
332329
// model contains -1 in input and output shape dimensions
333330
// model: input dims = [-1, 3], output argmax dims = [-1]
334331
var modelFile = @"unknowndimensions/test_unknowndimensions_float.onnx";
@@ -350,4 +347,3 @@ public void TestUnknownDimensions()
350347
}
351348
}
352349
}
353-

0 commit comments

Comments
 (0)