Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support DisposableNamedOnnxValue inputs in c# Run() #3175

Merged
merged 13 commits into from
Mar 24, 2020
40 changes: 38 additions & 2 deletions csharp/src/Microsoft.ML.OnnxRuntime/DisposableNamedOnnxValue.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
// Licensed under the MIT License.

using System;
using System.Buffers;
using System.Collections.Generic;
using Microsoft.ML.OnnxRuntime.Tensors;
using System.Runtime.InteropServices;
Expand Down Expand Up @@ -60,13 +61,48 @@ public void Dispose()

public class DisposableNamedOnnxValue : NamedOnnxValue, IDisposable
{
protected IDisposable _nativeMemoryManager;
protected DisposableNamedOnnxValue(string name, Object value, IDisposable nativeMemoryManager)
private NativeMemoryHandler _nativeMemoryManager;
private DisposableNamedOnnxValue(string name, Object value, NativeMemoryHandler nativeMemoryManager)
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ctor is private now as per @fs-eire 's suggestion

: base(name, value)
{
_nativeMemoryManager = nativeMemoryManager;
}

/// <summary>
/// Overrides the base class method. Since the instance already has access to the
/// underlying OrtValue handle (if this instance hasn't been disposed), it just assigns
/// that to the output onnxValue. With respect to pinnedMemoryHandle, it has no operation
/// to do, as this class doesn't maintain a managed buffer. It doesn't have to maintain it
/// as it already is associated with the object of interest (native OrtValue)
/// </summary>
/// <param name="onnxValue"></param>
/// <param name="pinnedMemoryHandle"></param>
/// <param name="disposeOnnxValueAfterUse"></param>
internal override void ToNativeOnnxValue(out IntPtr onnxValue,
out MemoryHandle pinnedMemoryHandle,
out bool disposeOnnxValueAfterUse)
{
// Make sure that this instance hasn't been disposed yet
if (disposedValue)
{
throw new Exception("This instance of DisposableNamedOnnxValue has already been disposed");
fs-eire marked this conversation as resolved.
Show resolved Hide resolved
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This instance of DisposableNamedOnnxValue has already been disposed [](start = 37, length = 67)

Should be a bug?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

bug in the user code ? Probably. But they get a user friendly error message this way...

}

// If not already disposed, _nativeMemoryManager can only be null
// for Maps and SequenceTensors
if (_nativeMemoryManager == null)
{
throw new NotSupportedException("Use of Maps and SequenceTensors is not yet supported");
}

// 'disposeOnnxValueAfterUse' is always 'false' for DisposableNamedOnnxValue as the onus
// to dispose the onnxValue after use is on the user, not the internal caller
disposeOnnxValueAfterUse = false;

// Assign the onnxValue by querying this instance's NativeOnnxTensorMemory instance
onnxValue = _nativeMemoryManager.GetOnnxValue();
}

internal static DisposableNamedOnnxValue CreateTensorFromOnnxValue(string name, IntPtr nativeOnnxValue)
{
DisposableNamedOnnxValue result = null;
Expand Down
21 changes: 14 additions & 7 deletions csharp/src/Microsoft.ML.OnnxRuntime/InferenceSession.cs
Original file line number Diff line number Diff line change
Expand Up @@ -136,14 +136,17 @@ public IDisposableReadOnlyCollection<DisposableNamedOnnxValue> Run(IReadOnlyColl
var inputNames = new string[inputs.Count];
var inputTensors = new IntPtr[inputs.Count];
var pinnedBufferHandles = new System.Buffers.MemoryHandle[inputs.Count];
var disposeOnnxValues = new bool[inputs.Count];

int inputIndex = 0;
foreach (var input in inputs)
{
inputNames[inputIndex] = input.Name;

// create Tensor from the input if feasible, else throw notsupported exception for now
input.ToNativeOnnxValue(out inputTensors[inputIndex], out pinnedBufferHandles[inputIndex]);
input.ToNativeOnnxValue(out inputTensors[inputIndex],
out pinnedBufferHandles[inputIndex],
out disposeOnnxValues[inputIndex]);
jignparm marked this conversation as resolved.
Show resolved Hide resolved

inputIndex++;
}
Expand Down Expand Up @@ -187,12 +190,16 @@ public IDisposableReadOnlyCollection<DisposableNamedOnnxValue> Run(IReadOnlyColl
}
finally
{
// always unpin the input buffers, and delete the native Onnx value objects
// For NamedOnnxValue, always unpin the input buffers, and delete the native Onnx value objects
// For DisposableNamedOnnxValue, the user needs to do this by invoking Dispose
for (int i = 0; i < inputs.Count; i++)
{
NativeMethods.OrtReleaseValue(inputTensors[i]); // For elementary type Tensors, this should not release the buffer, but should delete the native tensor object.
// For string tensors, this releases the native memory allocated for the tensor, including the buffer
pinnedBufferHandles[i].Dispose();
if (disposeOnnxValues[i])
jignparm marked this conversation as resolved.
Show resolved Hide resolved
{
NativeMethods.OrtReleaseValue(inputTensors[i]); // For elementary type Tensors, this should not release the buffer, but should delete the native tensor object.
// For string tensors, this releases the native memory allocated for the tensor, including the buffer
pinnedBufferHandles[i].Dispose();
}
}
}

Expand Down Expand Up @@ -429,7 +436,7 @@ internal static NodeMetadata GetMetadataFromTypeInfo(IntPtr typeInfo)
}
if (valueType != OnnxValueType.ONNX_TYPE_TENSOR && valueType != OnnxValueType.ONNX_TYPE_SPARSETENSOR)
{
return new NodeMetadata(valueType, new int[] { }, new string[] { }, typeof(NamedOnnxValue));
return new NodeMetadata(valueType, new int[] { }, new string[] { }, typeof(NamedOnnxValue));
}

IntPtr tensorInfo;
Expand Down Expand Up @@ -467,7 +474,7 @@ internal static NodeMetadata GetMetadataFromTypeInfo(IntPtr typeInfo)
{
symbolicDimensions[i] = Marshal.PtrToStringAnsi(dimensionNamePtrs[i]); //assumes charset = ANSI
}

return new NodeMetadata(valueType, intDimensions, symbolicDimensions, dotnetType);
}

Expand Down
11 changes: 9 additions & 2 deletions csharp/src/Microsoft.ML.OnnxRuntime/NamedOnnxValue.cs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ public static NamedOnnxValue CreateFromTensor<T>(string name, Tensor<T> value)
return new NamedOnnxValue(name, value);
}

public string Name { get { return _name; } }
public string Name { get { return _name; } set { _name = value; } }
jignparm marked this conversation as resolved.
Show resolved Hide resolved

/// <summary>
/// Try-get value as a Tensor&lt;T&gt;.
Expand Down Expand Up @@ -71,8 +71,15 @@ public IDictionary<K, V> AsDictionary<K, V>()
/// </summary>
/// <param name="onnxValue"></param>
/// <param name="pinnedMemoryHandle"></param>
internal void ToNativeOnnxValue(out IntPtr onnxValue, out MemoryHandle pinnedMemoryHandle)
/// <param name="disposeOnnxValueAfterUse"></param>
internal virtual void ToNativeOnnxValue(out IntPtr onnxValue,
hariharans29 marked this conversation as resolved.
Show resolved Hide resolved
out MemoryHandle pinnedMemoryHandle,
out bool disposeOnnxValueAfterUse)
{
// 'disposeOnnxValueAfterUse' is always 'true' for NamedOnnxValue as the onus
// to dispose the onnxValue after use is on the internal caller
disposeOnnxValueAfterUse = true;

//try to cast _value to Tensor<T>
TensorElementType nativeElementType = TensorElementType.DataTypeMax; //invalid
IntPtr dataBufferPointer = IntPtr.Zero;
Expand Down
12 changes: 11 additions & 1 deletion csharp/src/Microsoft.ML.OnnxRuntime/NativeOnnxTensorMemory.cs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,12 @@

namespace Microsoft.ML.OnnxRuntime
{
internal class NativeOnnxTensorMemory<T> : MemoryManager<T>
internal interface NativeMemoryHandler : IDisposable
fs-eire marked this conversation as resolved.
Show resolved Hide resolved
{
IntPtr GetOnnxValue();
}

internal class NativeOnnxTensorMemory<T> : MemoryManager<T>, NativeMemoryHandler
{
private bool _disposed;
private int _referenceCount;
Expand Down Expand Up @@ -122,6 +127,11 @@ public NativeOnnxTensorMemory(IntPtr onnxValueHandle)
}
}

public IntPtr GetOnnxValue()
{
return _onnxValueHandle;
}
hariharans29 marked this conversation as resolved.
Show resolved Hide resolved

~NativeOnnxTensorMemory()
{
Dispose(false);
Expand Down
108 changes: 108 additions & 0 deletions csharp/test/Microsoft.ML.OnnxRuntime.Tests/InferenceTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -731,6 +731,114 @@ private void TestModelInputBOOL()
}
}

[Fact]
private void TestReusingRunOutputNonStringType()
{
// model takes 1x5 input of fixed type, echoes back
string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "test_types_BOOL.pb");
using (var session = new InferenceSession(modelPath))
{
var container = new List<NamedOnnxValue>();
var tensorIn = new DenseTensor<bool>(new bool[] { true, false, true, false, true }, new int[] { 1, 5 });
var nov = NamedOnnxValue.CreateFromTensor("input", tensorIn);
container.Add(nov);
var res1 = session.Run(container);

// change the name of the DisposableNamedOnnxValue
res1.First().Name = "input";

// Run inferencing 2 times using the output of the first Run()
for(int i=0; i<2; ++i)
{
using (var res2 = session.Run(res1))
{
GC.Collect();
var tensorOut = res2.First().AsTensor<bool>();
Assert.True(tensorOut.SequenceEqual(tensorIn));
}
}

/*
// Dispose the result tensor
res1.First().Dispose();

bool succeeded = false;

// Now try using the disposed output as input to another Run()
try
{
// Run() should fail with a user friendly error message.
session.Run(res1);
}

catch(Exception e)
{
var errorString = "This instance of DisposableNamedOnnxValue has already been disposed";

Assert.True(e.Message.Contains(errorString));

succeeded = true;
}

Assert.True(succeeded);
*/
}
}

[Fact]
private void TestReusingRunOutputStringType()
{
// model takes 1x5 input of fixed type, echoes back
string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "test_types_STRING.pb");
using (var session = new InferenceSession(modelPath))
{
var container = new List<NamedOnnxValue>();
var tensorIn = new DenseTensor<string>(new string[] { "a", "b", "c", "d", "e" }, new int[] { 1, 5 });
var nov = NamedOnnxValue.CreateFromTensor("input", tensorIn);
container.Add(nov);
var res1 = session.Run(container);

// change the name of the DisposableNamedOnnxValue
res1.First().Name = "input";

// Run inferencing 2 times using the output of the first Run()
for (int i = 0; i < 2; ++i)
{
using (var res2 = session.Run(res1))
{
var tensorOut = res2.First().AsTensor<string>();
Assert.True(tensorOut.SequenceEqual(tensorIn));
}
}

/*
// Dispose the result tensor
res1.First().Dispose();

bool succeeded = false;

// Now try using the disposed output as input to another Run()
try
{
// Run() should fail with a user friendly error message.
session.Run(res1);
}

catch (Exception e)
{
var errorString = "This instance of DisposableNamedOnnxValue has already been disposed";

Assert.True(e.Message.Contains(errorString));

succeeded = true;
}

Assert.True(succeeded);
*/
}

}

[Fact]
private void TestModelInputINT32()
{
Expand Down