Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revert "add load from buffer" #10590

Merged
merged 1 commit into from
Mar 8, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 4 additions & 24 deletions winml/api/Windows.AI.MachineLearning.idl
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ import "windows.storage.idl";

namespace ROOT_NS.AI.MachineLearning
{
[contractversion(6)]
[contractversion(5)]
apicontract MachineLearningContract{};

//! Forward declarations
Expand Down Expand Up @@ -104,42 +104,22 @@ namespace ROOT_NS.AI.MachineLearning
//! Loads an ONNX model from a stream asynchronously.
[remote_async]
static Windows.Foundation.IAsyncOperation<LearningModel> LoadFromStreamAsync(Windows.Storage.Streams.IRandomAccessStreamReference modelStream);
//! Loads an ONNX model from a buffer asynchronously.
[contract(MachineLearningContract, 6)]
{
[remote_async]
static Windows.Foundation.IAsyncOperation<LearningModel> LoadFromBufferAsync(Windows.Storage.Streams.IBuffer modelBuffer);
}
//! Loads an ONNX model from a file on disk.
static LearningModel LoadFromFilePath(String filePath);
//! Loads an ONNX model from a stream.
static LearningModel LoadFromStream(Windows.Storage.Streams.IRandomAccessStreamReference modelStream);
//! Loads an ONNX model from a buffer.
[contract(MachineLearningContract, 6)]
{
static LearningModel LoadFromBuffer(Windows.Storage.Streams.IBuffer modelBuffer);
}

//! Loads an ONNX model from a StorageFile asynchronously.
[remote_async]
[method_name("LoadFromStorageFileWithOperatorProviderAsync")] static Windows.Foundation.IAsyncOperation<LearningModel> LoadFromStorageFileAsync(Windows.Storage.IStorageFile modelFile, ILearningModelOperatorProvider operatorProvider);
//! Loads an ONNX model from a stream asynchronously.
[remote_async]
[method_name("LoadFromStreamWithOperatorProviderAsync")] static Windows.Foundation.IAsyncOperation<LearningModel> LoadFromStreamAsync(Windows.Storage.Streams.IRandomAccessStreamReference modelStream, ILearningModelOperatorProvider operatorProvider);
//! Loads an ONNX model from a buffer asynchronously.
[contract(MachineLearningContract, 6)]
{
[remote_async]
[method_name("LoadFromBufferWithOperatorProviderAsync")] static Windows.Foundation.IAsyncOperation<LearningModel> LoadFromBufferAsync(Windows.Storage.Streams.IBuffer modelBuffer, ILearningModelOperatorProvider operatorProvider);
}
//! Loads an ONNX model from a file on disk.
//! Loads an ONNX model from a file on disk.
[method_name("LoadFromFilePathWithOperatorProvider")] static LearningModel LoadFromFilePath(String filePath, ILearningModelOperatorProvider operatorProvider);
//! Loads an ONNX model from a stream.
[method_name("LoadFromStreamWithOperatorProvider")] static LearningModel LoadFromStream(Windows.Storage.Streams.IRandomAccessStreamReference modelStream, ILearningModelOperatorProvider operatorProvider);
//! Loads an ONNX model from a buffer.
[contract(MachineLearningContract, 6)]
{
[method_name("LoadFromBufferWithOperatorProvider")] static LearningModel LoadFromBuffer(Windows.Storage.Streams.IBuffer modelBuffer, ILearningModelOperatorProvider operatorProvider);
}

//! The name of the model author.
String Author{ get; };
//! The name of the model.
Expand Down
53 changes: 0 additions & 53 deletions winml/lib/Api/LearningModel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -165,30 +165,6 @@ LearningModel::LearningModel(
}
WINML_CATCH_ALL

static HRESULT CreateModelFromBuffer(
_winml::IEngineFactory* engine_factory,
const wss::IBuffer buffer,
_winml::IModel** model) {

size_t len = buffer.Length();
if (FAILED(engine_factory->CreateModel((void*)buffer.data(), len, model))) {
WINML_THROW_HR(E_INVALIDARG);
}

return S_OK;
}

LearningModel::LearningModel(
const wss::IBuffer buffer,
const winml::ILearningModelOperatorProvider operator_provider) try : operator_provider_(operator_provider) {
_winmlt::TelemetryEvent loadModel_event(_winmlt::EventCategory::kModelLoad);

WINML_THROW_IF_FAILED(CreateOnnxruntimeEngineFactory(engine_factory_.put()));
WINML_THROW_IF_FAILED(CreateModelFromBuffer(engine_factory_.get(), buffer, model_.put()));
WINML_THROW_IF_FAILED(model_->GetModelInfo(model_info_.put()));
}
WINML_CATCH_ALL

hstring
LearningModel::Author() try {
const char* out;
Expand Down Expand Up @@ -317,20 +293,6 @@ LearningModel::LoadFromStreamAsync(
return make<LearningModel>(model_stream, provider);
}

wf::IAsyncOperation<winml::LearningModel>
LearningModel::LoadFromBufferAsync(
wss::IBuffer const model_buffer) {
return LoadFromBufferAsync(model_buffer, nullptr);
}

wf::IAsyncOperation<winml::LearningModel>
LearningModel::LoadFromBufferAsync(
wss::IBuffer const model_buffer,
winml::ILearningModelOperatorProvider const provider) {
co_await resume_background();
return make<LearningModel>(model_buffer, provider);
}

winml::LearningModel
LearningModel::LoadFromFilePath(
hstring const& path) try {
Expand Down Expand Up @@ -361,21 +323,6 @@ LearningModel::LoadFromStream(
}
WINML_CATCH_ALL

winml::LearningModel
LearningModel::LoadFromBuffer(
wss::IBuffer const model_buffer) try {
return LoadFromBuffer(model_buffer, nullptr);
}
WINML_CATCH_ALL

winml::LearningModel
LearningModel::LoadFromBuffer(
wss::IBuffer const model_buffer,
winml::ILearningModelOperatorProvider const provider) try {
return make<LearningModel>(model_buffer, provider);
}
WINML_CATCH_ALL

_winml::IModel*
LearningModel::DetachModel() {
com_ptr<_winml::IModel> detached_model;
Expand Down
22 changes: 0 additions & 22 deletions winml/lib/Api/LearningModel.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,6 @@ struct LearningModel : LearningModelT<LearningModel> {
LearningModel(
const wss::IRandomAccessStreamReference stream,
const winml::ILearningModelOperatorProvider operator_provider);

LearningModel(
const wss::IBuffer stream,
const winml::ILearningModelOperatorProvider operator_provider);

LearningModel(
_winml::IEngineFactory* engine_factory,
Expand Down Expand Up @@ -81,15 +77,6 @@ struct LearningModel : LearningModelT<LearningModel> {
wss::IRandomAccessStreamReference const stream,
winml::ILearningModelOperatorProvider const operator_provider);

static wf::IAsyncOperation<winml::LearningModel>
LoadFromBufferAsync(
wss::IBuffer const buffer);

static wf::IAsyncOperation<winml::LearningModel>
LoadFromBufferAsync(
wss::IBuffer const buffer,
winml::ILearningModelOperatorProvider const operator_provider);

static winml::LearningModel
LoadFromFilePath(
hstring const& path);
Expand All @@ -102,20 +89,11 @@ struct LearningModel : LearningModelT<LearningModel> {
static winml::LearningModel
LoadFromStream(
wss::IRandomAccessStreamReference const stream);

static winml::LearningModel
LoadFromBuffer(
wss::IBuffer const buffer);

static winml::LearningModel
LoadFromStream(
wss::IRandomAccessStreamReference const stream,
winml::ILearningModelOperatorProvider const operator_provider);

static winml::LearningModel
LoadFromBuffer(
wss::IBuffer const buffer,
winml::ILearningModelOperatorProvider const operator_provider);

public:
/* Non-ABI methods */
Expand Down
15 changes: 0 additions & 15 deletions winml/test/api/LearningModelAPITest.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -90,20 +90,6 @@ static void CreateModelFromIStream() {
WINML_EXPECT_EQUAL(L"onnx-caffe2", author);
}

static void CreateModelFromIBuffer() {
std::wstring path = FileHelpers::GetModulePath() + L"squeezenet_modifiedforruntimestests.onnx";
auto storageFile = ws::StorageFile::GetFileFromPathAsync(path).get();
IBuffer buffer = FileIO::ReadBufferAsync(storageFile).get();

LearningModel learningModel = nullptr;
WINML_EXPECT_NO_THROW(learningModel = LearningModel::LoadFromBufferAsync(buffer).get());
WINML_EXPECT_TRUE(learningModel != nullptr);

// check the author so we know the model was populated correctly.
std::wstring author(learningModel.Author());
WINML_EXPECT_EQUAL(L"onnx-caffe2", author);
}

static void ModelGetAuthor() {
LearningModel learningModel = nullptr;
WINML_EXPECT_NO_THROW(APITest::LoadModel(L"squeezenet_modifiedforruntimestests.onnx", learningModel));
Expand Down Expand Up @@ -337,7 +323,6 @@ const LearningModelApiTestsApi& getapi() {
CreateModelFromIStorage,
CreateModelFromIStorageOutsideCwd,
CreateModelFromIStream,
CreateModelFromIBuffer,
ModelGetAuthor,
ModelGetName,
ModelGetDomain,
Expand Down
2 changes: 0 additions & 2 deletions winml/test/api/LearningModelAPITest.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ struct LearningModelApiTestsApi
VoidTest CreateModelFromIStorage;
VoidTest CreateModelFromIStorageOutsideCwd;
VoidTest CreateModelFromIStream;
VoidTest CreateModelFromIBuffer;
VoidTest ModelGetAuthor;
VoidTest ModelGetName;
VoidTest ModelGetDomain;
Expand All @@ -37,7 +36,6 @@ WINML_TEST(LearningModelAPITests, CreateModelFileNotFound)
WINML_TEST(LearningModelAPITests, CreateModelFromIStorage)
WINML_TEST(LearningModelAPITests, CreateModelFromIStorageOutsideCwd)
WINML_TEST(LearningModelAPITests, CreateModelFromIStream)
WINML_TEST(LearningModelAPITests, CreateModelFromIBuffer)
WINML_TEST(LearningModelAPITests, ModelGetAuthor)
WINML_TEST(LearningModelAPITests, ModelGetName)
WINML_TEST(LearningModelAPITests, ModelGetDomain)
Expand Down