Skip to content

Commit

Permalink
Cherry pick PRs to Rel-1.3.1 (#4198)
Browse files Browse the repository at this point in the history
* link to folder instead of READMEs inside folder (#3938)

otherwise hard to find the source code

* [Node.js binding] fix linux build (#3927)

* [Node.js binding] add build flag for node.js binding (#3948)

* [Nodejs binding] create a new pipeline to generate signed binaries (#4104)

* add yml files

* update pipeline

* fix yaml syntax

* yaml pop BuildCSharp

* udpate yaml

* do not stage codesign summary

* fix build: pipeline Node.js version to 12.16.3 (#4145)

* [Node.js binding] upgrade node-addon-api to 3.0 (#4148)

* [Node.js binding] add linux and mac package (#4157)

* try mac pipeline

* fix path separator

* copy prebuilds folder

* split esrp yaml for win/mac

* disable mac signing temporarily

* add linux

* fix indent

* add nodetool in linux

* add nodetool in win-ci-2019

* replace linux build by custom docker scripts

* use manylinux as node 12.16 not working on centos6

* try ubuntu

* loosen timeout for test case - multiple runs calls

* add script to support update nodejs binding version (#4164)

* [java] Adds a CUDA test (#3956)

* [java] - adding a cuda enabled test.

* Adding --build_java to the windows gpu ci pipeline.

* Removing a stray line from the unit tests that always enabled CUDA for Java.

* Update OnnxRuntime.java for OS X environment. (#3985)

onnxruntime init failure due to wrong path of reading native libraries. In OS X 64 system, the arch name is detected as x86 which generates invalid path to read native libraries.

Exception java.lang.UnsatisfiedLinkError: no onnxruntime in java.library.path
	at java.lang.ClassLoader.loadLibrary(ClassLoader.java:1867)
	at java.lang.Runtime.loadLibrary0(Runtime.java:870)
	at java.lang.System.loadLibrary(System.java:1122)
	at ai.onnxruntime.OnnxRuntime.load(OnnxRuntime.java:174)
	at ai.onnxruntime.OnnxRuntime.init(OnnxRuntime.java:81)
	at ai.onnxruntime.OrtEnvironment.<clinit>(OrtEnvironment.java:24)

* Create Java publishing pipeline (#3944)

Create CPU and GPu Java publishing pipelines. Final jars are tested on all platforms. However, signing and publishing to maven are manual steps.

* Change group id to com.microsoft.onnxruntime per requirements.

* Java GPu artifact naming (#4179)

Modify gradle build so artifactID has _gpu for GPU builds.
  Pass USE_CUDA flag on CUDA build
  Adjust publishing pipelines to extract POM from a correct path.

Co-Authored-By: @Craigacp

* bump up ORT version to 1.3.1 (#4181)

* move back to toolset 14.16 to possibly work around nvcc bug (#4180)

* Symbolic shape inference exit on models without onnx opset used (#4090)

* Symbolic shape inference exit on models without onnx opset used

* Temporary fix for ConvTranspose with symbolic input dims

Co-authored-by: Changming Sun <me@sunchangming.com>

* Fix Nuphar test failure

* Enlarge the read buffer size in C#/Java test code (#4150)

1. Enlarge the read buffer size further, so that our code can run even faster. TODO: need apply the similar changes to python some other language bindings.
2. Add coreml_VGG16_ImageNet to the test exclusion set of x86_32. It is not a new model but previously we didn't run the test against x86_32.

* Temporarily disable windows static analysis CI job

* skip model coreml_Imputer-LogisticRegression_sklearn_load_breast_cancer

* Delete unused variable

Co-authored-by: Prasanth Pulavarthi <prasantp@microsoft.com>
Co-authored-by: Yulong Wang <yulongw@microsoft.com>
Co-authored-by: Adam Pocock <adam.pocock@oracle.com>
Co-authored-by: jji2019 <49252772+jji2019@users.noreply.github.com>
Co-authored-by: Dmitri Smirnov <yuslepukhin@users.noreply.github.com>
Co-authored-by: Dmitri Smirnov <dmitrism@microsoft.com>
Co-authored-by: George Wu <jywu@microsoft.com>
Co-authored-by: KeDengMS <kedeng@microsoft.com>
Co-authored-by: Changming Sun <me@sunchangming.com>
Co-authored-by: Changming Sun <chasun@microsoft.com>
  • Loading branch information
11 people authored Jun 12, 2020
1 parent eb5da13 commit 814638c
Show file tree
Hide file tree
Showing 57 changed files with 1,582 additions and 437 deletions.
2 changes: 1 addition & 1 deletion VERSION_NUMBER
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.3.0
1.3.1
9 changes: 8 additions & 1 deletion cmake/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,14 @@ set(CMAKE_OSX_DEPLOYMENT_TARGET "10.12" CACHE STRING "Minimum OS X deployment ve

# Project
project(onnxruntime C CXX)
# Needed for Java
set (CMAKE_C_STANDARD 99)

include(CheckCXXCompilerFlag)
include(CheckLanguage)

# CentOS compiler is old but it does allow certain C++14 features
# such as lambda captures and they are convinient
# such as lambda captures and they are convenient
# On the other hand it does not allow some others.
# So we cant' regulate simply with the standard.
set(CMAKE_CXX_STANDARD 14)
Expand Down Expand Up @@ -975,6 +977,11 @@ if (onnxruntime_BUILD_JAVA)
include(onnxruntime_java.cmake)
endif()

if (onnxruntime_BUILD_NODEJS)
message(STATUS "Node.js Build is enabled")
include(onnxruntime_nodejs.cmake)
endif()

# some of the tests rely on the shared libs to be
# built; hence the ordering
if (onnxruntime_BUILD_UNIT_TESTS)
Expand Down
6 changes: 6 additions & 0 deletions cmake/onnxruntime_java.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,9 @@ set(GRADLE_ARGS clean jar)
if(WIN32)
set(GRADLE_ARGS ${GRADLE_ARGS} -Dorg.gradle.daemon=false)
endif()
if(onnxruntime_USE_CUDA)
set(GRADLE_ARGS ${GRADLE_ARGS} -DUSE_CUDA=1)
endif()
add_custom_command(OUTPUT ${JAVA_OUTPUT_JAR} COMMAND ${GRADLE_EXECUTABLE} ${GRADLE_ARGS} WORKING_DIRECTORY ${JAVA_ROOT} DEPENDS ${onnxruntime4j_gradle_files} ${onnxruntime4j_src})
add_custom_target(onnxruntime4j DEPENDS ${JAVA_OUTPUT_JAR})
set_source_files_properties(${JAVA_OUTPUT_JAR} PROPERTIES GENERATED TRUE)
Expand Down Expand Up @@ -157,6 +160,9 @@ set(GRADLE_ARGS cmakeBuild -DcmakeBuildDir=${CMAKE_CURRENT_BINARY_DIR})
if(WIN32)
set(GRADLE_ARGS ${GRADLE_ARGS} -Dorg.gradle.daemon=false)
endif()
if(onnxruntime_USE_CUDA)
set(GRADLE_ARGS ${GRADLE_ARGS} -DUSE_CUDA=1)
endif()
add_custom_command(TARGET onnxruntime4j_jni POST_BUILD COMMAND ${GRADLE_EXECUTABLE} ${GRADLE_ARGS} WORKING_DIRECTORY ${JAVA_ROOT})
if (CMAKE_SYSTEM_NAME STREQUAL "Android")
add_custom_command(TARGET onnxruntime4j_jni POST_BUILD COMMAND ${GRADLE_EXECUTABLE} -b build-android.gradle -c settings-android.gradle build -DjniLibsDir=${ANDROID_PACKAGE_JNILIBS_DIR} -DbuildDir=${ANDROID_PACKAGE_OUTPUT_DIR} WORKING_DIRECTORY ${JAVA_ROOT})
Expand Down
8 changes: 7 additions & 1 deletion cmake/onnxruntime_java_unittests.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,15 @@
FILE(TO_NATIVE_PATH ${GRADLE_EXECUTABLE} GRADLE_NATIVE_PATH)
FILE(TO_NATIVE_PATH ${BIN_DIR} BINDIR_NATIVE_PATH)

execute_process(COMMAND cmd /C ${GRADLE_NATIVE_PATH} cmakeCheck -DcmakeBuildDir=${BINDIR_NATIVE_PATH} -Dorg.gradle.daemon=false
if (onnxruntime_USE_CUDA)
execute_process(COMMAND cmd /C ${GRADLE_NATIVE_PATH} cmakeCheck -DcmakeBuildDir=${BINDIR_NATIVE_PATH} -Dorg.gradle.daemon=false -DUSE_CUDA=1
WORKING_DIRECTORY ${REPO_ROOT}/java
RESULT_VARIABLE HAD_ERROR)
else()
execute_process(COMMAND cmd /C ${GRADLE_NATIVE_PATH} cmakeCheck -DcmakeBuildDir=${BINDIR_NATIVE_PATH} -Dorg.gradle.daemon=false
WORKING_DIRECTORY ${REPO_ROOT}/java
RESULT_VARIABLE HAD_ERROR)
endif()

if(HAD_ERROR)
message(FATAL_ERROR "Java Unitests failed")
Expand Down
36 changes: 36 additions & 0 deletions cmake/onnxruntime_nodejs.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved.
# Licensed under the MIT License.

set(NODEJS_BINDING_ROOT ${REPO_ROOT}/nodejs)
if (WIN32)
set(NPM_CLI cmd /c npm)
else()
set(NPM_CLI npm)
endif()

# verify Node.js and NPM
execute_process(COMMAND node --version
WORKING_DIRECTORY ${NODEJS_BINDING_ROOT}
OUTPUT_VARIABLE node_version
RESULT_VARIABLE had_error
OUTPUT_STRIP_TRAILING_WHITESPACE)
if(had_error)
message(FATAL_ERROR "Failed to find Node.js: " ${had_error})
endif()
execute_process(COMMAND ${NPM_CLI} --version
WORKING_DIRECTORY ${NODEJS_BINDING_ROOT}
OUTPUT_VARIABLE npm_version
RESULT_VARIABLE had_error
OUTPUT_STRIP_TRAILING_WHITESPACE)
if(had_error)
message(FATAL_ERROR "Failed to find NPM: " ${had_error})
endif()

# add custom target
add_custom_target(nodejs_binding_wrapper ALL
COMMAND ${NPM_CLI} ci --ort-skip-build
COMMAND ${NPM_CLI} run build -- --onnxruntime-build-dir=${CMAKE_CURRENT_BINARY_DIR} --config=${CMAKE_BUILD_TYPE}
COMMAND ${NPM_CLI} test -- --timeout=10000
WORKING_DIRECTORY ${NODEJS_BINDING_ROOT}
COMMENT "Using cmake-js to build OnnxRuntime Node.js binding")
add_dependencies(nodejs_binding_wrapper onnxruntime)
22 changes: 16 additions & 6 deletions cmake/onnxruntime_unittests.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -813,12 +813,15 @@ set_property(TARGET custom_op_library APPEND_STRING PROPERTY LINK_FLAGS ${ONNXRU

if (onnxruntime_BUILD_JAVA)
message(STATUS "Running Java tests")
# native-test is added to resources so custom_op_lib can be loaded
# and we want to symlink it there
set(JAVA_NATIVE_TEST_DIR ${JAVA_OUTPUT_DIR}/native-test)
file(MAKE_DIRECTORY ${JAVA_NATIVE_TEST_DIR})

# delegate to gradle's test runner
if(WIN32)
# If we're on windows, symlink the custom op test library somewhere we can see it
set(JAVA_NATIVE_TEST_DIR ${JAVA_OUTPUT_DIR}/native-test)
file(MAKE_DIRECTORY ${JAVA_NATIVE_TEST_DIR})
add_custom_command(TARGET custom_op_library POST_BUILD COMMAND ${CMAKE_COMMAND} -E create_symlink $<TARGET_FILE:custom_op_library> ${JAVA_NATIVE_TEST_DIR}/$<TARGET_FILE_NAME:custom_op_library>)
add_custom_command(TARGET custom_op_library POST_BUILD COMMAND ${CMAKE_COMMAND} -E create_symlink $<TARGET_FILE:custom_op_library>
${JAVA_NATIVE_TEST_DIR}/$<TARGET_FILE_NAME:custom_op_library>)
# On windows ctest requires a test to be an .exe(.com) file
# So there are two options 1) Install Chocolatey and its gradle package
# That package would install gradle.exe shim to its bin so ctest could run gradle.exe
Expand All @@ -830,8 +833,15 @@ if (onnxruntime_BUILD_JAVA)
-DREPO_ROOT=${REPO_ROOT}
-P ${CMAKE_CURRENT_SOURCE_DIR}/onnxruntime_java_unittests.cmake)
else()
add_test(NAME onnxruntime4j_test COMMAND ${GRADLE_EXECUTABLE} cmakeCheck -DcmakeBuildDir=${CMAKE_CURRENT_BINARY_DIR}
WORKING_DIRECTORY ${REPO_ROOT}/java)
add_custom_command(TARGET custom_op_library POST_BUILD COMMAND ${CMAKE_COMMAND} -E create_symlink $<TARGET_FILE:custom_op_library>
${JAVA_NATIVE_TEST_DIR}/$<TARGET_LINKER_FILE_NAME:custom_op_library>)
if (onnxruntime_USE_CUDA)
add_test(NAME onnxruntime4j_test COMMAND ${GRADLE_EXECUTABLE} cmakeCheck -DcmakeBuildDir=${CMAKE_CURRENT_BINARY_DIR} -DUSE_CUDA=1
WORKING_DIRECTORY ${REPO_ROOT}/java)
else()
add_test(NAME onnxruntime4j_test COMMAND ${GRADLE_EXECUTABLE} cmakeCheck -DcmakeBuildDir=${CMAKE_CURRENT_BINARY_DIR}
WORKING_DIRECTORY ${REPO_ROOT}/java)
endif()
endif()
set_property(TEST onnxruntime4j_test APPEND PROPERTY DEPENDS onnxruntime4j_jni)
endif()
12 changes: 9 additions & 3 deletions csharp/test/Microsoft.ML.OnnxRuntime.Tests/InferenceTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -511,6 +511,7 @@ private static Dictionary<string, string> GetSkippedModels()
{ "tf_resnet_v1_50", "result mismatch when Conv BN Fusion is applied" },
{ "tf_resnet_v1_101", "result mismatch when Conv BN Fusion is applied" },
{ "tf_resnet_v1_152", "result mismatch when Conv BN Fusion is applied" },
{ "coreml_Imputer-LogisticRegression_sklearn_load_breast_cancer", "Can't determine model file name" },
{ "mask_rcnn_keras", "Model should be edited to remove the extra outputs" },
};

Expand All @@ -531,6 +532,7 @@ private static Dictionary<string, string> GetSkippedModels()
skipModels["tf_nasnet_large"] = "Get preallocated buffer for initializer ConvBnFusion_BN_B_cell_11/beginning_bn/beta:0_331 failed";
skipModels["test_zfnet512"] = "System out of memory";
skipModels["test_bvlc_reference_caffenet"] = "System out of memory";
skipModels["coreml_VGG16_ImageNet"] = "System out of memory";
}

return skipModels;
Expand Down Expand Up @@ -1639,9 +1641,13 @@ private static void GetTypeAndWidth(TensorElementType elemType, out Type type, o
}
static NamedOnnxValue LoadTensorFromFilePb(string filename, IReadOnlyDictionary<string, NodeMetadata> nodeMetaDict)
{
var file = File.OpenRead(filename);
var tensor = Onnx.TensorProto.Parser.ParseFrom(file);
file.Close();
//Set buffer size to 4MB
int readBufferSize = 4194304;
Onnx.TensorProto tensor = null;
using (var file = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.Read, readBufferSize))
{
tensor = Onnx.TensorProto.Parser.ParseFrom(file);
}

Type tensorElemType = null;
int width = 0;
Expand Down
3 changes: 2 additions & 1 deletion docs/Versioning.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@ For more details on ONNX Release versions, see [this page](https://github.com/on

| ONNX Runtime release version | ONNX release version | ONNX opset version | ONNX ML opset version | Supported ONNX IR version | [WinML compatibility](https://docs.microsoft.com/en-us/windows/ai/windows-ml/)|
|------------------------------|--------------------|--------------------|----------------------|------------------|------------------|
| 1.3.0 | **1.6** down to 1.2 | 11 | 2 | 6 | -- |
| 1.3.1 | **1.7** down to 1.2 | 12 | 2 | 6 | -- |
| 1.3.0 | **1.7** down to 1.2 | 12 | 2 | 6 | -- |
| 1.2.0<br>1.1.2<br>1.1.1<br>1.1.0 | **1.6** down to 1.2 | 11 | 2 | 6 | -- |
| 1.0.0 | **1.6** down to 1.2 | 11 | 2 | 6 | -- |
| 0.5.0 | **1.5** down to 1.2 | 10 | 1 | 5 | -- |
Expand Down
5 changes: 5 additions & 0 deletions docs/python/README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,11 @@ For more information on ONNX Runtime, please see `aka.ms/onnxruntime <https://ak
Changes
-------

1.3.1
^^^^^

Release Notes : https://github.com/Microsoft/onnxruntime/releases/tag/v1.3.1

1.3.0
^^^^^

Expand Down
41 changes: 25 additions & 16 deletions java/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,23 @@ allprojects {
}
}

project.group = "ai.onnxruntime"
project.group = "com.microsoft.onnxruntime"
version = rootProject.file('../VERSION_NUMBER').text.trim()

// cmake runs will inform us of the build directory of the current run
def cmakeBuildDir = System.properties['cmakeBuildDir']
def useCUDA = System.properties['USE_CUDA']
def cmakeJavaDir = "${cmakeBuildDir}/java"
def cmakeNativeLibDir = "${cmakeJavaDir}/native-lib"
def cmakeNativeJniDir = "${cmakeJavaDir}/native-jni"
def cmakeNativeTestDir = "${cmakeJavaDir}/native-test"
def cmakeBuildOutputDir = "${cmakeJavaDir}/build"

def mavenUser = System.properties['mavenUser']
def mavenPwd = System.properties['mavenPwd']

def mavenArtifactId = useCUDA != null ? project.name + "_gpu" : project.name

java {
sourceCompatibility = JavaVersion.VERSION_1_8
targetCompatibility = JavaVersion.VERSION_1_8
Expand All @@ -29,7 +43,7 @@ jar {
task sourcesJar(type: Jar, dependsOn: classes) {
classifier = "sources"
from sourceSets.main.allSource
into("META-INF/maven/$project.group/$project.name") {
into("META-INF/maven/$project.group/$mavenArtifactId") {
from { generatePomFileForMavenPublication }
rename ".*", "pom.xml"
}
Expand All @@ -39,7 +53,7 @@ task sourcesJar(type: Jar, dependsOn: classes) {
task javadocJar(type: Jar, dependsOn: javadoc) {
classifier = "javadoc"
from javadoc.destinationDir
into("META-INF/maven/$project.group/$project.name") {
into("META-INF/maven/$project.group/$mavenArtifactId") {
from { generatePomFileForMavenPublication }
rename ".*", "pom.xml"
}
Expand All @@ -61,17 +75,6 @@ spotless {
}
}

// cmake runs will inform us of the build directory of the current run
def cmakeBuildDir = System.properties['cmakeBuildDir']
def cmakeJavaDir = "${cmakeBuildDir}/java"
def cmakeNativeLibDir = "${cmakeJavaDir}/native-lib"
def cmakeNativeJniDir = "${cmakeJavaDir}/native-jni"
def cmakeNativeTestDir = "${cmakeJavaDir}/native-test"
def cmakeBuildOutputDir = "${cmakeJavaDir}/build"

def mavenUser = System.properties['mavenUser']
def mavenPwd = System.properties['mavenPwd']

compileJava {
options.compilerArgs += ["-h", "${project.buildDir}/headers/"]
}
Expand All @@ -98,7 +101,12 @@ if (cmakeBuildDir != null) {

// Overwrite jar location
task allJar(type: Jar) {
into("META-INF/maven/$project.group/$project.name") {
manifest {
attributes('Automatic-Module-Name': project.group,
'Implementation-Title': 'onnxruntime',
'Implementation-Version': project.version)
}
into("META-INF/maven/$project.group/$mavenArtifactId") {
from { generatePomFileForMavenPublication }
rename ".*", "pom.xml"
}
Expand Down Expand Up @@ -138,6 +146,7 @@ test {
if (cmakeBuildDir != null) {
workingDir cmakeBuildDir
}
systemProperties System.getProperties().subMap(['USE_CUDA'])
testLogging {
events "passed", "skipped", "failed"
showStandardStreams = true
Expand All @@ -156,7 +165,7 @@ publishing {
publications {
maven(MavenPublication) {
groupId = project.group
artifactId = project.name
artifactId = mavenArtifactId

from components.java
pom {
Expand Down
2 changes: 1 addition & 1 deletion java/src/main/java/ai/onnxruntime/OnnxRuntime.java
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ private static String initOsArch() {
}
String detectedArch = null;
String arch = System.getProperty("os.arch", "generic").toLowerCase(Locale.ENGLISH);
if (arch.indexOf("amd64") == 0) {
if (arch.indexOf("amd64") == 0 || arch.indexOf("x86_64") == 0) {
detectedArch = "x64";
} else if (arch.indexOf("x86") == 0) {
detectedArch = "x86";
Expand Down
32 changes: 29 additions & 3 deletions java/src/test/java/ai/onnxruntime/InferenceTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -573,6 +573,32 @@ public void testMultiThreads() throws OrtException, InterruptedException {
}
}

@Test
public void testCUDA() throws OrtException {
if (System.getProperty("USE_CUDA") != null) {
SqueezeNetTuple tuple = openSessionSqueezeNet(0);
try (OrtEnvironment env = tuple.env;
OrtSession session = tuple.session) {
float[] inputData = tuple.inputData;
float[] expectedOutput = tuple.outputData;
NodeInfo inputMeta = session.getInputInfo().values().iterator().next();
Map<String, OnnxTensor> container = new HashMap<>();
long[] inputShape = ((TensorInfo) inputMeta.getInfo()).shape;
Object tensor = OrtUtil.reshape(inputData, inputShape);
container.put(inputMeta.getName(), OnnxTensor.createTensor(env, tensor));
try (OrtSession.Result result = session.run(container)) {
OnnxValue resultTensor = result.get(0);
float[] resultArray = TestHelpers.flattenFloat(resultTensor.getValue());
assertEquals(expectedOutput.length, resultArray.length);
assertArrayEquals(expectedOutput, resultArray, 1e-6f);
} catch (OrtException e) {
throw new IllegalStateException("Failed to execute a scoring operation", e);
}
OnnxValue.close(container.values());
}
}
}

private static File getTestModelsDir() throws IOException {
// get build directory, append downloaded models location
String cwd = System.getProperty("user.dir");
Expand Down Expand Up @@ -881,9 +907,9 @@ public void testLoadCustomLibrary() throws OrtException {
// So we look it up as a classpath resource and resolve it to a real path
customLibraryName = getResourcePath("/custom_op_library.dll").toString();
} else if (osName.contains("mac")) {
customLibraryName = "libcustom_op_library.dylib";
customLibraryName = getResourcePath("/libcustom_op_library.dylib").toString();
} else if (osName.contains("linux")) {
customLibraryName = "./libcustom_op_library.so";
customLibraryName = getResourcePath("/libcustom_op_library.so").toString();
} else {
fail("Unknown os/platform '" + osName + "'");
}
Expand Down Expand Up @@ -1399,7 +1425,7 @@ private static TypeWidth getTypeAndWidth(TensorProto.DataType elemType) {
private static StringTensorPair loadTensorFromFilePb(
OrtEnvironment env, File filename, Map<String, NodeInfo> nodeMetaDict)
throws IOException, OrtException {
InputStream is = new BufferedInputStream(new FileInputStream(filename));
InputStream is = new BufferedInputStream(new FileInputStream(filename), 1024 * 1024 * 4);
OnnxMl.TensorProto tensor = OnnxMl.TensorProto.parseFrom(is);
is.close();

Expand Down
Loading

0 comments on commit 814638c

Please sign in to comment.