Commit 69ad6aed by Andrew Tulloch Committed by Tianqi Chen

[NNPACK] Add check for NNPACK being available (`nnp_initialize()` succeeding) (#2119)

This fixes issues with failing tests on PowerPC.
parent 572c36d2
......@@ -5,16 +5,11 @@ from .. import api as _api
from .. import intrin as _intrin
from .._ffi.function import _init_api
def config(nthreads):
"""Configure the nnpack library.
Parameters
----------
nthreads : int
The threads number of nnpack thread pool, must be a nonnegative.
def is_available():
"""Check whether NNPACK is available, that is, `nnp_initialize()`
returns `nnp_status_success`.
"""
_Config(nthreads)
return _initialize() == 0
def fully_connected_inference(lhs, rhs, nthreads=1):
"""Create an extern op that compute fully connected of 1D tensor lhs and
......
......@@ -38,9 +38,10 @@ bool NNPackConfig(uint64_t nthreads) {
}
TVM_REGISTER_GLOBAL("contrib.nnpack._Config")
TVM_REGISTER_GLOBAL("contrib.nnpack._initialize")
.set_body([](TVMArgs args, TVMRetValue *ret) {
CHECK(NNPackConfig(args[0]));
*ret = nnp_initialize();
});
} // namespace contrib
} // namespace tvm
......@@ -21,7 +21,9 @@ def test_fully_connected_output():
if not tvm.get_global_func("tvm.contrib.nnpack.fully_connected_output", True):
print("skip because extern function is not available")
return
return
if not nnpack.is_available():
return
ctx = tvm.cpu(0)
f = tvm.build(s, [A, B, D, bias], target)
a = tvm.nd.array(np.random.uniform(size=(n, l)).astype(A.dtype), ctx)
......@@ -52,7 +54,9 @@ def test_fully_connected_inference():
if not tvm.get_global_func("tvm.contrib.nnpack.fully_connected_inference", True):
print("skip because extern function is not available")
return
return
if not nnpack.is_available():
return
ctx = tvm.cpu(0)
f = tvm.build(s, [A, B, D, bias], target)
a = tvm.nd.array(np.random.uniform(size=(l)).astype(A.dtype), ctx)
......@@ -130,7 +134,9 @@ def test_convolution_inference():
if not tvm.get_global_func("tvm.contrib.nnpack.fully_connected_inference", True):
print("skip because extern function is not available")
return
return
if not nnpack.is_available():
return
ctx = tvm.cpu(0)
output = nnpack.convolution_inference(
data, kernel, bias if with_bias else None,
......@@ -192,7 +198,9 @@ def test_convolution_inference_without_weight_transform():
if not tvm.get_global_func("tvm.contrib.nnpack.fully_connected_inference", True):
print("skip because extern function is not available")
return
return
if not nnpack.is_available():
return
ctx = tvm.cpu(0)
transformed_kernel = nnpack.convolution_inference_weight_transform(
kernel, algorithm=algorithm)
......@@ -249,7 +257,9 @@ def test_convolution_output():
if not tvm.get_global_func("tvm.contrib.nnpack.fully_connected_inference", True):
print("skip because extern function is not available")
return
return
if not nnpack.is_available():
return
ctx = tvm.cpu(0)
f = tvm.build(s, [data, kernel, bias, output], target)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment