Commit 46924406 by hlu1 Committed by Tianqi Chen

[NNPACK] Modernize test (#2868)

parent e23913f5
...@@ -2,6 +2,7 @@ import tvm ...@@ -2,6 +2,7 @@ import tvm
import numpy as np import numpy as np
import scipy.signal import scipy.signal
from tvm.contrib import nnpack from tvm.contrib import nnpack
from nose import SkipTest
def test_fully_connected_inference(): def test_fully_connected_inference():
...@@ -17,13 +18,11 @@ def test_fully_connected_inference(): ...@@ -17,13 +18,11 @@ def test_fully_connected_inference():
def verify(target="llvm"): def verify(target="llvm"):
if not tvm.module.enabled(target): if not tvm.module.enabled(target):
print("skip because %s is not enabled..." % target) raise SkipTest("skip because %s is not enabled..." % target)
return
if not tvm.get_global_func("tvm.contrib.nnpack.fully_connected_inference", True): if not tvm.get_global_func("tvm.contrib.nnpack.fully_connected_inference", True):
print("skip because extern function is not available") raise SkipTest("skip because extern function is not available")
return
if not nnpack.is_available(): if not nnpack.is_available():
return raise SkipTest("skip because nnpack is not available")
ctx = tvm.cpu(0) ctx = tvm.cpu(0)
f = tvm.build(s, [A, B, D, bias], target) f = tvm.build(s, [A, B, D, bias], target)
...@@ -97,13 +96,11 @@ def test_convolution_inference(): ...@@ -97,13 +96,11 @@ def test_convolution_inference():
algorithm=nnpack.ConvolutionAlgorithm.AUTO, algorithm=nnpack.ConvolutionAlgorithm.AUTO,
with_bias=True): with_bias=True):
if not tvm.module.enabled(target): if not tvm.module.enabled(target):
print("skip because %s is not enabled..." % target) raise SkipTest("skip because %s is not enabled..." % target)
return if not tvm.get_global_func("tvm.contrib.nnpack.fully_connected_inference", True):
if not tvm.get_global_func("tvm.contrib.nnpack.convolution_inference", True): raise SkipTest("skip because extern function is not available")
print("skip because extern function is not available")
return
if not nnpack.is_available(): if not nnpack.is_available():
return raise SkipTest("skip because nnpack is not available")
ctx = tvm.cpu(0) ctx = tvm.cpu(0)
output = nnpack.convolution_inference( output = nnpack.convolution_inference(
...@@ -161,13 +158,11 @@ def test_convolution_inference_without_weight_transform(): ...@@ -161,13 +158,11 @@ def test_convolution_inference_without_weight_transform():
algorithm=nnpack.ConvolutionAlgorithm.AUTO, algorithm=nnpack.ConvolutionAlgorithm.AUTO,
with_bias=True): with_bias=True):
if not tvm.module.enabled(target): if not tvm.module.enabled(target):
print("skip because %s is not enabled..." % target) raise SkipTest("skip because %s is not enabled..." % target)
return if not tvm.get_global_func("tvm.contrib.nnpack.fully_connected_inference", True):
if not tvm.get_global_func("tvm.contrib.nnpack.convolution_inference_without_weight_transform", True): raise SkipTest("skip because extern function is not available")
print("skip because extern function is not available")
return
if not nnpack.is_available(): if not nnpack.is_available():
return raise SkipTest("skip because nnpack is not available")
ctx = tvm.cpu(0) ctx = tvm.cpu(0)
transformed_kernel = nnpack.convolution_inference_weight_transform( transformed_kernel = nnpack.convolution_inference_weight_transform(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment