Commit fbeac5e2 by Yizhi Liu Committed by masahi

[tutorial][benchmark] nnvm -> relay (#4368)

* [tutorial] nnvm -> relay

* use relay workload

* delete movbilenetv2 option
parent 331f6fd0
...@@ -35,9 +35,9 @@ In general, the performance should also be good. ...@@ -35,9 +35,9 @@ In general, the performance should also be good.
It is recommended that you run tuning by yourself if you have your customized network or devices. It is recommended that you run tuning by yourself if you have your customized network or devices.
Please follow the tutorial for Please follow the tutorial for
[NVIDIA GPU](https://docs.tvm.ai/tutorials/autotvm/tune_nnvm_cuda.html), [NVIDIA GPU](https://docs.tvm.ai/tutorials/autotvm/tune_conv2d_cuda.html),
[ARM CPU](https://docs.tvm.ai/tutorials/autotvm/tune_nnvm_arm.html), [ARM CPU](https://docs.tvm.ai/tutorials/autotvm/tune_relay_arm.html),
[Mobile GPU](https://docs.tvm.ai/tutorials/autotvm/tune_nnvm_mobile_gpu.html). [Mobile GPU](https://docs.tvm.ai/tutorials/autotvm/tune_relay_mobile_gpu.html).
### NVIDIA GPU ### NVIDIA GPU
...@@ -67,7 +67,7 @@ python3 -m tvm.exec.rpc_tracker ...@@ -67,7 +67,7 @@ python3 -m tvm.exec.rpc_tracker
2. Register devices to the tracker 2. Register devices to the tracker
* For Linux device * For Linux device
* Build tvm runtime on your device [Help](https://docs.tvm.ai/tutorials/nnvm/deploy_model_on_rasp.html#build-tvm-runtime-on-device) * Build tvm runtime on your device [Help](https://docs.tvm.ai/tutorials/frontend/deploy_model_on_rasp.html#build-tvm-runtime-on-device)
* Register your device to tracker by * Register your device to tracker by
```bash ```bash
python3 -m tvm.exec.rpc_server --tracker=[HOST_IP]:9190 --key=[DEVICE_KEY] python3 -m tvm.exec.rpc_server --tracker=[HOST_IP]:9190 --key=[DEVICE_KEY]
......
...@@ -24,8 +24,7 @@ import numpy as np ...@@ -24,8 +24,7 @@ import numpy as np
import tvm import tvm
from tvm.contrib.util import tempdir from tvm.contrib.util import tempdir
import tvm.contrib.graph_runtime as runtime import tvm.contrib.graph_runtime as runtime
import nnvm.compiler from tvm import relay
import nnvm.testing
from util import get_network, print_progress from util import get_network, print_progress
...@@ -39,10 +38,9 @@ def evaluate_network(network, target, target_host, repeat): ...@@ -39,10 +38,9 @@ def evaluate_network(network, target, target_host, repeat):
net, params, input_shape, output_shape = get_network(network, batch_size=1) net, params, input_shape, output_shape = get_network(network, batch_size=1)
print_progress("%-20s building..." % network) print_progress("%-20s building..." % network)
with nnvm.compiler.build_config(opt_level=3): with relay.build_config(opt_level=3):
graph, lib, params = nnvm.compiler.build( graph, lib, params = relay.build(
net, target=target, target_host=target_host, net, target=target, target_host=target_host, params=params)
shape={'data': input_shape}, params=params, dtype=dtype)
tmp = tempdir() tmp = tempdir()
if 'android' in str(target): if 'android' in str(target):
...@@ -76,7 +74,7 @@ if __name__ == "__main__": ...@@ -76,7 +74,7 @@ if __name__ == "__main__":
parser.add_argument("--network", type=str, choices= parser.add_argument("--network", type=str, choices=
['resnet-18', 'resnet-34', 'resnet-50', ['resnet-18', 'resnet-34', 'resnet-50',
'vgg-16', 'vgg-19', 'densenet-121', 'inception_v3', 'vgg-16', 'vgg-19', 'densenet-121', 'inception_v3',
'mobilenet', 'mobilenet_v2', 'squeezenet_v1.0', 'squeezenet_v1.1'], 'mobilenet', 'squeezenet_v1.0', 'squeezenet_v1.1'],
help='The name of neural network') help='The name of neural network')
parser.add_argument("--model", type=str, choices= parser.add_argument("--model", type=str, choices=
['rk3399', 'mate10', 'mate10pro', 'p20', 'p20pro', ['rk3399', 'mate10', 'mate10pro', 'p20', 'p20pro',
......
...@@ -23,10 +23,8 @@ import threading ...@@ -23,10 +23,8 @@ import threading
import numpy as np import numpy as np
import tvm import tvm
from tvm.contrib.util import tempdir
import tvm.contrib.graph_runtime as runtime import tvm.contrib.graph_runtime as runtime
import nnvm.compiler from tvm import relay
import nnvm.testing
from util import get_network from util import get_network
...@@ -34,9 +32,8 @@ from util import get_network ...@@ -34,9 +32,8 @@ from util import get_network
def benchmark(network, target): def benchmark(network, target):
net, params, input_shape, output_shape = get_network(network, batch_size=1) net, params, input_shape, output_shape = get_network(network, batch_size=1)
with nnvm.compiler.build_config(opt_level=3): with relay.build_config(opt_level=3):
graph, lib, params = nnvm.compiler.build( graph, lib, params = relay.build(net, target=target, params=params)
net, target=target, shape={'data': input_shape}, params=params, dtype=dtype)
# create runtime # create runtime
ctx = tvm.context(str(target), 0) ctx = tvm.context(str(target), 0)
...@@ -56,7 +53,7 @@ if __name__ == "__main__": ...@@ -56,7 +53,7 @@ if __name__ == "__main__":
parser.add_argument("--network", type=str, choices= parser.add_argument("--network", type=str, choices=
['resnet-18', 'resnet-34', 'resnet-50', ['resnet-18', 'resnet-34', 'resnet-50',
'vgg-16', 'vgg-19', 'densenet-121', 'inception_v3', 'vgg-16', 'vgg-19', 'densenet-121', 'inception_v3',
'mobilenet', 'mobilenet_v2', 'squeezenet_v1.0', 'squeezenet_v1.1'], 'mobilenet', 'squeezenet_v1.0', 'squeezenet_v1.1'],
help='The name of neural network') help='The name of neural network')
parser.add_argument("--model", type=str, parser.add_argument("--model", type=str,
choices=['1080ti', 'titanx', 'tx2', 'gfx900'], default='1080ti', choices=['1080ti', 'titanx', 'tx2', 'gfx900'], default='1080ti',
......
...@@ -24,8 +24,7 @@ import numpy as np ...@@ -24,8 +24,7 @@ import numpy as np
import tvm import tvm
from tvm.contrib.util import tempdir from tvm.contrib.util import tempdir
import tvm.contrib.graph_runtime as runtime import tvm.contrib.graph_runtime as runtime
import nnvm.compiler from tvm import relay
import nnvm.testing
from util import get_network, print_progress from util import get_network, print_progress
...@@ -38,10 +37,9 @@ def evaluate_network(network, target, target_host, dtype, repeat): ...@@ -38,10 +37,9 @@ def evaluate_network(network, target, target_host, dtype, repeat):
net, params, input_shape, output_shape = get_network(network, batch_size=1, dtype=dtype) net, params, input_shape, output_shape = get_network(network, batch_size=1, dtype=dtype)
print_progress("%-20s building..." % network) print_progress("%-20s building..." % network)
with nnvm.compiler.build_config(opt_level=3): with relay.build_config(opt_level=3):
graph, lib, params = nnvm.compiler.build( graph, lib, params = relay.build(
net, target=target, target_host=target_host, net, target=target, target_host=target_host, params=params)
shape={'data': input_shape}, params=params, dtype=dtype)
tmp = tempdir() tmp = tempdir()
if 'android' in str(target) or 'android' in str(target_host): if 'android' in str(target) or 'android' in str(target_host):
...@@ -75,7 +73,7 @@ if __name__ == "__main__": ...@@ -75,7 +73,7 @@ if __name__ == "__main__":
parser.add_argument("--network", type=str, choices= parser.add_argument("--network", type=str, choices=
['resnet-18', 'resnet-34', 'resnet-50', ['resnet-18', 'resnet-34', 'resnet-50',
'vgg-16', 'vgg-19', 'densenet-121', 'inception_v3', 'vgg-16', 'vgg-19', 'densenet-121', 'inception_v3',
'mobilenet', 'mobilenet_v2', 'squeezenet_v1.0', 'squeezenet_v1.1'], 'mobilenet', 'squeezenet_v1.0', 'squeezenet_v1.1'],
help='The name of neural network') help='The name of neural network')
parser.add_argument("--model", type=str, choices= parser.add_argument("--model", type=str, choices=
['rk3399'], default='rk3399', ['rk3399'], default='rk3399',
......
...@@ -17,7 +17,8 @@ ...@@ -17,7 +17,8 @@
"""Utility for benchmark""" """Utility for benchmark"""
import sys import sys
import nnvm from tvm import relay
from tvm.relay import testing
def get_network(name, batch_size, dtype='float32'): def get_network(name, batch_size, dtype='float32'):
"""Get the symbol definition and random weight of a network """Get the symbol definition and random weight of a network
...@@ -46,38 +47,30 @@ def get_network(name, batch_size, dtype='float32'): ...@@ -46,38 +47,30 @@ def get_network(name, batch_size, dtype='float32'):
output_shape = (batch_size, 1000) output_shape = (batch_size, 1000)
if name == 'mobilenet': if name == 'mobilenet':
net, params = nnvm.testing.mobilenet.get_workload(batch_size=batch_size, dtype=dtype) net, params = testing.mobilenet.get_workload(batch_size=batch_size, dtype=dtype)
elif name == 'mobilenet_v2':
net, params = nnvm.testing.mobilenet_v2.get_workload(batch_size=batch_size, dtype=dtype)
elif name == 'inception_v3': elif name == 'inception_v3':
input_shape = (batch_size, 3, 299, 299) input_shape = (batch_size, 3, 299, 299)
net, params = nnvm.testing.inception_v3.get_workload(batch_size=batch_size, dtype=dtype) net, params = testing.inception_v3.get_workload(batch_size=batch_size, dtype=dtype)
elif "resnet" in name: elif "resnet" in name:
n_layer = int(name.split('-')[1]) n_layer = int(name.split('-')[1])
net, params = nnvm.testing.resnet.get_workload(num_layers=n_layer, batch_size=batch_size, dtype=dtype) net, params = testing.resnet.get_workload(num_layers=n_layer, batch_size=batch_size, dtype=dtype)
elif "vgg" in name: elif "vgg" in name:
n_layer = int(name.split('-')[1]) n_layer = int(name.split('-')[1])
net, params = nnvm.testing.vgg.get_workload(num_layers=n_layer, batch_size=batch_size, dtype=dtype) net, params = testing.vgg.get_workload(num_layers=n_layer, batch_size=batch_size, dtype=dtype)
elif "densenet" in name: elif "densenet" in name:
n_layer = int(name.split('-')[1]) n_layer = int(name.split('-')[1])
net, params = nnvm.testing.densenet.get_workload(num_layers=n_layer, batch_size=batch_size, dtype=dtype) net, params = testing.densenet.get_workload(densenet_size=n_layer, batch_size=batch_size, dtype=dtype)
elif "squeezenet" in name: elif "squeezenet" in name:
version = name.split("_v")[1] version = name.split("_v")[1]
net, params = nnvm.testing.squeezenet.get_workload(batch_size=batch_size, version=version, dtype=dtype) net, params = testing.squeezenet.get_workload(batch_size=batch_size, version=version, dtype=dtype)
elif name == 'custom':
# an example for custom network
from nnvm.testing import utils
net = nnvm.sym.Variable('data')
net = nnvm.sym.conv2d(net, channels=4, kernel_size=(3,3), padding=(1,1))
net = nnvm.sym.flatten(net)
net = nnvm.sym.dense(net, units=1000)
net, params = utils.create_workload(net, batch_size, (3, 224, 224), dtype=dtype)
elif name == 'mxnet': elif name == 'mxnet':
# an example for mxnet model # an example for mxnet model
from mxnet.gluon.model_zoo.vision import get_model from mxnet.gluon.model_zoo.vision import get_model
block = get_model('resnet18_v1', pretrained=True) block = get_model('resnet18_v1', pretrained=True)
net, params = nnvm.frontend.from_mxnet(block) net, params = relay.frontend.from_mxnet(block, shape={'data': input_shape}, dtype=dtype)
net = nnvm.sym.softmax(net) net = net["main"]
net = relay.Function(net.params, relay.nn.softmax(net.body), None, net.type_params, net.attrs)
net = relay.Module.from_expr(net)
else: else:
raise ValueError("Unsupported network: " + name) raise ValueError("Unsupported network: " + name)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment