Commit 93843536 by Tianqi Chen Committed by Haichen Shen

Update legacy places from nnvm to relay. (#4535)

* Update legacy places from nnvm to relay.

This PR prepares the current mainline to remove nnvm compiler dep.

* remove legacy stage
parent c44b7bf1
...@@ -310,17 +310,6 @@ stage('Integration Test') { ...@@ -310,17 +310,6 @@ stage('Integration Test') {
} }
} }
}, },
'legacy: GPU': {
node('GPU') {
ws(per_exec_ws("tvm/legacy-python-gpu")) {
init_git()
unpack_lib('gpu', tvm_multilib)
timeout(time: max_time, unit: 'MINUTES') {
sh "${docker_run} ${ci_gpu} ./tests/scripts/task_python_legacy.sh"
}
}
}
},
'docs: GPU': { 'docs: GPU': {
node('GPU') { node('GPU') {
ws(per_exec_ws("tvm/docs-python-gpu")) { ws(per_exec_ws("tvm/docs-python-gpu")) {
......
...@@ -34,8 +34,8 @@ def get_network(name, batch_size, dtype='float32'): ...@@ -34,8 +34,8 @@ def get_network(name, batch_size, dtype='float32'):
Returns Returns
------- -------
net: nnvm.symbol net: relay.Module
The NNVM symbol of network definition The relay function of network definition
params: dict params: dict
The random parameters for benchmark The random parameters for benchmark
input_shape: tuple input_shape: tuple
......
...@@ -16,15 +16,15 @@ ...@@ -16,15 +16,15 @@
# under the License. # under the License.
# Makefile Example to bundle TVM modules. # Makefile Example to bundle TVM modules.
TVM_ROOT=$(shell cd ../..; pwd) TVM_ROOT=$(shell cd ../..; pwd)
NNVM_PATH=nnvm
DMLC_CORE=${TVM_ROOT}/3rdparty/dmlc-core DMLC_CORE=${TVM_ROOT}/3rdparty/dmlc-core
PKG_CFLAGS = -std=c++14 -Oz -fPIC\ PKG_CFLAGS = -std=c++14 -O2 -fPIC\
-I${TVM_ROOT}/include\ -I${TVM_ROOT}/include\
-I${DMLC_CORE}/include\ -I${DMLC_CORE}/include\
-I${TVM_ROOT}/3rdparty/dlpack/include\ -I${TVM_ROOT}/3rdparty/dlpack/include
PKG_LDFLAGS = -L${TVM_ROOT}/build PKG_LDFLAGS = -pthread
build_dir := build build_dir := build
...@@ -33,7 +33,7 @@ test: $(build_dir)/demo $(build_dir)/bundle.so ...@@ -33,7 +33,7 @@ test: $(build_dir)/demo $(build_dir)/bundle.so
$(build_dir)/demo: demo.cc $(build_dir)/demo: demo.cc
@mkdir -p $(@D) @mkdir -p $(@D)
$(CXX) $(PKG_CFLAGS) -o $@ $^ $(CXX) $(PKG_CFLAGS) -o $@ $^ -ldl
# Serialize our graph.json file. # Serialize our graph.json file.
$(build_dir)/graph.json.cc: $(build_dir)/graph.json $(build_dir)/graph.json.cc: $(build_dir)/graph.json
...@@ -44,13 +44,13 @@ $(build_dir)/params.bin.cc: $(build_dir)/params.bin ...@@ -44,13 +44,13 @@ $(build_dir)/params.bin.cc: $(build_dir)/params.bin
xxd -i $^ > $@ xxd -i $^ > $@
$(build_dir)/model.o $(build_dir)/graph.json $(build_dir)/params.bin: build_model.py $(build_dir)/model.o $(build_dir)/graph.json $(build_dir)/params.bin: build_model.py
python $< -o $(build_dir) python3 $< -o $(build_dir)
# Build our bundle against the serialized bundle.cc API, the runtime.cc API, and # Build our bundle against the serialized bundle.cc API, the runtime.cc API, and
# the serialized graph.json and params.bin # the serialized graph.json and params.bin
$(build_dir)/bundle.so: bundle.cc runtime.cc $(build_dir)/model.o $(build_dir)/graph.json.cc $(build_dir)/params.bin.cc $(build_dir)/bundle.so: bundle.cc runtime.cc $(build_dir)/model.o $(build_dir)/graph.json.cc $(build_dir)/params.bin.cc
@mkdir -p $(@D) @mkdir -p $(@D)
$(CXX) $(PKG_CFLAGS) -fvisibility=hidden -o $@ $^ $(PKG_LDFLAGS) -shared $(CXX) -shared $(PKG_CFLAGS) -fvisibility=hidden -o $@ $^ $(PKG_LDFLAGS)
clean: clean:
rm -r $(build_dir) rm -r $(build_dir)
...@@ -18,8 +18,7 @@ ...@@ -18,8 +18,7 @@
import argparse import argparse
import os import os
import nnvm.compiler from tvm import relay
import nnvm.testing
import tvm import tvm
import logging import logging
...@@ -34,22 +33,24 @@ def main(): ...@@ -34,22 +33,24 @@ def main():
dshape = (1, 3, 224, 224) dshape = (1, 3, 224, 224)
from mxnet.gluon.model_zoo.vision import get_model from mxnet.gluon.model_zoo.vision import get_model
block = get_model('mobilenet0.25', pretrained=True) block = get_model('mobilenet0.25', pretrained=True)
net, params = nnvm.frontend.from_mxnet(block) shape_dict = {'data': dshape}
net = nnvm.sym.softmax(net) mod, params = relay.frontend.from_mxnet(block, shape_dict)
func = mod["main"]
func = relay.Function(func.params, relay.nn.softmax(func.body), None, func.type_params, func.attrs)
with relay.build_config(opt_level=3):
graph, lib, params = relay.build(
func, 'llvm --system-lib', params=params)
with nnvm.compiler.build_config(opt_level=3):
graph, lib, params = nnvm.compiler.build(
net, 'llvm --system-lib', shape={'data': dshape}, params=params)
print(graph.symbol().debug_str())
build_dir = os.path.abspath(opts.out_dir) build_dir = os.path.abspath(opts.out_dir)
if not os.path.isdir(build_dir): if not os.path.isdir(build_dir):
os.makedirs(build_dir) os.makedirs(build_dir)
lib.save(os.path.join(build_dir, 'model.o')) lib.save(os.path.join(build_dir, 'model.o'))
with open(os.path.join(build_dir, 'graph.json'), 'w') as f_graph_json: with open(os.path.join(build_dir, 'graph.json'), 'w') as f_graph_json:
f_graph_json.write(graph.json()) f_graph_json.write(graph)
with open(os.path.join(build_dir, 'params.bin'), 'wb') as f_params: with open(os.path.join(build_dir, 'params.bin'), 'wb') as f_params:
f_params.write(nnvm.compiler.save_param_dict(params)) f_params.write(relay.save_param_dict(params))
if __name__ == '__main__': if __name__ == '__main__':
......
...@@ -6,9 +6,9 @@ ...@@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, * Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an * software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
...@@ -26,7 +26,9 @@ extern unsigned int build_graph_json_len; ...@@ -26,7 +26,9 @@ extern unsigned int build_graph_json_len;
extern unsigned char build_params_bin[]; extern unsigned char build_params_bin[];
extern unsigned int build_params_bin_len; extern unsigned int build_params_bin_len;
#define TVM_BUNDLE_FUNCTION __attribute__((visibility("default"))) extern "C" #define TVM_BUNDLE_FUNCTION __attribute__((visibility("default")))
extern "C" {
TVM_BUNDLE_FUNCTION void *tvm_runtime_create() { TVM_BUNDLE_FUNCTION void *tvm_runtime_create() {
const std::string json_data(&build_graph_json[0], const std::string json_data(&build_graph_json[0],
...@@ -64,3 +66,4 @@ TVM_BUNDLE_FUNCTION void tvm_runtime_get_output(void *handle, int index, ...@@ -64,3 +66,4 @@ TVM_BUNDLE_FUNCTION void tvm_runtime_get_output(void *handle, int index,
reinterpret_cast<tvm::runtime::Module *>(handle)->GetFunction("get_output")( reinterpret_cast<tvm::runtime::Module *>(handle)->GetFunction("get_output")(
index, reinterpret_cast<DLTensor *>(tensor)); index, reinterpret_cast<DLTensor *>(tensor));
} }
}
...@@ -25,7 +25,7 @@ ...@@ -25,7 +25,7 @@
#include "../../src/runtime/c_runtime_api.cc" #include "../../src/runtime/c_runtime_api.cc"
#include "../../src/runtime/cpu_device_api.cc" #include "../../src/runtime/cpu_device_api.cc"
#include "../../src/runtime/workspace_pool.cc" #include "../../src/runtime/workspace_pool.cc"
#include "../../src/runtime/module_util.cc" #include "../../src/runtime/library_module.cc"
#include "../../src/runtime/module.cc" #include "../../src/runtime/module.cc"
#include "../../src/runtime/registry.cc" #include "../../src/runtime/registry.cc"
#include "../../src/runtime/file_util.cc" #include "../../src/runtime/file_util.cc"
...@@ -33,5 +33,5 @@ ...@@ -33,5 +33,5 @@
#include "../../src/runtime/thread_pool.cc" #include "../../src/runtime/thread_pool.cc"
#include "../../src/runtime/ndarray.cc" #include "../../src/runtime/ndarray.cc"
#include "../../src/runtime/object.cc" #include "../../src/runtime/object.cc"
#include "../../src/runtime/system_lib_module.cc" #include "../../src/runtime/system_library.cc"
#include "../../src/runtime/graph/graph_runtime.cc" #include "../../src/runtime/graph/graph_runtime.cc"
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
# Makefile Example to deploy TVM modules. # Makefile Example to deploy TVM modules.
TVM_ROOT=$(shell cd ../..; pwd) TVM_ROOT=$(shell cd ../..; pwd)
NNVM_PATH=nnvm
DMLC_CORE=${TVM_ROOT}/3rdparty/dmlc-core DMLC_CORE=${TVM_ROOT}/3rdparty/dmlc-core
PKG_CFLAGS = -std=c++11 -O2 -fPIC\ PKG_CFLAGS = -std=c++11 -O2 -fPIC\
...@@ -25,7 +24,7 @@ PKG_CFLAGS = -std=c++11 -O2 -fPIC\ ...@@ -25,7 +24,7 @@ PKG_CFLAGS = -std=c++11 -O2 -fPIC\
-I${DMLC_CORE}/include\ -I${DMLC_CORE}/include\
-I${TVM_ROOT}/3rdparty/dlpack/include\ -I${TVM_ROOT}/3rdparty/dlpack/include\
PKG_LDFLAGS = -L${TVM_ROOT}/build -ldl -lpthread PKG_LDFLAGS = -L${TVM_ROOT}/build -ldl -pthread
.PHONY: clean all .PHONY: clean all
...@@ -39,7 +38,7 @@ lib/libtvm_runtime_pack.o: tvm_runtime_pack.cc ...@@ -39,7 +38,7 @@ lib/libtvm_runtime_pack.o: tvm_runtime_pack.cc
# The code library built by TVM # The code library built by TVM
lib/test_addone_sys.o: prepare_test_libs.py lib/test_addone_sys.o: prepare_test_libs.py
@mkdir -p $(@D) @mkdir -p $(@D)
python prepare_test_libs.py python3 prepare_test_libs.py
# Deploy using the all in one TVM package library # Deploy using the all in one TVM package library
lib/cpp_deploy_pack: cpp_deploy.cc lib/test_addone_sys.o lib/libtvm_runtime_pack.o lib/cpp_deploy_pack: cpp_deploy.cc lib/test_addone_sys.o lib/libtvm_runtime_pack.o
......
...@@ -19,7 +19,6 @@ ...@@ -19,7 +19,6 @@
ROCM_PATH=/opt/rocm ROCM_PATH=/opt/rocm
TVM_ROOT=$(shell cd ../..; pwd) TVM_ROOT=$(shell cd ../..; pwd)
NNVM_PATH=nnvm
DMLC_CORE=${TVM_ROOT}/3rdparty/dmlc-core DMLC_CORE=${TVM_ROOT}/3rdparty/dmlc-core
PKG_CFLAGS = -std=c++11 -O2 -fPIC\ PKG_CFLAGS = -std=c++11 -O2 -fPIC\
......
...@@ -49,7 +49,7 @@ mkdir build && cd build ...@@ -49,7 +49,7 @@ mkdir build && cd build
cmake .. -DUSE_LLVM=ON -DUSE_SGX=/opt/sgxsdk -DRUST_SGX_SDK=/opt/rust-sgx-sdk cmake .. -DUSE_LLVM=ON -DUSE_SGX=/opt/sgxsdk -DRUST_SGX_SDK=/opt/rust-sgx-sdk
make -j4 make -j4
cd .. cd ..
pip install -e python -e topi/python -e nnvm/python pip install -e python -e topi/python
cd apps/sgx cd apps/sgx
``` ```
......
...@@ -20,8 +20,8 @@ import argparse ...@@ -20,8 +20,8 @@ import argparse
import os import os
from os import path as osp from os import path as osp
import nnvm.compiler from tvm import relay
import nnvm.testing from tvm.relay import testing
import tvm import tvm
...@@ -30,14 +30,13 @@ def main(): ...@@ -30,14 +30,13 @@ def main():
parser.add_argument('-o', '--out-dir', default='.') parser.add_argument('-o', '--out-dir', default='.')
opts = parser.parse_args() opts = parser.parse_args()
# from tutorials/nnvm_quick_start.py
dshape = (1, 3, 224, 224) dshape = (1, 3, 224, 224)
net, params = nnvm.testing.resnet.get_workload( net, params = relay.testing.resnet.get_workload(
layers=18, batch_size=dshape[0], image_shape=dshape[1:]) layers=18, batch_size=dshape[0], image_shape=dshape[1:])
with nnvm.compiler.build_config(opt_level=3): with relay.build_config(opt_level=3):
graph, lib, params = nnvm.compiler.build( graph, lib, params = relay.build(
net, 'llvm --system-lib', shape={'data': dshape}, params=params) net, 'llvm --system-lib', params=params)
build_dir = osp.abspath(opts.out_dir) build_dir = osp.abspath(opts.out_dir)
if not osp.isdir(build_dir): if not osp.isdir(build_dir):
...@@ -45,9 +44,9 @@ def main(): ...@@ -45,9 +44,9 @@ def main():
lib.save(osp.join(build_dir, 'model.bc')) lib.save(osp.join(build_dir, 'model.bc'))
with open(osp.join(build_dir, 'graph.json'), 'w') as f_graph_json: with open(osp.join(build_dir, 'graph.json'), 'w') as f_graph_json:
f_graph_json.write(graph.json()) f_graph_json.write(graph)
with open(osp.join(build_dir, 'params.bin'), 'wb') as f_params: with open(osp.join(build_dir, 'params.bin'), 'wb') as f_params:
f_params.write(nnvm.compiler.save_param_dict(params)) f_params.write(relay.save_param_dict(params))
if __name__ == '__main__': if __name__ == '__main__':
......
...@@ -22,7 +22,7 @@ members = [ ...@@ -22,7 +22,7 @@ members = [
"runtime", "runtime",
"runtime/tests/test_tvm_basic", "runtime/tests/test_tvm_basic",
"runtime/tests/test_tvm_dso", "runtime/tests/test_tvm_dso",
"runtime/tests/test_nnvm", "runtime/tests/test_nn",
"frontend", "frontend",
"frontend/tests/basics", "frontend/tests/basics",
"frontend/tests/callback", "frontend/tests/callback",
......
...@@ -23,7 +23,7 @@ description = "Rust frontend support for TVM" ...@@ -23,7 +23,7 @@ description = "Rust frontend support for TVM"
repository = "https://github.com/apache/incubator-tvm" repository = "https://github.com/apache/incubator-tvm"
homepage = "https://github.com/apache/incubator-tvm" homepage = "https://github.com/apache/incubator-tvm"
readme = "README.md" readme = "README.md"
keywords = ["rust", "tvm", "nnvm"] keywords = ["rust", "tvm"]
categories = ["api-bindings", "science"] categories = ["api-bindings", "science"]
authors = ["TVM Contributors"] authors = ["TVM Contributors"]
edition = "2018" edition = "2018"
......
...@@ -35,14 +35,12 @@ Here's a Python snippet for downloading and building a pretrained Resnet18 via A ...@@ -35,14 +35,12 @@ Here's a Python snippet for downloading and building a pretrained Resnet18 via A
```python ```python
block = get_model('resnet18_v1', pretrained=True) block = get_model('resnet18_v1', pretrained=True)
sym, params = nnvm.frontend.from_mxnet(block) sym, params = relay.frontend.from_mxnet(block, shape_dict)
# add the softmax layer for prediction
net = nnvm.sym.softmax(sym)
# compile the model # compile the model
with nnvm.compiler.build_config(opt_level=opt_level): with relay.build_config(opt_level=opt_level):
graph, lib, params = nnvm.compiler.build( graph, lib, params = relay.build(
net, target, shape={"data": data_shape}, params=params) net, target, params=params)
# same the model artifacts # same the model artifacts
lib.save(os.path.join(target_dir, "deploy_lib.o")) lib.save(os.path.join(target_dir, "deploy_lib.o"))
cc.create_shared(os.path.join(target_dir, "deploy_lib.so"), cc.create_shared(os.path.join(target_dir, "deploy_lib.so"),
...@@ -51,7 +49,7 @@ cc.create_shared(os.path.join(target_dir, "deploy_lib.so"), ...@@ -51,7 +49,7 @@ cc.create_shared(os.path.join(target_dir, "deploy_lib.so"),
with open(os.path.join(target_dir, "deploy_graph.json"), "w") as fo: with open(os.path.join(target_dir, "deploy_graph.json"), "w") as fo:
fo.write(graph.json()) fo.write(graph.json())
with open(os.path.join(target_dir,"deploy_param.params"), "wb") as fo: with open(os.path.join(target_dir,"deploy_param.params"), "wb") as fo:
fo.write(nnvm.compiler.save_param_dict(params)) fo.write(relay.save_param_dict(params))
``` ```
Now, we need to input the artifacts to create and run the *Graph Runtime* to detect our input cat image Now, we need to input the artifacts to create and run the *Graph Runtime* to detect our input cat image
...@@ -113,7 +111,7 @@ and the model correctly predicts the input image as **tiger cat**. ...@@ -113,7 +111,7 @@ and the model correctly predicts the input image as **tiger cat**.
Please follow TVM [installations](https://docs.tvm.ai/install/index.html), `export TVM_HOME=/path/to/tvm` and add `libtvm_runtime` to your `LD_LIBRARY_PATH`. Please follow TVM [installations](https://docs.tvm.ai/install/index.html), `export TVM_HOME=/path/to/tvm` and add `libtvm_runtime` to your `LD_LIBRARY_PATH`.
*Note:* To run the end-to-end examples and tests, `tvm`, `nnvm` and `topi` need to be added to your `PYTHONPATH` or it's automatic via an Anaconda environment when it is installed individually. *Note:* To run the end-to-end examples and tests, `tvm` and `topi` need to be added to your `PYTHONPATH` or it's automatic via an Anaconda environment when it is installed individually.
## Supported TVM Functionalities ## Supported TVM Functionalities
......
...@@ -18,11 +18,11 @@ ...@@ -18,11 +18,11 @@
## Resnet example ## Resnet example
This end-to-end example shows how to: This end-to-end example shows how to:
* build `Resnet 18` with `tvm` and `nnvm` from Python * build `Resnet 18` with `tvm` from Python
* use the provided Rust frontend API to test for an input image * use the provided Rust frontend API to test for an input image
To run the example with pretrained resnet weights, first `tvm`, `nnvm` and `mxnet` must be installed for the python build. To install mxnet for cpu, run `pip install mxnet` To run the example with pretrained resnet weights, first `tvm` and `mxnet` must be installed for the python build. To install mxnet for cpu, run `pip install mxnet`
and to install `tvm` and `nnvm` with `llvm` follow the [TVM installation guide](https://docs.tvm.ai/install/index.html). and to install `tvm` with `llvm` follow the [TVM installation guide](https://docs.tvm.ai/install/index.html).
* **Build the example**: `cargo build * **Build the example**: `cargo build
......
...@@ -22,7 +22,7 @@ license = "Apache-2.0" ...@@ -22,7 +22,7 @@ license = "Apache-2.0"
description = "A static TVM runtime" description = "A static TVM runtime"
repository = "https://github.com/apache/incubator-tvm" repository = "https://github.com/apache/incubator-tvm"
readme = "README.md" readme = "README.md"
keywords = ["tvm", "nnvm"] keywords = ["tvm"]
categories = ["api-bindings", "science"] categories = ["api-bindings", "science"]
authors = ["TVM Contributors"] authors = ["TVM Contributors"]
edition = "2018" edition = "2018"
......
...@@ -440,7 +440,7 @@ named!( ...@@ -440,7 +440,7 @@ named!(
) )
); );
/// Loads a param dict saved using `nnvm.compiler.save_param_dict`. /// Loads a param dict saved using `relay.save_param_dict`.
pub fn load_param_dict(bytes: &[u8]) -> Result<HashMap<String, Tensor>, GraphFormatError> { pub fn load_param_dict(bytes: &[u8]) -> Result<HashMap<String, Tensor>, GraphFormatError> {
if let Ok((remaining_bytes, param_dict)) = parse_param_dict(bytes) { if let Ok((remaining_bytes, param_dict)) = parse_param_dict(bytes) {
if remaining_bytes.len() == 0 { if remaining_bytes.len() == 0 {
......
...@@ -296,7 +296,7 @@ pub(crate) fn sgx_join_threads() { ...@@ -296,7 +296,7 @@ pub(crate) fn sgx_join_threads() {
ocall_packed!("__sgx_thread_group_join__", 0); ocall_packed!("__sgx_thread_group_join__", 0);
} }
// @see https://github.com/apache/incubator-tvm/issues/988 for information on why this function is used. // @see issue 988 for information on why this function is used.
#[no_mangle] #[no_mangle]
pub extern "C" fn TVMBackendParallelBarrier(_task_id: usize, penv: *const TVMParallelGroupEnv) { pub extern "C" fn TVMBackendParallelBarrier(_task_id: usize, penv: *const TVMParallelGroupEnv) {
let barrier: &Arc<Barrier> = unsafe { &*((*penv).sync_handle as *const Arc<Barrier>) }; let barrier: &Arc<Barrier> = unsafe { &*((*penv).sync_handle as *const Arc<Barrier>) };
......
...@@ -16,56 +16,37 @@ ...@@ -16,56 +16,37 @@
# specific language governing permissions and limitations # specific language governing permissions and limitations
# under the License. # under the License.
"""Builds a simple NNVM graph for testing.""" """Builds a simple graph for testing."""
from os import path as osp from os import path as osp
import nnvm
from nnvm import sym
from nnvm.compiler import graph_util
from nnvm.testing import init
import numpy as np import numpy as np
import tvm import tvm
from tvm import relay
from tvm.relay import testing
CWD = osp.dirname(osp.abspath(osp.expanduser(__file__))) CWD = osp.dirname(osp.abspath(osp.expanduser(__file__)))
def _get_model(dshape): def _get_model(dshape):
data = sym.Variable('data', shape=dshape) data = relay.var('data', shape=dshape)
fc1 = sym.dense(data, units=dshape[-1]*2, use_bias=True) fc = relay.nn.dense(data, relay.var("dense_weight"), units=dshape[-1]*2)
left, right = sym.split(fc1, indices_or_sections=2, axis=1) fc = relay.nn.bias_add(data, relay.var("dense_bias"))
return sym.Group(((left + 1), (right - 1))) left, right = relay.split(fc, indices_or_sections=2, axis=1)
one = relay.const(1, dtype="float32")
return relay.Tuple([(left + one), (right - one), fc])
def _init_params(graph, input_shapes, initializer=init.Xavier(), seed=10):
if isinstance(graph, sym.Symbol):
graph = nnvm.graph.create(graph)
ishapes, _ = graph_util.infer_shape(graph, **input_shapes)
param_shapes = dict(zip(graph.index.input_names, ishapes))
np.random.seed(seed)
params = {}
for param, shape in param_shapes.items():
if param in {'data', 'label'} or not shape:
continue
init_value = np.empty(shape).astype('float32')
initializer(param, init_value)
params[param] = tvm.nd.array(init_value)
return params
def main(): def main():
dshape = (32, 16) dshape = (32, 16)
net = _get_model(dshape) net = _get_model(dshape)
ishape_dict = {'data': dshape} mod, params = testing.create_workload(net)
params = _init_params(net, ishape_dict) graph, lib, params = relay.build(
graph, lib, params = nnvm.compiler.build(net, 'llvm', mod, 'llvm', params=params)
shape=ishape_dict,
params=params,
dtype='float32')
with open(osp.join(CWD, 'graph.json'), 'w') as f_resnet: with open(osp.join(CWD, 'graph.json'), 'w') as f_resnet:
f_resnet.write(graph.json()) f_resnet.write(graph)
with open(osp.join(CWD, 'graph.params'), 'wb') as f_params: with open(osp.join(CWD, 'graph.params'), 'wb') as f_params:
f_params.write(nnvm.compiler.save_param_dict(params)) f_params.write(relay.save_param_dict(params))
if __name__ == '__main__': if __name__ == '__main__':
main() main()
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
# under the License. # under the License.
[package] [package]
name = "test-nnvm" name = "test-nn"
version = "0.0.0" version = "0.0.0"
license = "Apache-2.0" license = "Apache-2.0"
authors = ["TVM Contributors"] authors = ["TVM Contributors"]
......
...@@ -16,67 +16,39 @@ ...@@ -16,67 +16,39 @@
# specific language governing permissions and limitations # specific language governing permissions and limitations
# under the License. # under the License.
"""Builds a simple NNVM graph for testing.""" """Builds a simple graph for testing."""
from os import path as osp from os import path as osp
import sys import sys
import nnvm
from nnvm import sym
from nnvm.compiler import graph_util
from nnvm.testing import init
import numpy as np import numpy as np
import tvm import tvm
from tvm import relay
from tvm.relay import testing
def _get_model(dshape): def _get_model(dshape):
data = sym.Variable('data', shape=dshape) data = relay.var('data', shape=dshape)
fc = sym.dense(data, units=dshape[-1]*2, use_bias=True) fc = relay.nn.dense(data, relay.var("dense_weight"), units=dshape[-1]*2)
left, right = sym.split(fc, indices_or_sections=2, axis=1) fc = relay.nn.bias_add(data, relay.var("dense_bias"))
return sym.Group(((left + 1), (right - 1), fc)) left, right = relay.split(fc, indices_or_sections=2, axis=1)
one = relay.const(1, dtype="float32")
return relay.Tuple([(left + one), (right - one), fc])
def _init_params(graph, input_shapes, initializer=init.Xavier(), seed=10):
if isinstance(graph, sym.Symbol):
graph = nnvm.graph.create(graph)
ishapes, _ = graph_util.infer_shape(graph, **input_shapes)
param_shapes = dict(zip(graph.index.input_names, ishapes))
np.random.seed(seed)
params = {}
for param, shape in param_shapes.items():
if param in {'data', 'label'} or not shape:
continue
init_value = np.arange(np.product(shape), 0, -1).reshape(*shape).astype('float32')
if param.endswith('_bias'):
params[param] = tvm.nd.array(init_value)
continue
init_value = np.empty(shape).astype('float32')
initializer(param, init_value)
# init_value /= init_value.sum() + 1e-10
params[param] = tvm.nd.array(init_value)
return params
def main(): def main():
dshape = (4, 8) dshape = (4, 8)
net = _get_model(dshape) net = _get_model(dshape)
ishape_dict = {'data': dshape} mod, params = testing.create_workload(net)
params = _init_params(net, ishape_dict) graph, lib, params = relay.build(
graph, lib, params = nnvm.compiler.build(net, 'llvm --system-lib', mod, 'llvm --system-lib', params=params)
shape=ishape_dict,
params=params,
dtype='float32')
out_dir = sys.argv[1] out_dir = sys.argv[1]
lib.save(osp.join(sys.argv[1], 'graph.o')) lib.save(osp.join(sys.argv[1], 'graph.o'))
with open(osp.join(out_dir, 'graph.json'), 'w') as f_resnet: with open(osp.join(out_dir, 'graph.json'), 'w') as f_resnet:
f_resnet.write(graph.json()) f_resnet.write(graph)
with open(osp.join(out_dir, 'graph.params'), 'wb') as f_params: with open(osp.join(out_dir, 'graph.params'), 'wb') as f_params:
f_params.write(nnvm.compiler.save_param_dict(params)) f_params.write(relay.save_param_dict(params))
if __name__ == '__main__': if __name__ == '__main__':
main() main()
...@@ -23,7 +23,7 @@ import topi.testing ...@@ -23,7 +23,7 @@ import topi.testing
import tvm import tvm
from tvm import relay from tvm import relay
from tvm.contrib import graph_runtime from tvm.contrib import graph_runtime
from nnvm.testing.config import ctx_list from tvm.relay.testing.config import ctx_list
import onnx import onnx
from onnx import helper, TensorProto, mapping from onnx import helper, TensorProto, mapping
import scipy import scipy
......
...@@ -510,7 +510,6 @@ def test_op_stack(): ...@@ -510,7 +510,6 @@ def test_op_stack():
# test an op with a tuple output # test an op with a tuple output
# adapted from test_split_infer_type in test_op_level3 # adapted from test_split_infer_type in test_op_level3
# and test_split in nnvm's test_top_level1
def test_split(): def test_split():
def verify_split(shape, indices_or_sections, axis=0): def verify_split(shape, indices_or_sections, axis=0):
x = np.random.normal(size=shape).astype('float32') x = np.random.normal(size=shape).astype('float32')
...@@ -529,7 +528,6 @@ def test_split(): ...@@ -529,7 +528,6 @@ def test_split():
# ensure we can generate code for batch_norm, since it requires simplify_inference # ensure we can generate code for batch_norm, since it requires simplify_inference
# adapted from test_batchnorm in nnvm's test_top_level1
def test_batch_norm(): def test_batch_norm():
def verify_batch_norm(shapes): def verify_batch_norm(shapes):
data = [np.absolute(np.random.normal(size=shape).astype('float32')) data = [np.absolute(np.random.normal(size=shape).astype('float32'))
......
...@@ -22,7 +22,7 @@ set -u ...@@ -22,7 +22,7 @@ set -u
export LD_LIBRARY_PATH="lib:${LD_LIBRARY_PATH:-}" export LD_LIBRARY_PATH="lib:${LD_LIBRARY_PATH:-}"
tvm_root="$(git rev-parse --show-toplevel)" tvm_root="$(git rev-parse --show-toplevel)"
export PYTHONPATH="$tvm_root/python":"$tvm_root/nnvm/python":"$tvm_root/topi/python" export PYTHONPATH="$tvm_root/python":"$tvm_root/topi/python"
# Golang tests # Golang tests
make -C golang tests make -C golang tests
...@@ -42,9 +42,6 @@ python3 -m pytest -v tests/python/frontend/onnx ...@@ -42,9 +42,6 @@ python3 -m pytest -v tests/python/frontend/onnx
echo "Running relay CoreML frontend test..." echo "Running relay CoreML frontend test..."
python3 -m pytest -v tests/python/frontend/coreml python3 -m pytest -v tests/python/frontend/coreml
echo "Running nnvm to relay frontend test..."
python3 -m pytest -v tests/python/frontend/nnvm_to_relay
echo "Running relay Tensorflow frontend test..." echo "Running relay Tensorflow frontend test..."
python3 -m pytest -v tests/python/frontend/tensorflow python3 -m pytest -v tests/python/frontend/tensorflow
......
...@@ -21,8 +21,8 @@ set -u ...@@ -21,8 +21,8 @@ set -u
export TVM_HOME="$(git rev-parse --show-toplevel)" export TVM_HOME="$(git rev-parse --show-toplevel)"
export LD_LIBRARY_PATH="$TVM_HOME/lib:$TVM_HOME/build:$TVM_HOME/nnvm:${LD_LIBRARY_PATH:-}" export LD_LIBRARY_PATH="$TVM_HOME/lib:$TVM_HOME/build:${LD_LIBRARY_PATH:-}"
export PYTHONPATH="$TVM_HOME/python":"$TVM_HOME/nnvm/python":"$TVM_HOME/topi/python" export PYTHONPATH="$TVM_HOME/python":"$TVM_HOME/topi/python"
export RUST_DIR="$TVM_HOME/rust" export RUST_DIR="$TVM_HOME/rust"
cd $RUST_DIR cd $RUST_DIR
...@@ -52,8 +52,8 @@ cd tests/test_tvm_dso ...@@ -52,8 +52,8 @@ cd tests/test_tvm_dso
cargo run cargo run
cd - cd -
# run NNVM graph test # run nn graph test
cd tests/test_nnvm cd tests/test_nn
cargo run cargo run
cd - cd -
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment