Commit 6cdc18e2 by Tianqi Chen Committed by GitHub

[DOCKER] Start docker infratructure (#1402)

parent 05f4362e
......@@ -11,7 +11,7 @@ tvm_multilib = "build/libtvm.so, " +
"build/libtvm_topi.so, build/libnnvm_compiler.so, " + tvm_runtime
// command to start a docker container
docker_run = 'tests/ci_build/ci_build.sh'
docker_run = 'docker/build.sh'
// timeout in minutes
max_time = 60
......@@ -39,7 +39,7 @@ stage("Sanity Check") {
node('linux') {
ws('workspace/tvm/sanity') {
init_git()
sh "${docker_run} lint ./tests/scripts/task_lint.sh"
sh "${docker_run} ci_lint ./tests/scripts/task_lint.sh"
}
}
}
......@@ -100,7 +100,7 @@ stage('Build') {
echo set\\(CMAKE_CXX_COMPILER g++\\) >> config.cmake
echo set\\(CMAKE_CXX_FLAGS -Werror\\) >> config.cmake
"""
make('gpu', 'build', '-j2')
make('ci_gpu', 'build', '-j2')
pack_lib('gpu', tvm_multilib)
// compiler test
sh """
......@@ -113,7 +113,7 @@ stage('Build') {
echo set\\(CMAKE_CXX_COMPILER clang-6.0\\) >> config.cmake
echo set\\(CMAKE_CXX_FLAGS -Werror\\) >> config.cmake
"""
make('gpu', 'build2', '-j2')
make('ci_gpu', 'build2', '-j2')
}
}
},
......@@ -130,10 +130,10 @@ stage('Build') {
echo set\\(CMAKE_CXX_COMPILER g++\\) >> config.cmake
echo set\\(CMAKE_CXX_FLAGS -Werror\\) >> config.cmake
"""
make('cpu', 'build', '-j2')
make('ci_cpu', 'build', '-j2')
pack_lib('cpu', tvm_lib)
timeout(time: max_time, unit: 'MINUTES') {
sh "${docker_run} cpu ./tests/scripts/task_cpp_unittest.sh"
sh "${docker_run} ci_cpu ./tests/scripts/task_cpp_unittest.sh"
}
}
}
......@@ -152,7 +152,7 @@ stage('Build') {
echo set\\(CMAKE_CXX_COMPILER g++\\) >> config.cmake
echo set\\(CMAKE_CXX_FLAGS -Werror\\) >> config.cmake
"""
make('i386', 'build', '-j2')
make('ci_i386', 'build', '-j2')
pack_lib('i386', tvm_multilib)
}
}
......@@ -166,7 +166,7 @@ stage('Unit Test') {
init_git()
unpack_lib('gpu', tvm_multilib)
timeout(time: max_time, unit: 'MINUTES') {
sh "${docker_run} gpu ./tests/scripts/task_python_unittest.sh"
sh "${docker_run} ci_gpu ./tests/scripts/task_python_unittest.sh"
}
}
}
......@@ -177,8 +177,8 @@ stage('Unit Test') {
init_git()
unpack_lib('i386', tvm_multilib)
timeout(time: max_time, unit: 'MINUTES') {
sh "${docker_run} i386 ./tests/scripts/task_python_unittest.sh"
sh "${docker_run} i386 ./tests/scripts/task_python_integration.sh"
sh "${docker_run} ci_i386 ./tests/scripts/task_python_unittest.sh"
sh "${docker_run} ci_i386 ./tests/scripts/task_python_integration.sh"
}
}
}
......@@ -189,7 +189,7 @@ stage('Unit Test') {
init_git()
unpack_lib('gpu', tvm_multilib)
timeout(time: max_time, unit: 'MINUTES') {
sh "${docker_run} gpu ./tests/scripts/task_java_unittest.sh"
sh "${docker_run} ci_gpu ./tests/scripts/task_java_unittest.sh"
}
}
}
......@@ -203,10 +203,10 @@ stage('Integration Test') {
init_git()
unpack_lib('gpu', tvm_multilib)
timeout(time: max_time, unit: 'MINUTES') {
sh "${docker_run} gpu ./tests/scripts/task_python_integration.sh"
sh "${docker_run} gpu ./tests/scripts/task_python_topi.sh"
sh "${docker_run} gpu ./tests/scripts/task_cpp_topi.sh"
sh "${docker_run} gpu ./tests/scripts/task_python_nnvm.sh"
sh "${docker_run} ci_gpu ./tests/scripts/task_python_integration.sh"
sh "${docker_run} ci_gpu ./tests/scripts/task_python_topi.sh"
sh "${docker_run} ci_gpu ./tests/scripts/task_cpp_topi.sh"
sh "${docker_run} ci_gpu ./tests/scripts/task_python_nnvm.sh"
}
}
}
......@@ -217,7 +217,7 @@ stage('Integration Test') {
init_git()
unpack_lib('gpu', tvm_multilib)
timeout(time: max_time, unit: 'MINUTES') {
sh "${docker_run} gpu ./tests/scripts/task_python_docs.sh"
sh "${docker_run} ci_gpu ./tests/scripts/task_python_docs.sh"
}
pack_lib('mydocs', 'docs.tgz')
}
......
# TVM Docker
This directory contains the TVM's docker infrastructure.
We use docker to quickly provide environments that can be
used to build tvm in various settings.
To run locally, we need to first install
[docker](https://docs.docker.com/engine/installation/) and
[nvidia-docker](https://github.com/NVIDIA/nvidia-docker/).
## Use Local Build Script
Each dockerfile defines a different environment.
We use (`build.sh`)[./build.sh] to build and run the commands.
To use the docker images, we can run the following command
at the root of the project.
```bash
./docker/build.sh image_name [command]
```
Here image_name corresponds to the docker defined in the
```Dockerfile.image_name```.
You can also start an interactive session by typing
```bash
./docker/build.sh image_name -it bash
```
The build command will map the tvm root to /workspace/ inside the container
with the same user as the user invoking the docker command.
Here are some common use examples to perform CI tasks.
- lint the python codes
```bash
./docker/build.sh ci_lint make pylint
```
- build codes with CUDA support
```bash
./docker/build.sh ci_gpu make -j$(nproc)
```
- do the python unittest
```bash
./docker/build.sh ci_gpu tests/scripts/task_python_unittest.sh'
```
- build the documents. The results will be available at `docs/_build/html`
```bash
./docker/ci_build.sh ci_gpu make -C docs html
```
......@@ -71,8 +71,8 @@ function upsearch () {
# Set up WORKSPACE and BUILD_TAG. Jenkins will set them for you or we pick
# reasonable defaults if you run it outside of Jenkins.
WORKSPACE="${WORKSPACE:-${SCRIPT_DIR}/../../}"
BUILD_TAG="${BUILD_TAG:-tvm-ci}"
WORKSPACE="${WORKSPACE:-${SCRIPT_DIR}/../}"
BUILD_TAG="${BUILD_TAG:-tvm}"
# Determine the docker image name
DOCKER_IMG_NAME="${BUILD_TAG}.${CONTAINER_TYPE}"
......@@ -122,5 +122,5 @@ ${DOCKER_BINARY} run --rm --pid=host \
-e "CI_BUILD_GID=$(id -g)" \
${CI_DOCKER_EXTRA_PARAMS[@]} \
${DOCKER_IMG_NAME} \
bash tests/ci_build/with_the_same_user \
bash docker/with_the_same_user \
${COMMAND[@]}
# CI Build Scripts
This directory contains the files and setup instructions to run all tests.
## Run locally
To run locally, we need to first install
[docker](https://docs.docker.com/engine/installation/) and
[nvidia-docker](https://github.com/NVIDIA/nvidia-docker/wiki).
Then we can run the tasks defined in the [Jenkinsfile](../../Jenkinsfile) by
using (`ci_build.sh`)[./ci_build.sh]. For example
- lint the python codes
```bash
./ci_build.sh lint make pylint
```
- build codes with CUDA supports
```bash
./ci_build.sh gpu make -j$(nproc)
```
- do the python unittest
```bash
./ci_build.sh gpu PYTHONPATH=./python/ nosetests --with-timer --verbose tests/python/unittest'
```
- build the documents. The results will be available at `docs/_build/html`
```bash
tests/ci_build/ci_build.sh gpu make -C docs html
```
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment