Jenkinsfile 9.79 KB
Newer Older
1
#!groovy
2
// -*- mode: groovy -*-
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20

// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements.  See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership.  The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License.  You may obtain a copy of the License at
//
//   http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied.  See the License for the
// specific language governing permissions and limitations
// under the License.

21 22 23
// Jenkins pipeline
// See documents at https://jenkins.io/doc/book/pipeline/jenkinsfile/

24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40
// Docker env used for testing
// Different image may have different version tag
// because some of them are more stable than anoter.
//
// Docker images are maintained by PMC, cached in dockerhub
// and remains relatively stable over the time.
// Flow for upgrading docker env(need commiter)
//
// - Send PR to upgrade build script in the repo
// - Build the new docker image
// - Tag the docker image with a new version and push to tvmai
// - Update the version in the Jenkinsfile, send a PR
// - Fix any issues wrt to the new image version in the PR
// - Merge the PR and now we are in new version
// - Tag the new version as the lates
// - Periodically cleanup the old versions on local workers
//
41
ci_lint = "tvmai/ci-lint:v0.51"
42
ci_gpu = "tvmai/ci-gpu:v0.51"
43
ci_cpu = "tvmai/ci-cpu:v0.50"
44 45
ci_i386 = "tvmai/ci-i386:v0.50"

46
// tvm libraries
47 48
tvm_runtime = "build/libtvm_runtime.so, build/config.cmake"
tvm_lib = "build/libtvm.so, " + tvm_runtime
49
// LLVM upstream lib
50
tvm_multilib = "build/libtvm.so, " +
51
             "build/libvta.so, build/libtvm_topi.so, build/libnnvm_compiler.so, " + tvm_runtime
52

53
// command to start a docker container
54
docker_run = 'docker/bash.sh'
55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78
// timeout in minutes
max_time = 60

// initialize source codes
def init_git() {
  checkout scm
  retry(5) {
    timeout(time: 2, unit: 'MINUTES') {
      sh 'git submodule update --init'
    }
  }
}

def init_git_win() {
    checkout scm
    retry(5) {
        timeout(time: 2, unit: 'MINUTES') {
            bat 'git submodule update --init'
        }
    }
}

stage("Sanity Check") {
  timeout(time: max_time, unit: 'MINUTES') {
79
    node('CPU') {
80 81
      ws('workspace/tvm/sanity') {
        init_git()
82
        sh "${docker_run} ${ci_lint}  ./tests/scripts/task_lint.sh"
83 84 85 86 87 88 89 90
      }
    }
  }
}

// Run make. First try to do an incremental make from a previous workspace in hope to
// accelerate the compilation. If something wrong, clean the workspace and then
// build from scratch.
91
def make(docker_type, path, make_flag) {
92 93
  timeout(time: max_time, unit: 'MINUTES') {
    try {
94
      sh "${docker_run} ${docker_type} ./tests/scripts/task_build.sh ${path} ${make_flag}"
95 96
      // always run cpp test when build
      sh "${docker_run} ${docker_type} ./tests/scripts/task_cpp_unittest.sh"
97 98
    } catch (exc) {
      echo 'Incremental compilation failed. Fall back to build from scratch'
99 100
      sh "${docker_run} ${docker_type} ./tests/scripts/task_clean.sh ${path}"
      sh "${docker_run} ${docker_type} ./tests/scripts/task_build.sh ${path} ${make_flag}"
101
      sh "${docker_run} ${docker_type} ./tests/scripts/task_cpp_unittest.sh"
102 103 104 105 106
    }
  }
}

// pack libraries for later use
107
def pack_lib(name, libs) {
108 109 110 111 112 113 114 115 116
  sh """
     echo "Packing ${libs} into ${name}"
     echo ${libs} | sed -e 's/,/ /g' | xargs md5sum
     """
  stash includes: libs, name: name
}


// unpack libraries saved before
117
def unpack_lib(name, libs) {
118 119 120 121 122 123 124 125
  unstash name
  sh """
     echo "Unpacked ${libs} from ${name}"
     echo ${libs} | sed -e 's/,/ /g' | xargs md5sum
     """
}

stage('Build') {
126 127
  parallel 'BUILD: GPU': {
    node('GPUBUILD') {
128 129 130
      ws('workspace/tvm/build-gpu') {
        init_git()
        sh """
131 132 133
           mkdir -p build
           cd build
           cp ../cmake/config.cmake .
134
           echo set\\(USE_CUBLAS ON\\) >> config.cmake
135 136 137 138
           echo set\\(USE_CUDNN ON\\) >> config.cmake
           echo set\\(USE_CUDA ON\\) >> config.cmake
           echo set\\(USE_OPENGL ON\\) >> config.cmake
           echo set\\(USE_LLVM llvm-config-6.0\\) >> config.cmake
139 140
           echo set\\(USE_NNPACK ON\\) >> config.cmake
           echo set\\(NNPACK_PATH /NNPACK/build/\\) >> config.cmake
141 142 143
           echo set\\(USE_RPC ON\\) >> config.cmake
           echo set\\(USE_SORT ON\\) >> config.cmake
           echo set\\(USE_GRAPH_RUNTIME ON\\) >> config.cmake
144
           echo set\\(USE_STACKVM_RUNTIME ON\\) >> config.cmake
145
           echo set\\(USE_GRAPH_RUNTIME_DEBUG ON\\) >> config.cmake
146
           echo set\\(USE_ANTLR ON\\) >> config.cmake
147
           echo set\\(USE_BLAS openblas\\) >> config.cmake
148 149
           echo set\\(CMAKE_CXX_COMPILER g++\\) >> config.cmake
           echo set\\(CMAKE_CXX_FLAGS -Werror\\) >> config.cmake
150
           """
151
        make(ci_gpu, 'build', '-j2')
152
        pack_lib('gpu', tvm_multilib)
153
        // compiler test
154
        sh """
155 156 157 158 159
           mkdir -p build2
           cd build2
           cp ../cmake/config.cmake .
           echo set\\(USE_OPENCL ON\\) >> config.cmake
           echo set\\(USE_ROCM ON\\) >> config.cmake
160
           echo set\\(USE_VULKAN ON\\) >> config.cmake
161
           echo set\\(USE_GRAPH_RUNTIME_DEBUG ON\\) >> config.cmake
162 163
           echo set\\(CMAKE_CXX_COMPILER clang-6.0\\) >> config.cmake
           echo set\\(CMAKE_CXX_FLAGS -Werror\\) >> config.cmake
164
           """
165
        make(ci_gpu, 'build2', '-j2')
166 167 168
      }
    }
  },
169 170
  'BUILD: CPU': {
    node('CPU') {
171 172 173
      ws('workspace/tvm/build-cpu') {
        init_git()
        sh """
174 175 176 177
           mkdir -p build
           cd build
           cp ../cmake/config.cmake .
           echo set\\(USE_SORT ON\\) >> config.cmake
178
           echo set\\(USE_GRAPH_RUNTIME_DEBUG ON\\) >> config.cmake
179
           echo set\\(USE_LLVM llvm-config-4.0\\) >> config.cmake
180 181
           echo set\\(USE_NNPACK ON\\) >> config.cmake
           echo set\\(NNPACK_PATH /NNPACK/build/\\) >> config.cmake
182
           echo set\\(USE_ANTLR ON\\) >> config.cmake
183 184
           echo set\\(CMAKE_CXX_COMPILER g++\\) >> config.cmake
           echo set\\(CMAKE_CXX_FLAGS -Werror\\) >> config.cmake
185
           """
186
        make(ci_cpu, 'build', '-j2')
187
        pack_lib('cpu', tvm_lib)
188
        timeout(time: max_time, unit: 'MINUTES') {
189 190 191 192 193
          sh "${docker_run} ${ci_cpu} ./tests/scripts/task_python_vta.sh"
          sh "${docker_run} ${ci_cpu} ./tests/scripts/task_rust.sh"
          sh "${docker_run} ${ci_cpu} ./tests/scripts/task_golang.sh"
          sh "${docker_run} ${ci_cpu} ./tests/scripts/task_python_unittest.sh"
          sh "${docker_run} ${ci_cpu} ./tests/scripts/task_python_integration.sh"
194
        }
195 196
      }
    }
197
  },
198 199
  'BUILD : i386': {
    node('CPU') {
200 201 202
      ws('workspace/tvm/build-i386') {
        init_git()
        sh """
203 204 205 206 207
           mkdir -p build
           cd build
           cp ../cmake/config.cmake .
           echo set\\(USE_SORT ON\\) >> config.cmake
           echo set\\(USE_RPC ON\\) >> config.cmake
208
           echo set\\(USE_GRAPH_RUNTIME_DEBUG ON\\) >> config.cmake
209
           echo set\\(USE_LLVM llvm-config-5.0\\) >> config.cmake
210 211
           echo set\\(CMAKE_CXX_COMPILER g++\\) >> config.cmake
           echo set\\(CMAKE_CXX_FLAGS -Werror\\) >> config.cmake
212
           """
213
        make(ci_i386, 'build', '-j2')
214
        pack_lib('i386', tvm_multilib)
215 216
      }
    }
217 218 219 220
  }
}

stage('Unit Test') {
221
  parallel 'python3: GPU': {
222
    node('GPU') {
223 224
      ws('workspace/tvm/ut-python-gpu') {
        init_git()
225
        unpack_lib('gpu', tvm_multilib)
226
        timeout(time: max_time, unit: 'MINUTES') {
227 228
          sh "${docker_run} ${ci_gpu} ./tests/scripts/task_python_unittest.sh"
          sh "${docker_run} ${ci_gpu} ./tests/scripts/task_python_integration.sh"
229 230 231 232
        }
      }
    }
  },
233
  'python3: i386': {
234
    node('CPU') {
235 236
      ws('workspace/tvm/ut-python-i386') {
        init_git()
237
        unpack_lib('i386', tvm_multilib)
238
        timeout(time: max_time, unit: 'MINUTES') {
239 240 241
          sh "${docker_run} ${ci_i386} ./tests/scripts/task_python_unittest.sh"
          sh "${docker_run} ${ci_i386} ./tests/scripts/task_python_integration.sh"
          sh "${docker_run} ${ci_i386} ./tests/scripts/task_python_vta.sh"
242
        }
243 244
      }
    }
245
  },
246 247
  'java: GPU': {
    node('GPU') {
248 249
      ws('workspace/tvm/ut-java') {
        init_git()
250
        unpack_lib('gpu', tvm_multilib)
251
        timeout(time: max_time, unit: 'MINUTES') {
252
          sh "${docker_run} ${ci_gpu} ./tests/scripts/task_java_unittest.sh"
253 254 255
        }
      }
    }
256 257 258 259
  }
}

stage('Integration Test') {
260 261
  parallel 'topi: GPU': {
    node('GPU') {
262
      ws('workspace/tvm/topi-python-gpu') {
263
        init_git()
264
        unpack_lib('gpu', tvm_multilib)
265
        timeout(time: max_time, unit: 'MINUTES') {
266
          sh "${docker_run} ${ci_gpu} ./tests/scripts/task_python_topi.sh"
267 268 269 270
        }
      }
    }
  },
271
  'frontend: GPU': {
272
    node('GPU') {
273
      ws('workspace/tvm/frontend-python-gpu') {
274 275 276
        init_git()
        unpack_lib('gpu', tvm_multilib)
        timeout(time: max_time, unit: 'MINUTES') {
277
          sh "${docker_run} ${ci_gpu} ./tests/scripts/task_python_frontend.sh"
278 279 280 281
        }
      }
    }
  },
282 283
  'docs: GPU': {
    node('GPU') {
284 285
      ws('workspace/tvm/docs-python-gpu') {
        init_git()
286
        unpack_lib('gpu', tvm_multilib)
287
        timeout(time: max_time, unit: 'MINUTES') {
288
          sh "${docker_run} ${ci_gpu} ./tests/scripts/task_python_docs.sh"
289
        }
290
        pack_lib('mydocs', 'docs.tgz')
291 292 293 294
      }
    }
  }
}
295 296

stage('Deploy') {
297
    node('doc') {
298 299 300 301 302 303 304 305
      ws('workspace/tvm/deploy-docs') {
        if (env.BRANCH_NAME == "master") {
           unpack_lib('mydocs', 'docs.tgz')
           sh "tar xf docs.tgz -C /var/docs"
        }
      }
    }
}