diff options
author | Ilya Biryukov <ibiryukov@google.com> | 2018-01-02 05:06:01 -0800 |
---|---|---|
committer | TensorFlower Gardener <gardener@tensorflow.org> | 2018-01-02 05:10:30 -0800 |
commit | e415e562d4010a465788228b4c9024e343427a75 (patch) | |
tree | 5f496514a049830f91d2fbdf37ad07194ee26b40 | |
parent | 7b700c515b132c0620e20f12eb032ea3dba397de (diff) |
Remove 'gpu_clang' CI Docker image, use 'gpu' image instead.
The clang is now downloaded using the new TF_DOWNLOAD_CLANG option at build
time.
Also removes GPU-specific env vars from 'tools/ci_build/builds/configured',
they are now passed directly to 'docker run' instead.
PiperOrigin-RevId: 180536813
-rw-r--r-- | tensorflow/tools/ci_build/Dockerfile.gpu_clang | 36 | ||||
-rwxr-xr-x | tensorflow/tools/ci_build/builds/configured | 9 | ||||
-rwxr-xr-x | tensorflow/tools/ci_build/ci_build.sh | 6 | ||||
-rwxr-xr-x | tensorflow/tools/ci_build/ci_parameterized_build.sh | 35 | ||||
-rwxr-xr-x | tensorflow/tools/ci_build/install/build_and_install_clang.sh | 49 | ||||
-rwxr-xr-x | tensorflow/tools/ci_build/install/install_cmake_for_clang.sh | 19 |
6 files changed, 31 insertions, 123 deletions
diff --git a/tensorflow/tools/ci_build/Dockerfile.gpu_clang b/tensorflow/tools/ci_build/Dockerfile.gpu_clang deleted file mode 100644 index 438a7ec532..0000000000 --- a/tensorflow/tools/ci_build/Dockerfile.gpu_clang +++ /dev/null @@ -1,36 +0,0 @@ -FROM nvidia/cuda:9.0-cudnn7-devel-ubuntu16.04 - -LABEL maintainer="Ilya Biryukov <ibiryukov@google.com>" - -# In the Ubuntu 16.04 images, cudnn is placed in system paths. Move them to -# /usr/local/cuda -RUN cp /usr/include/cudnn.h /usr/local/cuda/include -RUN cp /usr/lib/x86_64-linux-gnu/libcudnn* /usr/local/cuda/lib64 - -# Copy and run the install scripts. -COPY install/*.sh /install/ -RUN /install/install_bootstrap_deb_packages.sh -RUN add-apt-repository -y ppa:openjdk-r/ppa - -# LLVM requires cmake version 3.4.3, but ppa:george-edison55/cmake-3.x only -# provides version 3.2.2. -# So we skip it in `install_deb_packages.sh`, and later install it from -# https://cmake.org in `install_cmake_for_clang.sh`. -RUN /install/install_deb_packages.sh --without_cmake -RUN /install/install_pip_packages.sh -RUN /install/install_bazel.sh -RUN /install/install_golang.sh - -# Install cmake and build clang -RUN /install/install_cmake_for_clang.sh -RUN /install/build_and_install_clang.sh - -# Set up the master bazelrc configuration file. -COPY install/.bazelrc /etc/bazel.bazelrc -ENV LD_LIBRARY_PATH /usr/local/cuda/extras/CUPTI/lib64:$LD_LIBRARY_PATH - -# Configure the build for our CUDA configuration. -ENV TF_NEED_CUDA 1 -ENV TF_CUDA_CLANG 1 -ENV CLANG_CUDA_COMPILER_PATH /usr/local/bin/clang -ENV TF_CUDA_COMPUTE_CAPABILITIES 3.0 diff --git a/tensorflow/tools/ci_build/builds/configured b/tensorflow/tools/ci_build/builds/configured index de1e354170..868a3beac5 100755 --- a/tensorflow/tools/ci_build/builds/configured +++ b/tensorflow/tools/ci_build/builds/configured @@ -32,15 +32,6 @@ COMMAND=("$@") export CI_BUILD_PYTHON="${CI_BUILD_PYTHON:-python}" export PYTHON_BIN_PATH="${PYTHON_BIN_PATH:-$(which ${CI_BUILD_PYTHON})}" -if [ "${CONTAINER_TYPE}" == "gpu" ]; then - export TF_NEED_CUDA=1 -elif [ "${CONTAINER_TYPE}" == "gpu_clang" ]; then - export TF_NEED_CUDA=1 - export TF_CUDA_CLANG=1 - export CLANG_CUDA_COMPILER_PATH="/usr/local/bin/clang" -else - export TF_NEED_CUDA=0 -fi pushd "${CI_TENSORFLOW_SUBMODULE_PATH:-.}" yes "" | $PYTHON_BIN_PATH configure.py diff --git a/tensorflow/tools/ci_build/ci_build.sh b/tensorflow/tools/ci_build/ci_build.sh index 5164a25012..072dd6ab99 100755 --- a/tensorflow/tools/ci_build/ci_build.sh +++ b/tensorflow/tools/ci_build/ci_build.sh @@ -18,7 +18,7 @@ # <COMMAND> # # CONTAINER_TYPE: Type of the docker container used the run the build: -# e.g., (cpu | gpu | gpu_clang | android | tensorboard) +# e.g., (cpu | gpu | android | tensorboard) # # DOCKERFILE_PATH: (Optional) Path to the Dockerfile used for docker build. # If this optional value is not supplied (via the @@ -79,7 +79,7 @@ if [[ "${CONTAINER_TYPE}" == "cmake" ]]; then fi # Use nvidia-docker if the container is GPU. -if [[ "${CONTAINER_TYPE}" == "gpu" ]] || [[ "${CONTAINER_TYPE}" == "gpu_clang" ]]; then +if [[ "${CONTAINER_TYPE}" == "gpu" ]]; then DOCKER_BINARY="nvidia-docker" else DOCKER_BINARY="docker" @@ -99,7 +99,7 @@ BUILD_TAG="${BUILD_TAG:-tf_ci}" # Add extra params for cuda devices and libraries for GPU container. # And clear them if we are not building for GPU. -if [[ "${CONTAINER_TYPE}" != "gpu" ]] && [[ "${CONTAINER_TYPE}" != "gpu_clang" ]]; then +if [[ "${CONTAINER_TYPE}" != "gpu" ]]; then GPU_EXTRA_PARAMS="" fi diff --git a/tensorflow/tools/ci_build/ci_parameterized_build.sh b/tensorflow/tools/ci_build/ci_parameterized_build.sh index 2217b110e3..9d23b508aa 100755 --- a/tensorflow/tools/ci_build/ci_parameterized_build.sh +++ b/tensorflow/tools/ci_build/ci_parameterized_build.sh @@ -18,7 +18,7 @@ # ci_parameterized_build.sh # # The script obeys the following required environment variables: -# TF_BUILD_CONTAINER_TYPE: (CPU | GPU | GPU_CLANG | ANDROID | ANDROID_FULL) +# TF_BUILD_CONTAINER_TYPE: (CPU | GPU | ANDROID | ANDROID_FULL) # TF_BUILD_PYTHON_VERSION: (PYTHON2 | PYTHON3 | PYTHON3.5) # TF_BUILD_IS_PIP: (NO_PIP | PIP | BOTH) # @@ -88,6 +88,9 @@ # TF_NIGHTLY: # If this run is being used to build the tf_nightly pip # packages. +# TF_CUDA_CLANG: +# If set to 1, builds and runs cuda_clang configuration. +# Only available inside GPU containers. # # This script can be used by Jenkins parameterized / matrix builds. @@ -246,16 +249,34 @@ if [[ "$(uname -s)" == "Darwin" ]]; then OPT_FLAG="${OPT_FLAG} ${NO_DOCKER_OPT_FLAG}" fi +# In DO_DOCKER mode, appends environment variable to docker's run invocation. +# Otherwise, exports the corresponding variable. +function set_script_variable() { + local VAR="$1" + local VALUE="$2" + if [[ $DO_DOCKER == "1" ]]; then + TF_BUILD_APPEND_CI_DOCKER_EXTRA_PARAMS="${TF_BUILD_APPEND_CI_DOCKER_EXTRA_PARAMS} -e $VAR=$VALUE" + else + export $VAR="$VALUE" + fi +} + + # Process container type if [[ ${CTYPE} == "cpu" ]] || [[ ${CTYPE} == "debian.jessie.cpu" ]]; then : -elif [[ ${CTYPE} == "gpu" ]] || [[ ${CTYPE} == "gpu_clang" ]]; then - if [[ ${CTYPE} == "gpu" ]]; then - OPT_FLAG="${OPT_FLAG} --config=cuda" - else # ${CTYPE} == "gpu_clang" +elif [[ ${CTYPE} == "gpu" ]]; then + set_script_variable TF_NEED_CUDA 1 + + if [[ $TF_CUDA_CLANG == "1" ]]; then OPT_FLAG="${OPT_FLAG} --config=cuda_clang" - fi + set_script_variable TF_CUDA_CLANG 1 + # For cuda_clang we download `clang` while building. + set_script_variable TF_DOWNLOAD_CLANG 1 + else + OPT_FLAG="${OPT_FLAG} --config=cuda" + fi # Attempt to determine CUDA capability version automatically and use it if # CUDA capability version is not specified by the environment variables. @@ -407,7 +428,7 @@ if [[ ${TF_BUILD_IS_PIP} == "no_pip" ]] || # CPU only command, fully parallel. NO_PIP_MAIN_CMD="${MAIN_CMD} ${BAZEL_CMD} ${OPT_FLAG} ${EXTRA_ARGS} -- "\ "${BAZEL_TARGET}" - elif [[ ${CTYPE} == "gpu" ]] || [[ ${CTYPE} == "gpu_clang" ]]; then + elif [[ ${CTYPE} == "gpu" ]]; then # GPU only command, run as many jobs as the GPU count only. NO_PIP_MAIN_CMD="${BAZEL_CMD} ${OPT_FLAG} "\ "--local_test_jobs=${TF_GPU_COUNT} "\ diff --git a/tensorflow/tools/ci_build/install/build_and_install_clang.sh b/tensorflow/tools/ci_build/install/build_and_install_clang.sh deleted file mode 100755 index 9966434477..0000000000 --- a/tensorflow/tools/ci_build/install/build_and_install_clang.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -set -ex - -LLVM_SVN_REVISION="314281" -CLANG_TMP_DIR=/tmp/clang-build - -mkdir "$CLANG_TMP_DIR" - -pushd "$CLANG_TMP_DIR" - -# Checkout llvm+clang -svn co -q -r$LLVM_SVN_REVISION http://llvm.org/svn/llvm-project/llvm/trunk "$CLANG_TMP_DIR/llvm" -svn co -q -r$LLVM_SVN_REVISION http://llvm.org/svn/llvm-project/cfe/trunk "$CLANG_TMP_DIR/llvm/tools/clang" - -# Build 1st stage. Compile clang with system compiler -mkdir "$CLANG_TMP_DIR/build-1" -cd "$CLANG_TMP_DIR/build-1" -cmake -G"Unix Makefiles" -DCMAKE_BUILD_TYPE=Release "$CLANG_TMP_DIR/llvm" -make -j `nproc` clang clang-headers - -# Build 2nd stage. Compile clang with clang built in stage 1 -mkdir "$CLANG_TMP_DIR/build-2" -cd "$CLANG_TMP_DIR/build-2" - -CC="$CLANG_TMP_DIR/build-1/bin/clang" \ -CXX="$CLANG_TMP_DIR/build-1/bin/clang++" \ -cmake -G"Unix Makefiles" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local "$CLANG_TMP_DIR/llvm" - -make -j `nproc` install-clang install-clang-headers - -popd - -# Cleanup -rm -rf "$CLANG_TMP_DIR" diff --git a/tensorflow/tools/ci_build/install/install_cmake_for_clang.sh b/tensorflow/tools/ci_build/install/install_cmake_for_clang.sh deleted file mode 100755 index 3e626a69ab..0000000000 --- a/tensorflow/tools/ci_build/install/install_cmake_for_clang.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2017 The TensorFlow Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -CMAKE_URL="https://cmake.org/files/v3.7/cmake-3.7.2-Linux-x86_64.tar.gz" - -wget -O - "${CMAKE_URL}" | tar xzf - -C /usr/local --strip-components=1 |