Home | History | Annotate | Download | only in remote
      1 #!/usr/bin/env bash
      2 # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
      3 #
      4 # Licensed under the Apache License, Version 2.0 (the "License");
      5 # you may not use this file except in compliance with the License.
      6 # You may obtain a copy of the License at
      7 #
      8 #     http://www.apache.org/licenses/LICENSE-2.0
      9 #
     10 # Unless required by applicable law or agreed to in writing, software
     11 # distributed under the License is distributed on an "AS IS" BASIS,
     12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13 # See the License for the specific language governing permissions and
     14 # limitations under the License.
     15 # ==============================================================================
     16 # Build TensorFlow Docker images for remote build
     17 #
     18 # Usage:
     19 #   remote_docker_build.sh -c # docker image for cpu build
     20 #   remote_docker_build.sh -g # docker image for gpu build
     21 
     22 
     23 function main {
     24   publish=true
     25   cpu_build=false
     26   gpu_build=false
     27   publish=true
     28 
     29   script_dir=$(dirname "$(readlink -f "$0")")
     30   cd $script_dir
     31 
     32   trap cleanup_on_finish EXIT
     33 
     34   set_script_flags $@
     35 
     36   build_base_image
     37 
     38   build_tf_image
     39 
     40   if [ "$publish" = true ] ; then
     41     publish_tf_image
     42   fi
     43 }
     44 
     45 
     46 function set_script_flags {
     47   OPTIND=1 # Reset for getopts, just in case.
     48   while getopts "cf:ghn" opt; do
     49     case "$opt" in
     50       c)
     51         cpu_build=true
     52         ;;
     53       f)
     54         base_image_build_script=$OPTARG
     55         ;;
     56       g)
     57         gpu_build=true
     58         ;;
     59       h)
     60         print_usage
     61         ;;
     62       n)
     63         publish=false
     64         ;;
     65       *)
     66         print_usage "ERROR: unknown option"
     67         ;;
     68     esac
     69   done
     70   [[ "$cpu_build" = true ]] || [[ "$gpu_build" = true ]] || print_usage "ERROR: must specify build at least for one build type: cpu or gpu"
     71 
     72 }
     73 
     74 
     75 function print_usage {
     76   echo "Usage: $(basename $0) -c | -g [options]"
     77   echo "  -c build image for CPU build (base image debian8-clang)"
     78   echo "  -g build image for GPU build (base image nvidia-clang)"
     79   echo "  -f the script which build the {debian8,nvidia}-clang base image"
     80   echo "[option] is one of"
     81   echo "  -n not publish the locally-built image to GCR;"
     82   echo "     the build process will publish image to GCR by default"
     83   echo "  -h display help messages"
     84   if [[ -n $1 ]]; then
     85     echo $1
     86   fi
     87   exit 1
     88 }
     89 
     90 
     91 # Build nvidia-cuba-clang base image for GPU image.
     92 # For CPU the `clang-debian8` from Cloud Launcher will be used directly:
     93 # https://console.cloud.google.com/launcher/details/google/clang-debian8?filter=category:developer-tools&q=clang
     94 function build_base_image {
     95   if [ "$gpu_build" = true ] ; then
     96     base_image="nvidia-cuda"
     97     # Run a 2-stage build for clang base image, see
     98     # https://github.com/llvm-mirror/llvm/blob/master/docs/Docker.rst
     99     $base_image_build_script \
    100       --source $base_image \
    101       --branch branches/google/stable \
    102       --docker-repository ${base_image}-clang --docker-tag "latest" \
    103       -p clang -i stage2-install-clang -i stage2-install-clang-headers \
    104       -- \
    105       -DLLVM_TARGETS_TO_BUILD=Native -DCMAKE_BUILD_TYPE=Release \
    106       -DBOOTSTRAP_CMAKE_BUILD_TYPE=Release \
    107       -DCLANG_ENABLE_BOOTSTRAP=ON \
    108       -DCLANG_BOOTSTRAP_TARGETS="install-clang;install-clang-headers"
    109   fi
    110 }
    111 
    112 
    113 function build_tf_image {
    114   if [ "$cpu_build" = true ] ; then
    115     dockerfile="Dockerfile.cpu"
    116     tf_image="tensorflow-remote"
    117   else
    118     dockerfile="Dockerfile.gpu"
    119     tf_image="tensorflow-remote-gpu"
    120   fi
    121 
    122   docker build -f $dockerfile -t $tf_image .
    123 }
    124 
    125 
    126 function publish_tf_image {
    127   gcr_tf_image="gcr.io/tensorflow/${tf_image}"
    128   docker tag $tf_image $gcr_tf_image
    129   gcloud docker -- push $gcr_tf_image
    130 }
    131 
    132 
    133 function cleanup_on_finish {
    134   cd $script_dir
    135   rm -rf $llvm_docker_src
    136   docker rmi -f ${base_image}-clang ${base_image}-clang-build
    137 }
    138 
    139 
    140 main $@
    141