Github user foxish commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20154#discussion_r159754082
  
    --- Diff: sbin/build-push-docker-images.sh ---
    @@ -19,51 +19,131 @@
     # This script builds and pushes docker images when run from a release of 
Spark
     # with Kubernetes support.
     
    -declare -A path=( [spark-driver]=kubernetes/dockerfiles/driver/Dockerfile \
    -                  
[spark-executor]=kubernetes/dockerfiles/executor/Dockerfile \
    -                  
[spark-init]=kubernetes/dockerfiles/init-container/Dockerfile )
    +function error {
    +  echo "$@" 1>&2
    +  exit 1
    +}
    +
    +# Detect whether this is a git clone or a Spark distribution and adjust 
paths
    +# accordingly.
    +if [ -z "${SPARK_HOME}" ]; then
    +  SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
    +fi
    +. "${SPARK_HOME}/bin/load-spark-env.sh"
    +
    +if [ -f "$SPARK_HOME/RELEASE" ]; then
    +  IMG_PATH="kubernetes/dockerfiles"
    +  SPARK_JARS="jars"
    +else
    +  IMG_PATH="resource-managers/kubernetes/docker/src/main/dockerfiles"
    +  SPARK_JARS="assembly/target/scala-$SPARK_SCALA_VERSION/jars"
    +fi
    +
    +if [ ! -d "$IMG_PATH" ]; then
    +  error "Cannot find docker images. This script must be run from a 
runnable distribution of Apache Spark."
    +fi
    +
    +declare -A path=( [spark-driver]="$IMG_PATH/driver/Dockerfile" \
    +                  [spark-executor]="$IMG_PATH/executor/Dockerfile" \
    +                  [spark-init]="$IMG_PATH/init-container/Dockerfile" )
    +
    +function image_ref {
    +  local image="$1"
    +  local add_repo="${2:-1}"
    +  if [ $add_repo = 1 ] && [ -n "$REPO" ]; then
    +    image="$REPO/$image"
    +  fi
    +  if [ -n "$TAG" ]; then
    +    image="$image:$TAG"
    +  fi
    +  echo "$image"
    +}
     
     function build {
    -  docker build -t spark-base -f 
kubernetes/dockerfiles/spark-base/Dockerfile .
    +  local base_image="$(image_ref spark-base 0)"
    +  docker build --build-arg "spark_jars=$SPARK_JARS" \
    +    --build-arg "img_path=$IMG_PATH" \
    +    -t "$base_image" \
    +    -f "$IMG_PATH/spark-base/Dockerfile" .
       for image in "${!path[@]}"; do
    -    docker build -t ${REPO}/$image:${TAG} -f ${path[$image]} .
    +    docker build --build-arg "base_image=$base_image" -t "$(image_ref 
$image)" -f ${path[$image]} .
       done
     }
     
    -
     function push {
       for image in "${!path[@]}"; do
    -    docker push ${REPO}/$image:${TAG}
    +    docker push "$(image_ref $image)"
       done
     }
     
     function usage {
    -  echo "This script must be run from a runnable distribution of Apache 
Spark."
    -  echo "Usage: ./sbin/build-push-docker-images.sh -r <repo> -t <tag> build"
    -  echo "       ./sbin/build-push-docker-images.sh -r <repo> -t <tag> push"
    -  echo "for example: ./sbin/build-push-docker-images.sh -r 
docker.io/myrepo -t v2.3.0 push"
    +  cat <<EOF
    +Usage: $0 [options] [command]
    +Builds or pushes the built-in Spark Docker images.
    +
    +Commands:
    +  build       Build images.
    +  push        Push images to a registry. Requires a repository address to 
be provided, both
    +              when building and when pushing the images.
    +
    +Options:
    +  -r repo     Repository address.
    +  -t tag      Tag to apply to built images, or to identify images to be 
pushed.
    +  -m          Use minikube's Docker daemon.
    +
    +Using minikube when building images will do so directly into minikube's 
Docker daemon.
    +There is no need to push the images into minikube int that case, they'll 
be automatically
    --- End diff --
    
    typo int -> in.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to