Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21515#discussion_r217901993
  
    --- Diff: dev/create-release/do-release-docker.sh ---
    @@ -0,0 +1,143 @@
    +#!/usr/bin/env bash
    +
    +#
    +# Licensed to the Apache Software Foundation (ASF) under one or more
    +# contributor license agreements.  See the NOTICE file distributed with
    +# this work for additional information regarding copyright ownership.
    +# The ASF licenses this file to You under the Apache License, Version 2.0
    +# (the "License"); you may not use this file except in compliance with
    +# the License.  You may obtain a copy of the License at
    +#
    +#    http://www.apache.org/licenses/LICENSE-2.0
    +#
    +# Unless required by applicable law or agreed to in writing, software
    +# distributed under the License is distributed on an "AS IS" BASIS,
    +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    +# See the License for the specific language governing permissions and
    +# limitations under the License.
    +#
    +
    +#
    +# Creates a Spark release candidate. The script will update versions, tag 
the branch,
    +# build Spark binary packages and documentation, and upload maven 
artifacts to a staging
    +# repository. There is also a dry run mode where only local builds are 
performed, and
    +# nothing is uploaded to the ASF repos.
    +#
    +# Run with "-h" for options.
    +#
    +
    +set -e
    +SELF=$(cd $(dirname $0) && pwd)
    +. "$SELF/release-util.sh"
    +
    +function usage {
    +  local NAME=$(basename $0)
    +  cat <<EOF
    +Usage: $NAME [options]
    +
    +This script runs the release scripts inside a docker image. The image is 
hardcoded to be called
    +"spark-rm" and will be re-generated (as needed) on every invocation of 
this script.
    +
    +Options are:
    +
    +  -d [path]   : required: working directory (output will be written to an 
"output" directory in
    +                the working directory).
    +  -n          : dry run mode. Performs checks and local builds, but do not 
upload anything.
    +  -t [tag]    : tag for the spark-rm docker image to use for building 
(default: "latest").
    +  -j [path]   : path to local JDK installation to use for building. By 
default the script will
    +                use openjdk8 installed in the docker image.
    +  -s [step]   : runs a single step of the process; valid steps are: tag, 
build, docs, publish
    +EOF
    +}
    +
    +WORKDIR=
    +IMGTAG=latest
    +JAVA=
    +RELEASE_STEP=
    +while getopts "d:hj:ns:t:" opt; do
    +  case $opt in
    +    d) WORKDIR="$OPTARG" ;;
    +    n) DRY_RUN=1 ;;
    +    t) IMGTAG="$OPTARG" ;;
    +    j) JAVA="$OPTARG" ;;
    +    s) RELEASE_STEP="$OPTARG" ;;
    +    h) usage ;;
    +    ?) error "Invalid option. Run with -h for help." ;;
    +  esac
    +done
    +
    +if [ -z "$WORKDIR" ] || [ ! -d "$WORKDIR" ]; then
    +  error "Work directory (-d) must be defined and exist. Run with -h for 
help."
    +fi
    +
    +if [ -d "$WORKDIR/output" ]; then
    +  read -p "Output directory already exists. Overwrite and continue? [y/n] 
" ANSWER
    +  if [ "$ANSWER" != "y" ]; then
    +    error "Exiting."
    +  fi
    +fi
    +
    +cd "$WORKDIR"
    +rm -rf "$WORKDIR/output"
    +mkdir "$WORKDIR/output"
    +
    +get_release_info
    +
    +# Place all RM scripts and necessary data in a local directory that must 
be defined in the command
    +# line. This directory is mounted into the image.
    +for f in "$SELF"/*; do
    +  if [ -f "$f" ]; then
    +    cp "$f" "$WORKDIR"
    +  fi
    +done
    +
    +GPG_KEY_FILE="$WORKDIR/gpg.key"
    +fcreate_secure "$GPG_KEY_FILE"
    +$GPG --export-secret-key --armor "$GPG_KEY" > "$GPG_KEY_FILE"
    +
    +run_silent "Building spark-rm image with tag $IMGTAG..." 
"docker-build.log" \
    +  docker build -t "spark-rm:$IMGTAG" --build-arg UID=$UID "$SELF/spark-rm"
    --- End diff --
    
    got it. This is a system variable. So we can't run this script with root 
user...


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to