Github user nchammas commented on a diff in the pull request:

    https://github.com/apache/spark/pull/3707#discussion_r22268444
  
    --- Diff: build/mvn ---
    @@ -0,0 +1,130 @@
    +#!/usr/bin/env bash
    +
    +# Determine the current working directory
    +_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
    +
    +# Installs any application tarball given a URL, the expected tarball name,
    +# and, optionally, a checkable binary path to determine if the binary has
    +# already been installed
    +## Arg1 - URL
    +## Arg2 - Tarball Name
    +## Arg3 - Checkable Binary
    +install_app() {
    +  local remote_tarball="$1/$2"
    +  local local_tarball="${_DIR}/$2"
    +  local binary="${_DIR}/$3"
    +
    +  # setup `curl` and `wget` silent options if we're running on Jenkins
    +  local curl_opts=""
    +  local wget_opts=""
    +  if [ -n "$AMPLAB_JENKINS" ]; then
    +    curl_opts="-s"
    +    wget_opts="--quiet"
    +  else
    +    curl_opts="--progress-bar"
    +    wget_opts="--progress=bar:force"
    +  fi
    +
    +  if [ -z "$3" -o ! -f "$binary" ]; then
    +    # check if we already have the tarball
    +    # check if we have curl installed
    +    # download application
    +    [ ! -f "${local_tarball}" ] && [ -n "`which curl 2>/dev/null`" ] && \
    +      echo "exec: curl ${curl_opts} ${remote_tarball}" && \
    +      curl ${curl_opts} "${remote_tarball}" > "${local_tarball}"
    +    # if the file still doesn't exist, lets try `wget` and cross our 
fingers
    +    [ ! -f "${local_tarball}" ] && [ -n "`which wget 2>/dev/null`" ] && \
    +      echo "exec: wget ${wget_opts} ${remote_tarball}" && \
    +      wget ${wget_opts} -O "${local_tarball}" "${remote_tarball}"
    +    # if both were unsuccessful, exit
    +    [ ! -f "${local_tarball}" ] && \
    +      echo -n "ERROR: Cannot download $2 with cURL or wget; " && \
    +      echo "please install manually and try again." && \
    +      exit 2
    +    cd "${_DIR}" && tar -xzf "$2"
    +    rm -rf "$local_tarball"
    +  fi
    +}
    +
    +# Install maven under the build/ folder
    +install_mvn() {
    +  install_app \
    +    "http://apache.claz.org/maven/maven-3/3.2.3/binaries"; \
    +    "apache-maven-3.2.3-bin.tar.gz" \
    +    "apache-maven-3.2.3/bin/mvn"
    +  MVN_BIN="${_DIR}/apache-maven-3.2.3/bin/mvn"
    +}
    +
    +# Install zinc under the build/ folder
    +install_zinc() {
    +  local zinc_path="zinc-0.3.5.3/bin/zinc"
    +  [ ! -f "${zinc_path}" ] && ZINC_INSTALL_FLAG=1
    +  install_app \
    +    "http://downloads.typesafe.com/zinc/0.3.5.3"; \
    +    "zinc-0.3.5.3.tgz" \
    +    "${zinc_path}"
    +  ZINC_BIN="${_DIR}/${zinc_path}"
    +}
    +
    +# Determine the Scala version from the root pom.xml file, set the Scala 
URL,
    +# and, with that, download the specific version of Scala necessary under
    +# the build/ folder
    +install_scala() {
    +  # determine the Scala version used in Spark
    +  local scala_version=`grep "scala.version" "${_DIR}/../pom.xml" | \
    --- End diff --
    
    Hmm, I'd lean towards just leaving it as-is. Doesn't seem to be worth the 
hassle when compared against the requirement to reduce complexity and 
dependencies.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to