HADOOP-11393. Revert HADOOP_PREFIX, go back to HADOOP_HOME (aw)

Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0a74610d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0a74610d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0a74610d

Branch: refs/heads/HDFS-1312
Commit: 0a74610d1c7c7f183d2b2d0b7a775add53cf6c94
Parents: 0064cba
Author: Allen Wittenauer <a...@apache.org>
Authored: Thu Mar 24 08:47:00 2016 -0700
Committer: Allen Wittenauer <a...@apache.org>
Committed: Thu Mar 31 07:51:05 2016 -0700

----------------------------------------------------------------------
 .../hadoop-common/src/main/bin/hadoop           | 12 +++---
 .../hadoop-common/src/main/bin/hadoop-config.sh |  6 ++-
 .../hadoop-common/src/main/bin/hadoop-daemon.sh |  6 +--
 .../src/main/bin/hadoop-daemons.sh              |  6 +--
 .../src/main/bin/hadoop-functions.sh            | 37 +++++++++---------
 .../src/main/bin/hadoop-layout.sh.example       | 14 +++----
 .../hadoop-common/src/main/bin/slaves.sh        |  6 +--
 .../hadoop-common/src/main/bin/start-all.sh     |  4 +-
 .../hadoop-common/src/main/bin/stop-all.sh      |  4 +-
 .../hadoop-common/src/main/conf/hadoop-env.sh   | 10 ++---
 .../org/apache/hadoop/tracing/TraceUtils.java   |  4 +-
 .../src/site/markdown/ClusterSetup.md           | 40 ++++++++++----------
 .../src/site/markdown/CommandsManual.md         |  2 +-
 .../src/site/markdown/UnixShellGuide.md         |  2 +-
 .../scripts/hadoop-functions_test_helper.bash   |  3 +-
 .../src/test/scripts/hadoop_basic_init.bats     |  2 +-
 .../src/test/scripts/hadoop_bootstrap.bats      |  4 +-
 .../src/test/scripts/hadoop_confdir.bats        | 24 ++++++------
 .../src/test/scripts/hadoop_finalize.bats       |  2 +-
 .../hadoop-kms/src/main/conf/kms-env.sh         |  4 +-
 .../hadoop-kms/src/main/libexec/kms-config.sh   |  8 ++--
 .../hadoop-kms/src/main/sbin/kms.sh             |  4 +-
 .../src/main/conf/httpfs-env.sh                 |  4 +-
 .../src/main/libexec/httpfs-config.sh           |  8 ++--
 .../hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh  |  4 +-
 .../src/main/native/fuse-dfs/doc/README         |  6 +--
 .../main/native/fuse-dfs/fuse_dfs_wrapper.sh    | 12 +++---
 .../src/main/bin/distribute-exclude.sh          |  4 +-
 .../hadoop-hdfs/src/main/bin/hdfs               |  4 +-
 .../hadoop-hdfs/src/main/bin/hdfs-config.sh     |  6 +--
 .../src/main/bin/refresh-namenodes.sh           |  4 +-
 .../hadoop-hdfs/src/main/bin/start-balancer.sh  |  4 +-
 .../hadoop-hdfs/src/main/bin/start-dfs.sh       |  4 +-
 .../src/main/bin/start-secure-dns.sh            |  4 +-
 .../hadoop-hdfs/src/main/bin/stop-balancer.sh   |  4 +-
 .../hadoop-hdfs/src/main/bin/stop-dfs.sh        |  4 +-
 .../hadoop-hdfs/src/main/bin/stop-secure-dns.sh |  4 +-
 .../hadoop-hdfs/src/site/markdown/Federation.md | 18 ++++-----
 .../markdown/HDFSHighAvailabilityWithNFS.md     |  4 +-
 .../markdown/HDFSHighAvailabilityWithQJM.md     |  4 +-
 .../src/site/markdown/HdfsNfsGateway.md         |  8 ++--
 .../apache/hadoop/tracing/TestTraceAdmin.java   |  2 +-
 .../TestTracingShortCircuitLocalRead.java       |  4 +-
 hadoop-mapreduce-project/bin/mapred             |  4 +-
 hadoop-mapreduce-project/bin/mapred-config.sh   |  6 +--
 .../bin/mr-jobhistory-daemon.sh                 |  4 +-
 .../apache/hadoop/mapred/pipes/Submitter.java   |  2 +-
 .../java/org/apache/hadoop/fs/DFSCIOTest.java   |  2 +-
 .../apache/hadoop/mapred/ReliabilityTest.java   |  2 +-
 .../apache/hadoop/tools/HadoopArchiveLogs.java  |  4 +-
 .../hadoop/tools/TestHadoopArchiveLogs.java     |  4 +-
 .../apache/hadoop/contrib/utils/join/README.txt |  2 +-
 .../native/pipes/debug/pipes-default-script     |  5 ++-
 .../hadoop-sls/src/main/bin/rumen2sls.sh        |  4 +-
 hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh  |  8 ++--
 .../apache/hadoop/streaming/DumpTypedBytes.java |  2 +-
 .../hadoop/streaming/HadoopStreaming.java       |  2 +-
 .../apache/hadoop/streaming/LoadTypedBytes.java |  2 +-
 .../org/apache/hadoop/streaming/StreamJob.java  | 16 ++++----
 .../hadoop-yarn/bin/start-yarn.sh               |  4 +-
 .../hadoop-yarn/bin/stop-yarn.sh                |  4 +-
 hadoop-yarn-project/hadoop-yarn/bin/yarn        |  4 +-
 .../hadoop-yarn/bin/yarn-config.sh              |  6 +--
 .../hadoop-yarn/bin/yarn-daemon.sh              |  4 +-
 .../hadoop-yarn/bin/yarn-daemons.sh             |  4 +-
 .../TestDockerContainerExecutorWithMocks.java   |  2 +-
 .../site/markdown/DockerContainerExecutor.md.vm |  2 +-
 67 files changed, 211 insertions(+), 208 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/main/bin/hadoop
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
index 46eaf27..0756987 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
@@ -47,8 +47,8 @@ function hadoop_usage
 # This script runs the hadoop core commands.
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
   HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
@@ -84,9 +84,9 @@ case ${COMMAND} in
       # shellcheck disable=SC2086
       exec "${HADOOP_HDFS_HOME}/bin/hdfs" \
       --config "${HADOOP_CONF_DIR}" "${COMMAND}"  "$@"
-    elif [[ -f "${HADOOP_PREFIX}/bin/hdfs" ]]; then
+    elif [[ -f "${HADOOP_HOME}/bin/hdfs" ]]; then
       # shellcheck disable=SC2086
-      exec "${HADOOP_PREFIX}/bin/hdfs" \
+      exec "${HADOOP_HOME}/bin/hdfs" \
       --config "${HADOOP_CONF_DIR}" "${COMMAND}" "$@"
     else
       hadoop_error "HADOOP_HDFS_HOME not found!"
@@ -104,8 +104,8 @@ case ${COMMAND} in
     if [[ -f "${HADOOP_MAPRED_HOME}/bin/mapred" ]]; then
       exec "${HADOOP_MAPRED_HOME}/bin/mapred" \
       --config "${HADOOP_CONF_DIR}" "${COMMAND}" "$@"
-    elif [[ -f "${HADOOP_PREFIX}/bin/mapred" ]]; then
-      exec "${HADOOP_PREFIX}/bin/mapred" \
+    elif [[ -f "${HADOOP_HOME}/bin/mapred" ]]; then
+      exec "${HADOOP_HOME}/bin/mapred" \
       --config "${HADOOP_CONF_DIR}" "${COMMAND}" "$@"
     else
       hadoop_error "HADOOP_MAPRED_HOME not found!"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
index 0b52895..fd2c83e 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
@@ -63,6 +63,8 @@ else
   exit 1
 fi
 
+hadoop_deprecate_envvar HADOOP_PREFIX HADOOP_HOME
+
 # allow overrides of the above and pre-defines of the below
 if [[ -n "${HADOOP_COMMON_HOME}" ]] &&
    [[ -e "${HADOOP_COMMON_HOME}/libexec/hadoop-layout.sh" ]]; then
@@ -128,8 +130,8 @@ fi
 hadoop_shellprofiles_init
 
 # get the native libs in there pretty quick
-hadoop_add_javalibpath "${HADOOP_PREFIX}/build/native"
-hadoop_add_javalibpath "${HADOOP_PREFIX}/${HADOOP_COMMON_LIB_NATIVE_DIR}"
+hadoop_add_javalibpath "${HADOOP_HOME}/build/native"
+hadoop_add_javalibpath "${HADOOP_HOME}/${HADOOP_COMMON_LIB_NATIVE_DIR}"
 
 hadoop_shellprofiles_nativelib
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
index 5f094d6..8118f54 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
@@ -21,8 +21,8 @@ function hadoop_usage
 }
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   this="${BASH_SOURCE-$0}"
   bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
@@ -47,7 +47,7 @@ daemonmode=$1
 shift
 
 if [[ -z "${HADOOP_HDFS_HOME}" ]]; then
-  hdfsscript="${HADOOP_PREFIX}/bin/hdfs"
+  hdfsscript="${HADOOP_HOME}/bin/hdfs"
 else
   hdfsscript="${HADOOP_HDFS_HOME}/bin/hdfs"
 fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemons.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemons.sh 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemons.sh
index 604eb7e..ae1e324 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemons.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemons.sh
@@ -27,8 +27,8 @@ this="${BASH_SOURCE-$0}"
 bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
 fi
@@ -51,7 +51,7 @@ daemonmode=$1
 shift
 
 if [[ -z "${HADOOP_HDFS_HOME}" ]]; then
-  hdfsscript="${HADOOP_PREFIX}/bin/hdfs"
+  hdfsscript="${HADOOP_HOME}/bin/hdfs"
 else
   hdfsscript="${HADOOP_HDFS_HOME}/bin/hdfs"
 fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
index 7f293b6..6c4c345 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
@@ -278,7 +278,7 @@ function hadoop_bootstrap
   # By now, HADOOP_LIBEXEC_DIR should have been defined upstream
   # We can piggyback off of that to figure out where the default
   # HADOOP_FREFIX should be.  This allows us to run without
-  # HADOOP_PREFIX ever being defined by a human! As a consequence
+  # HADOOP_HOME ever being defined by a human! As a consequence
   # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful
   # env var within Hadoop.
   if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
@@ -286,8 +286,8 @@ function hadoop_bootstrap
     exit 1
   fi
   HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && 
pwd -P)
-  HADOOP_PREFIX=${HADOOP_PREFIX:-$HADOOP_DEFAULT_PREFIX}
-  export HADOOP_PREFIX
+  HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DEFAULT_PREFIX}
+  export HADOOP_HOME
 
   #
   # short-cuts. vendors may redefine these as well, preferably
@@ -302,7 +302,7 @@ function hadoop_bootstrap
   YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
   MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
   MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
-  HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_PREFIX}}
+  HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_HOME}}
   HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"}
   
HADOOP_TOOLS_LIB_JARS_DIR=${HADOOP_TOOLS_LIB_JARS_DIR:-"${HADOOP_TOOLS_DIR}/lib"}
 
@@ -326,12 +326,12 @@ function hadoop_find_confdir
 
   # An attempt at compatibility with some Hadoop 1.x
   # installs.
-  if [[ -e "${HADOOP_PREFIX}/conf/hadoop-env.sh" ]]; then
+  if [[ -e "${HADOOP_HOME}/conf/hadoop-env.sh" ]]; then
     conf_dir="conf"
   else
     conf_dir="etc/hadoop"
   fi
-  export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_PREFIX}/${conf_dir}}"
+  export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_HOME}/${conf_dir}}"
 
   hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
 }
@@ -524,8 +524,8 @@ function hadoop_basic_init
   hadoop_debug "Initialize CLASSPATH"
 
   if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
-  [[ -d "${HADOOP_PREFIX}/${HADOOP_COMMON_DIR}" ]]; then
-    export HADOOP_COMMON_HOME="${HADOOP_PREFIX}"
+  [[ -d "${HADOOP_HOME}/${HADOOP_COMMON_DIR}" ]]; then
+    export HADOOP_COMMON_HOME="${HADOOP_HOME}"
   fi
 
   # default policy file for service-level authorization
@@ -533,20 +533,20 @@ function hadoop_basic_init
 
   # define HADOOP_HDFS_HOME
   if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
-     [[ -d "${HADOOP_PREFIX}/${HDFS_DIR}" ]]; then
-    export HADOOP_HDFS_HOME="${HADOOP_PREFIX}"
+     [[ -d "${HADOOP_HOME}/${HDFS_DIR}" ]]; then
+    export HADOOP_HDFS_HOME="${HADOOP_HOME}"
   fi
 
   # define HADOOP_YARN_HOME
   if [[ -z "${HADOOP_YARN_HOME}" ]] &&
-     [[ -d "${HADOOP_PREFIX}/${YARN_DIR}" ]]; then
-    export HADOOP_YARN_HOME="${HADOOP_PREFIX}"
+     [[ -d "${HADOOP_HOME}/${YARN_DIR}" ]]; then
+    export HADOOP_YARN_HOME="${HADOOP_HOME}"
   fi
 
   # define HADOOP_MAPRED_HOME
   if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
-     [[ -d "${HADOOP_PREFIX}/${MAPRED_DIR}" ]]; then
-    export HADOOP_MAPRED_HOME="${HADOOP_PREFIX}"
+     [[ -d "${HADOOP_HOME}/${MAPRED_DIR}" ]]; then
+    export HADOOP_MAPRED_HOME="${HADOOP_HOME}"
   fi
 
   if [[ ! -d "${HADOOP_COMMON_HOME}" ]]; then
@@ -573,7 +573,7 @@ function hadoop_basic_init
   # let's define it as 'hadoop'
   HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
   HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-hadoop}
-  HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_PREFIX}/logs"}
+  HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_HOME}/logs"}
   HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
   HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO}
   HADOOP_NICENESS=${HADOOP_NICENESS:-0}
@@ -1219,7 +1219,6 @@ function hadoop_finalize_hadoop_opts
   hadoop_translate_cygwin_path HADOOP_LOG_DIR
   hadoop_add_param HADOOP_OPTS hadoop.log.dir 
"-Dhadoop.log.dir=${HADOOP_LOG_DIR}"
   hadoop_add_param HADOOP_OPTS hadoop.log.file 
"-Dhadoop.log.file=${HADOOP_LOGFILE}"
-  HADOOP_HOME=${HADOOP_PREFIX}
   hadoop_translate_cygwin_path HADOOP_HOME
   export HADOOP_HOME
   hadoop_add_param HADOOP_OPTS hadoop.home.dir 
"-Dhadoop.home.dir=${HADOOP_HOME}"
@@ -1252,11 +1251,11 @@ function hadoop_finalize_catalina_opts
 
   local prefix=${HADOOP_CATALINA_PREFIX}
 
-  hadoop_add_param CATALINA_OPTS hadoop.home.dir 
"-Dhadoop.home.dir=${HADOOP_PREFIX}"
+  hadoop_add_param CATALINA_OPTS hadoop.home.dir 
"-Dhadoop.home.dir=${HADOOP_HOME}"
   if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
     hadoop_add_param CATALINA_OPTS java.library.path 
"-Djava.library.path=${JAVA_LIBRARY_PATH}"
   fi
-  hadoop_add_param CATALINA_OPTS "${prefix}.home.dir" 
"-D${prefix}.home.dir=${HADOOP_PREFIX}"
+  hadoop_add_param CATALINA_OPTS "${prefix}.home.dir" 
"-D${prefix}.home.dir=${HADOOP_HOME}"
   hadoop_add_param CATALINA_OPTS "${prefix}.config.dir" 
"-D${prefix}.config.dir=${HADOOP_CATALINA_CONFIG}"
   hadoop_add_param CATALINA_OPTS "${prefix}.log.dir" 
"-D${prefix}.log.dir=${HADOOP_CATALINA_LOG}"
   hadoop_add_param CATALINA_OPTS "${prefix}.temp.dir" 
"-D${prefix}.temp.dir=${HADOOP_CATALINA_TEMP}"
@@ -1282,7 +1281,7 @@ function hadoop_finalize
   hadoop_finalize_hadoop_heap
   hadoop_finalize_hadoop_opts
 
-  hadoop_translate_cygwin_path HADOOP_PREFIX
+  hadoop_translate_cygwin_path HADOOP_HOME
   hadoop_translate_cygwin_path HADOOP_CONF_DIR
   hadoop_translate_cygwin_path HADOOP_COMMON_HOME
   hadoop_translate_cygwin_path HADOOP_HDFS_HOME

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/main/bin/hadoop-layout.sh.example
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-layout.sh.example 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-layout.sh.example
index faa4317..efba10c 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-layout.sh.example
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-layout.sh.example
@@ -26,8 +26,8 @@
 ##
 ## If you move HADOOP_LIBEXEC_DIR from some location that
 ## isn't bin/../libexec, you MUST define either HADOOP_LIBEXEC_DIR
-## or have HADOOP_PREFIX/libexec/hadoop-config.sh and
-## HADOOP_PREFIX/libexec/hadoop-layout.sh (this file) exist.
+## or have HADOOP_HOME/libexec/hadoop-config.sh and
+## HADOOP_HOME/libexec/hadoop-layout.sh (this file) exist.
 
 ## NOTE:
 ##
@@ -44,7 +44,7 @@
 ####
 
 # Default location for the common/core Hadoop project
-# export HADOOP_COMMON_HOME=${HADOOP_PREFIX}
+# export HADOOP_COMMON_HOME=${HADOOP_HOME}
 
 # Relative locations where components under HADOOP_COMMON_HOME are located
 # export HADOOP_COMMON_DIR="share/hadoop/common"
@@ -56,7 +56,7 @@
 ####
 
 # Default location for the HDFS subproject
-# export HADOOP_HDFS_HOME=${HADOOP_PREFIX}
+# export HADOOP_HDFS_HOME=${HADOOP_HOME}
 
 # Relative locations where components under HADOOP_HDFS_HOME are located
 # export HDFS_DIR="share/hadoop/hdfs"
@@ -67,7 +67,7 @@
 ####
 
 # Default location for the YARN subproject
-# export HADOOP_YARN_HOME=${HADOOP_PREFIX}
+# export HADOOP_YARN_HOME=${HADOOP_HOME}
 
 # Relative locations where components under HADOOP_YARN_HOME are located
 # export YARN_DIR="share/hadoop/yarn"
@@ -78,7 +78,7 @@
 ####
 
 # Default location for the MapReduce subproject
-# export HADOOP_MAPRED_HOME=${HADOOP_PREFIX}
+# export HADOOP_MAPRED_HOME=${HADOOP_HOME}
 
 # Relative locations where components under HADOOP_MAPRED_HOME are located
 # export MAPRED_DIR="share/hadoop/mapreduce"
@@ -92,6 +92,6 @@
 # note that this path only gets added for certain commands and not
 # part of the general classpath unless HADOOP_OPTIONAL_TOOLS is used
 # to configure them in
-# export HADOOP_TOOLS_HOME=${HADOOP_PREFIX}
+# export HADOOP_TOOLS_HOME=${HADOOP_HOME}
 # export HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"}
 # export 
HADOOP_TOOLS_LIB_JARS_DIR=${HADOOP_TOOLS_LIB_JARS_DIR:-"${HADOOP_TOOLS_DIR}/lib"}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh 
b/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh
index 5859da0..34bf0eb 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh
@@ -22,7 +22,7 @@
 #
 #   HADOOP_SLAVES    File naming remote hosts.
 #     Default is ${HADOOP_CONF_DIR}/slaves.
-#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_PREFIX}/conf.
+#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
 #   HADOOP_SLAVE_SLEEP Seconds to sleep between spawning remote commands.
 #   HADOOP_SSH_OPTS Options passed to ssh when running remote commands.
 ##
@@ -33,8 +33,8 @@ function hadoop_usage
 }
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   this="${BASH_SOURCE-$0}"
   bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh 
b/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh
index 845ca37..1420642 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh
@@ -21,8 +21,8 @@ exit 1
 
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   this="${BASH_SOURCE-$0}"
   bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/main/bin/stop-all.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/stop-all.sh 
b/hadoop-common-project/hadoop-common/src/main/bin/stop-all.sh
index df7ae8d..ee1f6eb 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/stop-all.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/stop-all.sh
@@ -22,8 +22,8 @@ echo "This script is deprecated. Use stop-dfs.sh and 
stop-yarn.sh instead."
 exit 1
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   this="${BASH_SOURCE-$0}"
   bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh 
b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
index 3c554aa..3f19e45 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
+++ b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
@@ -55,14 +55,14 @@
 
 # Location of Hadoop.  By default, Hadoop will attempt to determine
 # this location based upon its execution path.
-# export HADOOP_PREFIX=
+# export HADOOP_HOME=
 
 # Location of Hadoop's configuration information.  i.e., where this
 # file is probably living. Many sites will also set this in the
 # same location where JAVA_HOME is defined.  If this is not defined
 # Hadoop will attempt to locate it based upon its execution
 # path.
-# export HADOOP_CONF_DIR=$HADOOP_PREFIX/etc/hadoop
+# export HADOOP_CONF_DIR=${HADOOP_HOME}/etc/hadoop
 
 # The maximum amount of heap to use (Java -Xmx).  If no unit
 # is provided, it will be converted to MB.  Daemons will
@@ -186,10 +186,10 @@ esac
 # non-secure)
 #
 
-# Where (primarily) daemon log files are stored.  # $HADOOP_PREFIX/logs
-# by default.
+# Where (primarily) daemon log files are stored.
+# ${HADOOP_HOME}/logs by default.
 # Java property: hadoop.log.dir
-# export HADOOP_LOG_DIR=${HADOOP_PREFIX}/logs
+# export HADOOP_LOG_DIR=${HADOOP_HOME}/logs
 
 # A string representing this instance of hadoop. $USER by default.
 # This is used in writing log and pid files, so keep that in mind!

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
index 09acb35..0ae6d03 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
@@ -32,7 +32,7 @@ import org.apache.htrace.core.HTraceConfiguration;
 @InterfaceAudience.Private
 public class TraceUtils {
   private static List<ConfigurationPair> EMPTY = Collections.emptyList();
-  static final String DEFAULT_HADOOP_PREFIX = "hadoop.htrace.";
+  static final String DEFAULT_HADOOP_TRACE_PREFIX = "hadoop.htrace.";
 
   public static HTraceConfiguration wrapHadoopConf(final String prefix,
         final Configuration conf) {
@@ -52,7 +52,7 @@ public class TraceUtils {
         if (ret != null) {
           return ret;
         }
-        return getInternal(DEFAULT_HADOOP_PREFIX  + key);
+        return getInternal(DEFAULT_HADOOP_TRACE_PREFIX  + key);
       }
 
       @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/site/markdown/ClusterSetup.md
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/site/markdown/ClusterSetup.md 
b/hadoop-common-project/hadoop-common/src/site/markdown/ClusterSetup.md
index bf9419a..d2479e7 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/ClusterSetup.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/ClusterSetup.md
@@ -86,10 +86,10 @@ Other useful configuration parameters that you can 
customize include:
 
 In most cases, you should specify the `HADOOP_PID_DIR` and `HADOOP_LOG_DIR` 
directories such that they can only be written to by the users that are going 
to run the hadoop daemons. Otherwise there is the potential for a symlink 
attack.
 
-It is also traditional to configure `HADOOP_PREFIX` in the system-wide shell 
environment configuration. For example, a simple script inside `/etc/profile.d`:
+It is also traditional to configure `HADOOP_HOME` in the system-wide shell 
environment configuration. For example, a simple script inside `/etc/profile.d`:
 
-      HADOOP_PREFIX=/path/to/hadoop
-      export HADOOP_PREFIX
+      HADOOP_HOME=/path/to/hadoop
+      export HADOOP_HOME
 
 | Daemon | Environment Variable |
 |:---- |:---- |
@@ -243,73 +243,73 @@ To start a Hadoop cluster you will need to start both the 
HDFS and YARN cluster.
 
 The first time you bring up HDFS, it must be formatted. Format a new 
distributed filesystem as *hdfs*:
 
-    [hdfs]$ $HADOOP_PREFIX/bin/hdfs namenode -format <cluster_name>
+    [hdfs]$ $HADOOP_HOME/bin/hdfs namenode -format <cluster_name>
 
 Start the HDFS NameNode with the following command on the designated node as 
*hdfs*:
 
-    [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon start namenode
+    [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon start namenode
 
 Start a HDFS DataNode with the following command on each designated node as 
*hdfs*:
 
-    [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon start datanode
+    [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon start datanode
 
 If `etc/hadoop/slaves` and ssh trusted access is configured (see [Single Node 
Setup](./SingleCluster.html)), all of the HDFS processes can be started with a 
utility script. As *hdfs*:
 
-    [hdfs]$ $HADOOP_PREFIX/sbin/start-dfs.sh
+    [hdfs]$ $HADOOP_HOME/sbin/start-dfs.sh
 
 Start the YARN with the following command, run on the designated 
ResourceManager as *yarn*:
 
-    [yarn]$ $HADOOP_PREFIX/bin/yarn --daemon start resourcemanager
+    [yarn]$ $HADOOP_HOME/bin/yarn --daemon start resourcemanager
 
 Run a script to start a NodeManager on each designated host as *yarn*:
 
-    [yarn]$ $HADOOP_PREFIX/bin/yarn --daemon start nodemanager
+    [yarn]$ $HADOOP_HOME/bin/yarn --daemon start nodemanager
 
 Start a standalone WebAppProxy server. Run on the WebAppProxy server as 
*yarn*. If multiple servers are used with load balancing it should be run on 
each of them:
 
-    [yarn]$ $HADOOP_PREFIX/bin/yarn --daemon start proxyserver
+    [yarn]$ $HADOOP_HOME/bin/yarn --daemon start proxyserver
 
 If `etc/hadoop/slaves` and ssh trusted access is configured (see [Single Node 
Setup](./SingleCluster.html)), all of the YARN processes can be started with a 
utility script. As *yarn*:
 
-    [yarn]$ $HADOOP_PREFIX/sbin/start-yarn.sh
+    [yarn]$ $HADOOP_HOME/sbin/start-yarn.sh
 
 Start the MapReduce JobHistory Server with the following command, run on the 
designated server as *mapred*:
 
-    [mapred]$ $HADOOP_PREFIX/bin/mapred --daemon start historyserver
+    [mapred]$ $HADOOP_HOME/bin/mapred --daemon start historyserver
 
 ### Hadoop Shutdown
 
 Stop the NameNode with the following command, run on the designated NameNode 
as *hdfs*:
 
-    [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon stop namenode
+    [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon stop namenode
 
 Run a script to stop a DataNode as *hdfs*:
 
-    [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon stop datanode
+    [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon stop datanode
 
 If `etc/hadoop/slaves` and ssh trusted access is configured (see [Single Node 
Setup](./SingleCluster.html)), all of the HDFS processes may be stopped with a 
utility script. As *hdfs*:
 
-    [hdfs]$ $HADOOP_PREFIX/sbin/stop-dfs.sh
+    [hdfs]$ $HADOOP_HOME/sbin/stop-dfs.sh
 
 Stop the ResourceManager with the following command, run on the designated 
ResourceManager as *yarn*:
 
-    [yarn]$ $HADOOP_PREFIX/bin/yarn --daemon stop resourcemanager
+    [yarn]$ $HADOOP_HOME/bin/yarn --daemon stop resourcemanager
 
 Run a script to stop a NodeManager on a slave as *yarn*:
 
-    [yarn]$ $HADOOP_PREFIX/bin/yarn --daemon stop nodemanager
+    [yarn]$ $HADOOP_HOME/bin/yarn --daemon stop nodemanager
 
 If `etc/hadoop/slaves` and ssh trusted access is configured (see [Single Node 
Setup](./SingleCluster.html)), all of the YARN processes can be stopped with a 
utility script. As *yarn*:
 
-    [yarn]$ $HADOOP_PREFIX/sbin/stop-yarn.sh
+    [yarn]$ $HADOOP_HOME/sbin/stop-yarn.sh
 
 Stop the WebAppProxy server. Run on the WebAppProxy server as *yarn*. If 
multiple servers are used with load balancing it should be run on each of them:
 
-    [yarn]$ $HADOOP_PREFIX/bin/yarn stop proxyserver
+    [yarn]$ $HADOOP_HOME/bin/yarn stop proxyserver
 
 Stop the MapReduce JobHistory Server with the following command, run on the 
designated server as *mapred*:
 
-    [mapred]$ $HADOOP_PREFIX/bin/mapred --daemon stop historyserver
+    [mapred]$ $HADOOP_HOME/bin/mapred --daemon stop historyserver
 
 Web Interfaces
 --------------

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md 
b/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md
index 59ea198..365a844 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md
@@ -39,7 +39,7 @@ All of the shell commands will accept a common set of 
options. For some commands
 | SHELL\_OPTION | Description |
 |:---- |:---- |
 | `--buildpaths` | Enables developer versions of jars. |
-| `--config confdir` | Overwrites the default Configuration directory. Default 
is `$HADOOP_PREFIX/etc/hadoop`. |
+| `--config confdir` | Overwrites the default Configuration directory. Default 
is `$HADOOP_HOME/etc/hadoop`. |
 | `--daemon mode` | If the command supports daemonization (e.g., `hdfs 
namenode`), execute in the appropriate mode. Supported modes are `start` to 
start the process in daemon mode, `stop` to stop the process, and `status` to 
determine the active status of the process. `status` will return an 
[LSB-compliant](http://refspecs.linuxbase.org/LSB_3.0.0/LSB-generic/LSB-generic/iniscrptact.html)
 result code. If no option is provided, commands that support daemonization 
will run in the foreground. For commands that do not support daemonization, 
this option is ignored. |
 | `--debug` | Enables shell level configuration debugging information |
 | `--help` | Shell script usage information. |

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md 
b/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
index ffab5a0..caa3aa7 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
@@ -83,7 +83,7 @@ Apache Hadoop allows for third parties to easily add new 
features through a vari
 
 Core to this functionality is the concept of a shell profile.  Shell profiles 
are shell snippets that can do things such as add jars to the classpath, 
configure Java system properties and more.
 
-Shell profiles may be installed in either `${HADOOP_CONF_DIR}/shellprofile.d` 
or `${HADOOP_PREFIX}/libexec/shellprofile.d`.  Shell profiles in the `libexec` 
directory are part of the base installation and cannot be overriden by the 
user.  Shell profiles in the configuration directory may be ignored if the end 
user changes the configuration directory at runtime.
+Shell profiles may be installed in either `${HADOOP_CONF_DIR}/shellprofile.d` 
or `${HADOOP_HOME}/libexec/shellprofile.d`.  Shell profiles in the `libexec` 
directory are part of the base installation and cannot be overriden by the 
user.  Shell profiles in the configuration directory may be ignored if the end 
user changes the configuration directory at runtime.
 
 An example of a shell profile is in the libexec directory.
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
index f718345..be2d7f5 100755
--- 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
+++ 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
@@ -27,6 +27,7 @@ setup() {
   # shellcheck disable=SC2034
   HADOOP_SHELL_SCRIPT_DEBUG=true
   unset HADOOP_CONF_DIR
+  # we unset both of these for bw compat
   unset HADOOP_HOME
   unset HADOOP_PREFIX
 
@@ -53,4 +54,4 @@ strstr() {
   else
     echo false
   fi
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats
index ae20248..79ede42 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats
+++ 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats
@@ -45,7 +45,7 @@ basicinitsetup () {
     unset ${j}
   done
 
-  HADOOP_PREFIX=${TMP}
+  HADOOP_HOME=${TMP}
 }
 
 check_var_values () {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats
index 9114c70..de4edd4 100644
--- a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats
@@ -22,7 +22,7 @@ load hadoop-functions_test_helper
 }
 
 @test "hadoop_bootstrap (libexec)" {
-  unset   HADOOP_PREFIX
+  unset   HADOOP_HOME
   unset   HADOOP_COMMON_DIR
   unset   HADOOP_COMMON_LIB_JARS_DIR
   unset   HDFS_DIR
@@ -39,7 +39,7 @@ load hadoop-functions_test_helper
   hadoop_bootstrap
 
   # all of these should be set
-  [ -n ${HADOOP_PREFIX} ]
+  [ -n ${HADOOP_HOME} ]
   [ -n ${HADOOP_COMMON_DIR} ]
   [ -n ${HADOOP_COMMON_LIB_JARS_DIR} ]
   [ -n ${HDFS_DIR} ]

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats
index 3e42da9..1f0c706 100644
--- a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats
@@ -16,10 +16,10 @@
 load hadoop-functions_test_helper
 
 create_fake_dirs () {
-  HADOOP_PREFIX=${TMP}
+  HADOOP_HOME=${TMP}
   for j in conf etc/hadoop; do
-    mkdir -p "${HADOOP_PREFIX}/${j}"
-    echo "unittest=${j}" > "${HADOOP_PREFIX}/${j}/hadoop-env.sh"
+    mkdir -p "${HADOOP_HOME}/${j}"
+    echo "unittest=${j}" > "${HADOOP_HOME}/${j}/hadoop-env.sh"
   done
 }
 
@@ -32,27 +32,27 @@ create_fake_dirs () {
 @test "hadoop_find_confdir (bw compat: conf)" {
   create_fake_dirs
   hadoop_find_confdir
-  echo ">${HADOOP_CONF_DIR}< >${HADOOP_PREFIX}/conf<"
-  [ "${HADOOP_CONF_DIR}" = ${HADOOP_PREFIX}/conf ]
+  echo ">${HADOOP_CONF_DIR}< >${HADOOP_HOME}/conf<"
+  [ "${HADOOP_CONF_DIR}" = ${HADOOP_HOME}/conf ]
 }
 
 @test "hadoop_find_confdir (etc/hadoop)" {
   create_fake_dirs
-  rm -rf "${HADOOP_PREFIX}/conf"
+  rm -rf "${HADOOP_HOME}/conf"
   hadoop_find_confdir
-  [ "${HADOOP_CONF_DIR}" = ${HADOOP_PREFIX}/etc/hadoop ]
+  [ "${HADOOP_CONF_DIR}" = ${HADOOP_HOME}/etc/hadoop ]
 }
 
 @test "hadoop_verify_confdir (negative) " {
   create_fake_dirs
-  HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
+  HADOOP_CONF_DIR=${HADOOP_HOME}/conf
   run hadoop_verify_confdir
   [ -n "${output}" ]
 }
 
 @test "hadoop_verify_confdir (positive) " {
   create_fake_dirs
-  HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
+  HADOOP_CONF_DIR=${HADOOP_HOME}/conf
   touch "${HADOOP_CONF_DIR}/log4j.properties"
   run hadoop_verify_confdir
   [ -z "${output}" ]
@@ -60,7 +60,7 @@ create_fake_dirs () {
 
 @test "hadoop_exec_hadoopenv (positive) " {
   create_fake_dirs
-  HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
+  HADOOP_CONF_DIR=${HADOOP_HOME}/conf
   hadoop_exec_hadoopenv
   [ -n "${HADOOP_ENV_PROCESSED}" ]
   [ "${unittest}" = conf ]
@@ -68,7 +68,7 @@ create_fake_dirs () {
 
 @test "hadoop_exec_hadoopenv (negative) " {
   create_fake_dirs
-  HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
+  HADOOP_CONF_DIR=${HADOOP_HOME}/conf
   HADOOP_ENV_PROCESSED=true
   hadoop_exec_hadoopenv
   [ -z "${unittest}" ]
@@ -76,7 +76,7 @@ create_fake_dirs () {
 
 @test "hadoop_exec_userfuncs" {
   create_fake_dirs
-  HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
+  HADOOP_CONF_DIR=${HADOOP_HOME}/conf
   echo "unittest=userfunc" > "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
   hadoop_exec_userfuncs
   [ "${unittest}" = "userfunc" ]

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats
index 668c115..b9339f3 100644
--- a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats
@@ -100,7 +100,7 @@ load hadoop-functions_test_helper
   hadoop_finalize_hadoop_heap () { true; }
   hadoop_finalize_hadoop_opts () { true; }
   hadoop_translate_cygwin_path () {
-    if [ $1 = HADOOP_PREFIX ]; then
+    if [ $1 = HADOOP_HOME ]; then
       testvar=prefix;
     fi
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh 
b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
index c3bc772..7044fa8 100644
--- a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
+++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
@@ -20,7 +20,7 @@
 
 # KMS temporary directory
 #
-# export KMS_TEMP=${HADOOP_PREFIX}/temp
+# export KMS_TEMP=${HADOOP_HOME}/temp
 
 # The HTTP port used by KMS
 #
@@ -59,7 +59,7 @@
 #
 # Location of tomcat
 #
-# export KMS_CATALINA_HOME=${HADOOP_PREFIX}/share/hadoop/kms/tomcat
+# export KMS_CATALINA_HOME=${HADOOP_HOME}/share/hadoop/kms/tomcat
 
 # Java System properties for KMS should be specified in this variable.
 # The java.library.path and hadoop.home.dir properties are automatically

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh 
b/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh
index c88aa87..5e1ffa4 100644
--- a/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh
+++ b/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh
@@ -28,7 +28,7 @@ function hadoop_subproject_init
 
   export HADOOP_CATALINA_PREFIX=kms
 
-  export HADOOP_CATALINA_TEMP="${KMS_TEMP:-${HADOOP_PREFIX}/temp}"
+  export HADOOP_CATALINA_TEMP="${KMS_TEMP:-${HADOOP_HOME}/temp}"
 
   hadoop_deprecate_envvar KMS_CONFIG HADOOP_CONF_DIR
 
@@ -49,7 +49,7 @@ function hadoop_subproject_init
   # shellcheck disable=SC2086
   export KMS_SSL_TRUSTSTORE_PASS=${KMS_SSL_TRUSTSTORE_PASS:-"$(echo 
${CATALINA_OPTS} | grep -o 'trustStorePassword=[^ ]*' | cut -f2 -d= )"}
 
-  export 
CATALINA_BASE="${CATALINA_BASE:-${HADOOP_PREFIX}/share/hadoop/kms/tomcat}"
+  export 
CATALINA_BASE="${CATALINA_BASE:-${HADOOP_HOME}/share/hadoop/kms/tomcat}"
   export HADOOP_CATALINA_HOME="${KMS_CATALINA_HOME:-${CATALINA_BASE}}"
 
   export 
CATALINA_OUT="${CATALINA_OUT:-${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-kms-${HOSTNAME}.out}"
@@ -69,8 +69,8 @@ if [[ -n "${HADOOP_COMMON_HOME}" ]] &&
   . "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh"
 elif [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then
   . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
-elif [[ -e "${HADOOP_PREFIX}/libexec/hadoop-config.sh" ]]; then
-  . "${HADOOP_PREFIX}/libexec/hadoop-config.sh"
+elif [[ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]]; then
+  . "${HADOOP_HOME}/libexec/hadoop-config.sh"
 else
   echo "ERROR: Hadoop common not found." 2>&1
   exit 1

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh 
b/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh
index 4ed4725..6708cd9 100755
--- a/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh
+++ b/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh
@@ -30,8 +30,8 @@ function hadoop_usage
 }
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
   HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
index a4edef6..f012453 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
@@ -20,7 +20,7 @@
 
 # HTTPFS temporary directory
 #
-# export HTTPFS_TEMP=${HADOOP_PREFIX}/temp
+# export HTTPFS_TEMP=${HADOOP_HOME}/temp
 
 # The HTTP port used by HTTPFS
 #
@@ -53,7 +53,7 @@
 #
 # Location of tomcat
 #
-# export HTTPFS_CATALINA_HOME=${HADOOP_PREFIX}/share/hadoop/httpfs/tomcat
+# export HTTPFS_CATALINA_HOME=${HADOOP_HOME}/share/hadoop/httpfs/tomcat
 
 # Java System properties for HTTPFS should be specified in this variable.
 # The java.library.path and hadoop.home.dir properties are automatically

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
index 767bd6e..ba4b406 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
@@ -28,7 +28,7 @@ function hadoop_subproject_init
 
   export HADOOP_CATALINA_PREFIX=httpfs
 
-  export HADOOP_CATALINA_TEMP="${HTTPFS_TEMP:-${HADOOP_PREFIX}/temp}"
+  export HADOOP_CATALINA_TEMP="${HTTPFS_TEMP:-${HADOOP_HOME}/temp}"
 
   hadoop_deprecate_envvar HTTPFS_CONFIG HADOOP_CONF_DIR
 
@@ -47,7 +47,7 @@ function hadoop_subproject_init
 
   export 
HADOOP_CATALINA_SSL_KEYSTORE_FILE="${HTTPFS_SSL_KEYSTORE_FILE:-${HOME}/.keystore}"
 
-  export 
CATALINA_BASE="${CATALINA_BASE:-${HADOOP_PREFIX}/share/hadoop/httpfs/tomcat}"
+  export 
CATALINA_BASE="${CATALINA_BASE:-${HADOOP_HOME}/share/hadoop/httpfs/tomcat}"
   export HADOOP_CATALINA_HOME="${HTTPFS_CATALINA_HOME:-${CATALINA_BASE}}"
 
   export 
CATALINA_OUT="${CATALINA_OUT:-${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-httpfs-${HOSTNAME}.out}"
@@ -67,8 +67,8 @@ if [[ -n "${HADOOP_COMMON_HOME}" ]] &&
   . "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh"
 elif [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then
   . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
-elif [[ -e "${HADOOP_PREFIX}/libexec/hadoop-config.sh" ]]; then
-  . "${HADOOP_PREFIX}/libexec/hadoop-config.sh"
+elif [[ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]]; then
+  . "${HADOOP_HOME}/libexec/hadoop-config.sh"
 else
   echo "ERROR: Hadoop common not found." 2>&1
   exit 1

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
index 18c1af0..3e7cdf8 100755
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
@@ -30,8 +30,8 @@ function hadoop_usage
 }
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
   HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/doc/README
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/doc/README
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/doc/README
index 672265e..e8cc0e5 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/doc/README
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/doc/README
@@ -35,9 +35,9 @@ Requirements
 BUILDING
 
    fuse-dfs executable can be built by setting `require.fuse` option to true 
using Maven. For example:
-   in HADOOP_PREFIX: `mvn package -Pnative -Drequire.fuse=true -DskipTests 
-Dmaven.javadoc.skip=true`
+   in HADOOP_HOME: `mvn package -Pnative -Drequire.fuse=true -DskipTests 
-Dmaven.javadoc.skip=true`
 
-   The executable `fuse_dfs` will be located at 
HADOOP_PREFIX/hadoop-hdfs-project/hadoop-hdfs-native-client/target/main/native/fuse-dfs/
+   The executable `fuse_dfs` will be located at 
HADOOP_HOME/hadoop-hdfs-project/hadoop-hdfs-native-client/target/main/native/fuse-dfs/
 
 Common build problems include not finding the libjvm.so in 
JAVA_HOME/jre/lib/OS_ARCH/server or not finding fuse in FUSE_HOME or /usr/local.
 
@@ -109,7 +109,7 @@ NOTE - you cannot export this with a FUSE module built into 
the kernel
 
 RECOMMENDATIONS
 
-1. From /bin, `ln -s 
HADOOP_PREFIX/hadoop-hdfs-project/hadoop-hdfs-native-client/target/main/native/fuse-dfs/fuse_dfs*
 .`
+1. From /bin, `ln -s 
HADOOP_HOME/hadoop-hdfs-project/hadoop-hdfs-native-client/target/main/native/fuse-dfs/fuse_dfs*
 .`
 
 2. Always start with debug on so you can see if you are missing a classpath or 
something like that.
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/fuse_dfs_wrapper.sh
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/fuse_dfs_wrapper.sh
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/fuse_dfs_wrapper.sh
index 26dfd19..c52c5f9 100755
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/fuse_dfs_wrapper.sh
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/fuse_dfs_wrapper.sh
@@ -16,12 +16,12 @@
 # limitations under the License.
 #
 
-if [ "$HADOOP_PREFIX" = "" ]; then
-  echo "HADOOP_PREFIX is empty. Set it to the root directory of Hadoop source 
code"
+if [ "$HADOOP_HOME" = "" ]; then
+  echo "HADOOP_HOME is empty. Set it to the root directory of Hadoop source 
code"
   exit 1
 fi
-export 
FUSEDFS_PATH="$HADOOP_PREFIX/hadoop-hdfs-project/hadoop-hdfs-native-client/target/main/native/fuse-dfs"
-export 
LIBHDFS_PATH="$HADOOP_PREFIX/hadoop-hdfs-project/hadoop-hdfs-native-client/target/usr/local/lib"
+export 
FUSEDFS_PATH="$HADOOP_HOME/hadoop-hdfs-project/hadoop-hdfs-native-client/target/main/native/fuse-dfs"
+export 
LIBHDFS_PATH="$HADOOP_HOME/hadoop-hdfs-project/hadoop-hdfs-native-client/target/usr/local/lib"
 
 if [ "$OS_ARCH" = "" ]; then
 export OS_ARCH=amd64
@@ -38,12 +38,12 @@ fi
 while IFS= read -r -d '' file
 do
   export CLASSPATH=$CLASSPATH:$file
-done < <(find "$HADOOP_PREFIX/hadoop-client" -name "*.jar" -print0)
+done < <(find "$HADOOP_HOME/hadoop-client" -name "*.jar" -print0)
 
 while IFS= read -r -d '' file
 do
   export CLASSPATH=$CLASSPATH:$file
-done < <(find "$HADOOP_PREFIX/hhadoop-hdfs-project" -name "*.jar" -print0)
+done < <(find "$HADOOP_HOME/hhadoop-hdfs-project" -name "*.jar" -print0)
 
 export CLASSPATH=$HADOOP_CONF_DIR:$CLASSPATH
 export PATH=$FUSEDFS_PATH:$PATH

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/distribute-exclude.sh
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/distribute-exclude.sh 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/distribute-exclude.sh
index cfd44e3..97f04f7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/distribute-exclude.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/distribute-exclude.sh
@@ -52,8 +52,8 @@ if [ ! -f "$excludeFilenameLocal" ] ; then
   exit 1
 fi
 
-namenodes=$("$HADOOP_PREFIX/bin/hdfs" getconf -namenodes)
-excludeFilenameRemote=$("$HADOOP_PREFIX/bin/hdfs" getconf -excludeFile)
+namenodes=$("$HADOOP_HOME/bin/hdfs" getconf -namenodes)
+excludeFilenameRemote=$("$HADOOP_HOME/bin/hdfs" getconf -excludeFile)
 
 if [ "$excludeFilenameRemote" = '' ] ; then
   echo \

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
index bcd04d1..c365250 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
@@ -60,8 +60,8 @@ function hadoop_usage
 }
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
   HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs-config.sh
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs-config.sh 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs-config.sh
index 244e5a9..d440210 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs-config.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs-config.sh
@@ -49,7 +49,7 @@ function hadoop_subproject_init
 
   hadoop_deprecate_envvar HADOOP_HDFS_IDENT_STRING HADOOP_IDENT_STRING
   
-  HADOOP_HDFS_HOME="${HADOOP_HDFS_HOME:-$HADOOP_PREFIX}"
+  HADOOP_HDFS_HOME="${HADOOP_HDFS_HOME:-$HADOOP_HOME}"
   
   # turn on the defaults
   export HDFS_AUDIT_LOGGER=${HDFS_AUDIT_LOGGER:-INFO,NullAppender}
@@ -71,8 +71,8 @@ if [[ -n "${HADOOP_COMMON_HOME}" ]] &&
   . "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh"
 elif [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then
   . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
-elif [ -e "${HADOOP_PREFIX}/libexec/hadoop-config.sh" ]; then
-  . "${HADOOP_PREFIX}/libexec/hadoop-config.sh"
+elif [ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]; then
+  . "${HADOOP_HOME}/libexec/hadoop-config.sh"
 else
   echo "ERROR: Hadoop common not found." 2>&1
   exit 1

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/refresh-namenodes.sh
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/refresh-namenodes.sh 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/refresh-namenodes.sh
index 318a282..f51dd0f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/refresh-namenodes.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/refresh-namenodes.sh
@@ -21,8 +21,8 @@
 # for dfsadmin to support multiple namenodes.
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   this="${BASH_SOURCE-$0}"
   bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-balancer.sh
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-balancer.sh 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-balancer.sh
index 32ca2b2..df044fe 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-balancer.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-balancer.sh
@@ -30,8 +30,8 @@ function hadoop_usage
 bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
 fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-dfs.sh
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-dfs.sh 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-dfs.sh
index 9c5a172..1e35e7d 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-dfs.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-dfs.sh
@@ -29,8 +29,8 @@ this="${BASH_SOURCE-$0}"
 bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
 fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-secure-dns.sh
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-secure-dns.sh 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-secure-dns.sh
index f904640..3fce345 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-secure-dns.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-secure-dns.sh
@@ -26,8 +26,8 @@ this="${BASH_SOURCE-$0}"
 bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
 fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-balancer.sh
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-balancer.sh 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-balancer.sh
index bb51a8a..ec94080 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-balancer.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-balancer.sh
@@ -28,8 +28,8 @@ function hadoop_usage
 bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
 fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-dfs.sh
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-dfs.sh 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-dfs.sh
index cc0d11d..e693374 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-dfs.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-dfs.sh
@@ -28,8 +28,8 @@ this="${BASH_SOURCE-$0}"
 bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
 fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-secure-dns.sh
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-secure-dns.sh 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-secure-dns.sh
index 816a3e3..2a973b1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-secure-dns.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-secure-dns.sh
@@ -26,8 +26,8 @@ this="${BASH_SOURCE-$0}"
 bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
 fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/Federation.md
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/Federation.md 
b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/Federation.md
index 38c1070..99a41a2 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/Federation.md
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/Federation.md
@@ -150,13 +150,13 @@ Here is an example configuration with two Namenodes:
 
 **Step 1**: Format a Namenode using the following command:
 
-    [hdfs]$ $HADOOP_PREFIX/bin/hdfs namenode -format [-clusterId <cluster_id>]
+    [hdfs]$ $HADOOP_HOME/bin/hdfs namenode -format [-clusterId <cluster_id>]
 
 Choose a unique cluster\_id which will not conflict other clusters in your 
environment. If a cluster\_id is not provided, then a unique one is auto 
generated.
 
 **Step 2**: Format additional Namenodes using the following command:
 
-    [hdfs]$ $HADOOP_PREFIX/bin/hdfs namenode -format -clusterId <cluster_id>
+    [hdfs]$ $HADOOP_HOME/bin/hdfs namenode -format -clusterId <cluster_id>
 
 Note that the cluster\_id in step 2 must be same as that of the cluster\_id in 
step 1. If they are different, the additional Namenodes will not be part of the 
federated cluster.
 
@@ -164,7 +164,7 @@ Note that the cluster\_id in step 2 must be same as that of 
the cluster\_id in s
 
 Older releases only support a single Namenode. Upgrade the cluster to newer 
release in order to enable federation During upgrade you can provide a 
ClusterID as follows:
 
-    [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon start namenode -upgrade 
-clusterId <cluster_ID>
+    [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon start namenode -upgrade -clusterId 
<cluster_ID>
 
 If cluster\_id is not provided, it is auto generated.
 
@@ -187,7 +187,7 @@ Perform the following steps:
 * Refresh the Datanodes to pickup the newly added Namenode by running
   the following command against all the Datanodes in the cluster:
 
-        [hdfs]$ $HADOOP_PREFIX/bin/hdfs dfsadmin -refreshNamenodes 
<datanode_host_name>:<datanode_rpc_port>
+        [hdfs]$ $HADOOP_HOME/bin/hdfs dfsadmin -refreshNamenodes 
<datanode_host_name>:<datanode_rpc_port>
 
 Managing the cluster
 --------------------
@@ -196,11 +196,11 @@ Managing the cluster
 
 To start the cluster run the following command:
 
-    [hdfs]$ $HADOOP_PREFIX/sbin/start-dfs.sh
+    [hdfs]$ $HADOOP_HOME/sbin/start-dfs.sh
 
 To stop the cluster run the following command:
 
-    [hdfs]$ $HADOOP_PREFIX/sbin/stop-dfs.sh
+    [hdfs]$ $HADOOP_HOME/sbin/stop-dfs.sh
 
 These commands can be run from any node where the HDFS configuration is 
available. The command uses the configuration to determine the Namenodes in the 
cluster and then starts the Namenode process on those nodes. The Datanodes are 
started on the nodes specified in the `slaves` file. The script can be used as 
a reference for building your own scripts to start and stop the cluster.
 
@@ -208,7 +208,7 @@ These commands can be run from any node where the HDFS 
configuration is availabl
 
 The Balancer has been changed to work with multiple Namenodes. The Balancer 
can be run using the command:
 
-    [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon start balancer [-policy <policy>]
+    [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon start balancer [-policy <policy>]
 
 The policy parameter can be any of the following:
 
@@ -227,11 +227,11 @@ Decommissioning is similar to prior releases. The nodes 
that need to be decomiss
 
 **Step 1**: To distribute an exclude file to all the Namenodes, use the 
following command:
 
-    [hdfs]$ $HADOOP_PREFIX/sbin/distribute-exclude.sh <exclude_file>
+    [hdfs]$ $HADOOP_HOME/sbin/distribute-exclude.sh <exclude_file>
 
 **Step 2**: Refresh all the Namenodes to pick up the new exclude file:
 
-    [hdfs]$ $HADOOP_PREFIX/sbin/refresh-namenodes.sh
+    [hdfs]$ $HADOOP_HOME/sbin/refresh-namenodes.sh
 
 The above command uses HDFS configuration to determine the configured 
Namenodes in the cluster and refreshes them to pick up the new exclude file.
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithNFS.md
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithNFS.md
 
b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithNFS.md
index 51a88c9..f888966 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithNFS.md
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithNFS.md
@@ -475,7 +475,7 @@ There are also several other configuration parameters which 
may be set to contro
 
 After the configuration keys have been added, the next step is to initialize 
required state in ZooKeeper. You can do so by running the following command 
from one of the NameNode hosts.
 
-    [hdfs]$ $HADOOP_PREFIX/bin/zkfc -formatZK
+    [hdfs]$ $HADOOP_HOME/bin/zkfc -formatZK
 
 This will create a znode in ZooKeeper inside of which the automatic failover 
system stores its data.
 
@@ -487,7 +487,7 @@ Since automatic failover has been enabled in the 
configuration, the `start-dfs.s
 
 If you manually manage the services on your cluster, you will need to manually 
start the `zkfc` daemon on each of the machines that runs a NameNode. You can 
start the daemon by running:
 
-    [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon start zkfc
+    [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon start zkfc
 
 ### Securing access to ZooKeeper
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithQJM.md
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithQJM.md
 
b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithQJM.md
index 8b42386..9a97add 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithQJM.md
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithQJM.md
@@ -523,7 +523,7 @@ There are also several other configuration parameters which 
may be set to contro
 
 After the configuration keys have been added, the next step is to initialize 
required state in ZooKeeper. You can do so by running the following command 
from one of the NameNode hosts.
 
-    [hdfs]$ $HADOOP_PREFIX/bin/hdfs zkfc -formatZK
+    [hdfs]$ $HADOOP_HOME/bin/hdfs zkfc -formatZK
 
 This will create a znode in ZooKeeper inside of which the automatic failover 
system stores its data.
 
@@ -535,7 +535,7 @@ Since automatic failover has been enabled in the 
configuration, the `start-dfs.s
 
 If you manually manage the services on your cluster, you will need to manually 
start the `zkfc` daemon on each of the machines that runs a NameNode. You can 
start the daemon by running:
 
-    [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon start zkfc
+    [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon start zkfc
 
 ### Securing access to ZooKeeper
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HdfsNfsGateway.md
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HdfsNfsGateway.md 
b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HdfsNfsGateway.md
index 7dc2fe4..6731189 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HdfsNfsGateway.md
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HdfsNfsGateway.md
@@ -215,7 +215,7 @@ Three daemons are required to provide NFS service: rpcbind 
(or portmap), mountd
 
 2.  Start Hadoop's portmap (needs root privileges):
 
-        [root]> $HADOOP_PREFIX/bin/hdfs --daemon start portmap
+        [root]> $HADOOP_HOME/bin/hdfs --daemon start portmap
 
 3.  Start mountd and nfsd.
 
@@ -224,12 +224,12 @@ Three daemons are required to provide NFS service: 
rpcbind (or portmap), mountd
     While in secure mode, any user can start NFS gateway
     as long as the user has read access to the Kerberos keytab defined in 
"nfs.keytab.file".
 
-        [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon start nfs3
+        [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon start nfs3
 
 4.  Stop NFS gateway services.
 
-        [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon stop nfs3
-        [root]> $HADOOP_PREFIX/bin/hdfs --daemon stop portmap
+        [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon stop nfs3
+        [root]> $HADOOP_HOME/bin/hdfs --daemon stop portmap
 
 Optionally, you can forgo running the Hadoop-provided portmap daemon and 
instead use the system portmap daemon on all operating systems if you start the 
NFS Gateway as root. This will allow the HDFS NFS Gateway to work around the 
aforementioned bug and still register using the system portmap daemon. To do 
so, just start the NFS gateway daemon as you normally would, but make sure to 
do so as the "root" user, and also set the "HADOOP\_PRIVILEGED\_NFS\_USER" 
environment variable to an unprivileged user. In this mode the NFS Gateway will 
start as root to perform its initial registration with the system portmap, and 
then will drop privileges back to the user specified by the 
HADOOP\_PRIVILEGED\_NFS\_USER afterward and for the rest of the duration of the 
lifetime of the NFS Gateway process. Note that if you choose this route, you 
should skip steps 1 and 2 above.
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTraceAdmin.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTraceAdmin.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTraceAdmin.java
index 198dafb..71c9c56 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTraceAdmin.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTraceAdmin.java
@@ -62,7 +62,7 @@ public class TestTraceAdmin {
   public void testCreateAndDestroySpanReceiver() throws Exception {
     Configuration conf = new Configuration();
     conf = new Configuration();
-    conf.set(TraceUtils.DEFAULT_HADOOP_PREFIX +
+    conf.set(TraceUtils.DEFAULT_HADOOP_TRACE_PREFIX +
         Tracer.SPAN_RECEIVER_CLASSES_KEY, "");
     MiniDFSCluster cluster =
         new MiniDFSCluster.Builder(conf).numDataNodes(3).build();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java
index 37c09d1..b3cf402 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java
@@ -65,10 +65,10 @@ public class TestTracingShortCircuitLocalRead {
   public void testShortCircuitTraceHooks() throws IOException {
     assumeTrue(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS);
     conf = new Configuration();
-    conf.set(TraceUtils.DEFAULT_HADOOP_PREFIX +
+    conf.set(TraceUtils.DEFAULT_HADOOP_TRACE_PREFIX +
             Tracer.SPAN_RECEIVER_CLASSES_KEY,
         SetSpanReceiver.class.getName());
-    conf.set(TraceUtils.DEFAULT_HADOOP_PREFIX +
+    conf.set(TraceUtils.DEFAULT_HADOOP_TRACE_PREFIX +
             Tracer.SAMPLER_CLASSES_KEY,
         "AlwaysSampler");
     conf.setLong("dfs.blocksize", 100 * 1024);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-mapreduce-project/bin/mapred
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/bin/mapred 
b/hadoop-mapreduce-project/bin/mapred
index fab5b87..f280f31 100755
--- a/hadoop-mapreduce-project/bin/mapred
+++ b/hadoop-mapreduce-project/bin/mapred
@@ -37,8 +37,8 @@ function hadoop_usage
 bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
 fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-mapreduce-project/bin/mapred-config.sh
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/bin/mapred-config.sh 
b/hadoop-mapreduce-project/bin/mapred-config.sh
index fe6e9e6..a989792 100644
--- a/hadoop-mapreduce-project/bin/mapred-config.sh
+++ b/hadoop-mapreduce-project/bin/mapred-config.sh
@@ -47,7 +47,7 @@ function hadoop_subproject_init
 
   hadoop_deprecate_envvar HADOOP_MAPRED_ROOT_LOGGER HADOOP_ROOT_LOGGER
 
-  HADOOP_MAPRED_HOME="${HADOOP_MAPRED_HOME:-$HADOOP_PREFIX}"
+  HADOOP_MAPRED_HOME="${HADOOP_MAPRED_HOME:-$HADOOP_HOME}"
 
   hadoop_deprecate_envvar HADOOP_MAPRED_IDENT_STRING HADOOP_IDENT_STRING
 }
@@ -62,8 +62,8 @@ if [[ -n "${HADOOP_COMMON_HOME}" ]] &&
   . "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh"
 elif [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then
   . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
-elif [ -e "${HADOOP_PREFIX}/libexec/hadoop-config.sh" ]; then
-  . "${HADOOP_PREFIX}/libexec/hadoop-config.sh"
+elif [ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]; then
+  . "${HADOOP_HOME}/libexec/hadoop-config.sh"
 else
   echo "ERROR: Hadoop common not found." 2>&1
   exit 1

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-mapreduce-project/bin/mr-jobhistory-daemon.sh
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/bin/mr-jobhistory-daemon.sh 
b/hadoop-mapreduce-project/bin/mr-jobhistory-daemon.sh
index 57b1ebd..998ca90 100644
--- a/hadoop-mapreduce-project/bin/mr-jobhistory-daemon.sh
+++ b/hadoop-mapreduce-project/bin/mr-jobhistory-daemon.sh
@@ -21,8 +21,8 @@ function hadoop_usage
 }
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   this="${BASH_SOURCE-$0}"
   bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java
index 4f5b6a1..ae45782 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java
@@ -315,7 +315,7 @@ public class Submitter extends Configured implements Tool {
     // <path>#<executable>
     if (exec.contains("#")) {
       // set default gdb commands for map and reduce task 
-      String defScript = 
"$HADOOP_PREFIX/src/c++/pipes/debug/pipes-default-script";
+      String defScript = 
"$HADOOP_HOME/src/c++/pipes/debug/pipes-default-script";
       setIfUnset(conf, MRJobConfig.MAP_DEBUG_SCRIPT,defScript);
       setIfUnset(conf, MRJobConfig.REDUCE_DEBUG_SCRIPT,defScript);
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
index 12bec08..b01954e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
@@ -450,7 +450,7 @@ public class DFSCIOTest {
         }
 
         //Copy the executables over to the remote filesystem
-        String hadoopHome = System.getenv("HADOOP_PREFIX");
+        String hadoopHome = System.getenv("HADOOP_HOME");
         fs.copyFromLocalFile(new Path(hadoopHome + "/libhdfs/libhdfs.so." + 
HDFS_LIB_VERSION),
                              HDFS_SHLIB);
         fs.copyFromLocalFile(new Path(hadoopHome + "/libhdfs/hdfs_read"), 
HDFS_READ);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java
index e6e12eb..ecac83a 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java
@@ -179,7 +179,7 @@ public class ReliabilityTest extends Configured implements 
Tool {
   
   private String normalizeCommandPath(String command) {
     final String hadoopHome;
-    if ((hadoopHome = System.getenv("HADOOP_PREFIX")) != null) {
+    if ((hadoopHome = System.getenv("HADOOP_HOME")) != null) {
       command = hadoopHome + "/" + command;
     }
     return command;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java
 
b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java
index 6b8af97..c502ffd 100644
--- 
a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java
+++ 
b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java
@@ -450,7 +450,7 @@ public class HadoopArchiveLogs implements Tool {
   fi
   export HADOOP_CLIENT_OPTS="-Xmx1024m"
   export 
HADOOP_CLASSPATH=/dist/share/hadoop/tools/lib/hadoop-archive-logs-2.8.0-SNAPSHOT.jar:/dist/share/hadoop/tools/lib/hadoop-archives-2.8.0-SNAPSHOT.jar
-  "$HADOOP_PREFIX"/bin/hadoop org.apache.hadoop.tools.HadoopArchiveLogsRunner 
-appId "$appId" -user "$user" -workingDir /tmp/logs/archive-logs-work 
-remoteRootLogDir /tmp/logs -suffix logs
+  "$HADOOP_HOME"/bin/hadoop org.apache.hadoop.tools.HadoopArchiveLogsRunner 
-appId "$appId" -user "$user" -workingDir /tmp/logs/archive-logs-work 
-remoteRootLogDir /tmp/logs -suffix logs
    */
   @VisibleForTesting
   void generateScript(File localScript, Path workingDir,
@@ -484,7 +484,7 @@ public class HadoopArchiveLogs implements Tool {
       fw.write("m\"\n");
       fw.write("export HADOOP_CLASSPATH=");
       fw.write(classpath);
-      fw.write("\n\"$HADOOP_PREFIX\"/bin/hadoop ");
+      fw.write("\n\"$HADOOP_HOME\"/bin/hadoop ");
       fw.write(HadoopArchiveLogsRunner.class.getName());
       fw.write(" -appId \"$appId\" -user \"$user\" -workingDir ");
       fw.write(workingDir.toString());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java
 
b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java
index 7fcb0bf..d2d7801 100644
--- 
a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java
+++ 
b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java
@@ -294,14 +294,14 @@ public class TestHadoopArchiveLogs {
     Assert.assertTrue(lines[14].startsWith("export HADOOP_CLASSPATH="));
     if (proxy) {
       Assert.assertEquals(
-          "\"$HADOOP_PREFIX\"/bin/hadoop org.apache.hadoop.tools." +
+          "\"$HADOOP_HOME\"/bin/hadoop org.apache.hadoop.tools." +
               "HadoopArchiveLogsRunner -appId \"$appId\" -user \"$user\" " +
               "-workingDir " + workingDir.toString() + " -remoteRootLogDir " +
               remoteRootLogDir.toString() + " -suffix " + suffix,
           lines[15]);
     } else {
       Assert.assertEquals(
-          "\"$HADOOP_PREFIX\"/bin/hadoop org.apache.hadoop.tools." +
+          "\"$HADOOP_HOME\"/bin/hadoop org.apache.hadoop.tools." +
               "HadoopArchiveLogsRunner -appId \"$appId\" -user \"$user\" " +
               "-workingDir " + workingDir.toString() + " -remoteRootLogDir " +
               remoteRootLogDir.toString() + " -suffix " + suffix + " -noProxy",

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/README.txt
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/README.txt
 
b/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/README.txt
index 73fd6ef..47ef31c 100644
--- 
a/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/README.txt
+++ 
b/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/README.txt
@@ -20,7 +20,7 @@ B.a31   B.a32
 *****************************
 *** Invoke SampleDataJoin ***
 *****************************
-[:~]$ $HADOOP_PREFIX/bin/hadoop jar hadoop-datajoin-examples.jar 
org.apache.hadoop.contrib.utils.join.DataJoinJob datajoin/input datajoin/output 
Text 1 org.apache.hadoop.contrib.utils.join.SampleDataJoinMapper 
org.apache.hadoop.contrib.utils.join.SampleDataJoinReducer 
org.apache.hadoop.contrib.utils.join.SampleTaggedMapOutput Text
+[:~]$ $HADOOP_HOME/bin/hadoop jar hadoop-datajoin-examples.jar 
org.apache.hadoop.contrib.utils.join.DataJoinJob datajoin/input datajoin/output 
Text 1 org.apache.hadoop.contrib.utils.join.SampleDataJoinMapper 
org.apache.hadoop.contrib.utils.join.SampleDataJoinReducer 
org.apache.hadoop.contrib.utils.join.SampleTaggedMapOutput Text
 Using TextInputFormat: Text
 Using TextOutputFormat: Text
 07/06/01 19:58:23 INFO mapred.FileInputFormat: Total input paths to process : 2

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-tools/hadoop-pipes/src/main/native/pipes/debug/pipes-default-script
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-pipes/src/main/native/pipes/debug/pipes-default-script 
b/hadoop-tools/hadoop-pipes/src/main/native/pipes/debug/pipes-default-script
index 6bacc43..7b74fb6 100644
--- a/hadoop-tools/hadoop-pipes/src/main/native/pipes/debug/pipes-default-script
+++ b/hadoop-tools/hadoop-pipes/src/main/native/pipes/debug/pipes-default-script
@@ -1,3 +1,4 @@
+#!/usr/bin/env bash
 #   Licensed under the Apache License, Version 2.0 (the "License");
 #   you may not use this file except in compliance with the License.
 #   You may obtain a copy of the License at
@@ -9,6 +10,6 @@
 #   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 #   See the License for the specific language governing permissions and
 #   limitations under the License.
-core=`find . -name 'core*'`
+core=$(find . -name 'core*')
 #Only pipes programs have 5th argument as program name.
-gdb -quiet $5 -c $core -x 
$HADOOP_PREFIX/src/c++/pipes/debug/pipes-default-gdb-commands.txt 
+gdb -quiet "${5}" -c "${core}" -x 
"${HADOOP_HOME}/src/c++/pipes/debug/pipes-default-gdb-commands.txt"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh 
b/hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh
index f9bfaef..0bd291b 100644
--- a/hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh
+++ b/hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh
@@ -77,8 +77,8 @@ function run_sls_generator()
 }
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   this="${BASH_SOURCE-$0}"
   bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh 
b/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh
index 30fd60a..403c4bb 100644
--- a/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh
+++ b/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh
@@ -71,8 +71,8 @@ function parse_args()
 function calculate_classpath
 {
   hadoop_add_to_classpath_tools hadoop-sls
-  hadoop_debug "Injecting ${HADOOP_PREFIX}/share/hadoop/tools/sls/html into 
CLASSPATH"
-  hadoop_add_classpath "${HADOOP_PREFIX}/share/hadoop/tools/sls/html"
+  hadoop_debug "Injecting ${HADOOP_TOOLS_DIR}/sls/html into CLASSPATH"
+  hadoop_add_classpath "${HADOOP_TOOLS_DIR}/sls/html"
 }
 
 function run_simulation() {
@@ -105,8 +105,8 @@ function run_simulation() {
 }
 
 # let's locate libexec...
-if [[ -n "${HADOOP_PREFIX}" ]]; then
-  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+if [[ -n "${HADOOP_HOME}" ]]; then
+  HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
   this="${BASH_SOURCE-$0}"
   bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/DumpTypedBytes.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/DumpTypedBytes.java
 
b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/DumpTypedBytes.java
index 42007a07..5a07cc3 100644
--- 
a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/DumpTypedBytes.java
+++ 
b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/DumpTypedBytes.java
@@ -91,7 +91,7 @@ public class DumpTypedBytes implements Tool {
   }
 
   private void printUsage() {
-    System.out.println("Usage: $HADOOP_PREFIX/bin/hadoop jar 
hadoop-streaming.jar"
+    System.out.println("Usage: $HADOOP_HOME/bin/hadoop jar 
hadoop-streaming.jar"
         + " dumptb <glob-pattern>");
     System.out.println("  Dumps all files that match the given pattern to " +
         "standard output as typed bytes.");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a74610d/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/HadoopStreaming.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/HadoopStreaming.java
 
b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/HadoopStreaming.java
index 5d01124..eabf46c 100644
--- 
a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/HadoopStreaming.java
+++ 
b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/HadoopStreaming.java
@@ -56,7 +56,7 @@ public class HadoopStreaming {
   }
   
   private static void printUsage() {
-    System.out.println("Usage: $HADOOP_PREFIX/bin/hadoop jar 
hadoop-streaming.jar"
+    System.out.println("Usage: $HADOOP_HOME/bin/hadoop jar 
hadoop-streaming.jar"
         + " [options]");
     System.out.println("Options:");
     System.out.println("  dumptb <glob-pattern> Dumps all files that match 
the" 

Reply via email to