Author: bmahe
Date: Mon Apr 2 23:50:54 2012
New Revision: 1308627
URL: http://svn.apache.org/viewvc?rev=1308627&view=rev
Log:
BIGTOP-422. Need to start packaging fuse for Hadoop 0.23
Added:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/0001-fix-fuse_dfs-compilation-issues.patch
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/0002-fix-fuse_dfs-compilation.patch
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/0003-fix-dfs-fuse-compile.patch
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/HDFS-2696-plus.patch
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/do-component-build
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/install_hadoop.sh
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
Added:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/0001-fix-fuse_dfs-compilation-issues.patch
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/0001-fix-fuse_dfs-compilation-issues.patch?rev=1308627&view=auto
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/0001-fix-fuse_dfs-compilation-issues.patch
(added)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/0001-fix-fuse_dfs-compilation-issues.patch
Mon Apr 2 23:50:54 2012
@@ -0,0 +1,386 @@
+From d4b99de36e439ac61c923f036e85472702752edf Mon Sep 17 00:00:00 2001
+From: Petru Dimulescu <[email protected]>
+Date: Fri, 16 Dec 2011 14:45:37 +0100
+Subject: [PATCH 1/3] fix fuse_dfs compilation issues
+
+---
+ .../src/contrib/fuse-dfs/m4/apfunctions.m4 | 41 +++++
+ .../hadoop-hdfs/src/contrib/fuse-dfs/m4/apjava.m4 | 142 +++++++++++++++++
+ .../src/contrib/fuse-dfs/m4/apsupport.m4 | 168 ++++++++++++++++++++
+ 3 files changed, 351 insertions(+), 0 deletions(-)
+ create mode 100644
hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/m4/apfunctions.m4
+ create mode 100644
hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/m4/apjava.m4
+ create mode 100644
hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/m4/apsupport.m4
+
+diff --git
a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/m4/apfunctions.m4
b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/m4/apfunctions.m4
+new file mode 100644
+index 0000000..cb5938f
+--- /dev/null
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/m4/apfunctions.m4
+@@ -0,0 +1,41 @@
++dnl
++dnl Licensed to the Apache Software Foundation (ASF) under one or more
++dnl contributor license agreements. See the NOTICE file distributed with
++dnl this work for additional information regarding copyright ownership.
++dnl The ASF licenses this file to You under the Apache License, Version 2.0
++dnl (the "License"); you may not use this file except in compliance with
++dnl the License. You may obtain a copy of the License at
++dnl
++dnl http://www.apache.org/licenses/LICENSE-2.0
++dnl
++dnl Unless required by applicable law or agreed to in writing, software
++dnl distributed under the License is distributed on an "AS IS" BASIS,
++dnl WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++dnl See the License for the specific language governing permissions and
++dnl limitations under the License.
++dnl
++
++dnl -------------------------------------------------------------------------
++dnl Author Pier Fumagalli <mailto:[email protected]>
++dnl Version $Id$
++dnl -------------------------------------------------------------------------
++
++AC_DEFUN([AP_MSG_HEADER],[
++ printf "*** %s ***\n" "$1" 1>&2
++ AC_PROVIDE([$0])
++])
++
++AC_DEFUN([AP_CANONICAL_HOST_CHECK],[
++ AC_MSG_CHECKING([cached host system type])
++ if { test x"${ac_cv_host_system_type+set}" = x"set" &&
++ test x"$ac_cv_host_system_type" != x"$host" ; }
++ then
++ AC_MSG_RESULT([$ac_cv_host_system_type])
++ AC_MSG_ERROR([remove the \"$cache_file\" file and re-run configure])
++ else
++ AC_MSG_RESULT(ok)
++ ac_cv_host_system_type="$host"
++ fi
++ AC_PROVIDE([$0])
++])
++
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/m4/apjava.m4
b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/m4/apjava.m4
+new file mode 100644
+index 0000000..993fc5b
+--- /dev/null
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/m4/apjava.m4
+@@ -0,0 +1,142 @@
++dnl
++dnl Licensed to the Apache Software Foundation (ASF) under one or more
++dnl contributor license agreements. See the NOTICE file distributed with
++dnl this work for additional information regarding copyright ownership.
++dnl The ASF licenses this file to You under the Apache License, Version 2.0
++dnl (the "License"); you may not use this file except in compliance with
++dnl the License. You may obtain a copy of the License at
++dnl
++dnl http://www.apache.org/licenses/LICENSE-2.0
++dnl
++dnl Unless required by applicable law or agreed to in writing, software
++dnl distributed under the License is distributed on an "AS IS" BASIS,
++dnl WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++dnl See the License for the specific language governing permissions and
++dnl limitations under the License.
++dnl
++
++dnl -------------------------------------------------------------------------
++dnl Author Pier Fumagalli <mailto:[email protected]>
++dnl Version $Id$
++dnl -------------------------------------------------------------------------
++
++AC_DEFUN([AP_PROG_JAVAC_WORKS],[
++ AC_CACHE_CHECK([wether the Java compiler ($JAVAC)
works],ap_cv_prog_javac_works,[
++ echo "public class Test {}" > Test.java
++ $JAVAC $JAVACFLAGS Test.java > /dev/null 2>&1
++ if test $? -eq 0
++ then
++ rm -f Test.java Test.class
++ ap_cv_prog_javac_works=yes
++ else
++ rm -f Test.java Test.class
++ AC_MSG_RESULT(no)
++ AC_MSG_ERROR([installation or configuration problem: javac cannot
compile])
++ fi
++ ])
++])
++
++dnl Check for JAVA compilers.
++AC_DEFUN([AP_PROG_JAVAC],[
++ if test "$SABLEVM" != "NONE"
++ then
++ AC_PATH_PROG(JAVACSABLE,javac-sablevm,NONE,$JAVA_HOME/bin)
++ else
++ JAVACSABLE="NONE"
++ fi
++ if test "$JAVACSABLE" = "NONE"
++ then
++ XPATH="$JAVA_HOME/bin:$JAVA_HOME/Commands:$PATH"
++ AC_PATH_PROG(JAVAC,javac,NONE,$XPATH)
++ else
++ AC_PATH_PROG(JAVAC,javac-sablevm,NONE,$JAVA_HOME/bin)
++ fi
++ AC_MSG_RESULT([$JAVAC])
++ if test "$JAVAC" = "NONE"
++ then
++ AC_MSG_ERROR([javac not found])
++ fi
++ AP_PROG_JAVAC_WORKS()
++ AC_PROVIDE([$0])
++ AC_SUBST(JAVAC)
++ AC_SUBST(JAVACFLAGS)
++])
++
++dnl Check for jar archivers.
++AC_DEFUN([AP_PROG_JAR],[
++ if test "$SABLEVM" != "NONE"
++ then
++ AC_PATH_PROG(JARSABLE,jar-sablevm,NONE,$JAVA_HOME/bin)
++ else
++ JARSABLE="NONE"
++ fi
++ if test "$JARSABLE" = "NONE"
++ then
++ XPATH="$JAVA_HOME/bin:$JAVA_HOME/Commands:$PATH"
++ AC_PATH_PROG(JAR,jar,NONE,$XPATH)
++ else
++ AC_PATH_PROG(JAR,jar-sablevm,NONE,$JAVA_HOME/bin)
++ fi
++ if test "$JAR" = "NONE"
++ then
++ AC_MSG_ERROR([jar not found])
++ fi
++ AC_PROVIDE([$0])
++ AC_SUBST(JAR)
++])
++
++AC_DEFUN([AP_JAVA],[
++ AC_ARG_WITH(java,[ --with-java=DIR Specify the location of your
JDK installation],[
++ AC_MSG_CHECKING([JAVA_HOME])
++ if test -d "$withval"
++ then
++ JAVA_HOME="$withval"
++ AC_MSG_RESULT([$JAVA_HOME])
++ else
++ AC_MSG_RESULT([failed])
++ AC_MSG_ERROR([$withval is not a directory])
++ fi
++ AC_SUBST(JAVA_HOME)
++ ])
++ if test x"$JAVA_HOME" = x
++ then
++ AC_MSG_ERROR([Java Home not defined. Rerun with --with-java=[...]
parameter])
++ fi
++])
++
++dnl check if the JVM in JAVA_HOME is sableVM
++dnl $JAVA_HOME/bin/sablevm and /opt/java/lib/sablevm/bin are tested.
++AC_DEFUN([AP_SABLEVM],[
++ if test x"$JAVA_HOME" != x
++ then
++ AC_PATH_PROG(SABLEVM,sablevm,NONE,$JAVA_HOME/bin)
++ if test "$SABLEVM" = "NONE"
++ then
++ dnl java may be SableVM.
++ if $JAVA_HOME/bin/java -version 2> /dev/null | grep SableVM > /dev/null
++ then
++ SABLEVM=$JAVA_HOME/bin/java
++ fi
++ fi
++ if test "$SABLEVM" != "NONE"
++ then
++ AC_MSG_RESULT([Using sableVM: $SABLEVM])
++ CFLAGS="$CFLAGS -DHAVE_SABLEVM"
++ fi
++ fi
++])
++
++dnl check if the JVM in JAVA_HOME is kaffe
++dnl $JAVA_HOME/bin/kaffe is tested.
++AC_DEFUN([AP_KAFFE],[
++ if test x"$JAVA_HOME" != x
++ then
++ AC_PATH_PROG(KAFFEVM,kaffe,NONE,$JAVA_HOME/bin)
++ if test "$KAFFEVM" != "NONE"
++ then
++ AC_MSG_RESULT([Using kaffe: $KAFFEVM])
++ CFLAGS="$CFLAGS -DHAVE_KAFFEVM"
++ LDFLAGS="$LDFLAGS -Wl,-rpath $JAVA_HOME/jre/lib/$HOST_CPU -L
$JAVA_HOME/jre/lib/$HOST_CPU -lkaffevm"
++ fi
++ fi
++])
+diff --git
a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/m4/apsupport.m4
b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/m4/apsupport.m4
+new file mode 100644
+index 0000000..c3fb0e2
+--- /dev/null
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/m4/apsupport.m4
+@@ -0,0 +1,168 @@
++dnl
++dnl Licensed to the Apache Software Foundation (ASF) under one or more
++dnl contributor license agreements. See the NOTICE file distributed with
++dnl this work for additional information regarding copyright ownership.
++dnl The ASF licenses this file to You under the Apache License, Version 2.0
++dnl (the "License"); you may not use this file except in compliance with
++dnl the License. You may obtain a copy of the License at
++dnl
++dnl http://www.apache.org/licenses/LICENSE-2.0
++dnl
++dnl Unless required by applicable law or agreed to in writing, software
++dnl distributed under the License is distributed on an "AS IS" BASIS,
++dnl WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++dnl See the License for the specific language governing permissions and
++dnl limitations under the License.
++dnl
++
++dnl -------------------------------------------------------------------------
++dnl Author Pier Fumagalli <mailto:[email protected]>
++dnl Version $Id$
++dnl -------------------------------------------------------------------------
++
++AC_DEFUN([AP_SUPPORTED_HOST],[
++ AC_MSG_CHECKING([C flags dependant on host system type])
++
++ case $host_os in
++ darwin*)
++ CFLAGS="$CFLAGS -DOS_DARWIN -DDSO_DYLD"
++ supported_os="darwin"
++ ;;
++ solaris*)
++ CFLAGS="$CFLAGS -DOS_SOLARIS -DDSO_DLFCN"
++ supported_os="solaris"
++ LIBS="$LIBS -ldl -lthread"
++ ;;
++ linux*)
++ CFLAGS="$CFLAGS -DOS_LINUX -DDSO_DLFCN"
++ supported_os="linux"
++ LIBS="$LIBS -ldl -lpthread"
++ ;;
++ cygwin)
++ CFLAGS="$CFLAGS -DOS_CYGWIN -DDSO_DLFCN -DNO_SETSID"
++ supported_os="win32"
++ ;;
++ sysv)
++ CFLAGS="$CFLAGS -DOS_SYSV -DDSO_DLFCN"
++ LIBS="$LIBS -ldl"
++ ;;
++ sysv4)
++ CFLAGS="$CFLAGS -DOS_SYSV -DDSO_DLFCN -Kthread"
++ LDFLAGS="-Kthread $LDFLAGS"
++ LIBS="$LIBS -ldl"
++ ;;
++ freebsd*)
++ CFLAGS="$CFLAGS -DOS_FREEBSD -DDSO_DLFCN -D_THREAD_SAFE -pthread"
++ LDFLAGS="-pthread $LDFLAGS"
++ supported_os="freebsd"
++ ;;
++ osf5*)
++ CFLAGS="$CFLAGS -pthread -DOS_TRU64 -DDSO_DLFCN -D_XOPEN_SOURCE_EXTENDED"
++ LDFLAGS="$LDFLAGS -pthread"
++ ;;
++ hpux11*)
++ CFLAGS="$CFLAGS -pthread -DOS_HPUX -DDSO_DLFCN"
++ LDFLAGS="$LDFLAGS -pthread"
++ LIBS="$LIBS -lpthread"
++ ;;
++ *)
++ AC_MSG_RESULT([failed])
++ AC_MSG_ERROR([Unsupported operating system "$host_os"]);;
++ esac
++
++ case $host_cpu in
++ powerpc)
++ CFLAGS="$CFLAGS -DCPU=\\\"$host_cpu\\\""
++ HOST_CPU=$host_cpu;;
++ sparc*)
++ CFLAGS="$CFLAGS -DCPU=\\\"$host_cpu\\\""
++ HOST_CPU=$host_cpu;;
++ i?86)
++ CFLAGS="$CFLAGS -DCPU=\\\"i386\\\""
++ HOST_CPU=i386;;
++ x86_64)
++ CFLAGS="$CFLAGS -DCPU=\\\"amd64\\\""
++ HOST_CPU=amd64;;
++ bs2000)
++ CFLAGS="$CFLAGS -DCPU=\\\"osd\\\" -DCHARSET_EBCDIC -DOSD_POSIX"
++ supported_os="osd"
++ LDFLAGS="-Kno_link_stdlibs -B llm4"
++ LIBS="$LIBS -lBLSLIB"
++ LDCMD="/opt/C/bin/cc"
++ HOST_CPU=osd;;
++ mips)
++ CFLAGS="$CFLAGS -DCPU=\\\"mips\\\""
++ supported_os="mips"
++ HOST_CPU=mips;;
++ alpha*)
++ CFLAGS="$CFLAGS -DCPU=\\\"alpha\\\""
++ supported_os="alpha"
++ HOST_CPU=alpha;;
++ hppa2.0w)
++ CFLAGS="$CFLAGS -DCPU=\\\"PA_RISC2.0W\\\""
++ supported_os="hp-ux"
++ HOST_CPU=PA_RISC2.0W;;
++ hppa2.0)
++ CFLAGS="$CFLAGS -DCPU=\\\"PA_RISC2.0\\\""
++ supported_os="hp-ux"
++ HOST_CPU=PA_RISC2.0;;
++ mipsel)
++ CFLAGS="$CFLAGS -DCPU=\\\"mipsel\\\""
++ supported_os="mipsel"
++ HOST_CPU=mipsel;;
++ ia64)
++ CFLAGS="$CFLAGS -DCPU=\\\"ia64\\\""
++ supported_os="ia64"
++ HOST_CPU=ia64;;
++ s390)
++ CFLAGS="$CFLAGS -DCPU=\\\"s390\\\""
++ supported_os="s390"
++ HOST_CPU=s390;;
++ arm*)
++ CFLAGS="$CFLAGS -DCPU=\\\"arm\\\""
++ supported_os="arm"
++ HOST_CPU=arm;;
++ *)
++ AC_MSG_RESULT([failed])
++ AC_MSG_ERROR([Unsupported CPU architecture "$host_cpu"]);;
++ esac
++
++ AC_MSG_RESULT([ok])
++ AC_SUBST(CFLAGS)
++ AC_SUBST(LDFLAGS)
++])
++
++AC_DEFUN([AP_JVM_LIBDIR],[
++ AC_MSG_CHECKING([where on earth this jvm library is..])
++ javabasedir=$JAVA_HOME
++ case $host_os in
++ cygwin* | mingw* | pw23* )
++ lib_jvm_dir=`find $javabasedir -follow \( \
++ \( -name client -type d -prune \) -o \
++ \( -name "jvm.dll" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" "
"`
++ ;;
++ aix*)
++ lib_jvm_dir=`find $javabasedir \( \
++ \( -name client -type d -prune \) -o \
++ \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" "
"`
++ if test -z "$lib_jvm_dir"; then
++ lib_jvm_dir=`find $javabasedir \( \
++ \( -name client -type d -prune \) -o \
++ \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr
"\n" " "`
++ fi
++ ;;
++ *)
++ lib_jvm_dir=`find $javabasedir -follow \( \
++ \( -name client -type d -prune \) -o \
++ \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" "
"`
++ if test -z "$lib_jvm_dir"; then
++ lib_jvm_dir=`find $javabasedir -follow \( \
++ \( -name client -type d -prune \) -o \
++ \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr
"\n" " "`
++ fi
++ ;;
++ esac
++ LIB_JVM_DIR=$lib_jvm_dir
++ AC_MSG_RESULT([ohh u there ... $LIB_JVM_DIR])
++ AC_SUBST(LIB_JVM_DIR)
++])
+--
+1.7.4.1
+
Added:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/0002-fix-fuse_dfs-compilation.patch
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/0002-fix-fuse_dfs-compilation.patch?rev=1308627&view=auto
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/0002-fix-fuse_dfs-compilation.patch
(added)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/0002-fix-fuse_dfs-compilation.patch
Mon Apr 2 23:50:54 2012
@@ -0,0 +1,206 @@
+From b0d6ef692636fd65a6fe7c2fef6afd3d339a03f3 Mon Sep 17 00:00:00 2001
+From: Petru Dimulescu <[email protected]>
+Date: Fri, 16 Dec 2011 14:51:51 +0100
+Subject: [PATCH 2/3] fix fuse_dfs compilation
+
+---
+ .../hadoop-hdfs/src/contrib/build-contrib.xml | 2 +-
+ .../hadoop-hdfs/src/contrib/fuse-dfs/Makefile.am | 1 +
+ .../hadoop-hdfs/src/contrib/fuse-dfs/build.xml | 4 +-
+ .../hadoop-hdfs/src/contrib/fuse-dfs/configure.ac | 57 +++++++++++++++++++-
+ .../src/contrib/fuse-dfs/global_header.mk | 1 +
+ .../src/contrib/fuse-dfs/src/Makefile.am | 13 ++++-
+ .../hadoop-hdfs/src/main/native/Makefile.am | 1 +
+ .../hadoop-hdfs/src/main/native/configure.ac | 2 +-
+ 8 files changed, 73 insertions(+), 8 deletions(-)
+
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/build-contrib.xml
b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/build-contrib.xml
+index 0c57fb9..b4ad24a 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/build-contrib.xml
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/build-contrib.xml
+@@ -70,7 +70,7 @@
+ <property name="ivy.dir" location="ivy" />
+ <property name="ivysettings.xml"
location="${hadoop.root}/ivy/ivysettings.xml"/>
+ <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
+- <loadproperties srcfile="${hadoop.root}/ivy/libraries.properties"/>
++ <!--loadproperties srcfile="${hadoop.root}/ivy/libraries.properties"/-->
+ <property name="ivy.jar"
location="${hadoop.root}/ivy/ivy-${ivy.version}.jar"/>
+ <property name="ivy_repo_url"
+
value="http://repo2.maven.org/maven2/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"
/>
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/Makefile.am
b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/Makefile.am
+index f4d6c57..b86c6af 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/Makefile.am
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/Makefile.am
+@@ -17,6 +17,7 @@
+ @GLOBAL_HEADER_MK@
+
+ @PRODUCT_MK@
++ACLOCAL_AMFLAGS = -I m4
+
+ SUBDIRS = . src
+
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/build.xml
b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/build.xml
+index c8795c5..eb69039 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/build.xml
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/build.xml
+@@ -17,7 +17,7 @@
+ limitations under the License.
+ -->
+
+-<project name="fuse-dfs" default="jar" xmlns:ivy="antlib:org.apache.ivy.ant">
++<project name="fuse-dfs" default="compile"
xmlns:ivy="antlib:org.apache.ivy.ant">
+
+ <import file="../build-contrib.xml"/>
+
+@@ -29,7 +29,7 @@
+ </fail>
+ </target>
+
+- <target name="compile" if="fusedfs">
++ <target name="compile">
+ <exec executable="autoreconf" dir="${basedir}"
+ searchpath="yes" failonerror="yes">
+ <arg value="-if"/>
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/configure.ac
b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/configure.ac
+index 1062ec3..f17c704 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/configure.ac
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/configure.ac
+@@ -15,7 +15,7 @@
+ # limitations under the License.
+ #
+
+-# Autoconf input file
++# Autoconf input file for fuse_dfs
+ # $Id$
+ # AC - autoconf
+ #########################################################################
+@@ -30,6 +30,7 @@ AC_CANONICAL_TARGET()
+ FUSE_DFS_INITIALIZE([localinstall])
+ AC_PREFIX_DEFAULT([`pwd`])
+
++AC_CONFIG_MACRO_DIR([m4])
+
+
+ #case $target in
+@@ -51,6 +52,60 @@ AC_TYPE_GETGROUPS
+ AC_PROG_CC
+ AC_SYS_LARGEFILE
+
++### java env detection
++dnl -------------------------------------------------------------------------
++dnl Check if this host is supported
++dnl -------------------------------------------------------------------------
++AP_MSG_HEADER([Host support])
++AP_SUPPORTED_HOST()
++if test "$supported_os" = "darwin"
++then
++ if test -z "$JAVA_HOME" -a -d
/System/Library/Frameworks/JavaVM.framework/Home; then
++ JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Home
++ fi
++
++ _prevdir=`/bin/pwd`
++ if test -n "$JAVA_HOME" -a -d "$JAVA_HOME/include"; then
++ cd "$JAVA_HOME/include"
++ elif test -n "$JAVA_HOME" -a -d "$JAVA_HOME/../Headers"; then
++ cd "$JAVA_HOME/../Headers"
++ else
++ cd /System/Library/Frameworks/JavaVM.framework/Headers
++ fi
++ # JNI_CFLAGS="-m${JVM_ARCH} -I`/bin/pwd -P`"
++ JNI_CFLAGS="-I`/bin/pwd -P`"
++ cd $_prevdir
++ unset _prevdir
++ AC_SUBST(JNI_CFLAGS)
++fi
++
++dnl -------------------------------------------------------------------------
++dnl Check JAVA environment
++dnl -------------------------------------------------------------------------
++AP_MSG_HEADER([Java compilation tools])
++AP_JAVA()
++AP_SABLEVM()
++AP_KAFFE()
++AP_PROG_JAVAC()
++AP_PROG_JAR()
++AP_JVM_LIBDIR()
++if test "$supported_os" != "darwin"
++then
++ case $host_cpu in
++ arm*) ;;
++ *)
++ CFLAGS="$CFLAGS -m${JVM_ARCH}"
++ LDFLAGS="$LDFLAGS -m${JVM_ARCH}"
++ ;;
++ esac
++ AC_MSG_RESULT([VALUE OF JVM_ARCH IS :$JVM_ARCH])
++ CFLAGS="$CFLAGS -I$JAVA_HOME/include -I$JAVA_HOME/include/$supported_os"
++ LDFLAGS="$LDFLAGS -L$LIB_JVM_DIR -ljvm -Wl,-x"
++fi
++
++
++
++
+ ############################################################################
+ # Section 2:
+ # User Configurable system defaults. Change With CAUTION!
+diff --git
a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/global_header.mk
b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/global_header.mk
+index f67fa8b..7c4d2b8 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/global_header.mk
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/global_header.mk
+@@ -24,6 +24,7 @@ showvars:
+ @echo BUILD_SOURCES = $(BUILT_SOURCES)
+ @echo XBUILTSOURCES = $(XBUILT_SOURCES)
+ @echo DEFS = $(DEFS)
++ @echo CFLAGS = $(CFLAGS)
+ @echo CXXFLAGS = $(CXXFLAGS)
+ @echo AM_CXXFLAGS = $(AM_CXXFLAGS)
+ @echo CPPFLAGS = $(CPPFLAGS)
+diff --git
a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am
b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am
+index d62384d..5d78ede 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am
+@@ -16,6 +16,13 @@
+ #
+ bin_PROGRAMS = fuse_dfs
+ fuse_dfs_SOURCES = fuse_dfs.c fuse_options.c fuse_trash.c fuse_stat_struct.c
fuse_users.c fuse_init.c fuse_connect.c fuse_impls_access.c fuse_impls_chmod.c
fuse_impls_chown.c fuse_impls_create.c fuse_impls_flush.c
fuse_impls_getattr.c fuse_impls_mkdir.c fuse_impls_mknod.c fuse_impls_open.c
fuse_impls_read.c fuse_impls_release.c fuse_impls_readdir.c fuse_impls_rename.c
fuse_impls_rmdir.c fuse_impls_statfs.c fuse_impls_symlink.c
fuse_impls_truncate.c fuse_impls_utimens.c fuse_impls_unlink.c
fuse_impls_write.c
+-AM_CFLAGS= -Wall -g
+-AM_CPPFLAGS= -DPERMS=$(PERMS) -D_FILE_OFFSET_BITS=64 -I$(JAVA_HOME)/include
-I$(HADOOP_PREFIX)/src/c++/libhdfs -I$(JAVA_HOME)/include/linux
-D_FUSE_DFS_VERSION=\"$(PACKAGE_VERSION)\"
-DPROTECTED_PATHS=\"$(PROTECTED_PATHS)\" -I$(FUSE_HOME)/include
+-AM_LDFLAGS= -L$(HADOOP_PREFIX)/build/c++/$(BUILD_PLATFORM)/lib -lhdfs
-L$(FUSE_HOME)/lib -lfuse -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -ljvm
++
++#AM_CFLAGS= -Wall -g
++
++#AM_CPPFLAGS= -DPERMS=$(PERMS) -D_FILE_OFFSET_BITS=64 -I$(JAVA_HOME)/include
-I$(HADOOP_PREFIX)/src/c++/libhdfs -I$(JAVA_HOME)/include/linux
-D_FUSE_DFS_VERSION=\"$(PACKAGE_VERSION)\"
-DPROTECTED_PATHS=\"$(PROTECTED_PATHS)\" -I$(FUSE_HOME)/include
++
++fuse_dfs_CFLAGS=-Wall -g -DPERMS=$(PERMS) -D_FILE_OFFSET_BITS=64
-I$(JAVA_HOME)/include -I$(HADOOP_PREFIX)/src/c++/libhdfs
-I$(JAVA_HOME)/include/linux -D_FUSE_DFS_VERSION=\"$(PACKAGE_VERSION)\"
-DPROTECTED_PATHS=\"$(PROTECTED_PATHS)\" -I$(FUSE_HOME)/include $(JNI_CFLAGS)
++
++fuse_dfs_LDADD=-L$(HADOOP_PREFIX)/build/c++/$(BUILD_PLATFORM)/lib -lhdfs
-L$(FUSE_HOME)/lib -lfuse -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -ljvm
++
++#AM_LDFLAGS= -L$(HADOOP_PREFIX)/build/c++/$(BUILD_PLATFORM)/lib -lhdfs
-L$(FUSE_HOME)/lib -lfuse -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -ljvm
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/Makefile.am
b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/Makefile.am
+index 8bbd627..2507885 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/Makefile.am
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/Makefile.am
+@@ -22,6 +22,7 @@ ACLOCAL_AMFLAGS = -I m4
+
+ lib_LTLIBRARIES = libhdfs.la
+ libhdfs_la_SOURCES = hdfs.c hdfsJniHelper.c hdfs.h
++include_HEADERS = hdfs.h
+
+ #check_PROGRAMS = hdfs_test hdfs_read hdfs_write
+ check_PROGRAMS = hdfs_test hdfs_read hdfs_write
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/configure.ac
b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/configure.ac
+index d801fc4..f8ecd43 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/configure.ac
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/configure.ac
+@@ -15,7 +15,7 @@
+ # limitations under the License.
+ #
+
+-# Autoconf input file
++# Autoconf input file for libhdfs
+ # $Id$
+
+ AC_INIT([libhdfs], [0.1.0], [email protected])
+--
+1.7.4.1
+
Added:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/0003-fix-dfs-fuse-compile.patch
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/0003-fix-dfs-fuse-compile.patch?rev=1308627&view=auto
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/0003-fix-dfs-fuse-compile.patch
(added)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/0003-fix-dfs-fuse-compile.patch
Mon Apr 2 23:50:54 2012
@@ -0,0 +1,45 @@
+From ad2b5d5ca8622b38aecda3d631750d9659839310 Mon Sep 17 00:00:00 2001
+From: Petru Dimulescu <[email protected]>
+Date: Fri, 16 Dec 2011 18:31:18 +0100
+Subject: [PATCH 3/3] fix dfs-fuse compile
+
+---
+ .../hadoop-hdfs/src/contrib/build-contrib.xml | 2 +-
+ .../hadoop-hdfs/src/contrib/fuse-dfs/configure.ac | 8 --------
+ 2 files changed, 1 insertions(+), 9 deletions(-)
+
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/build-contrib.xml
b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/build-contrib.xml
+index b4ad24a..bde5e4e 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/build-contrib.xml
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/build-contrib.xml
+@@ -70,7 +70,7 @@
+ <property name="ivy.dir" location="ivy" />
+ <property name="ivysettings.xml"
location="${hadoop.root}/ivy/ivysettings.xml"/>
+ <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
+- <!--loadproperties srcfile="${hadoop.root}/ivy/libraries.properties"/-->
++ <loadproperties srcfile="ivy/libraries.properties"/>
+ <property name="ivy.jar"
location="${hadoop.root}/ivy/ivy-${ivy.version}.jar"/>
+ <property name="ivy_repo_url"
+
value="http://repo2.maven.org/maven2/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"
/>
+diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/configure.ac
b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/configure.ac
+index f17c704..de663d6 100644
+--- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/configure.ac
++++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/configure.ac
+@@ -91,14 +91,6 @@ AP_PROG_JAR()
+ AP_JVM_LIBDIR()
+ if test "$supported_os" != "darwin"
+ then
+- case $host_cpu in
+- arm*) ;;
+- *)
+- CFLAGS="$CFLAGS -m${JVM_ARCH}"
+- LDFLAGS="$LDFLAGS -m${JVM_ARCH}"
+- ;;
+- esac
+- AC_MSG_RESULT([VALUE OF JVM_ARCH IS :$JVM_ARCH])
+ CFLAGS="$CFLAGS -I$JAVA_HOME/include -I$JAVA_HOME/include/$supported_os"
+ LDFLAGS="$LDFLAGS -L$LIB_JVM_DIR -ljvm -Wl,-x"
+ fi
+--
+1.7.4.1
+
Added:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/HDFS-2696-plus.patch
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/HDFS-2696-plus.patch?rev=1308627&view=auto
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/HDFS-2696-plus.patch
(added)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/HDFS-2696-plus.patch
Mon Apr 2 23:50:54 2012
@@ -0,0 +1,15 @@
+diff --git
hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am
hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am
+index 5d78ede..3fbdfa7 100644
+--- hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am
++++ hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am
+@@ -21,8 +21,8 @@ fuse_dfs_SOURCES = fuse_dfs.c fuse_options.c fuse_trash.c
fuse_stat_struct.c fus
+
+ #AM_CPPFLAGS= -DPERMS=$(PERMS) -D_FILE_OFFSET_BITS=64 -I$(JAVA_HOME)/include
-I$(HADOOP_PREFIX)/src/c++/libhdfs -I$(JAVA_HOME)/include/linux
-D_FUSE_DFS_VERSION=\"$(PACKAGE_VERSION)\"
-DPROTECTED_PATHS=\"$(PROTECTED_PATHS)\" -I$(FUSE_HOME)/include
+
+-fuse_dfs_CFLAGS=-Wall -g -DPERMS=$(PERMS) -D_FILE_OFFSET_BITS=64
-I$(JAVA_HOME)/include -I$(HADOOP_PREFIX)/src/c++/libhdfs
-I$(JAVA_HOME)/include/linux -D_FUSE_DFS_VERSION=\"$(PACKAGE_VERSION)\"
-DPROTECTED_PATHS=\"$(PROTECTED_PATHS)\" -I$(FUSE_HOME)/include $(JNI_CFLAGS)
++fuse_dfs_CFLAGS=-Wall -g -DPERMS=$(PERMS) -D_FILE_OFFSET_BITS=64
-I$(JAVA_HOME)/include -I$(HADOOP_PREFIX)/src/main/native
-I$(JAVA_HOME)/include/linux -D_FUSE_DFS_VERSION=\"$(PACKAGE_VERSION)\"
-DPROTECTED_PATHS=\"$(PROTECTED_PATHS)\" -I$(FUSE_HOME)/include $(JNI_CFLAGS)
+
+-fuse_dfs_LDADD=-L$(HADOOP_PREFIX)/build/c++/$(BUILD_PLATFORM)/lib -lhdfs
-L$(FUSE_HOME)/lib -lfuse -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -ljvm
++fuse_dfs_LDADD=-L$(HADOOP_PREFIX)/target/native/target/usr/local/lib/ -lhdfs
-L$(FUSE_HOME)/lib -lm -lfuse -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -ljvm
+
+ #AM_LDFLAGS= -L$(HADOOP_PREFIX)/build/c++/$(BUILD_PLATFORM)/lib -lhdfs
-L$(FUSE_HOME)/lib -lfuse -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -ljvm
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/do-component-build
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/do-component-build?rev=1308627&r1=1308626&r2=1308627&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/do-component-build
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/do-component-build
Mon Apr 2 23:50:54 2012
@@ -29,6 +29,11 @@ MAVEN_SKIP_TESTS="-DskipTests -DskipTest
mvn $BUNDLE_SNAPPY -Dcontainer-executor.conf.dir=/etc/hadoop/conf -Pdist
-Pnative -Psrc -Dtar ${MAVEN_SKIP_TESTS} install "$@"
mvn site site:stage ${MAVEN_SKIP_TESTS} $@
+# Build fuse
+pushd hadoop-hdfs-project/hadoop-hdfs/src/contrib/
+ ant
+popd
+
(cd build ; tar --strip-components=1 -xzvf
../hadoop-dist/target/hadoop-${HADOOP_VERSION}.tar.gz)
(cd build/src ; tar --strip-components=1 -xzvf
../../hadoop-dist/target/hadoop-${HADOOP_VERSION}-src.tar.gz)
@@ -38,3 +43,6 @@ cp -r target/site/* build/share/doc/
# Create a manifest for hadoop client package
(cd hadoop-client/target/hadoop-client-*/share/hadoop/client/lib ; ls) >
build/hadoop-client.list
+
+# Copy fuse output to the build directory
+cp
hadoop-hdfs-project/hadoop-hdfs/build/contrib/fuse-dfs/{fuse_dfs,fuse_dfs_wrapper.sh}
build/bin/
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/install_hadoop.sh
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/install_hadoop.sh?rev=1308627&r1=1308626&r2=1308627&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/install_hadoop.sh
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/install_hadoop.sh
Mon Apr 2 23:50:54 2012
@@ -219,7 +219,7 @@ cp -ra ${BUILD_DIR}/share/hadoop/hdfs/we
# bin
install -d -m 0755 ${HADOOP_DIR}/bin
-cp -a ${BUILD_DIR}/bin/{hadoop,rcc} ${HADOOP_DIR}/bin
+cp -a ${BUILD_DIR}/bin/{hadoop,rcc,fuse_dfs,fuse_dfs_wrapper.sh}
${HADOOP_DIR}/bin
install -d -m 0755 ${HDFS_DIR}/bin
cp -a ${BUILD_DIR}/bin/hdfs ${HDFS_DIR}/bin
install -d -m 0755 ${YARN_DIR}/bin
@@ -231,9 +231,9 @@ cp -a ${BUILD_DIR}/bin/mapred ${YARN_DIR
# sbin
install -d -m 0755 ${HADOOP_DIR}/sbin
-cp -a
${BUILD_DIR}/sbin/{hadoop-daemon,hadoop-daemons,slaves,start-all,stop-all}.sh
${HADOOP_DIR}/sbin
+cp -a ${BUILD_DIR}/sbin/{hadoop-daemon,hadoop-daemons,slaves}.sh
${HADOOP_DIR}/sbin
install -d -m 0755 ${HDFS_DIR}/sbin
-cp -a
${BUILD_DIR}/sbin/{distribute-exclude,refresh-namenodes,start-balancer,start-dfs,start-secure-dns,stop-balancer,stop-dfs,stop-secure-dns}.sh
${HDFS_DIR}/sbin
+cp -a ${BUILD_DIR}/sbin/{distribute-exclude,refresh-namenodes}.sh
${HDFS_DIR}/sbin
install -d -m 0755 ${YARN_DIR}/sbin
cp -a ${BUILD_DIR}/sbin/*yarn* ${BUILD_DIR}/sbin/slaves.sh ${YARN_DIR}/sbin
install -d -m 0755 ${MAPREDUCE_DIR}/sbin
@@ -321,3 +321,31 @@ for file in `cat ${BUILD_DIR}/hadoop-cli
done
exit 1
done
+
+# Install fuse wrapper
+
+fuse_wrapper=${BIN_DIR}/hadoop-fuse-dfs
+cat > $fuse_wrapper << EOF
+#!/bin/bash
+
+/sbin/modprobe fuse
+
+export HADOOP_HOME=\${HADOOP_HOME:-${HADOOP_DIR#${PREFIX}}}
+
+if [ -f /etc/default/hadoop-fuse ]
+then . /etc/default/hadoop-fuse
+fi
+
+export HADOOP_LIBEXEC_DIR=/${SYSTEM_LIBEXEC_DIR#${PREFIX}}
+
+if [ "\${LD_LIBRARY_PATH}" = "" ]; then
+export LD_LIBRARY_PATH=/usr/lib
+for f in \`find \${JAVA_HOME}/jre/lib -name client -prune -o -name libjvm.so
-exec dirname {} \;\`; do
+export LD_LIBRARY_PATH=\$f:\${LD_LIBRARY_PATH}
+done
+fi
+
+env \${HADOOP_HOME}/bin/fuse_dfs \$@
+EOF
+
+chmod 755 $fuse_wrapper
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec?rev=1308627&r1=1308626&r2=1308627&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
Mon Apr 2 23:50:54 2012
@@ -160,6 +160,10 @@ Source19: mapreduce.default
Source20: hdfs.default
Source21: yarn.default
Source22: hadoop-layout.sh
+Patch0: 0001-fix-fuse_dfs-compilation-issues.patch
+Patch1: 0002-fix-fuse_dfs-compilation.patch
+Patch2: 0003-fix-dfs-fuse-compile.patch
+Patch3: HDFS-2696-plus.patch
Buildroot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id} -u -n)
BuildRequires: python >= 2.4, git, fuse-devel,fuse, automake, autoconf
Requires: coreutils, /usr/sbin/useradd, /usr/sbin/usermod, /sbin/chkconfig,
/sbin/service, bigtop-utils, zookeeper >= 3.4.0
@@ -337,8 +341,10 @@ The History server keeps records of the
%package client
Summary: Hadoop client side dependencies
Group: System/Daemons
-Requires: %{name} = %{version}-%{release}, %{name}-hdfs = %{version}-%{release}
-Requires: %{name}-yarn = %{version}-%{release}, %{name}-mapreduce =
%{version}-%{release}
+Requires: %{name} = %{version}-%{release}
+Requires: %{name}-hdfs = %{version}-%{release}
+Requires: %{name}-yarn = %{version}-%{release}
+Requires: %{name}-mapreduce = %{version}-%{release}
%description client
Installation of this package will provide you with all the dependencies for
Hadoop clients.
@@ -346,9 +352,12 @@ Installation of this package will provid
%package conf-pseudo
Summary: Hadoop installation in pseudo-distributed mode
Group: System/Daemons
-Requires: %{name} = %{version}-%{release}, %{name}-hdfs-namenode =
%{version}-%{release}
-Requires: %{name}-hdfs-datanode = %{version}-%{release},
%{name}-hdfs-secondarynamenode = %{version}-%{release}
-Requires: %{name}-yarn-resourcemanager = %{version}-%{release},
%{name}-yarn-nodemanager = %{version}-%{release}
+Requires: %{name} = %{version}-%{release}
+Requires: %{name}-hdfs-namenode = %{version}-%{release}
+Requires: %{name}-hdfs-datanode = %{version}-%{release}
+Requires: %{name}-hdfs-secondarynamenode = %{version}-%{release}
+Requires: %{name}-yarn-resourcemanager = %{version}-%{release}
+Requires: %{name}-yarn-nodemanager = %{version}-%{release}
Requires: %{name}-mapreduce-historyserver = %{version}-%{release}
%description conf-pseudo
@@ -371,10 +380,37 @@ AutoReq: no
%description libhdfs
Hadoop Filesystem Library
+
+%package fuse
+Summary: Mountable HDFS
+Group: Development/Libraries
+Requires: %{name} = %{version}-%{release}
+Requires: %{name}-libhdfs = %{version}-%{release}
+Requires: fuse
+AutoReq: no
+
+%if %{?suse_version:1}0
+Requires: libfuse2
+%else
+Requires: fuse-libs
+%endif
+
+
+%description fuse
+These projects (enumerated below) allow HDFS to be mounted (on most flavors of
Unix) as a standard file system using the mount command. Once mounted, the user
can operate on an instance of hdfs using standard Unix utilities such as 'ls',
'cd', 'cp', 'mkdir', 'find', 'grep', or use standard Posix libraries like open,
write, read, close from C, C++, Python, Ruby, Perl, Java, bash, etc.
+
+
+
%prep
# %setup -n %{name}-%{hadoop_base_version}-src
%setup -n apache-hadoop-common-f616c85
+%patch0 -p1
+%patch1 -p1
+%patch2 -p1
+%patch3
+
+
%build
# This assumes that you installed Java JDK 6 and set JAVA_HOME
# This assumes that you installed Java JDK 5 and set JAVA5_HOME
@@ -626,3 +662,9 @@ fi
%{_includedir}/hdfs.h
# -devel should be its own package
#%doc %{_docdir}/libhdfs-%{hadoop_version}
+
+%files fuse
+%defattr(-,root,root)
+%attr(0755,root,root) %{lib_hadoop}/bin/fuse_dfs
+%attr(0755,root,root) %{lib_hadoop}/bin/fuse_dfs_wrapper.sh
+%attr(0755,root,root) %{bin_hadoop}/hadoop-fuse-dfs