svn commit: r1383254 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/http/HttpServer.java src/test/java/org/apache/hadoop/http/TestServletFil

2012-09-11 Thread todd
Author: todd
Date: Tue Sep 11 06:37:17 2012
New Revision: 1383254

URL: http://svn.apache.org/viewvc?rev=1383254view=rev
Log:
HADOOP-8786. HttpServer continues to start even if AuthenticationFilter fails 
to init. Contributed by Todd Lipcon.

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1383254r1=1383253r2=1383254view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Sep 
11 06:37:17 2012
@@ -202,6 +202,9 @@ Trunk (Unreleased)
 HADOOP-8684. Deadlock between WritableComparator and WritableComparable.
 (Jing Zhao via suresh)
 
+HADOOP-8786. HttpServer continues to start even if AuthenticationFilter
+fails to init (todd)
+
   OPTIMIZATIONS
 
 HADOOP-7761. Improve the performance of raw comparisons. (todd)

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1383254r1=1383253r2=1383254view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
 Tue Sep 11 06:37:17 2012
@@ -677,6 +677,15 @@ public class HttpServer implements Filte
   Problem in starting http server. Server handlers failed);
 }
   }
+  // Make sure there are no errors initializing the context.
+  Throwable unavailableException = webAppContext.getUnavailableException();
+  if (unavailableException != null) {
+// Have to stop the webserver, or else its non-daemon threads
+// will hang forever.
+webServer.stop();
+throw new IOException(Unable to initialize WebAppContext,
+unavailableException);
+  }
 } catch (IOException e) {
   throw e;
 } catch (InterruptedException e) {

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java?rev=1383254r1=1383253r2=1383254view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
 Tue Sep 11 06:37:17 2012
@@ -35,6 +35,7 @@ import javax.servlet.http.HttpServletReq
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Test;
 
 public class TestServletFilter extends HttpServerFunctionalTest {
@@ -163,7 +164,7 @@ public class TestServletFilter extends H
   @Test
   public void testServletFilterWhenInitThrowsException() throws Exception {
 Configuration conf = new Configuration();
-// start a http server with CountingFilter
+// start a http server with ErrorFilter
 conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY,
 ErrorFilter.Initializer.class.getName());
 HttpServer http = createTestServer(conf);
@@ -174,4 +175,25 @@ public class TestServletFilter extends H
   assertTrue( e.getMessage().contains(Problem in starting http server. 
Server handlers failed));
 }
   }
+  
+  /**
+   * Similar to the above test case, except that it uses a different API to 
add the
+   * filter. Regression test for HADOOP-8786.
+   */
+  @Test
+  public void testContextSpecificServletFilterWhenInitThrowsException()
+  throws Exception {
+Configuration conf = new Configuration();
+HttpServer http = createTestServer(conf);
+http.defineFilter(http.webAppContext,
+ErrorFilter, ErrorFilter.class.getName(),
+null, null);
+try {
+  http.start();
+  fail(expecting exception);
+} catch (IOException e) {
+  GenericTestUtils.assertExceptionContains(Unable to initialize 
WebAppContext, e);
+}

[Hadoop Wiki] Trivial Update of QwertyManiac/BuildingHadoopTrunk by QwertyManiac

2012-09-11 Thread Apache Wiki
Dear Wiki user,

You have subscribed to a wiki page or wiki category on Hadoop Wiki for change 
notification.

The QwertyManiac/BuildingHadoopTrunk page has been changed by QwertyManiac:
http://wiki.apache.org/hadoop/QwertyManiac/BuildingHadoopTrunk?action=diffrev1=10rev2=11

  
  4. Enter the top level checkout directory ({{{hadoop}}}) and issue {{{mvn 
install -DskipTests}}} to kick off the compile.
  
- 5. If you want to generate eclipse project files, run: {{{mvn 
eclipse:eclipse}}}.
+ 5. If you want to generate eclipse project files, run: {{{mvn 
eclipse:eclipse}}}. Or alternatively, use the m2e plugin in Eclipse and import 
these maven projects directly.
+ 
+ 6. Post step (4), if you want to run any tests, just head down into the 
sub-module or the parent-module and issue an {{{mvn test}}}. For specific test 
classes, do {{{mvn -Dtest=TestClassNameHint test}}}.
  
  = Building branch-0.23 =
  
- This is same as building trunk, but checkout the '''branch-0.23''' branch 
before you run the commands.
+ This is similar to building trunk, but checkout the '''branch-0.23''' 
branch before you run the commands.
  
  1. Checkout the sources (Use any method below):
* Using GitHub mirror: {{{git clone 
g...@github.com:apache/hadoop-common.git hadoop}}}


svn commit: r1383430 - in /hadoop/common/branches/branch-1: CHANGES.txt src/mapred/org/apache/hadoop/filecache/TrackerDistributedCacheManager.java

2012-09-11 Thread tgraves
Author: tgraves
Date: Tue Sep 11 14:34:27 2012
New Revision: 1383430

URL: http://svn.apache.org/viewvc?rev=1383430view=rev
Log:
MAPREDUCE-4576. Large dist cache can block tasktracker heartbeat (Robert Evans 
via tgraves).

Modified:
hadoop/common/branches/branch-1/CHANGES.txt

hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/filecache/TrackerDistributedCacheManager.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1383430r1=1383429r2=1383430view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Tue Sep 11 14:34:27 2012
@@ -233,6 +233,9 @@ Release 1.2.0 - unreleased
 
 HADOOP-8781. hadoop-config.sh should add JAVA_LIBRARY_PATH to 
LD_LIBRARY_PATH. (tucu)
 
+MAPREDUCE-4576. Large dist cache can block tasktracker heartbeat
+(Robert Evans via tgraves)
+
 Release 1.1.0 - unreleased
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/filecache/TrackerDistributedCacheManager.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/filecache/TrackerDistributedCacheManager.java?rev=1383430r1=1383429r2=1383430view=diff
==
--- 
hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/filecache/TrackerDistributedCacheManager.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/filecache/TrackerDistributedCacheManager.java
 Tue Sep 11 14:34:27 2012
@@ -35,6 +35,7 @@ import java.util.Random;
 import java.util.Set;
 import java.util.TreeMap;
 import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -566,7 +567,8 @@ public class TrackerDistributedCacheMana
 //
 long size;  //the size of this cache.
 boolean inited = false; // is it initialized ?
-
+private final ReentrantLock lock = new ReentrantLock();
+ 
 //
 // The following five fields are Immutable.
 //
@@ -598,14 +600,20 @@ public class TrackerDistributedCacheMana
   this.key = key;
 }
 
-public synchronized void incRefCount() {
-  refcount.incrementAndGet() ;
-  LOG.debug(localizedLoadPath + : refcount= + refcount.get());
+public void incRefCount() {
+  lock.lock();
+  try {
+refcount.incrementAndGet() ;
+LOG.debug(localizedLoadPath + : refcount= + refcount.get());
+  } finally {
+lock.unlock();
+  }
 }
 
 public void decRefCount() {
   synchronized (cachedArchives) {
-synchronized (this) {
+lock.lock();
+try {
   refcount.decrementAndGet() ;
   LOG.debug(localizedLoadPath + : refcount= + refcount.get());
   if(refcount.get() = 0) {
@@ -613,6 +621,8 @@ public class TrackerDistributedCacheMana
 cachedArchives.remove(key);
 cachedArchives.put(key, this);
   }
+} finally {
+  lock.unlock();
 }
   }
 }
@@ -621,9 +631,14 @@ public class TrackerDistributedCacheMana
   return refcount.get();
 }
 
-public synchronized boolean isUsed() {
-  LOG.debug(localizedLoadPath + : refcount= + refcount.get());
-  return refcount.get()  0;
+public boolean isUsed() {
+  lock.lock();
+  try { 
+LOG.debug(localizedLoadPath + : refcount= + refcount.get());
+return refcount.get()  0;
+  } finally {
+lock.unlock();
+  }
 }
 
 Path getBaseDir(){
@@ -1027,19 +1042,24 @@ public class TrackerDistributedCacheMana
   CacheDir leftToClean = 
toBeCleanedBaseDir.get(cacheStatus.getBaseDir());
 
   if (leftToClean != null  (leftToClean.size  0 || 
leftToClean.subdirs  0)) {
-synchronized (cacheStatus) {
-  // if reference count is zero mark the cache for deletion
-  boolean isUsed = cacheStatus.isUsed();
-  long cacheSize = cacheStatus.size; 
-  LOG.debug(cacheStatus.getLocalizedUniqueDir() + : isUsed= + 
isUsed + 
-   size= + cacheSize +  leftToClean.size= + 
leftToClean.size);
-  if (!isUsed) {
-leftToClean.size -= cacheSize;
-leftToClean.subdirs--;
-// delete this cache entry from the global list 
-// and mark the localized file for deletion
-toBeDeletedCache.add(cacheStatus);
-it.remove();
+boolean gotLock = cacheStatus.lock.tryLock();
+if (gotLock) {
+  try {
+// if reference count is zero mark the cache for deletion
+boolean isUsed = 

svn commit: r1383459 - /hadoop/common/site/main/publish/.htaccess

2012-09-11 Thread cutting
Author: cutting
Date: Tue Sep 11 15:56:22 2012
New Revision: 1383459

URL: http://svn.apache.org/viewvc?rev=1383459view=rev
Log:
HADOOP-8662.  Fix broken credits link for hdfs and mapreduce.

Modified:
hadoop/common/site/main/publish/.htaccess

Modified: hadoop/common/site/main/publish/.htaccess
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/.htaccess?rev=1383459r1=1383458r2=1383459view=diff
==
--- hadoop/common/site/main/publish/.htaccess (original)
+++ hadoop/common/site/main/publish/.htaccess Tue Sep 11 15:56:22 2012
@@ -5,12 +5,12 @@ RedirectMatch Permanent ^/core/index.htm
 RedirectMatch Permanent ^/core/(.+)$ http://hadoop.apache.org/common/$1
 
 # Redirect former subprojects to the main site.
-RedirectMatch Permanent ^/common/credits.html$ 
http://hadoop.apache.org/who.html$1
 RedirectMatch Permanent ^/common/(.+)$ http://hadoop.apache.org/$1
 RedirectMatch Permanent ^/hdfs/docs/(.+)$ http://hadoop.apache.org/docs/hdfs/$1
 RedirectMatch Permanent ^/hdfs/(.+)$ http://hadoop.apache.org/$1
 RedirectMatch Permanent ^/mapreduce/docs/(.+)$ 
http://hadoop.apache.org/docs/mapreduce/$1
 RedirectMatch Permanent ^/mapreduce/(.+)$ http://hadoop.apache.org/$1
+RedirectMatch Permanent ^/credits.html$ http://hadoop.apache.org/who.html$1
 
 # promote Avro to TLP
 RedirectMatch Permanent ^/avro/(.*)$ http://avro.apache.org/$1




svn commit: r1383471 - /hadoop/common/site/main/publish/doap.rdf

2012-09-11 Thread cutting
Author: cutting
Date: Tue Sep 11 16:32:12 2012
New Revision: 1383471

URL: http://svn.apache.org/viewvc?rev=1383471view=rev
Log:
Add missing doap file.

Added:
hadoop/common/site/main/publish/doap.rdf
  - copied, changed from r1382979, 
hadoop/common/site/common/publish/doap.rdf

Copied: hadoop/common/site/main/publish/doap.rdf (from r1382979, 
hadoop/common/site/common/publish/doap.rdf)
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/doap.rdf?p2=hadoop/common/site/main/publish/doap.rdfp1=hadoop/common/site/common/publish/doap.rdfr1=1382979r2=1383471rev=1383471view=diff
==
--- hadoop/common/site/common/publish/doap.rdf (original)
+++ hadoop/common/site/main/publish/doap.rdf Tue Sep 11 16:32:12 2012
@@ -13,23 +13,23 @@
 
   ===
 --
-  Project rdf:about=http://hadoop.apache.org/core/;
+  Project rdf:about=http://hadoop.apache.org/;
 created2006-02-28/created
 license rdf:resource=http://usefulinc.com/doap/licenses/asl20; /
 nameApache Hadoop/name
-homepage rdf:resource=http://hadoop.apache.org/core/; /
+homepage rdf:resource=http://hadoop.apache.org/; /
 asfext:pmc rdf:resource=http://hadoop.apache.org; /
 shortdescA distributed computing platform./shortdesc
-descriptionHadoop Core contains a distributed computing platform. This 
includes the Hadoop Distributed Filesystem (HDFS) and an implementation of 
MapReduce./description
+descriptionHadoop is a distributed computing platform. This includes the 
Hadoop Distributed Filesystem (HDFS) and an implementation of 
MapReduce./description
 bug-database rdf:resource=http://issues.apache.org/jira/browse/HADOOP; /
-mailing-list 
rdf:resource=http://hadoop.apache.org/core/mailing_lists.html; /
-download-page 
rdf:resource=http://www.apache.org/dyn/closer.cgi/hadoop/core/; /
+mailing-list rdf:resource=http://hadoop.apache.org/mailing_lists.html; /
+download-page rdf:resource=http://www.apache.org/dyn/closer.cgi/hadoop/; 
/
 programming-languageJava/programming-language
 category rdf:resource=http://projects.apache.org/category/database; /
 repository
   SVNRepository
-location rdf:resource=http://svn.apache.org/repos/asf/hadoop/core//
-browse rdf:resource=http://svn.apache.org/viewcvs.cgi/hadoop/core//
+location rdf:resource=http://svn.apache.org/repos/asf/hadoop//
+browse rdf:resource=http://svn.apache.org/viewcvs.cgi/hadoop//
   /SVNRepository
 /repository
   /Project




svn commit: r1383494 - in /hadoop/common/trunk/hadoop-tools: hadoop-archives/src/test/java/org/apache/hadoop/tools/ hadoop-extras/src/test/java/org/apache/hadoop/tools/

2012-09-11 Thread eli
Author: eli
Date: Tue Sep 11 17:56:19 2012
New Revision: 1383494

URL: http://svn.apache.org/viewvc?rev=1383494view=rev
Log:
HADOOP-8789. Tests setLevel(Level.OFF) should be Level.ERROR. Contributed by 
Andy Isaacson

Modified:

hadoop/common/trunk/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java

hadoop/common/trunk/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java

hadoop/common/trunk/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java

Modified: 
hadoop/common/trunk/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java?rev=1383494r1=1383493r2=1383494view=diff
==
--- 
hadoop/common/trunk/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
 (original)
+++ 
hadoop/common/trunk/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
 Tue Sep 11 17:56:19 2012
@@ -52,11 +52,11 @@ public class TestHadoopArchives extends 
 
   {
 ((Log4JLogger)LogFactory.getLog(org.apache.hadoop.security.Groups.class)
-).getLogger().setLevel(Level.OFF);
+).getLogger().setLevel(Level.ERROR);
 ((Log4JLogger)org.apache.hadoop.ipc.Server.LOG
-).getLogger().setLevel(Level.OFF);
+).getLogger().setLevel(Level.ERROR);
 ((Log4JLogger)org.apache.hadoop.util.AsyncDiskService.LOG
-).getLogger().setLevel(Level.OFF);
+).getLogger().setLevel(Level.ERROR);
   }
 
   private static final String inputDir = input;

Modified: 
hadoop/common/trunk/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java?rev=1383494r1=1383493r2=1383494view=diff
==
--- 
hadoop/common/trunk/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java
 (original)
+++ 
hadoop/common/trunk/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java
 Tue Sep 11 17:56:19 2012
@@ -61,9 +61,9 @@ import org.junit.Ignore;
 public class TestCopyFiles extends TestCase {
   {
 ((Log4JLogger)LogFactory.getLog(org.apache.hadoop.hdfs.StateChange)
-).getLogger().setLevel(Level.OFF);
-((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.OFF);
-
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.OFF);
+).getLogger().setLevel(Level.ERROR);
+((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ERROR);
+
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ERROR);
 ((Log4JLogger)DistCpV1.LOG).getLogger().setLevel(Level.ALL);
   }
   

Modified: 
hadoop/common/trunk/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java?rev=1383494r1=1383493r2=1383494view=diff
==
--- 
hadoop/common/trunk/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java
 (original)
+++ 
hadoop/common/trunk/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java
 Tue Sep 11 17:56:19 2012
@@ -46,9 +46,9 @@ import org.junit.Ignore;
 public class TestDistCh extends junit.framework.TestCase {
   {
 ((Log4JLogger)LogFactory.getLog(org.apache.hadoop.hdfs.StateChange)
-).getLogger().setLevel(Level.OFF);
-((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.OFF);
-
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.OFF);
+).getLogger().setLevel(Level.ERROR);
+((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ERROR);
+
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ERROR);
   }
 
   static final Long RANDOM_NUMBER_GENERATOR_SEED = null;




svn commit: r1383494 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

2012-09-11 Thread eli
Author: eli
Date: Tue Sep 11 17:56:19 2012
New Revision: 1383494

URL: http://svn.apache.org/viewvc?rev=1383494view=rev
Log:
HADOOP-8789. Tests setLevel(Level.OFF) should be Level.ERROR. Contributed by 
Andy Isaacson

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1383494r1=1383493r2=1383494view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Sep 
11 17:56:19 2012
@@ -217,6 +217,9 @@ Release 2.0.3-alpha - Unreleased 
 
   IMPROVEMENTS
 
+HADOOP-8789. Tests setLevel(Level.OFF) should be Level.ERROR.
+(Andy Isaacson via eli)
+
   OPTIMIZATIONS
 
   BUG FIXES




svn commit: r1383497 - /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

2012-09-11 Thread eli
Author: eli
Date: Tue Sep 11 17:58:07 2012
New Revision: 1383497

URL: http://svn.apache.org/viewvc?rev=1383497view=rev
Log:
HADOOP-8789. Tests setLevel(Level.OFF) should be Level.ERROR. Contributed by 
Andy Isaacson

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1383497r1=1383496r2=1383497view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Tue Sep 11 17:58:07 2012
@@ -8,6 +8,9 @@ Release 2.0.3-alpha - Unreleased
 
   IMPROVEMENTS
 
+HADOOP-8789. Tests setLevel(Level.OFF) should be Level.ERROR.
+(Andy Isaacson via eli)
+
   OPTIMIZATIONS
 
   BUG FIXES 




svn commit: r1383497 - in /hadoop/common/branches/branch-2/hadoop-tools: hadoop-archives/src/test/java/org/apache/hadoop/tools/ hadoop-extras/src/test/java/org/apache/hadoop/tools/

2012-09-11 Thread eli
Author: eli
Date: Tue Sep 11 17:58:07 2012
New Revision: 1383497

URL: http://svn.apache.org/viewvc?rev=1383497view=rev
Log:
HADOOP-8789. Tests setLevel(Level.OFF) should be Level.ERROR. Contributed by 
Andy Isaacson

Modified:

hadoop/common/branches/branch-2/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java

hadoop/common/branches/branch-2/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java

hadoop/common/branches/branch-2/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java

Modified: 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java?rev=1383497r1=1383496r2=1383497view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
 Tue Sep 11 17:58:07 2012
@@ -52,11 +52,11 @@ public class TestHadoopArchives extends 
 
   {
 ((Log4JLogger)LogFactory.getLog(org.apache.hadoop.security.Groups.class)
-).getLogger().setLevel(Level.OFF);
+).getLogger().setLevel(Level.ERROR);
 ((Log4JLogger)org.apache.hadoop.ipc.Server.LOG
-).getLogger().setLevel(Level.OFF);
+).getLogger().setLevel(Level.ERROR);
 ((Log4JLogger)org.apache.hadoop.util.AsyncDiskService.LOG
-).getLogger().setLevel(Level.OFF);
+).getLogger().setLevel(Level.ERROR);
   }
 
   private static final String inputDir = input;

Modified: 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java?rev=1383497r1=1383496r2=1383497view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java
 Tue Sep 11 17:58:07 2012
@@ -61,9 +61,9 @@ import org.junit.Ignore;
 public class TestCopyFiles extends TestCase {
   {
 ((Log4JLogger)LogFactory.getLog(org.apache.hadoop.hdfs.StateChange)
-).getLogger().setLevel(Level.OFF);
-((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.OFF);
-
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.OFF);
+).getLogger().setLevel(Level.ERROR);
+((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ERROR);
+
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ERROR);
 ((Log4JLogger)DistCpV1.LOG).getLogger().setLevel(Level.ALL);
   }
   

Modified: 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java?rev=1383497r1=1383496r2=1383497view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java
 Tue Sep 11 17:58:07 2012
@@ -46,9 +46,9 @@ import org.junit.Ignore;
 public class TestDistCh extends junit.framework.TestCase {
   {
 ((Log4JLogger)LogFactory.getLog(org.apache.hadoop.hdfs.StateChange)
-).getLogger().setLevel(Level.OFF);
-((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.OFF);
-
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.OFF);
+).getLogger().setLevel(Level.ERROR);
+((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ERROR);
+
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ERROR);
   }
 
   static final Long RANDOM_NUMBER_GENERATOR_SEED = null;




svn commit: r1383560 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/bin/hadoop-config.sh src/main/bin/slaves.sh

2012-09-11 Thread suresh
Author: suresh
Date: Tue Sep 11 19:10:01 2012
New Revision: 1383560

URL: http://svn.apache.org/viewvc?rev=1383560view=rev
Log:
HADOOP-8767. Secondary namenode is started on slave nodes instead of master 
nodes. Contributed by Giovanni Delussu.

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1383560r1=1383559r2=1383560view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Sep 
11 19:10:01 2012
@@ -205,6 +205,9 @@ Trunk (Unreleased)
 HADOOP-8786. HttpServer continues to start even if AuthenticationFilter
 fails to init (todd)
 
+HADOOP-8767. Secondary namenode is started on slave nodes instead of
+master nodes. (Giovanni Delussu via suresh)
+
   OPTIMIZATIONS
 
 HADOOP-7761. Improve the performance of raw comparisons. (todd)

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh?rev=1383560r1=1383559r2=1383560view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
 Tue Sep 11 19:10:01 2012
@@ -74,6 +74,10 @@ fi
 
 export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-$HADOOP_PREFIX/$DEFAULT_CONF_DIR}
 
+if [ -f ${HADOOP_CONF_DIR}/hadoop-env.sh ]; then
+  . ${HADOOP_CONF_DIR}/hadoop-env.sh
+fi
+
 # User can specify hostnames or a file where the hostnames are (not both)
 if [[ ( $HADOOP_SLAVES != '' )  ( $HADOOP_SLAVE_NAMES != '' ) ]] ; then
   echo \
@@ -113,9 +117,6 @@ case `uname` in
 CYGWIN*) cygwin=true;;
 esac
 
-if [ -f ${HADOOP_CONF_DIR}/hadoop-env.sh ]; then
-  . ${HADOOP_CONF_DIR}/hadoop-env.sh
-fi
 
 # check if net.ipv6.bindv6only is set to 1
 bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2 /dev/null)

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh?rev=1383560r1=1383559r2=1383560view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh 
(original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh 
Tue Sep 11 19:10:01 2012
@@ -42,9 +42,6 @@ DEFAULT_LIBEXEC_DIR=$bin/../libexec
 HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
 . $HADOOP_LIBEXEC_DIR/hadoop-config.sh
 
-if [ -f ${HADOOP_CONF_DIR}/hadoop-env.sh ]; then
-  . ${HADOOP_CONF_DIR}/hadoop-env.sh
-fi
 
 # Where to start the script, see hadoop-config.sh
 # (it set up the variables based on command line options)




svn commit: r1383562 - in /hadoop/common/branches/branch-1: CHANGES.txt bin/hadoop-config.sh bin/slaves.sh

2012-09-11 Thread suresh
Author: suresh
Date: Tue Sep 11 19:11:35 2012
New Revision: 1383562

URL: http://svn.apache.org/viewvc?rev=1383562view=rev
Log:
HADOOP-8767. Secondary namenode is started on slave nodes instead of master 
nodes. Contributed by Giovanni Delussu.

Modified:
hadoop/common/branches/branch-1/CHANGES.txt
hadoop/common/branches/branch-1/bin/hadoop-config.sh
hadoop/common/branches/branch-1/bin/slaves.sh

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1383562r1=1383561r2=1383562view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Tue Sep 11 19:11:35 2012
@@ -236,6 +236,9 @@ Release 1.2.0 - unreleased
 MAPREDUCE-4576. Large dist cache can block tasktracker heartbeat
 (Robert Evans via tgraves)
 
+HADOOP-8767. Secondary namenode is started on slave nodes instead of
+master nodes. (Giovanni Delussu via suresh)
+
 Release 1.1.0 - unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/branches/branch-1/bin/hadoop-config.sh
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/bin/hadoop-config.sh?rev=1383562r1=1383561r2=1383562view=diff
==
--- hadoop/common/branches/branch-1/bin/hadoop-config.sh (original)
+++ hadoop/common/branches/branch-1/bin/hadoop-config.sh Tue Sep 11 19:11:35 
2012
@@ -53,6 +53,10 @@ else
 fi
 HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-$HADOOP_PREFIX/$DEFAULT_CONF_DIR}
 
+if [ -f ${HADOOP_CONF_DIR}/hadoop-env.sh ]; then
+  . ${HADOOP_CONF_DIR}/hadoop-env.sh
+fi
+
 #check to see it is specified whether to use the slaves or the
 # masters file
 if [ $# -gt 1 ]
@@ -66,9 +70,6 @@ then
 fi
 fi
 
-if [ -f ${HADOOP_CONF_DIR}/hadoop-env.sh ]; then
-  . ${HADOOP_CONF_DIR}/hadoop-env.sh
-fi
 
 if [ $HADOOP_HOME_WARN_SUPPRESS =  ]  [ $HADOOP_HOME !=  ]; then
   echo Warning: \$HADOOP_HOME is deprecated. 12

Modified: hadoop/common/branches/branch-1/bin/slaves.sh
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/bin/slaves.sh?rev=1383562r1=1383561r2=1383562view=diff
==
--- hadoop/common/branches/branch-1/bin/slaves.sh (original)
+++ hadoop/common/branches/branch-1/bin/slaves.sh Tue Sep 11 19:11:35 2012
@@ -38,21 +38,18 @@ fi
 bin=`dirname $0`
 bin=`cd $bin; pwd`
 
-if [ -e $bin/../libexec/hadoop-config.sh ]; then
-  . $bin/../libexec/hadoop-config.sh
-else
-  . $bin/hadoop-config.sh
-fi
-
 # If the slaves file is specified in the command line,
 # then it takes precedence over the definition in 
 # hadoop-env.sh. Save it here.
 HOSTLIST=$HADOOP_SLAVES
 
-if [ -f ${HADOOP_CONF_DIR}/hadoop-env.sh ]; then
-  . ${HADOOP_CONF_DIR}/hadoop-env.sh
+if [ -e $bin/../libexec/hadoop-config.sh ]; then
+  . $bin/../libexec/hadoop-config.sh
+else
+  . $bin/hadoop-config.sh
 fi
 
+
 if [ $HOSTLIST =  ]; then
   if [ $HADOOP_SLAVES =  ]; then
 export HOSTLIST=${HADOOP_CONF_DIR}/slaves




svn commit: r1383607 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/shell/Display.java src/test/java/org/apache/hadoop/fs/shell/TestTextC

2012-09-11 Thread cutting
Author: cutting
Date: Tue Sep 11 20:38:45 2012
New Revision: 1383607

URL: http://svn.apache.org/viewvc?rev=1383607view=rev
Log:
HADOOP-8597. Permit FsShell's text command to read Avro files.  Contributed by 
Ivan Vladimirov.

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java
   (with props)
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1383607r1=1383606r2=1383607view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Sep 
11 20:38:45 2012
@@ -218,6 +218,9 @@ Release 2.0.3-alpha - Unreleased 
 
   NEW FEATURES
 
+HADOOP-8597. Permit FsShell's text command to read Avro files.
+(Ivan Vladimirov Ivanov via cutting)
+
   IMPROVEMENTS
 
 HADOOP-8789. Tests setLevel(Level.OFF) should be Level.ERROR.

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java?rev=1383607r1=1383606r2=1383607view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
 Tue Sep 11 20:38:45 2012
@@ -17,11 +17,21 @@
  */
 package org.apache.hadoop.fs.shell;
 
-import java.io.IOException;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
 import java.io.InputStream;
+import java.io.IOException;
 import java.util.LinkedList;
 import java.util.zip.GZIPInputStream;
 
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.FileReader;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.io.JsonEncoder;
+import org.apache.avro.Schema;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -37,6 +47,10 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionCodecFactory;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.codehaus.jackson.JsonEncoding;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.util.MinimalPrettyPrinter;
 
 /**
  * Display contents of files 
@@ -95,14 +109,14 @@ class Display extends FsCommand {
   
   /**
* Same behavior as -cat, but handles zip and TextRecordInputStream
-   * encodings. 
+   * and Avro encodings. 
*/ 
   public static class Text extends Cat {
 public static final String NAME = text;
 public static final String USAGE = Cat.USAGE;
 public static final String DESCRIPTION =
   Takes a source file and outputs the file in text format.\n +
-  The allowed formats are zip and TextRecordInputStream.;
+  The allowed formats are zip and TextRecordInputStream and Avro.;
 
 @Override
 protected InputStream getInputStream(PathData item) throws IOException {
@@ -132,6 +146,13 @@ class Display extends FsCommand {
   }
   break;
 }
+case 0x4f62: { // 'O' 'b'
+  if (i.readByte() == 'j') {
+i.close();
+return new AvroFileInputStream(item.stat);
+  }
+  break;
+}
   }
 
   // File is non-compressed, or not a file container we know.
@@ -187,4 +208,68 @@ class Display extends FsCommand {
   super.close();
 }
   }
+
+  /**
+   * This class transforms a binary Avro data file into an InputStream
+   * with data that is in a human readable JSON format.
+   */
+  protected static class AvroFileInputStream extends InputStream {
+private int pos;
+private byte[] buffer;
+private ByteArrayOutputStream output;
+private FileReader fileReader;
+private DatumWriterObject writer;
+private JsonEncoder encoder;
+
+public AvroFileInputStream(FileStatus status) throws IOException {
+  pos = 0;
+  buffer = new byte[0];
+  GenericDatumReaderObject reader = new GenericDatumReaderObject();
+  fileReader =
+

svn commit: r1383608 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/shell/Display.java src/test/java/org/apache/hadoop/fs/she

2012-09-11 Thread cutting
Author: cutting
Date: Tue Sep 11 20:47:27 2012
New Revision: 1383608

URL: http://svn.apache.org/viewvc?rev=1383608view=rev
Log:
Merge -c 1383607 from trunk to branch-2. Fixes: HADOOP-8597 (add Avro support 
to FsShell text command).

Added:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java
   (with props)
Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1383608r1=1383607r2=1383608view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Tue Sep 11 20:47:27 2012
@@ -6,6 +6,9 @@ Release 2.0.3-alpha - Unreleased
 
   NEW FEATURES
 
+HADOOP-8597. Permit FsShell's text command to read Avro files.
+(Ivan Vladimirov Ivanov via cutting)
+
   IMPROVEMENTS
 
 HADOOP-8789. Tests setLevel(Level.OFF) should be Level.ERROR.

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java?rev=1383608r1=1383607r2=1383608view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
 Tue Sep 11 20:47:27 2012
@@ -17,11 +17,21 @@
  */
 package org.apache.hadoop.fs.shell;
 
-import java.io.IOException;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
 import java.io.InputStream;
+import java.io.IOException;
 import java.util.LinkedList;
 import java.util.zip.GZIPInputStream;
 
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.FileReader;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.io.JsonEncoder;
+import org.apache.avro.Schema;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -38,6 +48,10 @@ import org.apache.hadoop.io.WritableComp
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionCodecFactory;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.codehaus.jackson.JsonEncoding;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.util.MinimalPrettyPrinter;
 
 /**
  * Display contents of files 
@@ -96,14 +110,14 @@ class Display extends FsCommand {
   
   /**
* Same behavior as -cat, but handles zip and TextRecordInputStream
-   * encodings. 
+   * and Avro encodings. 
*/ 
   public static class Text extends Cat {
 public static final String NAME = text;
 public static final String USAGE = Cat.USAGE;
 public static final String DESCRIPTION =
   Takes a source file and outputs the file in text format.\n +
-  The allowed formats are zip and TextRecordInputStream.;
+  The allowed formats are zip and TextRecordInputStream and Avro.;
 
 @Override
 protected InputStream getInputStream(PathData item) throws IOException {
@@ -133,6 +147,13 @@ class Display extends FsCommand {
   }
   break;
 }
+case 0x4f62: { // 'O' 'b'
+  if (i.readByte() == 'j') {
+i.close();
+return new AvroFileInputStream(item.stat);
+  }
+  break;
+}
   }
 
   // File is non-compressed, or not a file container we know.
@@ -188,4 +209,68 @@ class Display extends FsCommand {
   super.close();
 }
   }
+
+  /**
+   * This class transforms a binary Avro data file into an InputStream
+   * with data that is in a human readable JSON format.
+   */
+  protected static class AvroFileInputStream extends InputStream {
+private int pos;
+private byte[] buffer;
+private ByteArrayOutputStream output;
+private FileReader fileReader;
+private DatumWriterObject writer;
+private JsonEncoder encoder;
+
+public AvroFileInputStream(FileStatus status) throws IOException {
+  pos 

svn commit: r1383678 - /hadoop/common/branches/branch-1/CHANGES.txt

2012-09-11 Thread suresh
Author: suresh
Date: Tue Sep 11 23:50:16 2012
New Revision: 1383678

URL: http://svn.apache.org/viewvc?rev=1383678view=rev
Log:
Moved some of the jiras that have been merged into 1.1.0 to the relevant 
section from 1.2.0 section in CHANGES.txt

Modified:
hadoop/common/branches/branch-1/CHANGES.txt

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1383678r1=1383677r2=1383678view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Tue Sep 11 23:50:16 2012
@@ -4,19 +4,12 @@ Release 1.2.0 - unreleased
 
   INCOMPATIBLE CHANGES
 
-HDFS-2617. Replaced Kerberized SSL for image transfer and fsck with
-SPNEGO-based solution. (Jakob Homan, Owen O'Malley, Alejandro Abdelnur and
-Aaron T. Myers via atm)
-
   NEW FEATURES
 
 HADOOP-8023. Add unset() method to Configuration (tucu)
 
 MAPREDUCE-4355. Add RunningJob.getJobStatus() (kkambatl via tucu)
 
-HADOOP-7823. Port HADOOP-4012 providing split support for bzip2 compressed
-files to branch-1. (Andrew Purtell via cdouglas)
-
 MAPREDUCE-987. Exposing MiniDFS and MiniMR clusters as a single process 
 command-line (philip and ahmed via tucu)
 
@@ -83,9 +76,6 @@ Release 1.2.0 - unreleased
 MAPREDUCE-3289. Make use of fadvise in the NM's shuffle handler.
 (Todd Lipcon and Brandon Li via sseth)
 
-HADOOP-8656 Backport forced daemon shutdown of HADOOP-8353 into branch-1
-(Roman Shaposhnik via stevel)
-
 MAPREDUCE-4511. Add IFile readahead (ahmed via tucu)
 
 MAPREDUCE-4565. Backport MR-2855 to branch-1: ResourceBundle lookup during
@@ -109,6 +99,8 @@ Release 1.2.0 - unreleased
 HDFS-3871. Change DFSClient to use RetryUtils.  (Arun C Murthy
 via szetszwo)
 
+HDFS-1108 Log newly allocated blocks (hdfs-1108-hadoop-1-v5.patch) 
(sanjay) 
+
   OPTIMIZATIONS
 
 HDFS-2533. Backport: Remove needless synchronization on some FSDataSet
@@ -184,9 +176,6 @@ Release 1.2.0 - unreleased
 HDFS-3727. When using SPNEGO, NN should not try to log in using KSSL
 principal. (atm)
 
-HDFS-3696. Set chunked streaming mode in WebHdfsFileSystem write operations
-to get around a Java library bug causing OutOfMemoryError.  (szetszwo)
-
 HADOOP-8613. AbstractDelegationTokenIdentifier#getUser() should set token
 auth type. (daryn)
 
@@ -243,6 +232,10 @@ Release 1.1.0 - unreleased
 
   INCOMPATIBLE CHANGES
 
+HDFS-2617. Replaced Kerberized SSL for image transfer and fsck with
+SPNEGO-based solution. (Jakob Homan, Owen O'Malley, Alejandro Abdelnur and
+Aaron T. Myers via atm)
+
 HDFS-3044. fsck move should be non-destructive by default.
 (Colin Patrick McCabe via eli)
 
@@ -272,8 +265,14 @@ Release 1.1.0 - unreleased
 HDFS-3518. Add a utility method DistributedFileSystem.isHealthy(uri) for
 checking if the given HDFS is healthy. (szetszwo)
 
+HADOOP-7823. Port HADOOP-4012 providing split support for bzip2 compressed
+files to branch-1. (Andrew Purtell via cdouglas)
+
   IMPROVEMENTS
 
+HADOOP-8656 Backport forced daemon shutdown of HADOOP-8353 into branch-1
+(Roman Shaposhnik via stevel)
+
 MAPREDUCE-3597. [Rumen] Provide a way to access other info of history file
 from Rumen tool. (ravigummadi)
 
@@ -356,8 +355,6 @@ Release 1.1.0 - unreleased
 
 HDFS-3516. Check content-type in WebHdfsFileSystem.  (szetszwo)
 
-HDFS-1108 Log newly allocated blocks (hdfs-1108-hadoop-1-v5.patch) 
(sanjay) 
-
 HADOOP-7753. Support fadvise and sync_file_range in NativeIO. Add
 ReadaheadPool infrastructure for use in HDFS and MR.
 (Brandon Li and todd via suresh)
@@ -379,6 +376,9 @@ Release 1.1.0 - unreleased
 
   BUG FIXES
 
+HDFS-3696. Set chunked streaming mode in WebHdfsFileSystem write operations
+to get around a Java library bug causing OutOfMemoryError.  (szetszwo)
+
 MAPREDUCE-4087. [Gridmix] GenerateDistCacheData job of Gridmix can
 become slow in some cases (ravigummadi)