Author: jitendra
Date: Mon Nov 28 21:41:05 2011
New Revision: 1207636

URL: http://svn.apache.org/viewvc?rev=1207636&view=rev
Log:
Merged r1207617 from branch-1 for HADOOP-7865.

Modified:
    hadoop/common/branches/branch-1.0/CHANGES.txt
    
hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/security/UserGroupInformation.java
    
hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/hdfs/TestFileAppend2.java
    
hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/hdfs/TestFileConcurrentReader.java

Modified: hadoop/common/branches/branch-1.0/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.0/CHANGES.txt?rev=1207636&r1=1207635&r2=1207636&view=diff
==============================================================================
--- hadoop/common/branches/branch-1.0/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1.0/CHANGES.txt Mon Nov 28 21:41:05 2011
@@ -139,6 +139,8 @@ Release 1.0.0 - 2011.11.27
 
     HADOOP-7854. UGI getCurrentUser is not synchronized. (Daryn Sharp via 
jitendra)
 
+    HADOOP-7865. Test Failures in 1.0 hdfs/common. (jitendra)
+
 Release 0.20.205.0 - 2011.10.06
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/security/UserGroupInformation.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/security/UserGroupInformation.java?rev=1207636&r1=1207635&r2=1207636&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/security/UserGroupInformation.java
 (original)
+++ 
hadoop/common/branches/branch-1.0/src/core/org/apache/hadoop/security/UserGroupInformation.java
 Mon Nov 28 21:41:05 2011
@@ -998,10 +998,11 @@ public class UserGroupInformation {
    */
   @Override
   public String toString() {
-    String me = (getRealUser() != null)
-      ? getUserName() + " via " +  getRealUser().toString()
-      : getUserName();
-    return me + " (auth:"+getAuthenticationMethod()+")";
+    if (getRealUser() != null) {
+      return getUserName() + " via " + getRealUser().toString();
+    } else {
+      return getUserName();
+    }
   }
 
   /**

Modified: 
hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/hdfs/TestFileAppend2.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/hdfs/TestFileAppend2.java?rev=1207636&r1=1207635&r2=1207636&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/hdfs/TestFileAppend2.java
 (original)
+++ 
hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/hdfs/TestFileAppend2.java
 Mon Nov 28 21:41:05 2011
@@ -29,32 +29,17 @@ import org.apache.hadoop.fs.FSDataOutput
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
-import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
-import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
-import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 
-import org.apache.commons.logging.impl.Log4JLogger;
-import org.apache.log4j.Level;
-
 /**
  * This class tests the building blocks that are needed to
  * support HDFS appends.
  */
 public class TestFileAppend2 extends TestCase {
 
-  {
-    ((Log4JLogger)NameNode.stateChangeLog).getLogger().setLevel(Level.ALL);
-    ((Log4JLogger)LeaseManager.LOG).getLogger().setLevel(Level.ALL);
-    ((Log4JLogger)FSNamesystem.LOG).getLogger().setLevel(Level.ALL);
-    ((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ALL);
-    ((Log4JLogger)DFSClient.LOG).getLogger().setLevel(Level.ALL);
-  }
-
   static final int blockSize = 1024;
   static final int numBlocks = 5;
   static final int fileSize = numBlocks * blockSize + 1;

Modified: 
hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/hdfs/TestFileConcurrentReader.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/hdfs/TestFileConcurrentReader.java?rev=1207636&r1=1207635&r2=1207636&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/hdfs/TestFileConcurrentReader.java
 (original)
+++ 
hadoop/common/branches/branch-1.0/src/test/org/apache/hadoop/hdfs/TestFileConcurrentReader.java
 Mon Nov 28 21:41:05 2011
@@ -17,7 +17,10 @@
  */
 package org.apache.hadoop.hdfs;
 
-import org.apache.commons.logging.impl.Log4JLogger;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.concurrent.atomic.AtomicBoolean;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.ChecksumException;
@@ -25,17 +28,10 @@ import org.apache.hadoop.fs.FSDataInputS
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
-import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 import org.junit.Before;
 
-import java.io.IOException;
-import java.util.*;
-import java.util.concurrent.atomic.*;
-
 
 /**
  * This class tests the cases of a concurrent reads/writes to a file;
@@ -52,12 +48,6 @@ public class TestFileConcurrentReader ex
   
   private static final Logger LOG = 
     Logger.getLogger(TestFileConcurrentReader.class);
-  
-  {
-    ((Log4JLogger) LeaseManager.LOG).getLogger().setLevel(Level.ALL);
-    ((Log4JLogger) FSNamesystem.LOG).getLogger().setLevel(Level.ALL);
-    ((Log4JLogger) DFSClient.LOG).getLogger().setLevel(Level.ALL);
-  }
 
   static final long seed = 0xDEADBEEFL;
   static final int blockSize = 8192;
@@ -67,16 +57,6 @@ public class TestFileConcurrentReader ex
   private MiniDFSCluster cluster;
   private FileSystem fileSystem;
 
-  // creates a file but does not close it
-  private FSDataOutputStream createFile(FileSystem fileSys, Path name, int 
repl)
-    throws IOException {
-    System.out.println("createFile: Created " + name + " with " + repl + " 
replica.");
-    FSDataOutputStream stm = fileSys.create(name, true,
-      fileSys.getConf().getInt("io.file.buffer.size", 4096),
-      (short) repl, (long) blockSize);
-    return stm;
-  }
-  
   @Before
   protected void setUp() throws IOException {
     conf = new Configuration();


Reply via email to