svn commit: r1453711 - /hadoop/common/branches/branch-1/CHANGES.txt
Author: szetszwo Date: Thu Mar 7 06:44:10 2013 New Revision: 1453711 URL: http://svn.apache.org/r1453711 Log: Move HDFS-4252 to 1.2.0 in CHANGES.txt. Modified: hadoop/common/branches/branch-1/CHANGES.txt Modified: hadoop/common/branches/branch-1/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1453711&r1=1453710&r2=1453711&view=diff == --- hadoop/common/branches/branch-1/CHANGES.txt (original) +++ hadoop/common/branches/branch-1/CHANGES.txt Thu Mar 7 06:44:10 2013 @@ -179,7 +179,9 @@ Release 1.2.0 - unreleased HDFS-4519. Support overriding jsvc binary and log file locations when launching secure datanode. (Chris Nauroth via suresh) - + +HDFS-4551. When copying bytes in DatanodeWebHdfsMethods OPEN, use 4096 +buffer size to improve performance. (Mark Wagner via szetszwo) OPTIMIZATIONS @@ -526,7 +528,7 @@ Release 1.2.0 - unreleased HADOOP-9375. Port HADOOP-7290 to branch-1 to fix TestUserGroupInformation failure. (Xiaobo Peng via suresh) -Release 1.1.2 - Unreleased +Release 1.1.2 - 2013.01.30 INCOMPATIBLE CHANGES @@ -540,9 +542,6 @@ Release 1.1.2 - Unreleased HDFS-4252. Improve confusing log message that prints exception when editlog read is completed. (Jing Zhao via suresh) -HDFS-4551. When copying bytes in DatanodeWebHdfsMethods OPEN, use 4096 buffer size -to improve performance. (Mark Wagner via szetszwo) - BUG FIXES MAPREDUCE-4798. Updated TestJobHistoryServer test case for startup
svn commit: r1453710 - in /hadoop/common/branches/branch-1.1: ./ CHANGES.txt src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
Author: szetszwo Date: Thu Mar 7 06:40:20 2013 New Revision: 1453710 URL: http://svn.apache.org/r1453710 Log: svn merge -c -1453709 for reverting HDFS-4551. Modified: hadoop/common/branches/branch-1.1/ (props changed) hadoop/common/branches/branch-1.1/CHANGES.txt (contents, props changed) hadoop/common/branches/branch-1.1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java Propchange: hadoop/common/branches/branch-1.1/ -- Reverse-merged /hadoop/common/branches/branch-1:r1453706 Modified: hadoop/common/branches/branch-1.1/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/CHANGES.txt?rev=1453710&r1=1453709&r2=1453710&view=diff == --- hadoop/common/branches/branch-1.1/CHANGES.txt (original) +++ hadoop/common/branches/branch-1.1/CHANGES.txt Thu Mar 7 06:40:20 2013 @@ -20,9 +20,6 @@ Release 1.1.2 - 2013.01.30 HADOOP-8567. Port conf servlet to dump running configuration to branch 1.x. (Jing Zhao via suresh) -HDFS-4551. When copying bytes in DatanodeWebHdfsMethods OPEN, use 4096 buffer size -to improve performance. (Mark Wagner via szetszwo) - BUG FIXES HADOOP-8419. Fixed GzipCode NPE reset for IBM JDK. (Yu Li via eyang) Propchange: hadoop/common/branches/branch-1.1/CHANGES.txt -- Reverse-merged /hadoop/common/branches/branch-1/CHANGES.txt:r1453706 Modified: hadoop/common/branches/branch-1.1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java?rev=1453710&r1=1453709&r2=1453710&view=diff == --- hadoop/common/branches/branch-1.1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java (original) +++ hadoop/common/branches/branch-1.1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java Thu Mar 7 06:40:20 2013 @@ -350,9 +350,9 @@ public class DatanodeWebHdfsMethods { DFSClient client = dfsclient; try { if (n == null) { - IOUtils.copyBytes(dfsin, out, 4096); + IOUtils.copyBytes(dfsin, out, b); } else { - IOUtils.copyBytes(dfsin, out, n, 4096, false); + IOUtils.copyBytes(dfsin, out, n, b, false); } dfsin.close(); dfsin = null;
svn commit: r1453709 - in /hadoop/common/branches/branch-1.1: ./ CHANGES.txt src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
Author: szetszwo Date: Thu Mar 7 06:36:49 2013 New Revision: 1453709 URL: http://svn.apache.org/r1453709 Log: svn merge -c 1453706 from branch-1 for HDFS-4551. When copying bytes in DatanodeWebHdfsMethods OPEN, use 4096 buffer size to improve performance. Modified: hadoop/common/branches/branch-1.1/ (props changed) hadoop/common/branches/branch-1.1/CHANGES.txt (contents, props changed) hadoop/common/branches/branch-1.1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java Propchange: hadoop/common/branches/branch-1.1/ -- Merged /hadoop/common/branches/branch-1:r1453706 Modified: hadoop/common/branches/branch-1.1/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/CHANGES.txt?rev=1453709&r1=1453708&r2=1453709&view=diff == --- hadoop/common/branches/branch-1.1/CHANGES.txt (original) +++ hadoop/common/branches/branch-1.1/CHANGES.txt Thu Mar 7 06:36:49 2013 @@ -20,6 +20,9 @@ Release 1.1.2 - 2013.01.30 HADOOP-8567. Port conf servlet to dump running configuration to branch 1.x. (Jing Zhao via suresh) +HDFS-4551. When copying bytes in DatanodeWebHdfsMethods OPEN, use 4096 buffer size +to improve performance. (Mark Wagner via szetszwo) + BUG FIXES HADOOP-8419. Fixed GzipCode NPE reset for IBM JDK. (Yu Li via eyang) Propchange: hadoop/common/branches/branch-1.1/CHANGES.txt -- Merged /hadoop/common/branches/branch-1/CHANGES.txt:r1453706 Modified: hadoop/common/branches/branch-1.1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java?rev=1453709&r1=1453708&r2=1453709&view=diff == --- hadoop/common/branches/branch-1.1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java (original) +++ hadoop/common/branches/branch-1.1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java Thu Mar 7 06:36:49 2013 @@ -350,9 +350,9 @@ public class DatanodeWebHdfsMethods { DFSClient client = dfsclient; try { if (n == null) { - IOUtils.copyBytes(dfsin, out, b); + IOUtils.copyBytes(dfsin, out, 4096); } else { - IOUtils.copyBytes(dfsin, out, n, b, false); + IOUtils.copyBytes(dfsin, out, n, 4096, false); } dfsin.close(); dfsin = null;
svn commit: r1453706 - in /hadoop/common/branches/branch-1: CHANGES.txt src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
Author: szetszwo Date: Thu Mar 7 06:33:31 2013 New Revision: 1453706 URL: http://svn.apache.org/r1453706 Log: HDFS-4551. When copying bytes in DatanodeWebHdfsMethods OPEN, use 4096 buffer size to improve performance. Contributed by Mark Wagner Modified: hadoop/common/branches/branch-1/CHANGES.txt hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java Modified: hadoop/common/branches/branch-1/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1453706&r1=1453705&r2=1453706&view=diff == --- hadoop/common/branches/branch-1/CHANGES.txt (original) +++ hadoop/common/branches/branch-1/CHANGES.txt Thu Mar 7 06:33:31 2013 @@ -540,6 +540,9 @@ Release 1.1.2 - Unreleased HDFS-4252. Improve confusing log message that prints exception when editlog read is completed. (Jing Zhao via suresh) +HDFS-4551. When copying bytes in DatanodeWebHdfsMethods OPEN, use 4096 buffer size +to improve performance. (Mark Wagner via szetszwo) + BUG FIXES MAPREDUCE-4798. Updated TestJobHistoryServer test case for startup Modified: hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java?rev=1453706&r1=1453705&r2=1453706&view=diff == --- hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java (original) +++ hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java Thu Mar 7 06:33:31 2013 @@ -350,9 +350,9 @@ public class DatanodeWebHdfsMethods { DFSClient client = dfsclient; try { if (n == null) { - IOUtils.copyBytes(dfsin, out, b); + IOUtils.copyBytes(dfsin, out, 4096); } else { - IOUtils.copyBytes(dfsin, out, n, b, false); + IOUtils.copyBytes(dfsin, out, n, 4096, false); } dfsin.close(); dfsin = null;
svn commit: r1453676 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/test/java/org/apache/hadoop/ha/TestHAZKUtil.java
Author: suresh Date: Thu Mar 7 03:42:18 2013 New Revision: 1453676 URL: http://svn.apache.org/r1453676 Log: HADOOP-9365. TestHAZKUtil fails on Windows. Contributed by Ivan Mitic. Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAZKUtil.java Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1453676&r1=1453675&r2=1453676&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Mar 7 03:42:18 2013 @@ -459,6 +459,8 @@ Trunk (Unreleased) HADOOP-9376. TestProxyUserFromEnv fails on a Windows domain joined machine. (Ivan Mitic via suresh) +HADOOP-9365. TestHAZKUtil fails on Windows. (Ivan Mitic via suresh) + Release 2.0.4-beta - UNRELEASED INCOMPATIBLE CHANGES Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAZKUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAZKUtil.java?rev=1453676&r1=1453675&r2=1453676&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAZKUtil.java (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAZKUtil.java Thu Mar 7 03:42:18 2013 @@ -40,7 +40,8 @@ public class TestHAZKUtil { "test-file"); /** A path which is expected not to exist */ - private static final String BOGUS_FILE = "/-this-does-not-exist"; + private static final String BOGUS_FILE = + new File("/-this-does-not-exist").getPath(); @Test public void testEmptyACL() {
svn commit: r1453675 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/test/java/org/apache/hadoop/security/TestProxyUserFromEnv.java
Author: suresh Date: Thu Mar 7 03:34:44 2013 New Revision: 1453675 URL: http://svn.apache.org/r1453675 Log: HADOOP-9376. TestProxyUserFromEnv fails on a Windows domain joined machine. Contributed by Ivan Mitic. Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestProxyUserFromEnv.java Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1453675&r1=1453674&r2=1453675&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Mar 7 03:34:44 2013 @@ -456,6 +456,9 @@ Trunk (Unreleased) HADOOP-9372. Fix bad timeout annotations on tests. (Arpit Agarwal via suresh) +HADOOP-9376. TestProxyUserFromEnv fails on a Windows domain joined machine. +(Ivan Mitic via suresh) + Release 2.0.4-beta - UNRELEASED INCOMPATIBLE CHANGES Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestProxyUserFromEnv.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestProxyUserFromEnv.java?rev=1453675&r1=1453674&r2=1453675&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestProxyUserFromEnv.java (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestProxyUserFromEnv.java Thu Mar 7 03:34:44 2013 @@ -42,6 +42,15 @@ public class TestProxyUserFromEnv { BufferedReader br = new BufferedReader (new InputStreamReader(pp.getInputStream())); String realUser = br.readLine().trim(); + +// On Windows domain joined machine, whoami returns the username +// in the DOMAIN\\username format, so we trim the domain part before +// the comparison. We don't have to special case for Windows +// given that Unix systems do not allow slashes in usernames. +int backslashIndex = realUser.indexOf('\\'); +if (backslashIndex != -1) { + realUser = realUser.substring(backslashIndex + 1); +} assertEquals(realUser, realUgi.getUserName()); } }
svn commit: r1453673 - in /hadoop/common/branches/branch-1: CHANGES.txt src/test/org/apache/hadoop/security/TestUserGroupInformation.java
Author: suresh Date: Thu Mar 7 03:13:37 2013 New Revision: 1453673 URL: http://svn.apache.org/r1453673 Log: HADOOP-9375. Port HADOOP-7290 to branch-1 to fix TestUserGroupInformation failure. Contributed by Xiaobo Peng. Modified: hadoop/common/branches/branch-1/CHANGES.txt hadoop/common/branches/branch-1/src/test/org/apache/hadoop/security/TestUserGroupInformation.java Modified: hadoop/common/branches/branch-1/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1453673&r1=1453672&r2=1453673&view=diff == --- hadoop/common/branches/branch-1/CHANGES.txt (original) +++ hadoop/common/branches/branch-1/CHANGES.txt Thu Mar 7 03:13:37 2013 @@ -523,6 +523,9 @@ Release 1.2.0 - unreleased HDFS-4544. Error in deleting blocks should not do check disk, for all types of errors. (Arpit Agarwal via suresh) +HADOOP-9375. Port HADOOP-7290 to branch-1 to fix TestUserGroupInformation +failure. (Xiaobo Peng via suresh) + Release 1.1.2 - Unreleased INCOMPATIBLE CHANGES Modified: hadoop/common/branches/branch-1/src/test/org/apache/hadoop/security/TestUserGroupInformation.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/test/org/apache/hadoop/security/TestUserGroupInformation.java?rev=1453673&r1=1453672&r2=1453673&view=diff == --- hadoop/common/branches/branch-1/src/test/org/apache/hadoop/security/TestUserGroupInformation.java (original) +++ hadoop/common/branches/branch-1/src/test/org/apache/hadoop/security/TestUserGroupInformation.java Thu Mar 7 03:13:37 2013 @@ -28,9 +28,9 @@ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.security.PrivilegedExceptionAction; -import java.util.ArrayList; import java.util.Collection; -import java.util.List; +import java.util.LinkedHashSet; +import java.util.Set; import javax.security.auth.login.AppConfigurationEntry; @@ -117,7 +117,7 @@ public class TestUserGroupInformation { String line = br.readLine(); System.out.println(userName + ":" + line); -List groups = new ArrayList (); +Set groups = new LinkedHashSet (); for(String s: line.split("[\\s]")) { groups.add(s); } @@ -127,7 +127,7 @@ public class TestUserGroupInformation { String[] gi = login.getGroupNames(); assertEquals(groups.size(), gi.length); for(int i=0; i < gi.length; i++) { - assertEquals(groups.get(i), gi[i]); + assertTrue(groups.contains(gi[i])); } final UserGroupInformation fakeUser =
svn commit: r1453670 - in /hadoop/common/branches/branch-1: CHANGES.txt src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java
Author: suresh Date: Thu Mar 7 03:06:03 2013 New Revision: 1453670 URL: http://svn.apache.org/r1453670 Log: HDFS-4544. Error in deleting blocks should not do check disk, for all types of errors. Contributed by Arpit Agarwal. Modified: hadoop/common/branches/branch-1/CHANGES.txt hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java Modified: hadoop/common/branches/branch-1/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1453670&r1=1453669&r2=1453670&view=diff == --- hadoop/common/branches/branch-1/CHANGES.txt (original) +++ hadoop/common/branches/branch-1/CHANGES.txt Thu Mar 7 03:06:03 2013 @@ -520,6 +520,9 @@ Release 1.2.0 - unreleased MAPREDUCE-5028. Maps fail when io.sort.mb is set to high value. (kkambatl via tucu) +HDFS-4544. Error in deleting blocks should not do check disk, for +all types of errors. (Arpit Agarwal via suresh) + Release 1.1.2 - Unreleased INCOMPATIBLE CHANGES Modified: hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=1453670&r1=1453669&r2=1453670&view=diff == --- hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java (original) +++ hadoop/common/branches/branch-1/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java Thu Mar 7 03:06:03 2013 @@ -1227,7 +1227,7 @@ public class DataNode extends Configured } data.invalidate(toDelete); } catch(IOException e) { -checkDiskError(); +// Exceptions caught here are not expected to be disk-related. throw e; } myMetrics.incrBlocksRemoved(toDelete.length);
svn commit: r1453669 [5/5] - in /hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common: ./ src/ src/main/bin/ src/main/conf/ src/main/docs/ src/main/docs/src/documentation/content/xdocs
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1453669&r1=1453668&r2=1453669&view=diff == --- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java Thu Mar 7 02:57:40 2013 @@ -21,6 +21,8 @@ import java.io.File; import java.io.FileDescriptor; import java.io.FileInputStream; import java.io.FileOutputStream; +import java.io.FileReader; +import java.io.FileWriter; import java.io.IOException; import java.util.concurrent.atomic.AtomicReference; import java.util.ArrayList; @@ -60,11 +62,15 @@ public class TestNativeIO { TEST_DIR.mkdirs(); } - @Test + @Test (timeout = 3) public void testFstat() throws Exception { +if (Path.WINDOWS) { + return; +} + FileOutputStream fos = new FileOutputStream( new File(TEST_DIR, "testfstat")); -NativeIO.Stat stat = NativeIO.getFstat(fos.getFD()); +NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD()); fos.close(); LOG.info("Stat: " + String.valueOf(stat)); @@ -72,7 +78,8 @@ public class TestNativeIO { assertNotNull(stat.getGroup()); assertTrue(!stat.getGroup().isEmpty()); assertEquals("Stat mode field should indicate a regular file", - NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT); + NativeIO.POSIX.Stat.S_IFREG, + stat.getMode() & NativeIO.POSIX.Stat.S_IFMT); } /** @@ -81,8 +88,12 @@ public class TestNativeIO { * NOTE: this test is likely to fail on RHEL 6.0 which has a non-threadsafe * implementation of getpwuid_r. */ - @Test + @Test (timeout = 3) public void testMultiThreadedFstat() throws Exception { +if (Path.WINDOWS) { + return; +} + final FileOutputStream fos = new FileOutputStream( new File(TEST_DIR, "testfstat")); @@ -96,12 +107,13 @@ public class TestNativeIO { long et = Time.now() + 5000; while (Time.now() < et) { try { - NativeIO.Stat stat = NativeIO.getFstat(fos.getFD()); + NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD()); assertEquals(System.getProperty("user.name"), stat.getOwner()); assertNotNull(stat.getGroup()); assertTrue(!stat.getGroup().isEmpty()); assertEquals("Stat mode field should indicate a regular file", -NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT); +NativeIO.POSIX.Stat.S_IFREG, +stat.getMode() & NativeIO.POSIX.Stat.S_IFMT); } catch (Throwable t) { thrown.set(t); } @@ -122,26 +134,123 @@ public class TestNativeIO { } } - @Test + @Test (timeout = 3) public void testFstatClosedFd() throws Exception { +if (Path.WINDOWS) { + return; +} + FileOutputStream fos = new FileOutputStream( new File(TEST_DIR, "testfstat2")); fos.close(); try { - NativeIO.Stat stat = NativeIO.getFstat(fos.getFD()); + NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD()); } catch (NativeIOException nioe) { LOG.info("Got expected exception", nioe); assertEquals(Errno.EBADF, nioe.getErrno()); } } - @Test + @Test (timeout = 3) + public void testSetFilePointer() throws Exception { +if (!Path.WINDOWS) { + return; +} + +LOG.info("Set a file pointer on Windows"); +try { + File testfile = new File(TEST_DIR, "testSetFilePointer"); + assertTrue("Create test subject", + testfile.exists() || testfile.createNewFile()); + FileWriter writer = new FileWriter(testfile); + try { +for (int i = 0; i < 200; i++) + if (i < 100) +writer.write('a'); + else +writer.write('b'); +writer.flush(); + } catch (Exception writerException) { +fail("Got unexpected exception: " + writerException.getMessage()); + } finally { +writer.close(); + } + + FileDescriptor fd = NativeIO.Windows.createFile( + testfile.getCanonicalPath(), + NativeIO.Windows.GENERIC_READ, + NativeIO.Windows.FILE_SHARE_READ | + NativeIO.Windows.FILE_SHARE_WRITE | + NativeIO.Windows.FILE_SHARE_DELETE, + NativeIO.Windows.OPEN_EXISTING); + NativeIO.Windows.setFilePointer(fd, 120, NativeIO.Windows.FILE_BEGIN); + FileReader reader = new FileReader(fd); + try { +
svn commit: r1453669 - in /hadoop/common/branches/HDFS-2802: ./ hadoop-assemblies/src/main/resources/assemblies/ hadoop-dist/ hadoop-project-dist/ hadoop-project/ hadoop-tools/hadoop-distcp/src/test/j
Author: szetszwo Date: Thu Mar 7 02:57:40 2013 New Revision: 1453669 URL: http://svn.apache.org/r1453669 Log: Merge r1449958 through r1453659 from trunk. Modified: hadoop/common/branches/HDFS-2802/ (props changed) hadoop/common/branches/HDFS-2802/BUILDING.txt hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml hadoop/common/branches/HDFS-2802/hadoop-dist/pom.xml hadoop/common/branches/HDFS-2802/hadoop-project-dist/pom.xml hadoop/common/branches/HDFS-2802/hadoop-project/pom.xml hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java Propchange: hadoop/common/branches/HDFS-2802/ -- Merged /hadoop/common/trunk:r1449958-1453659 Modified: hadoop/common/branches/HDFS-2802/BUILDING.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/BUILDING.txt?rev=1453669&r1=1453668&r2=1453669&view=diff == --- hadoop/common/branches/HDFS-2802/BUILDING.txt (original) +++ hadoop/common/branches/HDFS-2802/BUILDING.txt Thu Mar 7 02:57:40 2013 @@ -138,3 +138,70 @@ Create a local staging version of the we $ mvn clean site; mvn site:stage -DstagingDirectory=/tmp/hadoop-site -- + +Building on Windows + +-- +Requirements: + +* Windows System +* JDK 1.6 +* Maven 3.0 +* Findbugs 1.3.9 (if running findbugs) +* ProtocolBuffer 2.4.1+ (for MapReduce and HDFS) +* Unix command-line tools from GnuWin32 or Cygwin: sh, mkdir, rm, cp, tar, gzip +* Windows SDK or Visual Studio 2010 Professional +* Internet connection for first build (to fetch all Maven and Hadoop dependencies) + +If using Visual Studio, it must be Visual Studio 2010 Professional (not 2012). +Do not use Visual Studio Express. It does not support compiling for 64-bit, +which is problematic if running a 64-bit system. The Windows SDK is free to +download here: + +http://www.microsoft.com/en-us/download/details.aspx?id=8279 + +-- +Building: + +Keep the source code tree in a short path to avoid running into problems related +to Windows maximum path length limitation. (For example, C:\hdc). + +Run builds from a Windows SDK Command Prompt. (Start, All Programs, +Microsoft Windows SDK v7.1, Windows SDK 7.1 Command Prompt.) + +JAVA_HOME must be set, and the path must not contain spaces. If the full path +would contain spaces, then use the Windows short path instead. + +You must set the Platform environment variable to either x64 or Win32 depending +on whether you're running a 64-bit or 32-bit system. Note that this is +case-sensitive. It must be "Platform", not "PLATFORM" or "platform". +Environment variables on Windows are usually case-insensitive, but Maven treats +them as case-sensitive. Failure to set this environment variable correctly will +cause msbuild to fail while building the native code in hadoop-common. + +set Platform=x64 (when building on a 64-bit system) +set Platform=Win32 (when building on a 32-bit system) + +Several tests require that the user must have the Create Symbolic Links +privilege. + +All Maven goals are the same as described above, with the addition of profile +-Pnative-win to trigger building Windows native components. The native +components are required (not optional) on Windows. For example: + + * Run tests : mvn -Pnative-win test + +-- +Building distributions: + +Create binary distribution with native code and with documentation: + + $ mvn package -Pdist,native-win,docs -DskipTests -Dtar + +Create source distribution: + + $ mvn package -Pnative-win,src -DskipTests + +Create source and binary distributions with native code and documentation: + + $ mvn package -Pdist,native-win,docs,src -DskipTests -Dtar Modified: hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml?rev=1453669&r1=1453668&r2=1453669&view=diff =
svn commit: r1453637 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/test/java/org/apache/hadoop/util/TestWinUtils.java
Author: suresh Date: Thu Mar 7 01:09:53 2013 New Revision: 1453637 URL: http://svn.apache.org/r1453637 Log: HADOOP-9372. Fix bad timeout annotations on tests. Contributed by Arpit Agarwal. Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1453637&r1=1453636&r2=1453637&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Mar 7 01:09:53 2013 @@ -453,6 +453,9 @@ Trunk (Unreleased) HADOOP-9373. Merge CHANGES.branch-trunk-win.txt to CHANGES.txt. (suresh) +HADOOP-9372. Fix bad timeout annotations on tests. +(Arpit Agarwal via suresh) + Release 2.0.4-beta - UNRELEASED INCOMPATIBLE CHANGES Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java?rev=1453637&r1=1453636&r2=1453637&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java Thu Mar 7 01:09:53 2013 @@ -152,7 +152,6 @@ public class TestWinUtils { assertEquals(expected, output); } - @Test (timeout = 3) private void testChmodInternal(String mode, String expectedPerm) throws IOException { File a = new File(TEST_DIR, "file1"); @@ -171,7 +170,6 @@ public class TestWinUtils { assertFalse(a.exists()); } - @Test (timeout = 3) private void testNewFileChmodInternal(String expectedPerm) throws IOException { // Create a new directory File dir = new File(TEST_DIR, "dir1"); @@ -193,7 +191,6 @@ public class TestWinUtils { assertFalse(dir.exists()); } - @Test (timeout = 3) private void testChmodInternalR(String mode, String expectedPerm, String expectedPermx) throws IOException { // Setup test folder hierarchy
svn commit: r1453625 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/shell/Display.java src/test/resources/testConf.xml
Author: jeagles Date: Wed Mar 6 23:58:56 2013 New Revision: 1453625 URL: http://svn.apache.org/r1453625 Log: HADOOP-9209. Add shell command to dump file checksums (Todd Lipcon via jeagles) Modified: hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml Modified: hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1453625&r1=1453624&r2=1453625&view=diff == --- hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt (original) +++ hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt Wed Mar 6 23:58:56 2013 @@ -28,6 +28,9 @@ Release 0.23.7 - UNRELEASED HADOOP-9352. Expose UGI.setLoginUser for tests (daryn) +HADOOP-9209. Add shell command to dump file checksums (Todd Lipcon via +jeagles) + OPTIMIZATIONS HADOOP-9147. Add missing fields to FIleStatus.toString. Modified: hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java?rev=1453625&r1=1453624&r2=1453625&view=diff == --- hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java (original) +++ hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java Wed Mar 6 23:58:56 2013 @@ -26,6 +26,7 @@ import org.apache.hadoop.classification. import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.shell.PathExceptions.PathIsDirectoryException; @@ -38,9 +39,10 @@ import org.apache.hadoop.io.WritableComp import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionCodecFactory; import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.util.StringUtils; /** - * Display contents of files + * Display contents or checksums of files */ @InterfaceAudience.Private @InterfaceStability.Evolving @@ -49,6 +51,7 @@ class Display extends FsCommand { public static void registerCommands(CommandFactory factory) { factory.addClass(Cat.class, "-cat"); factory.addClass(Text.class, "-text"); +factory.addClass(Checksum.class, "-checksum"); } /** @@ -133,6 +136,36 @@ class Display extends FsCommand { return i; } } + + public static class Checksum extends Display { +public static final String NAME = "checksum"; +public static final String USAGE = " ..."; +public static final String DESCRIPTION = + "Dump checksum information for files that match the file\n" + + "pattern to stdout. Note that this requires a round-trip\n" + + "to a datanode storing each block of the file, and thus is not\n" + + "efficient to run on a large number of files. The checksum of a\n" + + "file depends on its content, block size and the checksum\n" + + "algorithm and parameters used for creating the file."; + +@Override +protected void processPath(PathData item) throws IOException { + if (item.stat.isDirectory()) { +throw new PathIsDirectoryException(item.toString()); + } + + FileChecksum checksum = item.fs.getFileChecksum(item.path); + if (checksum == null) { +out.printf("%s\tNONE\t\n", item.toString()); + } else { +String checksumString = StringUtils.byteToHexString( +checksum.getBytes(), 0, checksum.getLength()); +out.printf("%s\t%s\t%s\n", +item.toString(), checksum.getAlgorithmName(), +checksumString); + } +} + } protected class TextRecordInputStream extends InputStream { SequenceFile.Reader r; Modified: hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml?rev=1453625&r1=1453624&r2=1453625&view=diff == --- hadoop/comm
svn commit: r1453620 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/shell/Display.java src/test/resources/testConf.xml
Author: jeagles Date: Wed Mar 6 23:33:09 2013 New Revision: 1453620 URL: http://svn.apache.org/r1453620 Log: HADOOP-9209. Add shell command to dump file checksums (Todd Lipcon via jeagles) Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1453620&r1=1453619&r2=1453620&view=diff == --- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt (original) +++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt Wed Mar 6 23:33:09 2013 @@ -1072,6 +1072,9 @@ Release 0.23.7 - UNRELEASED HADOOP-9352. Expose UGI.setLoginUser for tests (daryn) +HADOOP-9209. Add shell command to dump file checksums (Todd Lipcon via +jeagles) + OPTIMIZATIONS HADOOP-8462. Native-code implementation of bzip2 codec. (Govind Kamat via Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java?rev=1453620&r1=1453619&r2=1453620&view=diff == --- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java (original) +++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java Wed Mar 6 23:33:09 2013 @@ -36,6 +36,7 @@ import org.apache.hadoop.classification. import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathIsDirectoryException; @@ -48,13 +49,14 @@ import org.apache.hadoop.io.WritableComp import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionCodecFactory; import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.util.StringUtils; import org.codehaus.jackson.JsonEncoding; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonGenerator; import org.codehaus.jackson.util.MinimalPrettyPrinter; /** - * Display contents of files + * Display contents or checksums of files */ @InterfaceAudience.Private @InterfaceStability.Evolving @@ -63,6 +65,7 @@ class Display extends FsCommand { public static void registerCommands(CommandFactory factory) { factory.addClass(Cat.class, "-cat"); factory.addClass(Text.class, "-text"); +factory.addClass(Checksum.class, "-checksum"); } /** @@ -162,6 +165,36 @@ class Display extends FsCommand { return i; } } + + public static class Checksum extends Display { +public static final String NAME = "checksum"; +public static final String USAGE = " ..."; +public static final String DESCRIPTION = + "Dump checksum information for files that match the file\n" + + "pattern to stdout. Note that this requires a round-trip\n" + + "to a datanode storing each block of the file, and thus is not\n" + + "efficient to run on a large number of files. The checksum of a\n" + + "file depends on its content, block size and the checksum\n" + + "algorithm and parameters used for creating the file."; + +@Override +protected void processPath(PathData item) throws IOException { + if (item.stat.isDirectory()) { +throw new PathIsDirectoryException(item.toString()); + } + + FileChecksum checksum = item.fs.getFileChecksum(item.path); + if (checksum == null) { +out.printf("%s\tNONE\t\n", item.toString()); + } else { +String checksumString = StringUtils.byteToHexString( +checksum.getBytes(), 0, checksum.getLength()); +out.printf("%s\t%s\t%s\n", +item.toString(), checksum.getAlgorithmName(), +checksumString); + } +} + } protected class TextRecordInputStream extends InputStream { SequenceFile.Reader r; Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/resources/testConf
svn commit: r1453613 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/shell/Display.java src/test/resources/testConf.xml
Author: jeagles Date: Wed Mar 6 23:17:14 2013 New Revision: 1453613 URL: http://svn.apache.org/r1453613 Log: HADOOP-9209. Add shell command to dump file checksums (Todd Lipcon via jeagles) Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1453613&r1=1453612&r2=1453613&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Mar 6 23:17:14 2013 @@ -1506,6 +1506,9 @@ Release 0.23.7 - UNRELEASED HADOOP-9352. Expose UGI.setLoginUser for tests (daryn) +HADOOP-9209. Add shell command to dump file checksums (Todd Lipcon via +jeagles) + OPTIMIZATIONS HADOOP-8462. Native-code implementation of bzip2 codec. (Govind Kamat via Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java?rev=1453613&r1=1453612&r2=1453613&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java Wed Mar 6 23:17:14 2013 @@ -36,6 +36,7 @@ import org.apache.hadoop.classification. import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathIsDirectoryException; @@ -47,13 +48,14 @@ import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionCodecFactory; import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.util.StringUtils; import org.codehaus.jackson.JsonEncoding; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonGenerator; import org.codehaus.jackson.util.MinimalPrettyPrinter; /** - * Display contents of files + * Display contents or checksums of files */ @InterfaceAudience.Private @InterfaceStability.Evolving @@ -62,6 +64,7 @@ class Display extends FsCommand { public static void registerCommands(CommandFactory factory) { factory.addClass(Cat.class, "-cat"); factory.addClass(Text.class, "-text"); +factory.addClass(Checksum.class, "-checksum"); } /** @@ -161,6 +164,36 @@ class Display extends FsCommand { return i; } } + + public static class Checksum extends Display { +public static final String NAME = "checksum"; +public static final String USAGE = " ..."; +public static final String DESCRIPTION = + "Dump checksum information for files that match the file\n" + + "pattern to stdout. Note that this requires a round-trip\n" + + "to a datanode storing each block of the file, and thus is not\n" + + "efficient to run on a large number of files. The checksum of a\n" + + "file depends on its content, block size and the checksum\n" + + "algorithm and parameters used for creating the file."; + +@Override +protected void processPath(PathData item) throws IOException { + if (item.stat.isDirectory()) { +throw new PathIsDirectoryException(item.toString()); + } + + FileChecksum checksum = item.fs.getFileChecksum(item.path); + if (checksum == null) { +out.printf("%s\tNONE\t\n", item.toString()); + } else { +String checksumString = StringUtils.byteToHexString( +checksum.getBytes(), 0, checksum.getLength()); +out.printf("%s\t%s\t%s\n", +item.toString(), checksum.getAlgorithmName(), +checksumString); + } +} + } protected class TextRecordInputStream extends InputStream { SequenceFile.Reader r; Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml?rev=1453613&r1=1453612&r2=1453613&view=diff == --- hadoop/common/trunk/hadoop-com
svn commit: r1453610 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/ src/main/java/org/apache/hadoop/io/compress/ src/main/java/org/apache/hadoop/io/compress/bzip2/
Author: jlowe Date: Wed Mar 6 23:16:33 2013 New Revision: 1453610 URL: http://svn.apache.org/r1453610 Log: svn merge -c 1453608 FIXES: HADOOP-8462. Native-code implementation of bzip2 codec. Contributed by Govind Kamat Added: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java - copied unchanged from r1453608, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java - copied unchanged from r1453608, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java - copied unchanged from r1453608, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/bzip2/ - copied from r1453608, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/bzip2/ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.c - copied unchanged from r1453608, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.c hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.c - copied unchanged from r1453608, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.c hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/bzip2/org_apache_hadoop_io_compress_bzip2.h - copied unchanged from r1453608, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/bzip2/org_apache_hadoop_io_compress_bzip2.h Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/CMakeLists.txt hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/config.h.cmake hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1453610&r1=1453609&r2=1453610&view=diff == --- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt (original) +++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt Wed Mar 6 23:16:33 2013 @@ -1074,6 +1074,9 @@ Release 0.23.7 - UNRELEASED OPTIMIZATIONS +HADOOP-8462. Native-code implementation of bzip2 codec. (Govind Kamat via +jlowe) + BUG FIXES HADOOP-9302. HDFS docs not linked from top level (Andy Isaacson via Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml?rev=1453610&r1=1453609&r2=1453610&view=diff == --- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml (original) +++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml Wed Mar 6 23:16:33 2013 @@ -452,6 +452,7 @@ false +false @@ -473,6 +474,8 @@ org.apache.hadoop.io.compress.zlib.ZlibCompressor org.apache.hadoop.io.compress.zlib.ZlibDecompressor + org.apache.hadoop.io.compress.bzip2.Bzip2Compressor + org.apache.hadoop.io.compress.bzip2.Bzip2Decompressor org.apache.hadoop.security.JniBasedUnixGroupsMapping
svn commit: r1453599 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.branch-trunk-win.txt CHANGES.txt pom.xml
Author: suresh Date: Wed Mar 6 22:37:42 2013 New Revision: 1453599 URL: http://svn.apache.org/r1453599 Log: HADOOP-9373. Merge CHANGES.branch-trunk-win.txt to CHANGES.txt. Contributed by Suresh Srinivas. Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.branch-trunk-win.txt hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt hadoop/common/trunk/hadoop-common-project/hadoop-common/pom.xml Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.branch-trunk-win.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.branch-trunk-win.txt?rev=1453599&r1=1453598&r2=1453599&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.branch-trunk-win.txt (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.branch-trunk-win.txt Wed Mar 6 22:37:42 2013 @@ -1,111 +0,0 @@ -branch-trunk-win changes - unreleased - - HADOOP-8924. Hadoop Common creating package-info.java must not depend on sh. - (Chris Nauroth via suresh) - - HADOOP-8945. Merge winutils from branch-1-win to branch-trunk-win. - (Bikas Saha, Chuan Liu, Giridharan Kesavan, Ivan Mitic, and Steve Maine - ported by Chris Nauroth via suresh) - - HADOOP-8946. winutils: compile codebase during Maven build on - branch-trunk-win. (Chris Nauroth via suresh) - - HADOOP-8947. Merge FileUtil and Shell changes from branch-1-win to - branch-trunk-win to enable initial test pass. (Raja Aluri, Davio Lao, - Sumadhur Reddy Bolli, Ahmed El Baz, Kanna Karanam, Chuan Liu, - Ivan Mitic, Chris Nauroth, and Bikas Saha via suresh) - - HADOOP-8954. "stat" executable not found on Windows. (Bikas Saha, Ivan Mitic - ported by Chris Narouth via suresh) - - HADOOP-8959. TestUserGroupInformation fails on Windows due to "id" executable - not found. (Bikas Saha, Ivan Mitic, ported by Chris Narouth via suresh) - - HADOOP-8955. "chmod" executable not found on Windows. - (Chris Nauroth via suresh) - - HADOOP-8960. TestMetricsServlet fails on Windows. (Ivan Mitic via suresh) - - HADOOP-8961. GenericOptionsParser URI parsing failure on Windows. - (Ivan Mitic via suresh) - - HADOOP-8949. Remove FileUtil.CygPathCommand dead code. (Chris Nauroth via - suresh) - - HADOOP-8956. FileSystem.primitiveMkdir failures on Windows cause multiple - test suites to fail. (Chris Nauroth via suresh) - - HADOOP-8978. TestTrash fails on Windows. (Chris Nauroth via suresh) - - HADOOP-8979. TestHttpServer fails on Windows. (Chris Nauroth via suresh) - - HADOOP-8953. Shell PathData parsing failures on Windows. (Arpit Agarwal via - suresh) - - HADOOP-8975. TestFileContextResolveAfs fails on Windows. (Chris Nauroth via - suresh) - - HADOOP-8977. Multiple FsShell test failures on Windows. (Chris Nauroth via - suresh) - - HADOOP-9005. Merge hadoop cmd line scripts from branch-1-win. (David Lao, - Bikas Saha, Lauren Yang, Chuan Liu, Thejas M Nair and Ivan Mitic via suresh) - - HADOOP-9008. Building hadoop tarball fails on Windows. (Chris Nauroth via - suresh) - - HADOOP-9011. saveVersion.py does not include branch in version annotation. - (Chris Nauroth via suresh) - - HADOOP-9110. winutils ls off-by-one error indexing MONTHS array can cause - access violation. (Chris Nauroth via suresh) - - HADOOP-9056. Build native library on Windows. (Chuan Liu, Arpit Agarwal via - suresh) - - HADOOP-9144. Fix findbugs warnings. (Chris Nauroth via suresh) - - HADOOP-9081. Add TestWinUtils. (Chuan Liu, Ivan Mitic, Chris Nauroth, - and Bikas Saha via suresh) - - HADOOP-9146. Fix sticky bit regression on branch-trunk-win. - (Chris Nauroth via suresh) - - HADOOP-9266. Fix javac, findbugs, and release audit warnings on - branch-trunk-win. (Chris Nauroth via suresh) - - HADOOP-9270. Remove a stale java comment from FileUtil. (Chris Nauroth via - szetszwo) - - HADOOP-9271. Revert Python build scripts from branch-trunk-win. - (Chris Nauroth via suresh) - - HADOOP-9313. Remove spurious mkdir from hadoop-config.cmd. - (Ivan Mitic via suresh) - - HADOOP-9309. Test failures on Windows due to UnsatisfiedLinkError - in NativeCodeLoader#buildSupportsSnappy. (Arpit Agarwal via suresh) - - HADOOP-9347. Add instructions to BUILDING.txt describing how to - build on Windows. (Chris Nauroth via suresh) - - HADOOP-9348. Address TODO in winutils to add more command line usage - and examples. (Chris Nauroth via suresh) - - HADOOP-9354. Windows native project files missing license headers. - (Chris Nauroth via suresh) - - HADOOP-9356. Remove remaining references to cygwin/cygpath from scripts. - (Chris Nauroth via suresh) - - HADOOP-9232. JniBasedUnixGroupsMappingWithFallback fails on Windows - with UnsatisfiedLinkError. (Ivan Mitic via suresh) - - HADOOP-9368. Add timeouts to new tests in branch-trunk-win. - (Arpit Agarwal via suresh) -
svn commit: r1453502 - /hadoop/common/branches/branch-2/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
Author: hitesh Date: Wed Mar 6 19:29:12 2013 New Revision: 1453502 URL: http://svn.apache.org/r1453502 Log: merge -c 1453501 from trunk to branch-2 to fix YARN-429. capacity-scheduler config missing from yarn-test artifact. Contributed by Siddharth Seth. Modified: hadoop/common/branches/branch-2/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml Modified: hadoop/common/branches/branch-2/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml?rev=1453502&r1=1453501&r2=1453502&view=diff == --- hadoop/common/branches/branch-2/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml (original) +++ hadoop/common/branches/branch-2/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml Wed Mar 6 19:29:12 2013 @@ -121,7 +121,7 @@ tests -share/hadoop/${hadoop.component} + share/hadoop/${hadoop.component}/test false false
svn commit: r1453501 - /hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
Author: hitesh Date: Wed Mar 6 19:26:57 2013 New Revision: 1453501 URL: http://svn.apache.org/r1453501 Log: YARN-429. capacity-scheduler config missing from yarn-test artifact. Contributed by Siddharth Seth. Modified: hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml Modified: hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml?rev=1453501&r1=1453500&r2=1453501&view=diff == --- hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml (original) +++ hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml Wed Mar 6 19:26:57 2013 @@ -125,7 +125,7 @@ tests -share/hadoop/${hadoop.component} + share/hadoop/${hadoop.component}/test false false
svn commit: r1453486 [7/7] - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/bin/ src/main/conf/ src/main/docs/src/documentation/content/xdocs/ src/main/java/ src/main/java/or
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java?rev=1453486&r1=1453485&r2=1453486&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java Wed Mar 6 19:15:18 2013 @@ -42,6 +42,7 @@ import org.apache.hadoop.security.token. import org.apache.hadoop.security.token.TokenIdentifier; import static org.apache.hadoop.test.MetricsAsserts.*; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.util.Shell; public class TestUserGroupInformation { final private static String USER_NAME = "us...@hadoop.apache.org"; @@ -90,17 +91,17 @@ public class TestUserGroupInformation { UserGroupInformation.setLoginUser(null); } - @Test + @Test (timeout = 3) public void testSimpleLogin() throws IOException { tryLoginAuthenticationMethod(AuthenticationMethod.SIMPLE, true); } - @Test + @Test (timeout = 3) public void testTokenLogin() throws IOException { tryLoginAuthenticationMethod(AuthenticationMethod.TOKEN, false); } - @Test + @Test (timeout = 3) public void testProxyLogin() throws IOException { tryLoginAuthenticationMethod(AuthenticationMethod.PROXY, false); } @@ -129,7 +130,7 @@ public class TestUserGroupInformation { } } - @Test + @Test (timeout = 3) public void testGetRealAuthenticationMethod() { UserGroupInformation ugi = UserGroupInformation.createRemoteUser("user1"); ugi.setAuthenticationMethod(AuthenticationMethod.SIMPLE); @@ -140,7 +141,7 @@ public class TestUserGroupInformation { assertEquals(AuthenticationMethod.SIMPLE, ugi.getRealAuthenticationMethod()); } /** Test login method */ - @Test + @Test (timeout = 3) public void testLogin() throws Exception { // login from unix UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); @@ -167,7 +168,7 @@ public class TestUserGroupInformation { * given user name - get all the groups. * Needs to happen before creating the test users */ - @Test + @Test (timeout = 3) public void testGetServerSideGroups() throws IOException, InterruptedException { // get the user name @@ -175,19 +176,38 @@ public class TestUserGroupInformation { BufferedReader br = new BufferedReader (new InputStreamReader(pp.getInputStream())); String userName = br.readLine().trim(); +// If on windows domain, token format is DOMAIN\\user and we want to +// extract only the user name +if(Shell.WINDOWS) { + int sp = userName.lastIndexOf('\\'); + if (sp != -1) { +userName = userName.substring(sp + 1); + } + // user names are case insensitive on Windows. Make consistent + userName = userName.toLowerCase(); +} // get the groups -pp = Runtime.getRuntime().exec("id -Gn " + userName); +pp = Runtime.getRuntime().exec(Shell.WINDOWS ? + Shell.WINUTILS + " groups -F" : "id -Gn"); br = new BufferedReader(new InputStreamReader(pp.getInputStream())); String line = br.readLine(); + System.out.println(userName + ":" + line); Set groups = new LinkedHashSet (); -for(String s: line.split("[\\s]")) { +String[] tokens = line.split(Shell.TOKEN_SEPARATOR_REGEX); +for(String s: tokens) { groups.add(s); } final UserGroupInformation login = UserGroupInformation.getCurrentUser(); -assertEquals(userName, login.getShortUserName()); +String loginUserName = login.getShortUserName(); +if(Shell.WINDOWS) { + // user names are case insensitive on Windows. Make consistent + loginUserName = loginUserName.toLowerCase(); +} +assertEquals(userName, loginUserName); + String[] gi = login.getGroupNames(); assertEquals(groups.size(), gi.length); for(int i=0; i < gi.length; i++) { @@ -208,7 +228,7 @@ public class TestUserGroupInformation { } /** test constructor */ - @Test + @Test (timeout = 3) public void testConstructor() throws Exception { UserGroupInformation ugi = UserGroupInformation.createUserForTesting("user2/c...@hadoop.apache.org", @@ -234,7 +254,7 @@ public class TestUserGroupInformation { assertTrue(gotException); } - @Test + @Test (timeout = 3) public void testEquals() throws Exception { UserGroupInformation uugi = UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAME
svn commit: r1453486 [4/7] - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/bin/ src/main/conf/ src/main/docs/src/documentation/content/xdocs/ src/main/java/ src/main/java/or
Added: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/winutils/chmod.c URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/winutils/chmod.c?rev=1453486&view=auto == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/winutils/chmod.c (added) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/winutils/chmod.c Wed Mar 6 19:15:18 2013 @@ -0,0 +1,893 @@ +/** +* Licensed to the Apache Software Foundation (ASF) under one or more +* contributor license agreements. See the NOTICE file distributed with this +* work for additional information regarding copyright ownership. The ASF +* licenses this file to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +* License for the specific language governing permissions and limitations under +* the License. +*/ + +#include "winutils.h" +#include + +enum CHMOD_WHO +{ + CHMOD_WHO_NONE =0, + CHMOD_WHO_OTHER = 07, + CHMOD_WHO_GROUP = 070, + CHMOD_WHO_USER = 0700, + CHMOD_WHO_ALL = CHMOD_WHO_OTHER | CHMOD_WHO_GROUP | CHMOD_WHO_USER +}; + +enum CHMOD_OP +{ + CHMOD_OP_INVALID, + CHMOD_OP_PLUS, + CHMOD_OP_MINUS, + CHMOD_OP_EQUAL, +}; + +enum CHMOD_PERM +{ + CHMOD_PERM_NA = 00, + CHMOD_PERM_R = 01, + CHMOD_PERM_W = 02, + CHMOD_PERM_X = 04, + CHMOD_PERM_LX = 010, +}; + +/* + * We use the following struct to build a linked list of mode change actions. + * The mode is described by the following grammar: + * mode ::= clause [, clause ...] + * clause ::= [who ...] [action ...] + * action ::= op [perm ...] | op [ref] + * who ::= a | u | g | o + * op ::= + | - | = + * perm ::= r | w | x | X + * ref ::= u | g | o + */ +typedef struct _MODE_CHANGE_ACTION +{ + USHORT who; + USHORT op; + USHORT perm; + USHORT ref; + struct _MODE_CHANGE_ACTION *next_action; +} MODE_CHANGE_ACTION, *PMODE_CHANGE_ACTION; + +const MODE_CHANGE_ACTION INIT_MODE_CHANGE_ACTION = { + CHMOD_WHO_NONE, CHMOD_OP_INVALID, CHMOD_PERM_NA, CHMOD_WHO_NONE, NULL +}; + +static BOOL ParseOctalMode(LPCWSTR tsMask, INT *uMask); + +static BOOL ParseMode(LPCWSTR modeString, PMODE_CHANGE_ACTION *actions); + +static BOOL FreeActions(PMODE_CHANGE_ACTION actions); + +static BOOL ParseCommandLineArguments(__in int argc, __in wchar_t *argv[], + __out BOOL *rec, __out_opt INT *mask, + __out_opt PMODE_CHANGE_ACTION *actions, __out LPCWSTR *path); + +static BOOL ChangeFileModeByActions(__in LPCWSTR path, + PMODE_CHANGE_ACTION actions); + +static BOOL ChangeFileMode(__in LPCWSTR path, __in_opt INT mode, + __in_opt PMODE_CHANGE_ACTION actions); + +static BOOL ChangeFileModeRecursively(__in LPCWSTR path, __in_opt INT mode, + __in_opt PMODE_CHANGE_ACTION actions); + + +// +// Function: Chmod +// +// Description: +// The main method for chmod command +// +// Returns: +// 0: on success +// +// Notes: +// +int Chmod(int argc, wchar_t *argv[]) +{ + LPWSTR pathName = NULL; + LPWSTR longPathName = NULL; + + BOOL recursive = FALSE; + + PMODE_CHANGE_ACTION actions = NULL; + + INT unixAccessMask = 0; + + DWORD dwRtnCode = 0; + + int ret = EXIT_FAILURE; + + // Parsing chmod arguments + // + if (!ParseCommandLineArguments(argc, argv, +&recursive, &unixAccessMask, &actions, &pathName)) + { +fwprintf(stderr, L"Incorrect command line arguments.\n\n"); +ChmodUsage(argv[0]); +return EXIT_FAILURE; + } + + // Convert the path the the long path + // + dwRtnCode = ConvertToLongPath(pathName, &longPathName); + if (dwRtnCode != ERROR_SUCCESS) + { +ReportErrorCode(L"ConvertToLongPath", dwRtnCode); +goto ChmodEnd; + } + + if (!recursive) + { +if (ChangeFileMode(longPathName, unixAccessMask, actions)) +{ + ret = EXIT_SUCCESS; +} + } + else + { +if (ChangeFileModeRecursively(longPathName, unixAccessMask, actions)) +{ + ret = EXIT_SUCCESS; +} + } + +ChmodEnd: + FreeActions(actions); + LocalFree(longPathName); + + return ret; +} + +// +// Function: ChangeFileMode +// +// Description: +// Wrapper function for change file mode. Choose either change by action or by +// access mask. +// +// Returns: +// TRUE: on success +// FALSE: otherwise +// +// Notes: +// +static BOOL ChangeFileMode(__in LPCWSTR path, __in_opt INT unixAccessMask, + __in_opt PMODE_CHANGE_ACTION a
svn commit: r1453486 - in /hadoop/common/trunk: ./ hadoop-assemblies/src/main/resources/assemblies/ hadoop-dist/ hadoop-project-dist/ hadoop-project/ hadoop-tools/hadoop-streaming/src/test/java/org/ap
Author: suresh Date: Wed Mar 6 19:15:18 2013 New Revision: 1453486 URL: http://svn.apache.org/r1453486 Log: HADOOP-8952. Enhancements to support Hadoop on Windows Server and Windows Azure environments. Contributed by Ivan Mitic, Chuan Liu, Ramya Sunil, Bikas Saha, Kanna Karanam, John Gordon, Brandon Li, Chris Nauroth, David Lao, Sumadhur Reddy Bolli, Arpit Agarwal, Ahmed El Baz, Mike Liddell, Jing Zhao, Thejas Nair, Steve Maine, Ganeshan Iyer, Raja Aluri, Giridharan Kesavan, Ramya Bharathi Nimmagadda. Modified: hadoop/common/trunk/BUILDING.txt hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml hadoop/common/trunk/hadoop-dist/pom.xml hadoop/common/trunk/hadoop-project-dist/pom.xml hadoop/common/trunk/hadoop-project/pom.xml hadoop/common/trunk/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java hadoop/common/trunk/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java Modified: hadoop/common/trunk/BUILDING.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/BUILDING.txt?rev=1453486&r1=1453485&r2=1453486&view=diff == --- hadoop/common/trunk/BUILDING.txt (original) +++ hadoop/common/trunk/BUILDING.txt Wed Mar 6 19:15:18 2013 @@ -138,3 +138,70 @@ Create a local staging version of the we $ mvn clean site; mvn site:stage -DstagingDirectory=/tmp/hadoop-site -- + +Building on Windows + +-- +Requirements: + +* Windows System +* JDK 1.6 +* Maven 3.0 +* Findbugs 1.3.9 (if running findbugs) +* ProtocolBuffer 2.4.1+ (for MapReduce and HDFS) +* Unix command-line tools from GnuWin32 or Cygwin: sh, mkdir, rm, cp, tar, gzip +* Windows SDK or Visual Studio 2010 Professional +* Internet connection for first build (to fetch all Maven and Hadoop dependencies) + +If using Visual Studio, it must be Visual Studio 2010 Professional (not 2012). +Do not use Visual Studio Express. It does not support compiling for 64-bit, +which is problematic if running a 64-bit system. The Windows SDK is free to +download here: + +http://www.microsoft.com/en-us/download/details.aspx?id=8279 + +-- +Building: + +Keep the source code tree in a short path to avoid running into problems related +to Windows maximum path length limitation. (For example, C:\hdc). + +Run builds from a Windows SDK Command Prompt. (Start, All Programs, +Microsoft Windows SDK v7.1, Windows SDK 7.1 Command Prompt.) + +JAVA_HOME must be set, and the path must not contain spaces. If the full path +would contain spaces, then use the Windows short path instead. + +You must set the Platform environment variable to either x64 or Win32 depending +on whether you're running a 64-bit or 32-bit system. Note that this is +case-sensitive. It must be "Platform", not "PLATFORM" or "platform". +Environment variables on Windows are usually case-insensitive, but Maven treats +them as case-sensitive. Failure to set this environment variable correctly will +cause msbuild to fail while building the native code in hadoop-common. + +set Platform=x64 (when building on a 64-bit system) +set Platform=Win32 (when building on a 32-bit system) + +Several tests require that the user must have the Create Symbolic Links +privilege. + +All Maven goals are the same as described above, with the addition of profile +-Pnative-win to trigger building Windows native components. The native +components are required (not optional) on Windows. For example: + + * Run tests : mvn -Pnative-win test + +-- +Building distributions: + +Create binary distribution with native code and with documentation: + + $ mvn package -Pdist,native-win,docs -DskipTests -Dtar + +Create source distribution: + + $ mvn package -Pnative-win,src -DskipTests + +Create source and binary distributions with native code and documentation: + + $ mvn package -Pdist,native-win,docs,src -DskipTests -Dtar Modified: hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml?rev=1453486&r1=1453485&r2=1453486&view=diff == --- hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml (original) +++ hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml Wed Mar 6 19:15:18 2013 @@ -26,6 +26,9 @@ /bin
svn commit: r1453456 - in /hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common: CHANGES.txt src/main/docs/ src/main/java/ src/test/core/
Author: suresh Date: Wed Mar 6 17:51:32 2013 New Revision: 1453456 URL: http://svn.apache.org/r1453456 Log: Merging trunk to branch-trunk-win Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt (props changed) hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/ (props changed) hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/ (props changed) hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/ (props changed) Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt -- Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1453120-1453453 Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/ -- Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1453120-1453453 Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/ -- Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1453120-1453453 Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/ -- Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1453120-1453453
svn commit: r1453456 - /hadoop/common/branches/branch-trunk-win/
Author: suresh Date: Wed Mar 6 17:51:32 2013 New Revision: 1453456 URL: http://svn.apache.org/r1453456 Log: Merging trunk to branch-trunk-win Modified: hadoop/common/branches/branch-trunk-win/ (props changed) Propchange: hadoop/common/branches/branch-trunk-win/ -- Merged /hadoop/common/trunk:r1453120-1453453
svn commit: r1453410 - in /hadoop/common/site/main: author/src/documentation/content/xdocs/ publish/
Author: jlowe Date: Wed Mar 6 16:37:32 2013 New Revision: 1453410 URL: http://svn.apache.org/r1453410 Log: Add jlowe to PMC list Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml hadoop/common/site/main/publish/bylaws.pdf hadoop/common/site/main/publish/index.pdf hadoop/common/site/main/publish/issue_tracking.pdf hadoop/common/site/main/publish/linkmap.pdf hadoop/common/site/main/publish/mailing_lists.pdf hadoop/common/site/main/publish/privacy_policy.pdf hadoop/common/site/main/publish/releases.pdf hadoop/common/site/main/publish/version_control.pdf hadoop/common/site/main/publish/who.html hadoop/common/site/main/publish/who.pdf Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml URL: http://svn.apache.org/viewvc/hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml?rev=1453410&r1=1453409&r2=1453410&view=diff == --- hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml (original) +++ hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml Wed Mar 6 16:37:32 2013 @@ -159,6 +159,14 @@ -8 + + jlowe + Jason Lowe + Yahoo! + + -6 + + mahadev Mahadev Konar Modified: hadoop/common/site/main/publish/bylaws.pdf URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/bylaws.pdf?rev=1453410&r1=1453409&r2=1453410&view=diff == Binary files - no diff available. Modified: hadoop/common/site/main/publish/index.pdf URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/index.pdf?rev=1453410&r1=1453409&r2=1453410&view=diff == Binary files - no diff available. Modified: hadoop/common/site/main/publish/issue_tracking.pdf URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/issue_tracking.pdf?rev=1453410&r1=1453409&r2=1453410&view=diff == Binary files - no diff available. Modified: hadoop/common/site/main/publish/linkmap.pdf URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/linkmap.pdf?rev=1453410&r1=1453409&r2=1453410&view=diff == Binary files - no diff available. Modified: hadoop/common/site/main/publish/mailing_lists.pdf URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/mailing_lists.pdf?rev=1453410&r1=1453409&r2=1453410&view=diff == Binary files - no diff available. Modified: hadoop/common/site/main/publish/privacy_policy.pdf URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/privacy_policy.pdf?rev=1453410&r1=1453409&r2=1453410&view=diff == Binary files - no diff available. Modified: hadoop/common/site/main/publish/releases.pdf URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/releases.pdf?rev=1453410&r1=1453409&r2=1453410&view=diff == Binary files - no diff available. Modified: hadoop/common/site/main/publish/version_control.pdf URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/version_control.pdf?rev=1453410&r1=1453409&r2=1453410&view=diff == Binary files - no diff available. Modified: hadoop/common/site/main/publish/who.html URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/who.html?rev=1453410&r1=1453409&r2=1453410&view=diff == --- hadoop/common/site/main/publish/who.html (original) +++ hadoop/common/site/main/publish/who.html Wed Mar 6 16:37:32 2013 @@ -436,6 +436,17 @@ document.write("Last Published: " + docu + + + +jlowe + Jason Lowe + Yahoo! + + -6 + + + @@ -662,7 +673,7 @@ document.write("Last Published: " + docu - + Emeritus Hadoop PMC Members @@ -677,7 +688,7 @@ document.write("Last Published: " + docu - + Hadoop Committers Hadoop's active committers include: @@ -1337,7 +1348,7 @@ document.write("Last Published: " + docu - + Emeritus Hadoop Committers Hadoop committers who are no longer active include: Modified: hadoop/common/site/main/publish/who.pdf URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/who.pdf?rev=1453410&r1=1453409&r2=1453410&view=diff
[Hadoop Wiki] Update of "Books" by Jitesh Gawali
Dear Wiki user, You have subscribed to a wiki page or wiki category on "Hadoop Wiki" for change notification. The "Books" page has been changed by Jitesh Gawali: http://wiki.apache.org/hadoop/Books?action=diff&rev1=8&rev2=9 == Books in Print == Here are the books that are currently in print -in order of publishing-, along with the Hadoop version they were written against. One problem anyone writing a book will encounter is that Hadoop is a very fast-moving target, and that things can change fast. Usually this is for the better, when a book says "Hadoop can't" they really mean "the version of Hadoop we worked with couldn't", and that the situation may have improved since then. If you have any query about Hadoop, don't be afraid to ask on the relevant user mailing lists. + + === Hadoop Beginner's Guide === + + '''Name:''' [[http://www.packtpub.com/hadoop-beginners-guide/book|Hadoop Beginner's Guide]] + + '''Author:''' Garry Turkington + + '''Hadoop Version:''' 1.0.x + + '''Publisher:''' Packt Publishing + + '''Date of Publishing:''' February 22, 2013 + + '''Sample Chapter:''' [[http://www.packtpub.com/sites/default/files/9781849517300_Chapter_04.pdf|Chapter 4: Developing MapReduce Programs]] + + Written for complete beginners to Hadoop, this book will get you up and running through practical tutorials using real data. Learn how to install and run Hadoop on a local Ubuntu host or create an on-demand Hadoop cluster on Amazon Web Services (EC2), before getting to grips with !MapReduce and learning to exploit Hadoop to its full potential. No matter how large or small your operation, this book is your indispensible guide to help you make sense of the data avalanche. === Hadoop Real World Solutions Cookbook ===
svn commit: r1453232 [1/2] - in /hadoop/common/branches/branch-1: build.xml lib/jdiff/hadoop_1.1.2.xml
Author: mattf Date: Wed Mar 6 09:22:25 2013 New Revision: 1453232 URL: http://svn.apache.org/r1453232 Log: JDiff output for release 1.1.2 Added: hadoop/common/branches/branch-1/lib/jdiff/hadoop_1.1.2.xml Modified: hadoop/common/branches/branch-1/build.xml Modified: hadoop/common/branches/branch-1/build.xml URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/build.xml?rev=1453232&r1=1453231&r2=1453232&view=diff == --- hadoop/common/branches/branch-1/build.xml (original) +++ hadoop/common/branches/branch-1/build.xml Wed Mar 6 09:22:25 2013 @@ -160,7 +160,7 @@ - + http://hadoop.apache.org/core/docs/r${jdiff.stable}/api/"/>
svn commit: r1453231 [1/2] - /hadoop/common/branches/branch-1.1/lib/jdiff/hadoop_1.1.2.xml
Author: mattf Date: Wed Mar 6 09:15:14 2013 New Revision: 1453231 URL: http://svn.apache.org/r1453231 Log: JDiff output for release 1.1.2 Added: hadoop/common/branches/branch-1.1/lib/jdiff/hadoop_1.1.2.xml
svn commit: r1453210 - in /hadoop/common/site/main/publish/docs/r1.1.2: ./ api/ api/org/ api/org/apache/ api/org/apache/hadoop/ api/org/apache/hadoop/class-use/ api/org/apache/hadoop/classification/ a
Author: mattf Date: Wed Mar 6 08:41:57 2013 New Revision: 1453210 URL: http://svn.apache.org/r1453210 Log: Publishing docs for release 1.1.2 [This commit notification would consist of 569 parts, which exceeds the limit of 50 ones, so it was shortened to the summary.]
[Hadoop Wiki] Update of "HowToRelease" by MattFoley
Dear Wiki user, You have subscribed to a wiki page or wiki category on "Hadoop Wiki" for change notification. The "HowToRelease" page has been changed by MattFoley: http://wiki.apache.org/hadoop/HowToRelease?action=diff&rev1=84&rev2=85 ssh people.apache.org svn co --depth immediates https://svn.apache.org/repos/asf/hadoop/common/site/main/publish/docs/ cd docs - tar xzf /www/www.apache.org/dist/hadoop/core/hadoop-X.Y.Z/hadoop-X.Y.Z.tar.gz --wildcards '*/doc' + tar xzf /www/www.apache.org/dist/hadoop/core/hadoop-X.Y.Z/hadoop-X.Y.Z.tar.gz --wildcards '*/docs' - mv hadoop-X.Y.Z/share/doc/hadoop rX.Y.Z + mv hadoop-X.Y.Z/docs rX.Y.Z svn add rX.Y.Z svn commit -m "Publishing docs for release X.Y.Z." rm -r hadoop-X.Y.Z
[Hadoop Wiki] Trivial Update of "HowToRelease" by MattFoley
Dear Wiki user, You have subscribed to a wiki page or wiki category on "Hadoop Wiki" for change notification. The "HowToRelease" page has been changed by MattFoley: http://wiki.apache.org/hadoop/HowToRelease?action=diff&rev1=83&rev2=84 }}} 1. Publish the new release docs, by doing the following: {{{ ssh people.apache.org - svn co --depth immediates https://svn.apache.org:/repos/asf/hadoop/common/site/main/publish/docs/ + svn co --depth immediates https://svn.apache.org/repos/asf/hadoop/common/site/main/publish/docs/ cd docs tar xzf /www/www.apache.org/dist/hadoop/core/hadoop-X.Y.Z/hadoop-X.Y.Z.tar.gz --wildcards '*/doc' mv hadoop-X.Y.Z/share/doc/hadoop rX.Y.Z
svn commit: r1453195 - in /hadoop/common/site/main: author/src/documentation/content/xdocs/ publish/
Author: mattf Date: Wed Mar 6 08:12:17 2013 New Revision: 1453195 URL: http://svn.apache.org/r1453195 Log: Updated docs site for release 1.1.2 Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/releases.xml hadoop/common/site/main/author/src/documentation/content/xdocs/site.xml hadoop/common/site/main/publish/bylaws.html hadoop/common/site/main/publish/bylaws.pdf hadoop/common/site/main/publish/index.html hadoop/common/site/main/publish/index.pdf hadoop/common/site/main/publish/issue_tracking.html hadoop/common/site/main/publish/issue_tracking.pdf hadoop/common/site/main/publish/linkmap.html hadoop/common/site/main/publish/linkmap.pdf hadoop/common/site/main/publish/mailing_lists.html hadoop/common/site/main/publish/mailing_lists.pdf hadoop/common/site/main/publish/privacy_policy.html hadoop/common/site/main/publish/privacy_policy.pdf hadoop/common/site/main/publish/releases.html hadoop/common/site/main/publish/releases.pdf hadoop/common/site/main/publish/version_control.html hadoop/common/site/main/publish/version_control.pdf hadoop/common/site/main/publish/who.html hadoop/common/site/main/publish/who.pdf Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/releases.xml URL: http://svn.apache.org/viewvc/hadoop/common/site/main/author/src/documentation/content/xdocs/releases.xml?rev=1453195&r1=1453194&r2=1453195&view=diff == --- hadoop/common/site/main/author/src/documentation/content/xdocs/releases.xml (original) +++ hadoop/common/site/main/author/src/documentation/content/xdocs/releases.xml Wed Mar 6 08:12:17 2013 @@ -41,6 +41,14 @@ News +15 February, 2013: Release 1.1.2 available + Point release for the 1.1.X line. Bug fixes and improvements, as documented in the http://hadoop.apache.org/docs/r1.1.2/releasenotes.html";> +Hadoop 1.1.2 Release Notes. + + + + 14 February, 2013: Release 2.0.3-alpha available This is the latest (alpha) version in the hadoop-2.x series. Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/site.xml URL: http://svn.apache.org/viewvc/hadoop/common/site/main/author/src/documentation/content/xdocs/site.xml?rev=1453195&r1=1453194&r2=1453195&view=diff == --- hadoop/common/site/main/author/src/documentation/content/xdocs/site.xml (original) +++ hadoop/common/site/main/author/src/documentation/content/xdocs/site.xml Wed Mar 6 08:12:17 2013 @@ -35,7 +35,7 @@ - + @@ -76,7 +76,7 @@ - + Modified: hadoop/common/site/main/publish/bylaws.html URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/bylaws.html?rev=1453195&r1=1453194&r2=1453195&view=diff == --- hadoop/common/site/main/publish/bylaws.html (original) +++ hadoop/common/site/main/publish/bylaws.html Wed Mar 6 08:12:17 2013 @@ -161,7 +161,7 @@ document.write("Last Published: " + docu http://hadoop.apache.org/docs/r1.0.4/";>Release 1.0.4 -http://hadoop.apache.org/docs/r1.1.1/";>Release 1.1.1 +http://hadoop.apache.org/docs/r1.1.2/";>Release 1.1.2 http://hadoop.apache.org/docs/r2.0.3-alpha/";>Release 2.0.3-alpha Modified: hadoop/common/site/main/publish/bylaws.pdf URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/bylaws.pdf?rev=1453195&r1=1453194&r2=1453195&view=diff == Binary files - no diff available. Modified: hadoop/common/site/main/publish/index.html URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/index.html?rev=1453195&r1=1453194&r2=1453195&view=diff == --- hadoop/common/site/main/publish/index.html (original) +++ hadoop/common/site/main/publish/index.html Wed Mar 6 08:12:17 2013 @@ -161,7 +161,7 @@ document.write("Last Published: " + docu http://hadoop.apache.org/docs/r1.0.4/";>Release 1.0.4 -http://hadoop.apache.org/docs/r1.1.1/";>Release 1.1.1 +http://hadoop.apache.org/docs/r1.1.2/";>Release 1.1.2 http://hadoop.apache.org/docs/r2.0.3-alpha/";>Release 2.0.3-alpha Modified: hadoop/common/site/main/publish/index.pdf URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/index.pdf?rev=1453195&r1=1453194&r2=1453195&view=diff == Binary files - no diff available. Modified: hadoop/common/site/main/publish/issue_tracking.html URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/issue_tracking.html?rev=1453195&r1=1453194&r2=1453