svn commit: r1397709 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/FileSystem.java
Author: jlowe Date: Fri Oct 12 20:00:39 2012 New Revision: 1397709 URL: http://svn.apache.org/viewvc?rev=1397709&view=rev Log: HADOOP-8906. paths with multiple globs are unreliable. Contributed by Daryn Sharp. Modified: hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java Modified: hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1397709&r1=1397708&r2=1397709&view=diff == --- hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt (original) +++ hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt Fri Oct 12 20:00:39 2012 @@ -36,6 +36,9 @@ Release 0.23.5 - UNRELEASED HADOOP-8310. FileContext#checkPath should handle URIs with no port. (atm) +HADOOP-8906. paths with multiple globs are unreliable. (Daryn Sharp via +jlowe) + Release 0.23.4 - UNRELEASED INCOMPATIBLE CHANGES Modified: hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java?rev=1397709&r1=1397708&r2=1397709&view=diff == --- hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java (original) +++ hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java Fri Oct 12 20:00:39 2012 @@ -24,6 +24,7 @@ import java.net.URI; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; @@ -1512,120 +1513,113 @@ public abstract class FileSystem extends public FileStatus[] globStatus(Path pathPattern, PathFilter filter) throws IOException { String filename = pathPattern.toUri().getPath(); +List allMatches = null; + List filePatterns = GlobExpander.expand(filename); -if (filePatterns.size() == 1) { - return globStatusInternal(pathPattern, filter); -} else { - List results = new ArrayList(); - for (String filePattern : filePatterns) { -FileStatus[] files = globStatusInternal(new Path(filePattern), filter); -for (FileStatus file : files) { - results.add(file); +for (String filePattern : filePatterns) { + Path path = new Path(filePattern.isEmpty() ? Path.CUR_DIR : filePattern); + List matches = globStatusInternal(path, filter); + if (matches != null) { +if (allMatches == null) { + allMatches = matches; +} else { + allMatches.addAll(matches); } } - return results.toArray(new FileStatus[results.size()]); } + +FileStatus[] results = null; +if (allMatches != null) { + results = allMatches.toArray(new FileStatus[allMatches.size()]); +} else if (filePatterns.size() > 1) { + // no matches with multiple expansions is a non-matching glob + results = new FileStatus[0]; +} +return results; } + + // sort gripes because FileStatus Comparable isn't parameterized... + @SuppressWarnings("unchecked") + private List globStatusInternal(Path pathPattern, + PathFilter filter) throws IOException { +boolean patternHasGlob = false; // pathPattern has any globs +List matches = new ArrayList(); - private FileStatus[] globStatusInternal(Path pathPattern, PathFilter filter) - throws IOException { -Path[] parents = new Path[1]; +// determine starting point int level = 0; -String filename = pathPattern.toUri().getPath(); +String baseDir = Path.CUR_DIR; +if (pathPattern.isAbsolute()) { + level = 1; // need to skip empty item at beginning of split list + baseDir = Path.SEPARATOR; +} -// path has only zero component -if ("".equals(filename) || Path.SEPARATOR.equals(filename)) { - return getFileStatus(new Path[]{pathPattern}); +// parse components and determine if it's a glob +String[] components = null; +GlobFilter[] filters = null; +String filename = pathPattern.toUri().getPath(); +if (!filename.isEmpty() && !Path.SEPARATOR.equals(filename)) { + components = filename.split(Path.SEPARATOR); + filters = new GlobFilter[components.length]; + for (int i=level; i < components.length; i++) { +filters[i] = new GlobFilter(compo
svn commit: r1397708 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/FileSystem.java
Author: jlowe Date: Fri Oct 12 20:00:31 2012 New Revision: 1397708 URL: http://svn.apache.org/viewvc?rev=1397708&view=rev Log: svn merge -c 1397704 FIXES: HADOOP-8906. paths with multiple globs are unreliable. Contributed by Daryn Sharp. Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1397708&r1=1397707&r2=1397708&view=diff == --- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt (original) +++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt Fri Oct 12 20:00:31 2012 @@ -781,6 +781,9 @@ Release 0.23.5 - UNRELEASED BUG FIXES +HADOOP-8906. paths with multiple globs are unreliable. (Daryn Sharp via +jlowe) + Release 0.23.4 - UNRELEASED INCOMPATIBLE CHANGES Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java?rev=1397708&r1=1397707&r2=1397708&view=diff == --- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java (original) +++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java Fri Oct 12 20:00:31 2012 @@ -24,6 +24,7 @@ import java.net.URI; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; @@ -1575,120 +1576,113 @@ public abstract class FileSystem extends public FileStatus[] globStatus(Path pathPattern, PathFilter filter) throws IOException { String filename = pathPattern.toUri().getPath(); +List allMatches = null; + List filePatterns = GlobExpander.expand(filename); -if (filePatterns.size() == 1) { - return globStatusInternal(pathPattern, filter); -} else { - List results = new ArrayList(); - for (String filePattern : filePatterns) { -FileStatus[] files = globStatusInternal(new Path(filePattern), filter); -for (FileStatus file : files) { - results.add(file); +for (String filePattern : filePatterns) { + Path path = new Path(filePattern.isEmpty() ? Path.CUR_DIR : filePattern); + List matches = globStatusInternal(path, filter); + if (matches != null) { +if (allMatches == null) { + allMatches = matches; +} else { + allMatches.addAll(matches); } } - return results.toArray(new FileStatus[results.size()]); } + +FileStatus[] results = null; +if (allMatches != null) { + results = allMatches.toArray(new FileStatus[allMatches.size()]); +} else if (filePatterns.size() > 1) { + // no matches with multiple expansions is a non-matching glob + results = new FileStatus[0]; +} +return results; } - private FileStatus[] globStatusInternal(Path pathPattern, PathFilter filter) - throws IOException { -Path[] parents = new Path[1]; + // sort gripes because FileStatus Comparable isn't parameterized... + @SuppressWarnings("unchecked") + private List globStatusInternal(Path pathPattern, + PathFilter filter) throws IOException { +boolean patternHasGlob = false; // pathPattern has any globs +List matches = new ArrayList(); + +// determine starting point int level = 0; -String filename = pathPattern.toUri().getPath(); +String baseDir = Path.CUR_DIR; +if (pathPattern.isAbsolute()) { + level = 1; // need to skip empty item at beginning of split list + baseDir = Path.SEPARATOR; +} -// path has only zero component -if ("".equals(filename) || Path.SEPARATOR.equals(filename)) { - return getFileStatus(new Path[]{pathPattern}); +// parse components and determine if it's a glob +String[] components = null; +GlobFilter[] filters = null; +String filename = pathPattern.toUri().getPath(); +if (!filename.isEmpty() && !Path.SEPARATOR.equals(filename)) { + components = filename.split(Path.SEPARATOR); + filters = new GlobFilter[components.length]; + for (int i=level; i < components.length; i++) { +filters[i] = new GlobFilter(components[i]); +patternHasGlob |= filters[i].hasPattern(); +
svn commit: r1397704 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/FileSystem.java
Author: jlowe Date: Fri Oct 12 19:49:40 2012 New Revision: 1397704 URL: http://svn.apache.org/viewvc?rev=1397704&view=rev Log: HADOOP-8906. paths with multiple globs are unreliable. Contributed by Daryn Sharp. Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1397704&r1=1397703&r2=1397704&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri Oct 12 19:49:40 2012 @@ -1054,6 +1054,9 @@ Release 0.23.5 - UNRELEASED BUG FIXES +HADOOP-8906. paths with multiple globs are unreliable. (Daryn Sharp via +jlowe) + Release 0.23.4 - UNRELEASED INCOMPATIBLE CHANGES Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java?rev=1397704&r1=1397703&r2=1397704&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java Fri Oct 12 19:49:40 2012 @@ -24,6 +24,7 @@ import java.net.URI; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; @@ -1574,120 +1575,113 @@ public abstract class FileSystem extends public FileStatus[] globStatus(Path pathPattern, PathFilter filter) throws IOException { String filename = pathPattern.toUri().getPath(); +List allMatches = null; + List filePatterns = GlobExpander.expand(filename); -if (filePatterns.size() == 1) { - return globStatusInternal(pathPattern, filter); -} else { - List results = new ArrayList(); - for (String filePattern : filePatterns) { -FileStatus[] files = globStatusInternal(new Path(filePattern), filter); -for (FileStatus file : files) { - results.add(file); +for (String filePattern : filePatterns) { + Path path = new Path(filePattern.isEmpty() ? Path.CUR_DIR : filePattern); + List matches = globStatusInternal(path, filter); + if (matches != null) { +if (allMatches == null) { + allMatches = matches; +} else { + allMatches.addAll(matches); } } - return results.toArray(new FileStatus[results.size()]); } + +FileStatus[] results = null; +if (allMatches != null) { + results = allMatches.toArray(new FileStatus[allMatches.size()]); +} else if (filePatterns.size() > 1) { + // no matches with multiple expansions is a non-matching glob + results = new FileStatus[0]; +} +return results; } - private FileStatus[] globStatusInternal(Path pathPattern, PathFilter filter) - throws IOException { -Path[] parents = new Path[1]; + // sort gripes because FileStatus Comparable isn't parameterized... + @SuppressWarnings("unchecked") + private List globStatusInternal(Path pathPattern, + PathFilter filter) throws IOException { +boolean patternHasGlob = false; // pathPattern has any globs +List matches = new ArrayList(); + +// determine starting point int level = 0; -String filename = pathPattern.toUri().getPath(); +String baseDir = Path.CUR_DIR; +if (pathPattern.isAbsolute()) { + level = 1; // need to skip empty item at beginning of split list + baseDir = Path.SEPARATOR; +} -// path has only zero component -if (filename.isEmpty() || Path.SEPARATOR.equals(filename)) { - return getFileStatus(new Path[]{pathPattern}); +// parse components and determine if it's a glob +String[] components = null; +GlobFilter[] filters = null; +String filename = pathPattern.toUri().getPath(); +if (!filename.isEmpty() && !Path.SEPARATOR.equals(filename)) { + components = filename.split(Path.SEPARATOR); + filters = new GlobFilter[components.length]; + for (int i=level; i < components.length; i++) { +filters[i] = new GlobFilter(components[i]); +patternHasGlob |= filters[i].hasPattern(); + } + if (!patternHasGlob) { +baseDir = unquotePathComponent(filename); +components = null; // short through to filter check +
svn commit: r1397701 - in /hadoop/common/tags: release-1.0.4-rc0/ release-1.0.4/
Author: omalley Date: Fri Oct 12 19:43:26 2012 New Revision: 1397701 URL: http://svn.apache.org/viewvc?rev=1397701&view=rev Log: Moving the 1.0.4rc0 to final tag. Added: hadoop/common/tags/release-1.0.4/ (props changed) - copied from r1397700, hadoop/common/tags/release-1.0.4-rc0/ Removed: hadoop/common/tags/release-1.0.4-rc0/ Propchange: hadoop/common/tags/release-1.0.4/ -- --- svn:ignore (added) +++ svn:ignore Fri Oct 12 19:43:26 2012 @@ -0,0 +1,9 @@ +build +build.properties +logs +.classpath +.git +.project +.settings +.launches +.externalToolBuilders Propchange: hadoop/common/tags/release-1.0.4/ -- --- svn:mergeinfo (added) +++ svn:mergeinfo Fri Oct 12 19:43:26 2012 @@ -0,0 +1,8 @@ +/hadoop/common/branches/branch-0.20:826138,826568,829987,831184,833001,880632,898713,909245,909723,932042,960946,990003,1044225 +/hadoop/common/branches/branch-0.20-append:955380,955398,955448,956329 +/hadoop/common/branches/branch-0.20-security:1170042,1170087,1170997,1171137,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192,1173470,1174471,1175114,1176179,1176720,1177907,1179036,1179171,1179519,1179857,1183086,1183101,1183176,1183556,1186509,1187141,1189029,1189361,1190079,1190089,1190100,1195737,1197330,1198906,1199401,1199571,1200733,1205148,1205655,1206180 +/hadoop/common/branches/branch-0.20-security-203:1096071,1097011,1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088,1099191,1099324,1099330,1099333,1102071,1128115 +/hadoop/common/branches/branch-0.20-security-204:1128390,1147228,1148069,1149316,1154413 +/hadoop/common/branches/branch-1:1207579,1208143,1235107,1293012,1293490,1293716,1301291,1301804,1302058,1302720,1303017,1303027,1304158,1310039,1325636,1331064,1333564 +/hadoop/core/branches/branch-0.19:713112 +/hadoop/core/trunk:727001,727117,727191,727212,727217,727228,727255,727869,728187,729052,729987,732385,732572,732613,732777,732838,732869,733887,734870,734916,736426,738328,738697,740077,740157,741703,741762,743745,743816,743892,744894,745180,746010,746206,746227,746233,746274,746338,746902-746903,746925,746944,746968,746970,747279,747289,747802,748084,748090,748783,749262,749318,749863,750533,752073,752609,752834,752836,752913,752932,753112-753113,753346,754645,754847,754927,755035,755226,755348,755370,755418,755426,755790,755905,755938,755960,755986,755998,756352,757448,757624,757849,758156,758180,759398,759932,760502,760783,761046,761482,761632,762216,762879,763107,763502,764967,765016,765809,765951,771607,771661,772844,772876,772884,772920,773889,776638,778962,778966,779893,781720,784661,785046,785569
svn commit: r1397691 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/fs/FileSystem.java src/main/java/org/apache/hadoop/fs/GlobExpander.java
Author: suresh Date: Fri Oct 12 19:03:10 2012 New Revision: 1397691 URL: http://svn.apache.org/viewvc?rev=1397691&view=rev Log: HADOOP-8910. Add examples to GlobExpander#expand method. Contributed by Suresh Srinivas. Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1397691&r1=1397690&r2=1397691&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri Oct 12 19:03:10 2012 @@ -122,6 +122,8 @@ Trunk (Unreleased) HADOOP-8864. Addendum to HADOOP-8840: Add a coloring case for +0 results too. (harsh) +HADOOP-8910. Add examples to GlobExpander#expand method. (suresh) + BUG FIXES HADOOP-8177. MBeans shouldn't try to register when it fails to create MBeanName. Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java?rev=1397691&r1=1397690&r2=1397691&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java Fri Oct 12 19:03:10 2012 @@ -1391,11 +1391,11 @@ public abstract class FileSystem extends } final private static PathFilter DEFAULT_FILTER = new PathFilter() { - @Override - public boolean accept(Path file) { -return true; - } -}; +@Override +public boolean accept(Path file) { + return true; +} + }; /** * List the statuses of the files/directories in the given path if the path is @@ -1559,17 +1559,16 @@ public abstract class FileSystem extends } /** - * Return an array of FileStatus objects whose path names match pathPattern - * and is accepted by the user-supplied path filter. Results are sorted by - * their path names. - * Return null if pathPattern has no glob and the path does not exist. - * Return an empty array if pathPattern has a glob and no path matches it. + * Return an array of FileStatus objects whose path names match + * {@code pathPattern} and is accepted by the user-supplied path filter. + * Results are sorted by their path names. * - * @param pathPattern - * a regular expression specifying the path pattern - * @param filter - * a user-supplied path filter - * @return an array of FileStatus objects + * @param pathPattern a regular expression specifying the path pattern + * @param filter a user-supplied path filter + * @return null if {@code pathPattern} has no glob and the path does not exist + * an empty array if {@code pathPattern} has a glob and no path + * matches it else an array of {@link FileStatus} objects matching the + * pattern * @throws IOException if any I/O error occurs when fetching file status */ public FileStatus[] globStatus(Path pathPattern, PathFilter filter) Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java?rev=1397691&r1=1397690&r2=1397691&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java Fri Oct 12 19:03:10 2012 @@ -39,12 +39,26 @@ class GlobExpander { } /** - * Expand globs in the given filePattern into a collection of - * file patterns so that in the expanded set no file pattern has a - * slash character ("/") in a curly bracket pair. + * Expand globs in the given filePattern into a collection of + * file patterns so that in the expanded set no file pattern has a slash + * character ("/") in a curly bracket pair. + * + * Some examples of how the filePattern is expanded: + * + * + * filePattern - Expanded file pattern + * {a/b} - a/b +
[Hadoop Wiki] Update of "Support" by learncomputer
Dear Wiki user, You have subscribed to a wiki page or wiki category on "Hadoop Wiki" for change notification. The "Support" page has been changed by learncomputer: http://wiki.apache.org/hadoop/Support?action=diff&rev1=45&rev2=46 Comment: Added LearnComputer as a provider of Hadoop training courses * [[http://www.doculibre.com/|Doculibre Inc.]] Open source and information management consulting. (Lucene, Nutch, Hadoop, Solr, Lius etc.) * [[http://hortonworks.com/|Hortonworks]] - our [[http://hortonworks.com/download/HDP|HDP]] product is an always free-to-use Hadoop system suitable for production use. We provide training for [[http://hortonworks.com/hadoop-training/hadoop-training-for-developers/|developers]] and [[http://hortonworks.com/hadoop-training/hadoop-training-for-administrators/|administrators]], [[http://hortonworks.com/hadoop-support/|support]] and consultancy services. * [[http://www.impetus.com/|Impetus]] - With a strong focus, established thought leadership and open source contributions in the area of Big Data analytics and consulting services, Impetus uses its Global Delivery Model to help technology businesses and enterprises evaluate and implement solutions tailored to their specific context, without being biased towards a particular solution. [[http://bigdata.impetus.com/#|More info about Big Data @Impetus]] + * [[http://www.learncomputer.com/training/hadoop/|LearnComputer Hadoop Training]] - offers training courses in Hadoop Development and Administration as well as a non-technical [[http://www.learncomputer.com/training/hadoop/hadoop-overview/|Hadoop Overview]] course designed for managers and key decision makers. * [[http://marakana.com/training/java/hadoop.html|Marakana Hadoop Training]]: 3-day training program in San Francisco with [[http://marakana.com/expert/srisatish_ambati,10809.html|Srisatish Ambati]] Program is geared to give developers hands-on working knowledge for harnessing the power of Hadoop in their organizations. * [[http://metrixcloud.com/|MetrixCloud]] - provides commercial support, installation, and hosting of Hadoop Clusters. [[http://metrixcloud.com/contact.php|Contact Us.]] * [[http://www.nubetech.co/|Nube Technologies]] - Big Data Integration, deduplication, mining and analysis. Creators of [[https://github.com/sonalgoyal/hiho|HIHO Hadoop Data Integration]]. Provide services for Map Reduce design and implementation using Hadoop and NoSQL databases, cluster setup, content analysis, information extraction and retrieval and cloud computing.
svn commit: r1397635 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/Client.java src/test/java/org/apache/hadoop/ipc/TestSasl
Author: daryn Date: Fri Oct 12 16:27:54 2012 New Revision: 1397635 URL: http://svn.apache.org/viewvc?rev=1397635&view=rev Log: HADOOP-8784. Improve IPC.Client's token use (daryn) Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1397635&r1=1397634&r2=1397635&view=diff == --- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt (original) +++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt Fri Oct 12 16:27:54 2012 @@ -47,6 +47,8 @@ Release 2.0.3-alpha - Unreleased HADOOP-8912. Add .gitattributes file to prevent CRLF and LF mismatches for source and text files. (Raja Aluri via suresh) +HADOOP-8784. Improve IPC.Client's token use (daryn) + OPTIMIZATIONS HADOOP-8866. SampleQuantiles#query is O(N^2) instead of O(N). (Andrew Wang Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1397635&r1=1397634&r2=1397635&view=diff == --- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original) +++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Fri Oct 12 16:27:54 2012 @@ -223,7 +223,6 @@ public class Client { private IpcConnectionContextProto connectionContext; // connection context private final ConnectionId remoteId;// connection id private AuthMethod authMethod; // authentication method -private boolean useSasl; private Token token; private SaslRpcClient saslRpcClient; @@ -268,8 +267,7 @@ public class Client { UserGroupInformation ticket = remoteId.getTicket(); Class protocol = remoteId.getProtocol(); - this.useSasl = UserGroupInformation.isSecurityEnabled(); - if (useSasl && protocol != null) { + if (protocol != null) { TokenInfo tokenInfo = SecurityUtil.getTokenInfo(protocol, conf); if (tokenInfo != null) { TokenSelector tokenSelector = null; @@ -294,12 +292,12 @@ public class Client { } } - if (!useSasl) { -authMethod = AuthMethod.SIMPLE; - } else if (token != null) { + if (token != null) { authMethod = AuthMethod.DIGEST; - } else { + } else if (UserGroupInformation.isSecurityEnabled()) { authMethod = AuthMethod.KERBEROS; + } else { +authMethod = AuthMethod.SIMPLE; } connectionContext = ProtoUtil.makeIpcConnectionContext( @@ -574,14 +572,12 @@ public class Client { InputStream inStream = NetUtils.getInputStream(socket); OutputStream outStream = NetUtils.getOutputStream(socket); writeConnectionHeader(outStream); - if (useSasl) { + if (authMethod != AuthMethod.SIMPLE) { final InputStream in2 = inStream; final OutputStream out2 = outStream; UserGroupInformation ticket = remoteId.getTicket(); -if (authMethod == AuthMethod.KERBEROS) { - if (ticket.getRealUser() != null) { -ticket = ticket.getRealUser(); - } +if (ticket.getRealUser() != null) { + ticket = ticket.getRealUser(); } boolean continueSasl = false; try { @@ -612,7 +608,6 @@ public class Client { connectionContext.getProtocol(), ProtoUtil.getUgi(connectionContext.getUserInfo()), authMethod); - useSasl = false; } } @@ -1172,7 +1167,7 @@ public class Client { call.error); } } else { -return call.rpcResponse; +return call.getRpcResult(); } } } Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java?rev=1397635&r1=1397634&r2=1397635&view=diff ==
svn commit: r1397634 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/ipc/Client.java src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
Author: daryn Date: Fri Oct 12 16:27:26 2012 New Revision: 1397634 URL: http://svn.apache.org/viewvc?rev=1397634&view=rev Log: HADOOP-8784. Improve IPC.Client's token use (daryn) Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1397634&r1=1397633&r2=1397634&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri Oct 12 16:27:26 2012 @@ -312,6 +312,8 @@ Release 2.0.3-alpha - Unreleased HADOOP-8912. Add .gitattributes file to prevent CRLF and LF mismatches for source and text files. (Raja Aluri via suresh) +HADOOP-8784. Improve IPC.Client's token use (daryn) + OPTIMIZATIONS HADOOP-8866. SampleQuantiles#query is O(N^2) instead of O(N). (Andrew Wang Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1397634&r1=1397633&r2=1397634&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Fri Oct 12 16:27:26 2012 @@ -225,7 +225,6 @@ public class Client { private IpcConnectionContextProto connectionContext; // connection context private final ConnectionId remoteId;// connection id private AuthMethod authMethod; // authentication method -private boolean useSasl; private Token token; private SaslRpcClient saslRpcClient; @@ -270,8 +269,7 @@ public class Client { UserGroupInformation ticket = remoteId.getTicket(); Class protocol = remoteId.getProtocol(); - this.useSasl = UserGroupInformation.isSecurityEnabled(); - if (useSasl && protocol != null) { + if (protocol != null) { TokenInfo tokenInfo = SecurityUtil.getTokenInfo(protocol, conf); if (tokenInfo != null) { TokenSelector tokenSelector = null; @@ -296,12 +294,12 @@ public class Client { } } - if (!useSasl) { -authMethod = AuthMethod.SIMPLE; - } else if (token != null) { + if (token != null) { authMethod = AuthMethod.DIGEST; - } else { + } else if (UserGroupInformation.isSecurityEnabled()) { authMethod = AuthMethod.KERBEROS; + } else { +authMethod = AuthMethod.SIMPLE; } connectionContext = ProtoUtil.makeIpcConnectionContext( @@ -576,14 +574,12 @@ public class Client { InputStream inStream = NetUtils.getInputStream(socket); OutputStream outStream = NetUtils.getOutputStream(socket); writeConnectionHeader(outStream); - if (useSasl) { + if (authMethod != AuthMethod.SIMPLE) { final InputStream in2 = inStream; final OutputStream out2 = outStream; UserGroupInformation ticket = remoteId.getTicket(); -if (authMethod == AuthMethod.KERBEROS) { - if (ticket.getRealUser() != null) { -ticket = ticket.getRealUser(); - } +if (ticket.getRealUser() != null) { + ticket = ticket.getRealUser(); } boolean continueSasl = false; try { @@ -614,7 +610,6 @@ public class Client { connectionContext.getProtocol(), ProtoUtil.getUgi(connectionContext.getUserInfo()), authMethod); - useSasl = false; } } @@ -1174,7 +1169,7 @@ public class Client { call.error); } } else { -return call.rpcResponse; +return call.getRpcResult(); } } } Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java?rev=1397634&r1=1397633&r2=1397634&view=diff == --- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache
[Hadoop Wiki] Trivial Update of "ContributorsGroup" by QwertyManiac
Dear Wiki user, You have subscribed to a wiki page or wiki category on "Hadoop Wiki" for change notification. The "ContributorsGroup" page has been changed by QwertyManiac: http://wiki.apache.org/hadoop/ContributorsGroup?action=diff&rev1=39&rev2=40 Comment: Add learncomputer.com's account for the Support page * kzhang * LarsGeorge * LarsFrancke + * learncomputer * LeeSchlesinger * linebeeLabs * LohitVijayarenu
svn commit: r1397547 - in /hadoop/common/site/main: author/src/documentation/content/xdocs/ publish/
Author: acmurthy Date: Fri Oct 12 12:55:25 2012 New Revision: 1397547 URL: http://svn.apache.org/viewvc?rev=1397547&view=rev Log: Updated site for hadoop-2.0.2-alpha release. Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/releases.xml hadoop/common/site/main/author/src/documentation/content/xdocs/site.xml hadoop/common/site/main/publish/bylaws.html hadoop/common/site/main/publish/bylaws.pdf hadoop/common/site/main/publish/index.html hadoop/common/site/main/publish/index.pdf hadoop/common/site/main/publish/issue_tracking.html hadoop/common/site/main/publish/issue_tracking.pdf hadoop/common/site/main/publish/linkmap.html hadoop/common/site/main/publish/linkmap.pdf hadoop/common/site/main/publish/mailing_lists.html hadoop/common/site/main/publish/mailing_lists.pdf hadoop/common/site/main/publish/privacy_policy.html hadoop/common/site/main/publish/privacy_policy.pdf hadoop/common/site/main/publish/releases.html hadoop/common/site/main/publish/releases.pdf hadoop/common/site/main/publish/version_control.html hadoop/common/site/main/publish/version_control.pdf hadoop/common/site/main/publish/who.html hadoop/common/site/main/publish/who.pdf Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/releases.xml URL: http://svn.apache.org/viewvc/hadoop/common/site/main/author/src/documentation/content/xdocs/releases.xml?rev=1397547&r1=1397546&r2=1397547&view=diff == --- hadoop/common/site/main/author/src/documentation/content/xdocs/releases.xml (original) +++ hadoop/common/site/main/author/src/documentation/content/xdocs/releases.xml Fri Oct 12 12:55:25 2012 @@ -41,6 +41,20 @@ News +9 October, 2012: Release 2.0.2-alpha available + This is the second (alpha) version in the hadoop-2.x series. + + This delivers significant enhancements to HDFS HA. Also it has a +significantly more stable version of YARN which, at the time of +release, has already been deployed on a 2000 node cluster. + + Please see the http://hadoop.apache.org/docs/r2.0.2-alpha/hadoop-project-dist/hadoop-common/releasenotes.html";> +Hadoop 2.0.2-alpha Release Notes for details. + + + + 17 September, 2012: Release 0.23.3 available This release contains YARN and MRv2 but does not have Name Node High Avalability Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/site.xml URL: http://svn.apache.org/viewvc/hadoop/common/site/main/author/src/documentation/content/xdocs/site.xml?rev=1397547&r1=1397546&r2=1397547&view=diff == --- hadoop/common/site/main/author/src/documentation/content/xdocs/site.xml (original) +++ hadoop/common/site/main/author/src/documentation/content/xdocs/site.xml Fri Oct 12 12:55:25 2012 @@ -38,7 +38,7 @@ - + @@ -78,7 +78,7 @@ - + Modified: hadoop/common/site/main/publish/bylaws.html URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/bylaws.html?rev=1397547&r1=1397546&r2=1397547&view=diff == --- hadoop/common/site/main/publish/bylaws.html (original) +++ hadoop/common/site/main/publish/bylaws.html Fri Oct 12 12:55:25 2012 @@ -170,7 +170,7 @@ document.write("Last Published: " + docu http://hadoop.apache.org/docs/r0.23.3/";>Release 0.23.3 -http://hadoop.apache.org/docs/r2.0.0-alpha/";>Release 2.0.0-alpha +http://hadoop.apache.org/docs/r2.0.2-alpha/";>Release 2.0.2-alpha Related Projects Modified: hadoop/common/site/main/publish/bylaws.pdf URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/bylaws.pdf?rev=1397547&r1=1397546&r2=1397547&view=diff == Binary files - no diff available. Modified: hadoop/common/site/main/publish/index.html URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/index.html?rev=1397547&r1=1397546&r2=1397547&view=diff == --- hadoop/common/site/main/publish/index.html (original) +++ hadoop/common/site/main/publish/index.html Fri Oct 12 12:55:25 2012 @@ -170,7 +170,7 @@ document.write("Last Published: " + docu http://hadoop.apache.org/docs/r0.23.3/";>Release 0.23.3 -http://hadoop.apache.org/docs/r2.0.0-alpha/";>Release 2.0.0-alpha +http://hadoop.apache.org/docs/r2.0.2-alpha/";>Release 2.0.2-alpha Related Projects Modified: hadoop/common/site/main/publish/index.pdf URL: http://svn.apache.org/viewvc/hadoop/common/site/main/publish/index.pdf?rev=1397547&r1=13975