svn commit: r1442609 - /hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-pipes/src/main/native/pipes/impl/HadoopPipes.cc

2013-02-05 Thread tgraves
Author: tgraves
Date: Tue Feb  5 14:33:10 2013
New Revision: 1442609

URL: http://svn.apache.org/viewvc?rev=1442609view=rev
Log:
 MAPREDUCE-4953. HadoopPipes misuses fprintf. (Andy Isaacson via tgraves)

Modified:

hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-pipes/src/main/native/pipes/impl/HadoopPipes.cc

Modified: 
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-pipes/src/main/native/pipes/impl/HadoopPipes.cc
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-pipes/src/main/native/pipes/impl/HadoopPipes.cc?rev=1442609r1=1442608r2=1442609view=diff
==
--- 
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-pipes/src/main/native/pipes/impl/HadoopPipes.cc
 (original)
+++ 
hadoop/common/branches/branch-0.23/hadoop-tools/hadoop-pipes/src/main/native/pipes/impl/HadoopPipes.cc
 Tue Feb  5 14:33:10 2013
@@ -126,7 +126,7 @@ namespace HadoopPipes {
 static const char lineSeparator = '\n';
 
 void writeBuffer(const string buffer) {
-  fprintf(stream, quoteString(buffer, \t\n).c_str());
+  fputs(quoteString(buffer, \t\n).c_str(), stream);
 }
 
   public:




svn commit: r1442639 - /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

2013-02-05 Thread jlowe
Author: jlowe
Date: Tue Feb  5 16:02:52 2013
New Revision: 1442639

URL: http://svn.apache.org/viewvc?rev=1442639view=rev
Log:
HADOOP-9260. Hadoop version may be not correct when starting name node or data 
node. Contributed by Chris Nauroth

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1442639r1=1442638r2=1442639view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Feb 
 5 16:02:52 2013
@@ -602,6 +602,9 @@ Release 2.0.3-alpha - Unreleased 
 HADOOP-9252. In StringUtils, humanReadableInt(..) has a race condition and
 the synchronization of limitDecimalTo2(double) can be avoided.  (szetszwo)
 
+HADOOP-9260. Hadoop version may be not correct when starting name node or
+data node. (Chris Nauroth via jlowe)
+
 Release 2.0.2-alpha - 2012-09-07 
 
   INCOMPATIBLE CHANGES




svn commit: r1442639 - /hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml

2013-02-05 Thread jlowe
Author: jlowe
Date: Tue Feb  5 16:02:52 2013
New Revision: 1442639

URL: http://svn.apache.org/viewvc?rev=1442639view=rev
Log:
HADOOP-9260. Hadoop version may be not correct when starting name node or data 
node. Contributed by Chris Nauroth

Modified:

hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml

Modified: 
hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml?rev=1442639r1=1442638r2=1442639view=diff
==
--- 
hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
 (original)
+++ 
hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
 Tue Feb  5 16:02:52 2013
@@ -91,14 +91,20 @@
   includes
 include${project.artifactId}-${project.version}.jar/include
 include${project.artifactId}-${project.version}-tests.jar/include
-include${project.artifactId}-${project.version}-sources.jar/include
-
include${project.artifactId}-${project.version}-test-sources.jar/include
   /includes
   excludes
 excludehadoop-tools-dist-*.jar/exclude
   /excludes
 /fileSet
 fileSet
+  directory${project.build.directory}/directory
+  
outputDirectory/share/hadoop/${hadoop.component}/sources/outputDirectory
+  includes
+include${project.artifactId}-${project.version}-sources.jar/include
+
include${project.artifactId}-${project.version}-test-sources.jar/include
+  /includes
+/fileSet
+fileSet
   directory${basedir}/dev-support/jdiff/directory
   
outputDirectory/share/hadoop/${hadoop.component}/jdiff/outputDirectory
 /fileSet




svn commit: r1442641 - /hadoop/common/branches/branch-2/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml

2013-02-05 Thread jlowe
Author: jlowe
Date: Tue Feb  5 16:06:58 2013
New Revision: 1442641

URL: http://svn.apache.org/viewvc?rev=1442641view=rev
Log:
svn merge -c 1442639 FIXES: HADOOP-9260. Hadoop version may be not correct when 
starting name node or data node. Contributed by Chris Nauroth

Modified:

hadoop/common/branches/branch-2/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml

Modified: 
hadoop/common/branches/branch-2/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml?rev=1442641r1=1442640r2=1442641view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
 Tue Feb  5 16:06:58 2013
@@ -91,14 +91,20 @@
   includes
 include${project.artifactId}-${project.version}.jar/include
 include${project.artifactId}-${project.version}-tests.jar/include
-include${project.artifactId}-${project.version}-sources.jar/include
-
include${project.artifactId}-${project.version}-test-sources.jar/include
   /includes
   excludes
 excludehadoop-tools-dist-*.jar/exclude
   /excludes
 /fileSet
 fileSet
+  directory${project.build.directory}/directory
+  
outputDirectory/share/hadoop/${hadoop.component}/sources/outputDirectory
+  includes
+include${project.artifactId}-${project.version}-sources.jar/include
+
include${project.artifactId}-${project.version}-test-sources.jar/include
+  /includes
+/fileSet
+fileSet
   directory${basedir}/dev-support/jdiff/directory
   
outputDirectory/share/hadoop/${hadoop.component}/jdiff/outputDirectory
 /fileSet




svn commit: r1442641 - /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

2013-02-05 Thread jlowe
Author: jlowe
Date: Tue Feb  5 16:06:58 2013
New Revision: 1442641

URL: http://svn.apache.org/viewvc?rev=1442641view=rev
Log:
svn merge -c 1442639 FIXES: HADOOP-9260. Hadoop version may be not correct when 
starting name node or data node. Contributed by Chris Nauroth

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1442641r1=1442640r2=1442641view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Tue Feb  5 16:06:58 2013
@@ -283,6 +283,9 @@ Release 2.0.3-alpha - Unreleased 
 HADOOP-9252. In StringUtils, humanReadableInt(..) has a race condition and
 the synchronization of limitDecimalTo2(double) can be avoided.  (szetszwo)
 
+HADOOP-9260. Hadoop version may be not correct when starting name node or
+data node. (Chris Nauroth via jlowe)
+
 Release 2.0.2-alpha - 2012-09-07 
 
   INCOMPATIBLE CHANGES




svn commit: r1442755 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/util/ src/test/java/org/apache/hadoop/fs/

2013-02-05 Thread szetszwo
Author: szetszwo
Date: Tue Feb  5 21:23:29 2013
New Revision: 1442755

URL: http://svn.apache.org/viewvc?rev=1442755view=rev
Log:
HADOOP-9278. Fix the file handle leak in HarMetaData.parseMetaData() in 
HarFileSystem. Contributed by Chris Nauroth

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1442755r1=1442754r2=1442755view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Feb 
 5 21:23:29 2013
@@ -605,6 +605,9 @@ Release 2.0.3-alpha - Unreleased 
 HADOOP-9260. Hadoop version may be not correct when starting name node or
 data node. (Chris Nauroth via jlowe)
 
+HADOOP-9278. Fix the file handle leak in HarMetaData.parseMetaData() in
+HarFileSystem. (Chris Nauroth via szetszwo)
+
 Release 2.0.2-alpha - 2012-09-07 
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java?rev=1442755r1=1442754r2=1442755view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 Tue Feb  5 21:23:29 2013
@@ -30,8 +30,11 @@ import java.util.TreeMap;
 import java.util.HashMap;
 import java.util.concurrent.ConcurrentHashMap;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.Progressable;
@@ -50,6 +53,9 @@ import org.apache.hadoop.util.Progressab
  */
 
 public class HarFileSystem extends FilterFileSystem {
+
+  private static final Log LOG = LogFactory.getLog(HarFileSystem.class);
+
   public static final int VERSION = 3;
 
   private static final MapURI, HarMetaData harMetaCache =
@@ -1025,68 +1031,69 @@ public class HarFileSystem extends Filte
 }
 
 private void parseMetaData() throws IOException {
-  FSDataInputStream in = fs.open(masterIndexPath);
-  FileStatus masterStat = fs.getFileStatus(masterIndexPath);
-  masterIndexTimestamp = masterStat.getModificationTime();
-  LineReader lin = new LineReader(in, getConf());
-  Text line = new Text();
-  long read = lin.readLine(line);
-
- // the first line contains the version of the index file
-  String versionLine = line.toString();
-  String[] arr = versionLine.split( );
-  version = Integer.parseInt(arr[0]);
-  // make it always backwards-compatible
-  if (this.version  HarFileSystem.VERSION) {
-throw new IOException(Invalid version  + 
-this.version +  expected  + HarFileSystem.VERSION);
-  }
-
-  // each line contains a hashcode range and the index file name
-  String[] readStr = null;
-  while(read  masterStat.getLen()) {
-int b = lin.readLine(line);
-read += b;
-readStr = line.toString().split( );
-int startHash = Integer.parseInt(readStr[0]);
-int endHash  = Integer.parseInt(readStr[1]);
-stores.add(new Store(Long.parseLong(readStr[2]), 
-Long.parseLong(readStr[3]), startHash,
-endHash));
-line.clear();
-  }
+  Text line;
+  long read;
+  FSDataInputStream in = null;
+  LineReader lin = null;
+
   try {
-// close the master index
-lin.close();
-  } catch(IOException io){
-// do nothing just a read.
-  }
+in = fs.open(masterIndexPath);
+FileStatus masterStat = fs.getFileStatus(masterIndexPath);
+masterIndexTimestamp = masterStat.getModificationTime();
+lin = new LineReader(in, getConf());
+line = new Text();
+read = lin.readLine(line);
+
+// the first line contains the version of the index file
+String versionLine = line.toString();
+   

svn commit: r1442756 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ src/main/java/ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/util/ src/test/cor

2013-02-05 Thread szetszwo
Author: szetszwo
Date: Tue Feb  5 21:24:05 2013
New Revision: 1442756

URL: http://svn.apache.org/viewvc?rev=1442756view=rev
Log:
svn merge -c 1442755 from trunk for HADOOP-9278. Fix the file handle leak in 
HarMetaData.parseMetaData() in HarFileSystem.

Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/   
(props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
  (contents, props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
   (props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/core/
   (props changed)

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java

Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/
--
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common:r1442755

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1442756r1=1442755r2=1442756view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Tue Feb  5 21:24:05 2013
@@ -286,6 +286,9 @@ Release 2.0.3-alpha - Unreleased 
 HADOOP-9260. Hadoop version may be not correct when starting name node or
 data node. (Chris Nauroth via jlowe)
 
+HADOOP-9278. Fix the file handle leak in HarMetaData.parseMetaData() in
+HarFileSystem. (Chris Nauroth via szetszwo)
+
 Release 2.0.2-alpha - 2012-09-07 
 
   INCOMPATIBLE CHANGES

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1442755

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1442755

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java?rev=1442756r1=1442755r2=1442756view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 Tue Feb  5 21:24:05 2013
@@ -30,8 +30,11 @@ import java.util.TreeMap;
 import java.util.HashMap;
 import java.util.concurrent.ConcurrentHashMap;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.Progressable;
@@ -50,6 +53,9 @@ import org.apache.hadoop.util.Progressab
  */
 
 public class HarFileSystem extends FilterFileSystem {
+
+  private static final Log LOG = LogFactory.getLog(HarFileSystem.class);
+
   public static final int VERSION = 3;
 
   private static final MapURI, HarMetaData harMetaCache =
@@ -1025,68 +1031,69 @@ public class HarFileSystem extends Filte
 }
 
 private void parseMetaData() throws IOException {
-  FSDataInputStream in = fs.open(masterIndexPath);
-  FileStatus masterStat = fs.getFileStatus(masterIndexPath);
-  masterIndexTimestamp = masterStat.getModificationTime();
-  LineReader lin = new LineReader(in, getConf());
-  Text line = new Text();
-  long read = lin.readLine(line);
-
- // the first line contains the version of the index file
-  String versionLine = line.toString();
-  String[] arr = versionLine.split( );
-  version = Integer.parseInt(arr[0]);
-  // make it always backwards-compatible
-  if (this.version  HarFileSystem.VERSION) {
-throw new IOException(Invalid version  + 
-  

svn commit: r1442757 - in /hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common: ./ src/main/java/ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/util/ src/

2013-02-05 Thread szetszwo
Author: szetszwo
Date: Tue Feb  5 21:25:54 2013
New Revision: 1442757

URL: http://svn.apache.org/viewvc?rev=1442757view=rev
Log:
svn merge -c 1442755 from trunk for HADOOP-9278. Fix the file handle leak in 
HarMetaData.parseMetaData() in HarFileSystem.

Modified:

hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/   
(props changed)

hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
   (contents, props changed)

hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/
   (props changed)

hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java

hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java

hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/
   (props changed)

hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java

Propchange: 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/
--
--- svn:mergeinfo (added)
+++ svn:mergeinfo Tue Feb  5 21:25:54 2013
@@ -0,0 +1,4 @@
+/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common:1152502-1296519
+/hadoop/common/branches/HDFS-3042/hadoop-common-project/hadoop-common:1306184-1342109
+/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common:1363593-1396941
+/hadoop/common/trunk/hadoop-common-project/hadoop-common:1379224-1441213,1442755

Modified: 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1442757r1=1442756r2=1442757view=diff
==
--- 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
 (original)
+++ 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
 Tue Feb  5 21:25:54 2013
@@ -593,6 +593,9 @@ Release 2.0.3-alpha - Unreleased 
 
 HADOOP-8981. TestMetricsSystemImpl fails on Windows. (Xuan Gong via suresh)
 
+HADOOP-9278. Fix the file handle leak in HarMetaData.parseMetaData() in
+HarFileSystem. (Chris Nauroth via szetszwo)
+
 Release 2.0.2-alpha - 2012-09-07 
 
   INCOMPATIBLE CHANGES

Propchange: 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1442755

Propchange: 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/
--
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1442755

Modified: 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java?rev=1442757r1=1442756r2=1442757view=diff
==
--- 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
 Tue Feb  5 21:25:54 2013
@@ -30,8 +30,11 @@ import java.util.TreeMap;
 import java.util.HashMap;
 import java.util.concurrent.ConcurrentHashMap;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.Progressable;
@@ -50,6 +53,9 @@ import org.apache.hadoop.util.Progressab
  */
 
 public class HarFileSystem extends FilterFileSystem {
+
+  private static final Log LOG = LogFactory.getLog(HarFileSystem.class);
+
   public static final int VERSION = 3;
 
   private static final MapURI, HarMetaData harMetaCache =
@@ -1025,68 +1031,69 @@ public class HarFileSystem extends Filte
 }
 
 private void parseMetaData() throws IOException {
-  FSDataInputStream in = fs.open(masterIndexPath);
-  FileStatus masterStat = fs.getFileStatus(masterIndexPath);
-  masterIndexTimestamp = masterStat.getModificationTime();
-  

svn commit: r1442828 - in /hadoop/common/branches/branch-1: CHANGES.txt src/test/org/apache/hadoop/mapred/TestJvmReuse.java

2013-02-05 Thread tucu
Author: tucu
Date: Wed Feb  6 01:37:41 2013
New Revision: 1442828

URL: http://svn.apache.org/viewvc?rev=1442828view=rev
Log:
MAPREDUCE-4967. TestJvmReuse fails on assertion. (kkambatl via tucu)

Modified:
hadoop/common/branches/branch-1/CHANGES.txt

hadoop/common/branches/branch-1/src/test/org/apache/hadoop/mapred/TestJvmReuse.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1442828r1=1442827r2=1442828view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Wed Feb  6 01:37:41 2013
@@ -471,6 +471,8 @@ Release 1.2.0 - unreleased
 MAPREDUCE-4843. When using DefaultTaskController, JobLocalizer not thread 
 safe. (kkambatl via tucu)
 
+MAPREDUCE-4967. TestJvmReuse fails on assertion. (kkambatl via tucu)
+
 Release 1.1.2 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/mapred/TestJvmReuse.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/test/org/apache/hadoop/mapred/TestJvmReuse.java?rev=1442828r1=1442827r2=1442828view=diff
==
--- 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/mapred/TestJvmReuse.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/test/org/apache/hadoop/mapred/TestJvmReuse.java
 Wed Feb  6 01:37:41 2013
@@ -17,11 +17,12 @@
  */
 package org.apache.hadoop.mapred;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 import java.io.DataOutputStream;
 import java.io.IOException;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -29,9 +30,9 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.junit.Ignore;
+import org.junit.Test;
 
-@Ignore
-public class TestJvmReuse extends TestCase {
+public class TestJvmReuse {
   private static Path rootDir = new Path(System.getProperty(test.build.data,
   /tmp), TestJvmReuse.class.getName());
   private int numMappers = 5;
@@ -141,6 +142,8 @@ public class TestJvmReuse extends TestCa
 }
   }
 
+  @Ignore
+  @Test
   public void testTaskLogs() throws IOException {
 MiniMRCluster mr = null;
 try {




svn commit: r1442830 - in /hadoop/common/branches/branch-1: CHANGES.txt src/mapred/org/apache/hadoop/mapreduce/split/JobSplitWriter.java

2013-02-05 Thread tucu
Author: tucu
Date: Wed Feb  6 01:42:54 2013
New Revision: 1442830

URL: http://svn.apache.org/viewvc?rev=1442830view=rev
Log:
MAPREDUCE-4434. Backport MR-2779 (JobSplitWriter.java can't handle large 
job.split file) to branch-1. (kkambatl via tucu)

Modified:
hadoop/common/branches/branch-1/CHANGES.txt

hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapreduce/split/JobSplitWriter.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1442830r1=1442829r2=1442830view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Wed Feb  6 01:42:54 2013
@@ -473,6 +473,9 @@ Release 1.2.0 - unreleased
 
 MAPREDUCE-4967. TestJvmReuse fails on assertion. (kkambatl via tucu)
 
+MAPREDUCE-4434. Backport MR-2779 (JobSplitWriter.java can't handle large 
+job.split file) to branch-1. (kkambatl via tucu)
+
 Release 1.1.2 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapreduce/split/JobSplitWriter.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapreduce/split/JobSplitWriter.java?rev=1442830r1=1442829r2=1442830view=diff
==
--- 
hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapreduce/split/JobSplitWriter.java
 (original)
+++ 
hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapreduce/split/JobSplitWriter.java
 Wed Feb  6 01:42:54 2013
@@ -116,15 +116,15 @@ public class JobSplitWriter {
 if (array.length != 0) {
   SerializationFactory factory = new SerializationFactory(conf);
   int i = 0;
-  long offset = out.size();
+  long offset = out.getPos();
   for(T split: array) {
-int prevCount = out.size();
+long prevCount = out.getPos();
 Text.writeString(out, split.getClass().getName());
 SerializerT serializer = 
   factory.getSerializer((ClassT) split.getClass());
 serializer.open(out);
 serializer.serialize(split);
-int currCount = out.size();
+long currCount = out.getPos();
 String[] locations = split.getLocations();
 final int max_loc = conf.getInt(MAX_SPLIT_LOCATIONS, 10);
 if (locations.length  max_loc) {
@@ -149,12 +149,12 @@ public class JobSplitWriter {
 SplitMetaInfo[] info = new SplitMetaInfo[splits.length];
 if (splits.length != 0) {
   int i = 0;
-  long offset = out.size();
+  long offset = out.getPos();
   for(org.apache.hadoop.mapred.InputSplit split: splits) {
-int prevLen = out.size();
+long prevLen = out.getPos();
 Text.writeString(out, split.getClass().getName());
 split.write(out);
-int currLen = out.size();
+long currLen = out.getPos();
 String[] locations = split.getLocations();
 final int max_loc = conf.getInt(MAX_SPLIT_LOCATIONS, 10);
 if (locations.length  max_loc) {




svn commit: r1442831 - in /hadoop/common/branches/branch-1: CHANGES.txt conf/task-log4j.properties src/docs/src/documentation/content/xdocs/cluster_setup.xml src/mapred/org/apache/hadoop/mapred/TaskRu

2013-02-05 Thread tucu
Author: tucu
Date: Wed Feb  6 01:48:16 2013
New Revision: 1442831

URL: http://svn.apache.org/viewvc?rev=1442831view=rev
Log:
MAPREDUCE-4970. Child tasks (try to) create security audit log files. (sandyr 
via tucu)

Added:
hadoop/common/branches/branch-1/conf/task-log4j.properties
Modified:
hadoop/common/branches/branch-1/CHANGES.txt

hadoop/common/branches/branch-1/src/docs/src/documentation/content/xdocs/cluster_setup.xml

hadoop/common/branches/branch-1/src/mapred/org/apache/hadoop/mapred/TaskRunner.java

Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1442831r1=1442830r2=1442831view=diff
==
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Wed Feb  6 01:48:16 2013
@@ -476,6 +476,9 @@ Release 1.2.0 - unreleased
 MAPREDUCE-4434. Backport MR-2779 (JobSplitWriter.java can't handle large 
 job.split file) to branch-1. (kkambatl via tucu)
 
+MAPREDUCE-4970. Child tasks (try to) create security audit log files.
+(sandyr via tucu)
+
 Release 1.1.2 - Unreleased
 
   INCOMPATIBLE CHANGES

Added: hadoop/common/branches/branch-1/conf/task-log4j.properties
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/conf/task-log4j.properties?rev=1442831view=auto
==
--- hadoop/common/branches/branch-1/conf/task-log4j.properties (added)
+++ hadoop/common/branches/branch-1/conf/task-log4j.properties Wed Feb  6 
01:48:16 2013
@@ -0,0 +1,123 @@
+#   Licensed under the Apache License, Version 2.0 (the License);
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an AS IS BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+
+# Define some default values that can be overridden by system properties
+hadoop.root.logger=INFO,console
+hadoop.log.dir=.
+hadoop.log.file=hadoop.log
+
+#
+# Job Summary Appender 
+#
+# Use following logger to send summary to separate file defined by 
+# hadoop.mapreduce.jobsummary.log.file rolled daily:
+# hadoop.mapreduce.jobsummary.logger=INFO,JSA
+# 
+hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
+hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
+
+# Define the root logger to the system property hadoop.root.logger.
+log4j.rootLogger=${hadoop.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=ALL
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} 
(%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add console to rootlogger above if you want to use this 
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p 
%c{2}: %m%n
+
+#
+# TaskLog Appender
+#
+
+#Default values
+hadoop.tasklog.taskid=null
+hadoop.tasklog.iscleanup=false
+hadoop.tasklog.noKeepSplits=4
+hadoop.tasklog.totalLogFileSize=100
+hadoop.tasklog.purgeLogSplits=true
+hadoop.tasklog.logsRetainHours=12
+
+log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
+log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
+log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
+log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
+
+log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
+log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+#
+# Rolling File Appender
+#
+
+#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
+#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Logfile size and and 30-day backups
+#log4j.appender.RFA.MaxFileSize=1MB
+#log4j.appender.RFA.MaxBackupIndex=30
+
+#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} 
(%F:%M(%L)) - %m%n
+
+
+# Custom Logging 

[Hadoop Wiki] Update of HadoopUserGroups by Jana Uhlig

2013-02-05 Thread Apache Wiki
Dear Wiki user,

You have subscribed to a wiki page or wiki category on Hadoop Wiki for change 
notification.

The HadoopUserGroups page has been changed by Jana Uhlig:
http://wiki.apache.org/hadoop/HadoopUserGroups?action=diffrev1=67rev2=68

Comment:
Added Karlsruhe HUG

  
  === Europe ===
   * MunichOpenHadoopUserGroup: Hadoop Users/Developers in south of Germany
+  * 
[[http://www.meetup.com/Hadoop-and-Big-Data-User-Group-in-Karlsruhe-Stuttgart/|Hadoop
 User Group Karlsruhe/Stuttgart, Germany]]
   * [[http://www.meetup.com/hadoop-users-group-uk/|Hadoop Users Group UK]]
   * [[http://www.meetup.com/warsaw-hug|Warsaw Hadoop User Group]]: Hadoop 
users in Warsaw, Poland
   * [[http://www.meetup.com/cracow-hug|Cracow Hadoop User Group]]: Hadoop 
users in Cracow, Poland