Sorry, Just check the attachment now.

Adarsh Sharma wrote:
Dear all,

Can Someone Please tell me how to apply a patch on hadoop-0.20.2 package.

I attached the patch.

Please find the attachment. I just follow below steps for Hadoop :
1. Download Hadoop-0.20.2.tar.gz
2. Extract the file.
3. Set Configurations in site.xml files

Thanks & best Regards,
Adarsh Sharma


Index: src/test/org/apache/hadoop/mapred/pipes/TestPipes.java
===================================================================
--- src/test/org/apache/hadoop/mapred/pipes/TestPipes.java	(revision 565616)
+++ src/test/org/apache/hadoop/mapred/pipes/TestPipes.java	(working copy)
@@ -150,7 +150,8 @@
     JobConf job = mr.createJobConf();
     job.setInputFormat(WordCountInputFormat.class);
     FileSystem local = FileSystem.getLocal(job);
-    Path testDir = new Path(System.getProperty("test.build.data"), "pipes");
+    Path testDir = new Path("file:" + System.getProperty("test.build.data"), 
+                            "pipes");
     Path inDir = new Path(testDir, "input");
     Path outDir = new Path(testDir, "output");
     Path wordExec = new Path("/testing/bin/application");
Index: src/test/org/apache/hadoop/mapred/pipes/WordCountInputFormat.java
===================================================================
--- src/test/org/apache/hadoop/mapred/pipes/WordCountInputFormat.java	(revision 565616)
+++ src/test/org/apache/hadoop/mapred/pipes/WordCountInputFormat.java	(working copy)
@@ -35,7 +35,7 @@
     private String filename;
     WordCountInputSplit() { }
     WordCountInputSplit(Path filename) {
-      this.filename = filename.toString();
+      this.filename = filename.toUri().getPath();
     }
     public void write(DataOutput out) throws IOException { 
       Text.writeString(out, filename); 
Index: src/examples/pipes/impl/wordcount-nopipe.cc
===================================================================
--- src/examples/pipes/impl/wordcount-nopipe.cc	(revision 565616)
+++ src/examples/pipes/impl/wordcount-nopipe.cc	(working copy)
@@ -87,9 +87,15 @@
     const HadoopPipes::JobConf* job = context.getJobConf();
     int part = job->getInt("mapred.task.partition");
     std::string outDir = job->get("mapred.output.dir");
+    // remove the file: schema substring
+    std::string::size_type posn = outDir.find(":");
+    HADOOP_ASSERT(posn != std::string::npos, 
+                  "no schema found in output dir: " + outDir);
+    outDir.erase(0, posn+1);
     mkdir(outDir.c_str(), 0777);
     std::string outFile = outDir + "/part-" + HadoopUtils::toString(part);
     file = fopen(outFile.c_str(), "wt");
+    HADOOP_ASSERT(file != NULL, "can't open file for writing: " + outFile);
   }
 
   ~WordCountWriter() {

Reply via email to