Dear all,
I am puzzled around the error occured in running a C++ program to run
through Hadoop Pipes.
Below exception occurs while running the code. The error occurs in
reduce phase :
[hadoop@ws37-mah-lin hadoop-0.20.2]$ bin/hadoop pipes -D
hadoop.pipes.java.recordreader=true -D
hadoop.pipes.java.recordwriter=true -input gutenberg -output
gutenberg_cuda_output_final -program bin/wordcount1
11/03/14 17:27:29 WARN mapred.JobClient: No job jar file set. User
classes may not be found. See JobConf(Class) or JobConf#setJar(String).
11/03/14 17:27:29 INFO mapred.FileInputFormat: Total input paths to
process : 3
11/03/14 17:27:30 INFO mapred.JobClient: Running job: job_201103141407_0003
11/03/14 17:27:31 INFO mapred.JobClient: map 0% reduce 0%
11/03/14 17:27:46 INFO mapred.JobClient: map 100% reduce 0%
11/03/14 17:27:54 INFO mapred.JobClient: map 100% reduce 33%
11/03/14 17:27:56 INFO mapred.JobClient: Task Id :
attempt_201103141407_0003_r_000000_0, Status : FAILED
java.net.SocketException: Broken pipe
at java.net.SocketOutputStream.socketWrite0(Native Method)
at
java.net.SocketOutputStream.socketWrite(SocketOutputStream.java:92)
at java.net.SocketOutputStream.write(SocketOutputStream.java:136)
at
java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:65)
at java.io.BufferedOutputStream.write(BufferedOutputStream.java:109)
at java.io.DataOutputStream.write(DataOutputStream.java:90)
at
org.apache.hadoop.mapred.pipes.BinaryProtocol.writeObject(BinaryProtocol.java:333)
at
org.apache.hadoop.mapred.pipes.BinaryProtocol.reduceValue(BinaryProtocol.java:302)
at
org.apache.hadoop.mapred.pipes.PipesReducer.reduce(PipesReducer.java:66)
at
org.apache.hadoop.mapred.pipes.PipesReducer.reduce(PipesReducer.java:37)
at
org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:463)
at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:411)
at org.apache.hadoop.mapred.Child.main(Child.java:170)
11/03/14 17:27:57 INFO mapred.JobClient: map 100% reduce 0%
11/03/14 17:28:07 INFO mapred.JobClient: map 100% reduce 33%
11/03/14 17:28:09 INFO mapred.JobClient: Task Id :
attempt_201103141407_0003_r_000000_1, Status : FAILED
java.net.SocketException: Broken pipe
at java.net.SocketOutputStream.socketWrite0(Native Method)
at
java.net.SocketOutputStream.socketWrite(SocketOutputStream.java:92)
at java.net.SocketOutputStream.write(SocketOutputStream.java:136)
at
java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:65)
at java.io.BufferedOutputStream.write(BufferedOutputStream.java:109)
at java.io.DataOutputStream.write(DataOutputStream.java:90)
at
org.apache.hadoop.mapred.pipes.BinaryProtocol.writeObject(BinaryProtocol.java:333)
at
org.apache.hadoop.mapred.pipes.BinaryProtocol.reduceValue(BinaryProtocol.java:302)
at
org.apache.hadoop.mapred.pipes.PipesReducer.reduce(PipesReducer.java:66)
at
org.apache.hadoop.mapred.pipes.PipesReducer.reduce(PipesReducer.java:37)
at
org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:463)
at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:411)
at org.apache.hadoop.mapred.Child.main(Child.java:170)
I attached the code. Please find the attachment.
Thanks & best Regards,
Adarsh Sharma
#include <algorithm>
#include <limits>
#include <string>
#include <stdio.h>
#include<stdlib.h>
#include <cuda.h>
#include "stdint.h" // <--- to prevent uint64_t errors!
#include "hadoop/Pipes.hh"
#include "hadoop/TemplateFactory.hh"
#include "hadoop/StringUtils.hh"
//#include "/usr/local/cuda/include/shrUtils.h"
#include "/usr/local/cuda/include/cuda_runtime_api.h"
using namespace std;
class WordCountMapper : public HadoopPipes::Mapper {
public:
// constructor: does nothing
WordCountMapper( HadoopPipes::TaskContext& context ) {
}
void map( HadoopPipes::MapContext& context )
{
cudaDeviceProp prop;
std::string sProfileString = prop.name;
std::string s2 = "abc";
context.emit(sProfileString,s2);
}
};
class WordCountReducer : public HadoopPipes::Reducer {
public:
// constructor: does nothing
WordCountReducer(HadoopPipes::TaskContext& context) {
}
// reduce function
void reduce( HadoopPipes::ReduceContext& context ) {
context.emit(context.getInputKey(), context.getInputValue());
}
};
int main(int argc, char *argv[]) {
return HadoopPipes::runTask(HadoopPipes::TemplateFactory<WordCountMapper, WordCountReducer >() );
}