The problem I am facing is

1  I have 1 Windows System. I am running eclipse with hadoop - plugin.. Its
not a part of hadoop cluster. I am able to connect to hadoop systems and can
view DFS and MAPRED folders using this plugin. If I am able to view the
contents of the hadoop, so I am assuming that I can connect to the hadoop
system from my windows.

2. Now I am writing some program from my windows machine and try to run it
on hadoop machines.
but whenever I am trying to do that.. I am getting the following error :


*1/04/25 18:24:21 INFO jvm.JvmMetrics: Initializing JVM Metrics with
processName=JobTracker, sessionId=*
*
11/04/25 18:24:21 WARN mapred.JobClient: Use GenericOptionsParser for
parsing the arguments. Applications should implement Tool for the same.
11/04/25 18:24:21 INFO input.FileInputFormat: Total input paths to process :
1
Exception in thread "main"
java.io.IOException: Cannot run program "chmod": CreateProcess error=2, The
system cannot find the file specified
at java.lang.ProcessBuilder.start ProcessBuilder.java:460)
at org.apache.hadoop.util.Shell.runCommand(Shell.java:149)
at org.apache.hadoop.util.Shell.run(Shell.java:134)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:286)

at org.apache.hadoop.util.Shell.execCommand(Shell.java:354)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:337)
at
org.apache.hadoop.fs.RawLocalFileSystem.execCommand(RawLocalFileSystem.java:481)

at
org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:473)

at
org.apache.hadoop.fs.FilterFileSystem.setPermission(FilterFileSystem.java:280)

at
org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:372)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:484)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:465)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:372)
at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:208)
at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:142)
at org.apache.hadoop.fs.FileSystem.copyToLocalFile(FileSystem.java:1216)
at org.apache.hadoop.fs.FileSystem.copyToLocalFile(FileSystem.java:1197)
at
org.apache.hadoop.mapred.LocalJobRunner$Job.<init>(LocalJobRunner.java:92)
at
org.apache.hadoop.mapred.LocalJobRunner.submitJob(LocalJobRunner.java:373)
at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:800)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:432)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:447)
at WordCount.run(WordCount.java:94)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:65)
at WordCount.main(WordCount.java:98) *
*Caused by: *
*
java.io.IOException: CreateProcess error=2, The system cannot find the file
specified
at java.lang.ProcessImpl.create(Native Method)
at java.lang.ProcessImpl.<init>(ProcessImpl.java:81)
at java.lang.ProcessImpl.start(ProcessImpl.java:30)
at java.lang.ProcessBuilder.start(ProcessBuilder.java:453)
... 24 more*


 My program is  :

import java.io.*;
import java.util.*;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.util.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 public class WordCount extends Configured implements Tool {
public static class MapClass extends Mapper<Object, Text, Text, IntWritable>
{
private final static IntWritable one = new IntWritable(1);
private Text word = new Text();
public void map(Object key, Text value, Context context) throws IOException,
InterruptedException {
String line = value.toString();
StringTokenizer itr = new StringTokenizer(line);
while (itr.hasMoreTokens()) {
word.set(itr.nextToken());
context.write(word, one);
}
}
}

/**
* A reducer class that just emits the sum of the input values.
*/
public static class Reduce extends Reducer<Text, IntWritable, Text,
IntWritable> {
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
int sum = 0;
for (IntWritable value : values) {
sum += value.get();
}
context.write(key,
new IntWritable(sum));
}
}
static int printUsage() {
System.out.println("wordcount [-r <reduces>] <input> <output>");
ToolRunner.printGenericCommandUsage(System.out);
return -1;
}
public int run(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = new Job(conf, "WordCount example for hadoop 0.20.1");
job.setJarByClass(WordCount.class);
job.setMapperClass(MapClass.class);
job.setCombinerClass(Reduce.class);
job.setReducerClass(Reduce.class);
// the keys are words (strings)
job.setOutputKeyClass(Text.class);
// the values are counts (ints)
job.setOutputValueClass(IntWritable.class);
 List<String> other_args = new ArrayList<String>();
for(int i=0; i < args.length; ++i) {
try {
// The number of map tasks was earlier configurable, // But with hadoop
0.20.1, it is decided by the framework.
// Since this heavily depends on the input data size and how it is being
split.
if ("-r".equals(args[i])) {
job.setNumReduceTasks(Integer.parseInt(args[++i]));
}
else {
other_args.add(args[i]);
}
}
catch (NumberFormatException except) {
System.out.println("ERROR: Integer expected instead of " + args[i]);
return printUsage();
}
catch (ArrayIndexOutOfBoundsException except) {
System.out.println("ERROR: Required parameter missing from " + args[i-1]);
return printUsage();
}
}
// Make sure there are exactly 2 parameters left.
/*if (other_args.size() != 2) {
System.out.println("ERROR: Wrong number of parameters: " + other_args.size()
+ " instead of 2.");
return printUsage();
}*/
FileInputFormat.addInputPath(job, new Path("hdfs://
162.192.100.46:54310/user/hadoop/gutenberg"));
FileOutputFormat.setOutputPath(job, new Path("hdfs://
162.192.100.46:54310/user/hadoop/gutenberg-output11"));
//submit job and wait for completion. Also show output to user.
job.waitForCompletion(true);
return 0;
}
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new WordCount(), args);
System.exit(res);
}
}

Any help will be very thankful.

Regards,
Praveenesh

Reply via email to