Dear Wiki user, You have subscribed to a wiki page or wiki category on "Hadoop Wiki" for change notification.
The "WordCount" page has been changed by RobinWenglewski. The comment on this change is: example updated to new API. http://wiki.apache.org/hadoop/WordCount?action=diff&rev1=12&rev2=13 -------------------------------------------------- import org.apache.hadoop.fs.Path; import org.apache.hadoop.conf.*; import org.apache.hadoop.io.*; - import org.apache.hadoop.mapred.*; + import org.apache.hadoop.mapreduce.*; - import org.apache.hadoop.util.*; + import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; + import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; + import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; + import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; public class WordCount { - public static class Map extends MapReduceBase implements Mapper<LongWritable, Text, Text, IntWritable> { + public static class Map extends Mapper<LongWritable, Text, Text, IntWritable> { private final static IntWritable one = new IntWritable(1); private Text word = new Text(); - public void map(LongWritable key, Text value, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException { + public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String line = value.toString(); StringTokenizer tokenizer = new StringTokenizer(line); while (tokenizer.hasMoreTokens()) { word.set(tokenizer.nextToken()); - output.collect(word, one); + context.write(word, one); } } } - public static class Reduce extends MapReduceBase implements Reducer<Text, IntWritable, Text, IntWritable> { + public static class Reduce extends Reducer<Text, IntWritable, Text, IntWritable> { - public void reduce(Text key, Iterator<IntWritable> values, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException { + public void reduce(Text key, Iterator<IntWritable> values, Context context) + throws IOException, InterruptedException { int sum = 0; while (values.hasNext()) { sum += values.next().get(); } - output.collect(key, new IntWritable(sum)); + context.write(key, new IntWritable(sum)); } } public static void main(String[] args) throws Exception { - JobConf conf = new JobConf(WordCount.class); - conf.setJobName("wordcount"); + Configuration conf = new Configuration(); + + Job job = new Job(conf, "wordcount"); + + job.setOutputKeyClass(Text.class); + job.setOutputValueClass(IntWritable.class); + + job.setMapperClass(Map.class); + job.setReducerClass(Reduce.class); + job.setInputFormatClass(TextInputFormat.class); + job.setOutputFormatClass(TextOutputFormat.class); - conf.setOutputKeyClass(Text.class); - conf.setOutputValueClass(IntWritable.class); - - conf.setMapperClass(Map.class); - conf.setCombinerClass(Reduce.class); - conf.setReducerClass(Reduce.class); - conf.setInputFormat(TextInputFormat.class); - conf.setOutputFormat(TextOutputFormat.class); + FileInputFormat.addInputPath(job, new Path(args[0])); + FileOutputFormat.setOutputPath(job, new Path(args[1])); + job.waitForCompletion(true); - FileInputFormat.setInputPaths(conf, new Path(args[0])); - FileOutputFormat.setOutputPath(conf, new Path(args[1])); - - JobClient.runJob(conf); } }