0% found this document useful (0 votes)
30 views3 pages

Codigo Haddop

This Java code defines a WordCount program that uses MapReduce to count the frequency of words in a text file. It contains classes for the Mapper and Reducer that implement the map and reduce functions. The main method configures and runs a MapReduce job that takes an input path, runs the map and reduce tasks, and outputs the results.
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
30 views3 pages

Codigo Haddop

This Java code defines a WordCount program that uses MapReduce to count the frequency of words in a text file. It contains classes for the Mapper and Reducer that implement the map and reduce functions. The main method configures and runs a MapReduce job that takes an input path, runs the map and reduce tasks, and outputs the results.
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
You are on page 1/ 3

import org.apache.hadoop.mapred.

Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;

public class WordCount {

public static class Map extends MapReduceBase implements

Mapper<LongWritable, Text, Text, IntWritable> {

@Override

public void map(LongWritable key, Text value,


OutputCollector<Text, IntWritable> output, Reporter
reporter)

throws IOException {

String line = value.toString();

StringTokenizer tokenizer = new StringTokenizer(line);

while (tokenizer.hasMoreTokens()) {

value.set(tokenizer.nextToken());

output.collect(value, new IntWritable(1));

}
public static class Reduce extends MapReduceBase
implements

Reducer<Text, IntWritable, Text, IntWritable> {

@Override

public void reduce(Text key, Iterator<IntWritable> values,

OutputCollector<Text, IntWritable> output, Reporter


reporter)

throws IOException {

int sum = 0;

while (values.hasNext()) {

sum += values.next().get();

output.collect(key, new IntWritable(sum));

public static void main(String[] args) throws Exception {

JobConf conf = new JobConf(WordCount.class);

conf.setJobName("wordcount");

conf.setOutputKeyClass(Text.class);

conf.setOutputValueClass(IntWritable.class);
conf.setMapperClass(Map.class);

conf.setReducerClass(Reduce.class);

conf.setInputFormat(TextInputFormat.class);

conf.setOutputFormat(TextOutputFormat.class);

FileInputFormat.setInputPaths(conf, new Path(args[0]));

FileOutputFormat.setOutputPath(conf, new Path(args[1]));

JobClient.runJob(conf);

You might also like