BDA MapReduce Program
BDA MapReduce Program
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
// Mapper class
public static class TempMapper
extends Mapper<LongWritable, Text, Text, IntWritable>
{
private Text year = new Text();
private IntWritable temperature = new IntWritable();
protected void map(LongWritable key, Text value, Context context) throws
IOException , InterruptedException{
String line = value.toString();
String[] fields = line.split("");
year.set(fields[0]);
temperature.set(Integer.parseInt(fields[1]));
context.write(year,temperature);
}
}
// Reducer Class
public static class TempReducer
extends Reducer<Text, IntWritable, Text, IntWritable>
{
}
//Main Driver Method
public static void main(String [] args) throws Exception{
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "Max Temperature");
job.setJarByClass(MaxTemperature.class);
job.setMapperClass(TempMapper.class);
job.setReducerClass(TempReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job,new Path(args[0]));
FileOutputFormat.setOutputPath(job,new Path(args[1]));
System.exit(job.waitForCompletion(true)? 0 : 1 );
}
}
Even Odd:
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
job.setJarByClass(EvenOddCount.class);
job.setMapperClass(EvenOddMapper.class);
job.setCombinerClass(EvenOddReducer.class);
job.setReducerClass(EvenOddReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
System.exit(job.waitForCompletion(true)?0:1);
}
}
Student Grade:
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
@Override
protected void map(LongWritable key, Text value, Context context) throws
IOException, InterruptedException{
String line = value.toString();
String[] fields = line.split(" ");
studentName.set(fields[0]);
score.set(Integer.parseInt(fields[1]));
context.write(studentName, score);
}
}
job.setMapperClass(GradeMapper.class);
job.setReducerClass(GradeReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
Word Count:
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;