import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
public class WordCountMapper extends Mapper<LongWritable, Text, Text, IntWritable>{
private Text k = new Text();
private IntWritable v = new IntWritable(1);
@Override
protected void map(LongWritable key, Text value,Context context) throws java.io.IOException, java.lang.InterruptedException {
String line = value.toString();
String[] words = line.split(" ");
for (String word : words) {
String trim = word.trim();
if(!" ".equals(trim)){
k.set(trim);
context.write(k, v);
}
}
}
}
複製代碼
import java.util.Iterator;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
public class WordCountReducer extends Reducer<Text, IntWritable, Text, IntWritable>{
@Override
protected void reduce(Text text, Iterable<IntWritable> iterable, Context context) throws java.io.IOException, java.lang.InterruptedException {
int sum = 0;
Iterator<IntWritable> iterator = iterable.iterator();
while(iterator.hasNext()){
sum += iterator.next().get();
}
if(!text.toString().trim().equals("")){
context.write(text, new IntWritable(sum));
}
}
}
複製代碼