寫了很多的hadoop map reduce,發現這程序有必定共用性,能夠作成模板,之後直接拿來改改,就能夠直接開發業務了,通過抽取刪減,大體的模板代碼以下:java
package com.test.my.common; import com.test.my.WordCount; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import java.io.IOException; import java.util.StringTokenizer; public class ModuleMapReduce extends Configured implements Tool{ public static class ModuleMapper extends Mapper<IntWritable,Text,Text,IntWritable>{ Text mapKeyText=new Text(); IntWritable mapValueInt=new IntWritable(1); @Override protected void map(IntWritable key, Text value, Context context) throws IOException, InterruptedException { //TODO /* String lineValue=value.toString(); StringTokenizer stringTokenizer=new StringTokenizer(lineValue); while (stringTokenizer.hasMoreTokens()){ String wordValue=stringTokenizer.nextToken(); mapKeyText.set(wordValue); context.write(mapKeyText,new IntWritable(1)); }*/ } } public static class ModuleReducer extends Reducer<Text,IntWritable,Text,IntWritable>{ private static IntWritable outValueInt=new IntWritable(); @Override protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { //TODO int sum=0; /* for(IntWritable value:values){ sum+=value.get(); } outValueInt.set(sum); context.write(key,outValueInt);*/ } } @Override public int run(String[] strings) throws Exception { //讀取配置文件 Configuration configuration=getConf(); //creat job Job job=Job.getInstance(configuration,this.getClass().getSimpleName()); //run jar job.setJarByClass(this.getClass()); //set job //input->map-->reduce-->reduce //input set Path path=new Path(strings[0]); FileInputFormat.addInputPath(job,path); //set mapper job.setMapperClass(WordCount.WordCountMapper.class); //set mapper output //TODO job.setMapOutputKeyClass(Text.class); //TODO job.setMapOutputValueClass(IntWritable.class); job.setReducerClass(WordCount.WordCountReducer.class); //TODO job.setOutputKeyClass(Text.class); //TODO job.setOutputValueClass(IntWritable.class); //set output Path output=new Path(strings[1]); FileOutputFormat.setOutputPath(job,output); //submit job boolean isSuccess=job.waitForCompletion(true); return isSuccess?1:0; } public static void main(String[] args) throws Exception { args=new String[2]; args[0]="E:\\test\\wcinput.txt"; args[1]="E:\\test\\output13"; Configuration configuration=new Configuration(); int result=ToolRunner.run(configuration,new ModuleMapReduce(),args); System.out.println("result:"+result); } }