1.下載hadoop插件(hadoop下載包裏好像有這個插件)java
hadoop-eclipse-plugin-2.7.1分享連接 https://pan.baidu.com/s/1sldBu9napache
放到eclipse/plugins文件夾下,重啓eclipseapp
2.window -> preferences 點擊肯定 找到 hadoop map/reduce 在右窗口填上hadoop安裝地址eclipse
3.出現一個和控制檯同樣位置的map/reduce location ,右擊空白處 選擇new hadoop locationoop
loaction name填上名字,Map/Reduce (V2) Master的端口填mapred-site.xml端口 。DFS Master填core-site.xml 肯定。host都是填localhost。this
4.File-->New-->Other-->Map/Reduce Project 建立文件 取名 新建java文件代碼以下spa
5.源代碼以下插件
package com.filex; import java.io.IOException; import java.util.StringTokenizer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper.Context; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.Reducer.Context; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; public class WordCount { public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); Job job = new Job(conf); job.setJarByClass(WordCount.class); job.setJobName("wordcount"); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setMapperClass(WordCountMap.class); job.setReducerClass(WordCountReduce.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); ///////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////// //下面的兩句代碼,其中參數意義 //hdfs://localhost:9000/in 表示須要計數的文件夾 計算命令行下:hadoop fs -ls /in 出現的文件 //hdfs://localhost:9000/output 表示儲存結果的文件夾(不要建立,同時以前不要存在這個文件夾) //new Path(arg[0]) new Path(arg[1])也能夠使用命令行傳參的方式傳入兩個文件夾(不能夠直接運行) // FileInputFormat.addInputPath(job, new Path("hdfs://localhost:9000/in")); FileOutputFormat.setOutputPath(job, new Path("hdfs://localhost:9000/output")); job.waitForCompletion(true); } public static class WordCountMap extends Mapper<LongWritable, Text, Text, IntWritable> { private final IntWritable one = new IntWritable(1); private Text word = new Text(); public void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context) throws IOException, InterruptedException { String line = value.toString(); StringTokenizer token = new StringTokenizer(line); while (token.hasMoreTokens()) { this.word.set(token.nextToken()); context.write(this.word, this.one); } } } public static class WordCountReduce extends Reducer<Text, IntWritable, Text, IntWritable> { public void reduce(Text key, Iterable<IntWritable> values, Reducer<Text, IntWritable, Text, IntWritable>.Context context) throws IOException, InterruptedException { int sum = 0; for (IntWritable val : values) { sum += val.get(); } context.write(key, new IntWritable(sum)); } } }
ps:注意一下注釋部分,須要確認你須要計算的文件命令行
6.直接運行 或者導出code
7.若是導出,運行命令:
hadoop jar .jar路徑 運行的類(含包路徑) 類的參數
hadoop jar /home/user/xxx.jar com.filex.WordCount (輸入輸出文件已經設置好)
hadoop jar /home/user/xxx.jar com.filex.WordCount in put ( 輸入輸出文件未設置好)。