1.首先要下載相應的hadoop版本的插件,我這裏就給2個例子:java
hadoop-1.2.1插件:http://download.csdn.net/download/hanyongan300/6238153node
hadoop2.2.0插件:http://blog.csdn.net/twlkyao/article/details/17334693apache
上一章我也講了怎麼製做相應版本的插件,這些插件能夠在網上搜到windows
2.把插件拷貝到myeclipse根目錄下/dropins目錄下。服務器
三、 啓動myeclipse,打開Perspective:eclipse
【Window】->【Open Perspective】->【Other...】->【Map/Reduce】->【OK】oop
三、 打開一個View:url
【Window】->【Show View】->【Other...】->【MapReduce Tools】->【Map/Reduce Locations】->【OK】spa
四、 添加Hadoop location:.net
location name: 我填寫的是:hadoop,這個能夠任意填寫
Map/Reduce Master 這個框裏:
這兩個參數就是mapred-site.xml裏面mapred.job.tracker裏面的ip和port
Host:就是jobtracker 所在的集羣機器,這裏寫192.168.0.155,我這個myeclipse是在windows下的,因此去鏈接liunx下的hadoop要寫地址
Hort:就是jobtracker 的port,這裏寫的是9001
DFS Master 這個框裏
這兩個參數就是core-site.xml裏面fs.default.name裏面的ip和port
Host:就是namenode所在的集羣機器,這裏寫192.168.0.155
Port:就是namenode的port,這裏寫9000
(Use M/R master host,這個複選框若是選上,就默認和Map/Reduce Master這個框裏的host同樣,若是不選擇,就能夠本身定義輸入,這裏jobtracker 和namenode在一個機器上,所以是同樣的,就勾選上)
user name:這個是鏈接hadoop的用戶名,我建立的用戶就是hadoop。
而後繼續填寫advanced parameters
在這裏只須要填寫 hadoop.tmp.dir這一欄,跟在core-site.xml裏面配置寫的同樣
而後關閉myeclipse重啓,就能夠看到鏈接成功了。。。
鏈接DFS,是爲了幫助你查看hdfs目錄的,在這裏,你能夠直接操做hdfs.點擊右鍵:
create new directory:建立文件
Refresh:刷新
upload files to DFS:上傳文本
upload directory to DFS:上傳文件夾
若是要是建立hadoop項目,先配置下:Window------preferences----Hadoop Map/Reduce,指定本地的hadoop
而後在本地配置遠程hadoop的IP,打開C:\Windows\System32\drivers\etc\hosts文件,添加hadoop所安裝的服務器IP和主機名
下面就能夠在myeclipse建立mapreduce項目了,file----new Prokect-----
下面就正常寫代碼:下面是操做hase的代碼
package hbase; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.util.Bytes; /** * 對hbase進行增刪改查 * @author Administrator * */ public class HbaseTest { private static Configuration conf=null; static{ conf=HBaseConfiguration.create(); conf.set("hbase.zookeeper.property.clientPort", "2181"); conf.set("hbase.zookeeper.quorum", "192.168.0.26"); //configuration.set("hbase.master", "192.168.1.25:6000"); File workaround = new File("."); System.getProperties().put("hadoop.home.dir", workaround.getAbsolutePath()); new File("./bin").mkdirs(); try { new File("./bin/winutils.exe").createNewFile(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } /** * 建立一張表 * @param tablename * @param cfg * @throws IOException */ public static void createTable(String tablename,String[] cfg) throws IOException{ HBaseAdmin admin=new HBaseAdmin(conf); if(admin.tableExists(tablename)){ System.out.println("表已經存在!"); }else{ HTableDescriptor tableDesc=new HTableDescriptor(tablename); for(int i=0;i<cfg.length;i++){ tableDesc.addFamily(new HColumnDescriptor(cfg[i])); } admin.createTable(tableDesc); System.out.println("表建立成功!"); } } /** * 刪除表 * @param tablename * @throws IOException */ public static void deleteTable(String tablename) throws IOException{ HBaseAdmin admin=new HBaseAdmin(conf); admin.disableTable(tablename); admin.deleteTable(tablename); System.out.println("表刪除成功"); } /** * 插入一行記錄 * @param tablename * @param rowkey * @param family * @param qualifier * @param value */ public static void writeRow(String tablename,String rowkey,String family,String qualifier,String value) { try { HTable table=new HTable(conf,tablename); Put put =new Put(Bytes.toBytes(rowkey)); put.add(Bytes.toBytes(family),Bytes.toBytes(qualifier),Bytes.toBytes(value)); table.put(put); System.out.println("插入數據成功"); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } /** * 查找開始與結束之間的數據 * @param tableName */ public static void getAll(String tableName) { try{ HTable table = new HTable(conf, tableName); Scan scan = new Scan(Bytes.toBytes("a"), Bytes.toBytes("z")); // scan.addColumn(Bytes.toBytes("a"), Bytes.toBytes("z")); // SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes.toBytes("a"), // Bytes.toBytes("z"), // CompareOp.NOT_EQUAL, Bytes.toBytes("0")); // filter.setFilterIfMissing(true); // scan.setFilter(filter); ResultScanner ss = table.getScanner(scan); for(Result r:ss){ for(KeyValue kv : r.raw()){ System.out.print("rowid爲:"+new String(kv.getRow()) + " "); System.out.print("列族爲:"+new String(kv.getFamily()) + " "); System.out.print("列限定符爲:"+new String(kv.getQualifier()) + " "); System.out.print("時間戳"+kv.getTimestamp() + " "); System.out.println("值爲"+new String(kv.getValue())); } } } catch (IOException e){ e.printStackTrace(); } } /** * 查找一行記錄 * @param tablename * @param rowKey * @throws IOException */ @SuppressWarnings("deprecation") public static void getOne(String tablename,String rowKey) throws IOException{ HTable table = new HTable(conf, tablename); Get get = new Get(rowKey.getBytes()); Result rs = table.get(get); for(KeyValue kv : rs.raw()){ System.out.print(new String(kv.getRow()) + " " ); System.out.print(new String(kv.getFamily()) + ":" ); System.out.print(new String(kv.getQualifier()) + " " ); System.out.print(kv.getTimestamp() + " " ); System.out.println(new String(kv.getValue())); } } /** * 顯示全部數據 */ public static void getAllRecord (String tableName) { try{ HTable table = new HTable(conf, tableName); Scan s = new Scan(); ResultScanner ss = table.getScanner(s); for(Result r:ss){ for(KeyValue kv : r.raw()){ System.out.print("rowid爲:"+new String(kv.getRow()) + " "); System.out.print("列族爲:"+new String(kv.getFamily()) + " "); System.out.print("列限定符爲:"+new String(kv.getQualifier()) + " "); System.out.print("時間戳"+kv.getTimestamp() + " "); System.out.println("值爲"+new String(kv.getValue())); } } } catch (IOException e){ e.printStackTrace(); } } /** * 刪除一行記錄 */ public static void delRecord (String tableName, String rowKey) throws IOException{ HTable table = new HTable(conf, tableName); List list = new ArrayList(); Delete del = new Delete(rowKey.getBytes()); list.add(del); table.delete(list); System.out.println("del recored " + rowKey + " ok."); } public static void main(String[] agrs){ String tablename="score"; String[] familys={"grade","course"}; try { HbaseTest.createTable(tablename, familys); //HbaseTest.delvalue(tablename, "zkb","course" , "click", "90"); // HbaseTest.delRecord(tablename, "zkb"); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } // HbaseTest.writeRow(tablename, "zkb", "grade", "title", "5"); // HbaseTest.writeRow(tablename, "zkb", "course", "click", "90"); // HbaseTest.writeRow(tablename, "zkb", "course", "url", "97"); // HbaseTest.writeRow(tablename, "zkb", "course", "author", "87"); // // HbaseTest.writeRow(tablename, "baoniu", "grade", "reply", "4"); // HbaseTest.writeRow(tablename, "baoniu", "course", "siteName", "89"); // HbaseTest.writeRow(tablename, "1", "grade", "title", "5"); // HbaseTest.writeRow(tablename, "1", "course", "click", "90"); // HbaseTest.writeRow(tablename, "2", "course", "url", "97"); // HbaseTest.writeRow(tablename, "2", "course", "author", "87"); // // HbaseTest.writeRow(tablename, "3", "grade", "reply", "4"); // HbaseTest.writeRow(tablename, "3", "course", "siteName", "89"); // HbaseTest.getOne(tablename, "zkb"); // HbaseTest.getAllRecord(tablename); // HbaseTest.getAllRecord(tablename); HbaseTest.getAll(tablename); } }