HBase 增刪改查Java API

1. 建立NameSpaceAndTablejava

package com.HbaseTest.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.HBaseAdmin;

public class CreateNamespaceAndCreateTable {
    
    public static void main(String[] args) throws MasterNotRunningException, ZooKeeperConnectionException, IOException {
        Configuration conf = new Configuration();   //獲取配置文件
        HBaseAdmin admin = new HBaseAdmin(conf);    //經過HBaseAdmin管理員加載配置文件
        admin.createNamespace(NamespaceDescriptor.create("HadoopHbase").build());  //建立命名空間
        
        HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf("HadoopHbase:InsertCSV"));  //描述器類指明建立的table名以及隸屬於哪一個命令空間
        tableDesc.setDurability(Durability.ASYNC_WAL);  //請參照下列註釋
        
        HColumnDescriptor hcd = new HColumnDescriptor("info");  //建立列簇,若是須要多個列簇請參照下列//HColumnDescriptor hcd1 = new HColumnDescriptor("contect");
        //HColumnDescriptor hcd1 = new HColumnDescriptor("contect");
        tableDesc.addFamily(hcd);       //把列簇加載到"HadoopHbase:InsertCSV"中
        //tableDesc.addFamily(hcd1);
        admin.createTable(tableDesc); //最終建立表
        admin.close();  //關閉HBaseAdmin進程
    }
 
}

註釋:設置寫WAL(Write-Ahead-Log)的級別
public void setDurability(Durability d)
參數是一個枚舉值,能夠有如下幾種選擇:
ASYNC_WAL : 當數據變更時,異步寫WAL日誌
SYNC_WAL : 當數據變更時,同步寫WAL日誌
FSYNC_WAL : 當數據變更時,同步寫WAL日誌,而且,強制將數據寫入磁盤
SKIP_WAL : 不寫WAL日誌
USE_DEFAULT : 使用HBase全局默認的WAL寫入級別,即SYNC_WAL正則表達式

2. Create Table apache

上述建立命名空間及表的方法中以及涉及到建立表所以不作過多描述數組

package com.HbaseTest.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.HBaseAdmin;

public class CreateHbaseTable {
    
    public static void main(String[] args) throws MasterNotRunningException, ZooKeeperConnectionException, IOException {
        Configuration conf = HBaseConfiguration.create();
        HBaseAdmin admin = new HBaseAdmin(conf);
        HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf("liupeng:student"));
        tableDesc.setDurability(Durability.ASYNC_WAL);
        
        HColumnDescriptor hcd = new HColumnDescriptor("info");
        //HColumnDescriptor hcd1 = new HColumnDescriptor("contect");
        tableDesc.addFamily(hcd);
        //tableDesc.addFamily(hcd1);
        admin.createTable(tableDesc);
        admin.close();
    }

}

3.  DeleteNameSpace異步

package com.HbaseTest.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.HBaseAdmin;

public class DeleteNameSpace {
    
    public static void main(String[] args) throws MasterNotRunningException, ZooKeeperConnectionException, IOException {
        Configuration conf = new Configuration();
        HBaseAdmin admin = new HBaseAdmin(conf);
        admin.deleteNamespace("HadoopHbase"); //直接經過HBaseAdmin類提供的.deleteNamespace方法執行便可
        admin.close();
    }

}

4.  DeleteTable工具

package com.HbaseTest.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.HBaseAdmin;

public class DeleteTable {
    
    public static void main(String[] args) throws IOException {
        Configuration conf = new Configuration();
        HBaseAdmin admin = new HBaseAdmin(conf);
        String tableName = "liupeng:StudentName"; //tableName爲String類型。所以先定義一個變量指定要刪除的命令空間對應的要刪除的table
        
        if(admin.tableExists(tableName)){   //判斷指定的table是否存在若是存在就執行下列語句
            admin.disableTable(tableName);  //在HBase中不能對table進行直接刪除,必須先把表禁用以後才能夠刪除。所以這裏是先經過.disableTable方法,先禁用table。
            admin.deleteTable(tableName);   //禁用以後刪除指定table表
        }
        //admin.deleteNamespace("HadoopHbase");   //若是對應的namespace下只有一個table表而這個table表須要被刪除也不須要在這個命名空間下建立新的table表時,建議同時執行該語句把命名空間一塊兒刪除。若是該命名空間下還存在其餘table表是刪除不了的。
        admin.close();
    }

}

5.  DeleteColumnsoop

package com.HbaseTest.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.util.Bytes;

public class DeleteColumns {
    
    public static void main(String[] args) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        HTable table = new HTable(conf, "HadoopHbase:Person");   //加載配置文件及指定要刪除columns對應的命令空間及表
        Delete del = new Delete(Bytes.toBytes("RowKey"));       //建立Delete類方法,指定rowkey。由於rowkey存儲的是一個Bytes數組所以須要調用Bytes.toBytes()方法
        del.deleteColumn(Bytes.toBytes("info"), Bytes.toBytes("address"));  //指定要刪除的列簇及列名
        del.deleteColumn(Bytes.toBytes("info"), Bytes.toBytes("id"));
        del.deleteColumn(Bytes.toBytes("info"), Bytes.toBytes("name"));
        del.deleteColumn(Bytes.toBytes("info"), Bytes.toBytes("nation"));
        del.deleteColumn(Bytes.toBytes("info"), Bytes.toBytes("sex"));
        table.delete(del); //刪除
        table.close();  //關閉
    }

}

 6.  Put 添加數據ui

package com.HbaseTest.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;

public class PutData {
    
    public static void main(String[] args) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        HTable table = new HTable(conf, Bytes.toBytes("liupeng:student"));   //加載配置文件並指定要put的命令空間及table表
        
        Put put = new Put(Bytes.toBytes("10001"));    //建立Put類方法傳rowkey
        put.add(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes("劉鵬"));   //經過put.add方法加載數據,順序爲列簇,列名,value內容
        put.add(Bytes.toBytes("info"), Bytes.toBytes("city"), Bytes.toBytes("蘇州"));
        put.add(Bytes.toBytes("info"), Bytes.toBytes("mail"), Bytes.toBytes("liupeng@163.com"));
        put.add(Bytes.toBytes("info"), Bytes.toBytes("age"), Bytes.toBytes("37"));
        
        Put put1 = new Put(Bytes.toBytes("10002"));   //添加不止一條數據所以指定rowkey10002方法同上
        put1.add(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes("岳雲鵬"));
        put1.add(Bytes.toBytes("info"), Bytes.toBytes("city"), Bytes.toBytes("河南"));
        put1.add(Bytes.toBytes("info"), Bytes.toBytes("mail"), Bytes.toBytes("www.yunpeng@deyunshe.com"));
        put1.add(Bytes.toBytes("info"), Bytes.toBytes("age"), Bytes.toBytes("39"));
        
        Put put2 = new Put(Bytes.toBytes("10003"));
        put2.add(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes("韓雪"));
        put2.add(Bytes.toBytes("info"), Bytes.toBytes("city"), Bytes.toBytes("蘇州"));
        put2.add(Bytes.toBytes("info"), Bytes.toBytes("mail"), Bytes.toBytes("www.hanxue@suzhou.com"));
        put2.add(Bytes.toBytes("info"), Bytes.toBytes("age"), Bytes.toBytes("35"));
        
        Put put3 = new Put(Bytes.toBytes("10004"));
        put3.add(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes("范冰冰"));
        put3.add(Bytes.toBytes("info"), Bytes.toBytes("city"), Bytes.toBytes("山東煙臺"));
        put3.add(Bytes.toBytes("info"), Bytes.toBytes("mail"), Bytes.toBytes("www.fanbingbing@yantai.com"));
        put3.add(Bytes.toBytes("info"), Bytes.toBytes("age"), Bytes.toBytes("40"));
        
        
        put.setDurability(Durability.ASYNC_WAL);    //這裏請參照上述註釋內容:ASYNC_WAL : 當數據變更時,異步寫WAL日誌 
        put1.setDurability(Durability.ASYNC_WAL);
        table.put(put);  //逐一添加數據
        table.put(put1);
        table.put(put2);
        table.put(put3);
        table.close();//關閉
    }

}

7.  GetDataFamilyspa

package com.HbaseTest.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;

public class GetDataFamily {
    
//建立getTable方法內部加載配置文件及table名便於咱們獲取table,tablename是String類型所以咱們方法中傳的參數也定義爲string類型
public static HTable getTable(String name)throws IOException { Configuration conf = HBaseConfiguration.create(); HTable table = new HTable(conf, name); return table; } //建立getData方法,獲取數據首先要先獲取表,所以咱們參數傳的是上述定義的HTable類型
public static void getData(HTable table) throws IOException{ Get get = new Get(Bytes.toBytes("10004"));    //建立Get方法指定「rowkey」 //get.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name")); //能夠經過.addColumn方法獲取指定列簇下的列名信息。 get.addFamily(Bytes.toBytes("info"));  //經過.addFamily獲取指定的列簇信息 Result rs =table.get(get);  //所以get返回的是一個Result類型。要想獲取get到信息下的每一個元素首先要先建立Result類型
        //根據rowkey進行存儲是每一行爲一個Cell。每一個rowkey下對應多個Cell。所以要想獲取到全部的cell,就要經過加強for循環來獲取每一個cell的信息
for(Cell cell:rs.rawCells()){ System.out.println(Bytes.toString(CellUtil.cloneFamily(cell)) //Bytes.toString()最終輸出的是一個String類型。而內容是經過CellUtil工具類來獲取。 .cloneFamily()是獲取列簇信息 +"=>"+Bytes.toString(CellUtil.cloneQualifier(cell))   //.cloneQualifier()是獲取列信息 +"=>"+Bytes.toString(CellUtil.cloneValue(cell))   // .cloneValue()是獲取value信息 +"=>"+cell.getTimestamp());   //cell.getTimeStamp()是獲取時間戳 } table.get(get); } public static void main(String[] args) throws IOException { HTable table = getTable("HadoopHbase:Person");  //指定命令空間及table表名 getData(table); //獲取數據 } }

 8.  GetDataColumn日誌

package com.HbaseTest.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;

public class GetDataColumn {
    
    public static void main(String[] args) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        HTable table = new HTable(conf, "HadoopHbase:Person");
        
        Get get = new Get(Bytes.toBytes("10004"));
        get.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name"));
        
        Result rs = table.get(get);
        
        for(Cell cell:rs.rawCells()){
            System.out.println(Bytes.toString(rs.getRow())    //獲取rowkey
                    +"\t"+ new String(CellUtil.cloneQualifier(cell))  //獲取列信息
                    +"=>"+ new String(CellUtil.cloneValue(cell))   //獲取value信息
                    +"=>"+ cell.getTimestamp());  //獲取時間戳
        }
        
        
    }

}

9. ScanAllData

package com.HbaseTest.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;

public class ScanAllData {

    public static void main(String[] args) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        HTable table = new HTable(conf, Bytes.toBytes("liupeng:getInfo"));
        
        Scan scan = new Scan();
        
        ResultScanner rs = table.getScanner(scan);
        for(Result result:rs){
            for(Cell cell:result.rawCells()){
                System.out.println(new String(CellUtil.cloneRow(cell))
                        +"\t"+ new String(CellUtil.cloneFamily(cell))
                        +"=>"+ new String(CellUtil.cloneQualifier(cell))
                        +"=>"+ new String(CellUtil.cloneValue(cell),"UTF-8")
                        +"=>"+ cell.getTimestamp());
            }
            System.out.println("========================================");
        }
        table.close();
    }
    
}

10.  SingleColumnValueFilter類根據查詢的列簇,列及value值,列舉出匹配條件的全部rowkey下的值

package com.HbaseTest.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;

public class SingleColumnValueFilter_Demo {
    
    public static void main(String[] args) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        HTable table = new HTable(conf, "HadoopHbase:Person");
        /**
         * 根據條件把rowkey下全部的列簇及列中的數據進行列舉
         * 
         * 指定列簇(info),列(name,age,city,mail任意),設置條件CompareOp.EQUAL是表明「等於「的意思,設定具體查找的值必定要跟列對應
         */
        SingleColumnValueFilter scvf = new SingleColumnValueFilter("info".getBytes(), "nation".getBytes(), CompareOp.EQUAL, "蒙古族".getBytes());
        Scan scan = new Scan();
        scan.setFilter(scvf);
        ResultScanner rs = table.getScanner(scan);
        for(Result result:rs){
            for(Cell cell:result.rawCells()){
                System.out.println(new String(CellUtil.cloneRow(cell))
                        +"\t"+ new String(CellUtil.cloneFamily(cell))
                        +"=>"+ new String(CellUtil.cloneQualifier(cell))
                        +"=>"+ new String(CellUtil.cloneValue(cell),"UTF-8")
                        +"=>"+ cell.getTimestamp());
            }
            System.out.println("========================================");
        }
        table.close();
    }

}

11. SingleColumnValueFilter類經過SubStringcomparator比較器來指定要查詢的值(scan結果同上列舉全部匹配到的列簇,列跟value)

package com.HbaseTest.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;

/***
 * 需求:獲取到表中列'mail'尾號爲.163爲郵箱的全部人員信息
 * @author liupeng
 *
 */
public class SingleColumnValueFilter_Demo2 {
    
    public static void main(String[] args) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        HTable table = new HTable(conf, "liupeng:employee");
        
        SubstringComparator comparator = new SubstringComparator("23");  //設置SubStringComparator比較器,參數傳的是字符串。只要value中匹配到含有匹配字符串的值就能夠把全部對應的rowkey下列columnfaily,column,value值列舉出來
        SingleColumnValueFilter scvf = new SingleColumnValueFilter("info".getBytes(), "age".getBytes(), CompareOp.EQUAL, comparator);
        Scan scan = new Scan();
        scan.setFilter(scvf);
        ResultScanner rs = table.getScanner(scan);
        for(Result result:rs){
            for(Cell cell:result.rawCells()){
                System.out.println(new String(CellUtil.cloneRow(cell))
                        +"\t"+ new String(CellUtil.cloneFamily(cell))
                        +"=>"+ new String(CellUtil.cloneQualifier(cell))
                        +"=>"+ new String(CellUtil.cloneValue(cell),"UTF-8")
                        +"=>"+ cell.getTimestamp());
            }
            System.out.println("========================================");
        }
        table.close();
    }

}

12. 經過RegexStringComparator正則表達式比較器匹配指定值,列舉全部符合條件的rowkey,columnfaily,column,value值

package com.HbaseTest.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;

/***
 * 需求:獲取到表中列'mail'尾號爲.163爲郵箱的全部人員信息
 * @author liupeng
 *
 */
public class SingleColumnValueFilter_Demo3 {
    
    public static void main(String[] args) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        HTable table = new HTable(conf, "Alibaba:AliYun");
        
        //經過正則表達式的方式進行過濾
        RegexStringComparator comparator = new RegexStringComparator(".163");
        
        SingleColumnValueFilter scvf = new SingleColumnValueFilter("info".getBytes(), "mail".getBytes(), CompareOp.EQUAL, comparator);
        Scan scan = new Scan();
        scan.setFilter(scvf);
        ResultScanner rs = table.getScanner(scan);
        for(Result result:rs){
            for(Cell cell:result.rawCells()){
                System.out.println(new String(CellUtil.cloneRow(cell))
                        +"\t"+ new String(CellUtil.cloneFamily(cell))
                        +"=>"+ new String(CellUtil.cloneQualifier(cell))
                        +"=>"+ new String(CellUtil.cloneValue(cell),"UTF-8")
                        +"=>"+ cell.getTimestamp());
            }
            System.out.println("========================================");
        }
        table.close();
    }

}

13. RangeData  (經過指定StartRow,StopRow範圍來列舉查詢數據)

註釋:Startrow : 有下線沒有上線

          Stoprow : 有上線沒有下線

注意: 能夠經過先後加0的方式包含指定stoprow或者startrow的值

package com.HbaseTest.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;

public class Scan_StartAndStopRow {
    
    public static void main(String[] args) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        HTable table = new HTable(conf, "HadoopHbase:Person");
        
        Scan scan = new Scan();
        scan.setStartRow(Bytes.toBytes("10001"));
        scan.setStopRow(Bytes.toBytes("10003"));
        //經過末尾加0使得結果集包含StopRow
        //scan.setStopRow(Bytes.toBytes("100030"));
        ResultScanner scanner = table.getScanner(scan);
        for(Result rs:scanner){
            for(Cell cell:rs.rawCells()){
                System.out.println(new String(CellUtil.cloneRow(cell))
                        +"\t"+ new String(CellUtil.cloneFamily(cell))
                        +"=>"+ new String(CellUtil.cloneQualifier(cell))
                        +"=>"+ new String(CellUtil.cloneValue(cell),"UTF-8")
                        +"=>"+ cell.getTimestamp());
            }
            System.out.println("==============================================");
        }
        
    }

}

14. RangeData (經過FilterList集合類加載每一個SingleColumnValueFilter傳入對象的條件查詢數據)

package com.HbaseTest.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.util.Bytes;

public class RangeDataDemo2 {
    
    public static void main(String[] args) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        HTable table = new HTable(conf, Bytes.toBytes("liupeng:employee"));
        
        FilterList filterlist = new FilterList(FilterList.Operator.MUST_PASS_ALL);
        SingleColumnValueFilter filter1 = new SingleColumnValueFilter(
                Bytes.toBytes("info"), Bytes.toBytes("age"), 
                CompareOp.GREATER_OR_EQUAL, Bytes.toBytes("40"));
        
        SingleColumnValueFilter filter2 = new SingleColumnValueFilter(
                Bytes.toBytes("info"), Bytes.toBytes("age"), 
                CompareOp.LESS_OR_EQUAL, Bytes.toBytes("46"));
        
        filterlist.addFilter(filter1);
        filterlist.addFilter(filter2);
        
        Scan scan = new Scan();
        scan.setFilter(filterlist);
        ResultScanner rs = table.getScanner(scan);
        for(Result result:rs){
            for(Cell cell:result.rawCells()){
                System.out.println(new String(CellUtil.cloneRow(cell))
                        +"\t"+ new String(CellUtil.cloneFamily(cell))
                        +"=>"+ new String(CellUtil.cloneQualifier(cell))
                        +"=>"+ new String(CellUtil.cloneValue(cell),"UTF-8")
                        +"=>"+ cell.getTimestamp());
            }
            System.out.println("========================================");
        }
        table.close();
    }
    
}

15. ChangeTableSetMaxVersions(變動table最大版本數)

package com.HbaseTest.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.util.Bytes;

public class ChangeTableSetMaxVersions {
    
    
    public static void main(String[] args) throws MasterNotRunningException, ZooKeeperConnectionException, IOException {
        Configuration conf = HBaseConfiguration.create();
        HBaseAdmin admin =  new HBaseAdmin(conf);
        String tablename = "Alibaba:AliYun";
        
        if(admin.tableExists(tablename)){
            admin.disableTable(tablename);
            
            HTableDescriptor htd = admin.getTableDescriptor(Bytes.toBytes("Alibaba:AliYun"));
            HColumnDescriptor infocf = htd.getFamily(Bytes.toBytes("info"));
            infocf.setMaxVersions(50);
            
            admin.modifyTable(Bytes.toBytes("Alibaba:AliYun"), htd);
            admin.enableTable(tablename);
        }
        admin.close();
    }
    

}
相關文章
相關標籤/搜索