• HBase 增删改查Java API


    1. 创建NameSpaceAndTable

    package com.HbaseTest.hdfs;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.HColumnDescriptor;
    import org.apache.hadoop.hbase.HTableDescriptor;
    import org.apache.hadoop.hbase.MasterNotRunningException;
    import org.apache.hadoop.hbase.NamespaceDescriptor;
    import org.apache.hadoop.hbase.TableName;
    import org.apache.hadoop.hbase.ZooKeeperConnectionException;
    import org.apache.hadoop.hbase.client.Durability;
    import org.apache.hadoop.hbase.client.HBaseAdmin;
    
    public class CreateNamespaceAndCreateTable {
        
        public static void main(String[] args) throws MasterNotRunningException, ZooKeeperConnectionException, IOException {
            Configuration conf = new Configuration();   //获取配置文件
            HBaseAdmin admin = new HBaseAdmin(conf);    //通过HBaseAdmin管理员加载配置文件
            admin.createNamespace(NamespaceDescriptor.create("HadoopHbase").build());  //创建命名空间
            
            HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf("HadoopHbase:InsertCSV"));  //描述器类指明创建的table名以及隶属于哪个命令空间
            tableDesc.setDurability(Durability.ASYNC_WAL);  //请参照下列注释
            
            HColumnDescriptor hcd = new HColumnDescriptor("info");  //创建列簇,如果需要多个列簇请参照下列//HColumnDescriptor hcd1 = new HColumnDescriptor("contect");
            //HColumnDescriptor hcd1 = new HColumnDescriptor("contect");
            tableDesc.addFamily(hcd);       //把列簇加载到"HadoopHbase:InsertCSV"中
            //tableDesc.addFamily(hcd1);
            admin.createTable(tableDesc); //最终创建表
            admin.close();  //关闭HBaseAdmin进程
        }
     
    }

    注释:设置写WAL(Write-Ahead-Log)的级别
    public void setDurability(Durability d)
    参数是一个枚举值,可以有以下几种选择:
    ASYNC_WAL : 当数据变动时,异步写WAL日志
    SYNC_WAL : 当数据变动时,同步写WAL日志
    FSYNC_WAL : 当数据变动时,同步写WAL日志,并且,强制将数据写入磁盘
    SKIP_WAL : 不写WAL日志
    USE_DEFAULT : 使用HBase全局默认的WAL写入级别,即SYNC_WAL

    2. Create Table

    上述创建命名空间及表的方法中以及涉及到创建表因此不做过多描述

    package com.HbaseTest.hdfs;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.HColumnDescriptor;
    import org.apache.hadoop.hbase.HTableDescriptor;
    import org.apache.hadoop.hbase.MasterNotRunningException;
    import org.apache.hadoop.hbase.TableName;
    import org.apache.hadoop.hbase.ZooKeeperConnectionException;
    import org.apache.hadoop.hbase.client.Durability;
    import org.apache.hadoop.hbase.client.HBaseAdmin;
    
    public class CreateHbaseTable {
        
        public static void main(String[] args) throws MasterNotRunningException, ZooKeeperConnectionException, IOException {
            Configuration conf = HBaseConfiguration.create();
            HBaseAdmin admin = new HBaseAdmin(conf);
            HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf("liupeng:student"));
            tableDesc.setDurability(Durability.ASYNC_WAL);
            
            HColumnDescriptor hcd = new HColumnDescriptor("info");
            //HColumnDescriptor hcd1 = new HColumnDescriptor("contect");
            tableDesc.addFamily(hcd);
            //tableDesc.addFamily(hcd1);
            admin.createTable(tableDesc);
            admin.close();
        }
    
    }

    3.  DeleteNameSpace

    package com.HbaseTest.hdfs;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.MasterNotRunningException;
    import org.apache.hadoop.hbase.ZooKeeperConnectionException;
    import org.apache.hadoop.hbase.client.HBaseAdmin;
    
    public class DeleteNameSpace {
        
        public static void main(String[] args) throws MasterNotRunningException, ZooKeeperConnectionException, IOException {
            Configuration conf = new Configuration();
            HBaseAdmin admin = new HBaseAdmin(conf);
            admin.deleteNamespace("HadoopHbase"); //直接通过HBaseAdmin类提供的.deleteNamespace方法执行即可
            admin.close();
        }
    
    }

    4.  DeleteTable

    package com.HbaseTest.hdfs;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.client.HBaseAdmin;
    
    public class DeleteTable {
        
        public static void main(String[] args) throws IOException {
            Configuration conf = new Configuration();
            HBaseAdmin admin = new HBaseAdmin(conf);
            String tableName = "liupeng:StudentName"; //tableName为String类型。因此先定义一个变量指定要删除的命令空间对应的要删除的table
            
            if(admin.tableExists(tableName)){   //判断指定的table是否存在如果存在就执行下列语句
                admin.disableTable(tableName);  //在HBase中不能对table进行直接删除,必须先把表禁用之后才可以删除。因此这里是先通过.disableTable方法,先禁用table。
                admin.deleteTable(tableName);   //禁用之后删除指定table表
            }
            //admin.deleteNamespace("HadoopHbase");   //如果对应的namespace下只有一个table表而这个table表需要被删除也不需要在这个命名空间下创建新的table表时,建议同时执行该语句把命名空间一起删除。如果该命名空间下还存在其他table表是删除不了的。
            admin.close();
        }
    
    }

    5.  DeleteColumns

    package com.HbaseTest.hdfs;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.client.Delete;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.util.Bytes;
    
    public class DeleteColumns {
        
        public static void main(String[] args) throws IOException {
            Configuration conf = HBaseConfiguration.create();
            HTable table = new HTable(conf, "HadoopHbase:Person");   //加载配置文件及指定要删除columns对应的命令空间及表
            Delete del = new Delete(Bytes.toBytes("RowKey"));       //创建Delete类方法,指定rowkey。因为rowkey存储的是一个Bytes数组因此需要调用Bytes.toBytes()方法
            del.deleteColumn(Bytes.toBytes("info"), Bytes.toBytes("address"));  //指定要删除的列簇及列名
            del.deleteColumn(Bytes.toBytes("info"), Bytes.toBytes("id"));
            del.deleteColumn(Bytes.toBytes("info"), Bytes.toBytes("name"));
            del.deleteColumn(Bytes.toBytes("info"), Bytes.toBytes("nation"));
            del.deleteColumn(Bytes.toBytes("info"), Bytes.toBytes("sex"));
            table.delete(del); //删除
            table.close();  //关闭
        }
    
    }

     6.  Put 添加数据

    package com.HbaseTest.hdfs;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.client.Durability;
    import org.apache.hadoop.hbase.client.HBaseAdmin;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.client.Put;
    import org.apache.hadoop.hbase.util.Bytes;
    
    public class PutData {
        
        public static void main(String[] args) throws IOException {
            Configuration conf = HBaseConfiguration.create();
            HTable table = new HTable(conf, Bytes.toBytes("liupeng:student"));   //加载配置文件并指定要put的命令空间及table表
            
            Put put = new Put(Bytes.toBytes("10001"));    //创建Put类方法传rowkey
            put.add(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes("刘鹏"));   //通过put.add方法加载数据,顺序为列簇,列名,value内容
            put.add(Bytes.toBytes("info"), Bytes.toBytes("city"), Bytes.toBytes("苏州"));
            put.add(Bytes.toBytes("info"), Bytes.toBytes("mail"), Bytes.toBytes("liupeng@163.com"));
            put.add(Bytes.toBytes("info"), Bytes.toBytes("age"), Bytes.toBytes("37"));
            
            Put put1 = new Put(Bytes.toBytes("10002"));   //添加不止一条数据因此指定rowkey10002方法同上
            put1.add(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes("岳云鹏"));
            put1.add(Bytes.toBytes("info"), Bytes.toBytes("city"), Bytes.toBytes("河南"));
            put1.add(Bytes.toBytes("info"), Bytes.toBytes("mail"), Bytes.toBytes("www.yunpeng@deyunshe.com"));
            put1.add(Bytes.toBytes("info"), Bytes.toBytes("age"), Bytes.toBytes("39"));
            
            Put put2 = new Put(Bytes.toBytes("10003"));
            put2.add(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes("韩雪"));
            put2.add(Bytes.toBytes("info"), Bytes.toBytes("city"), Bytes.toBytes("苏州"));
            put2.add(Bytes.toBytes("info"), Bytes.toBytes("mail"), Bytes.toBytes("www.hanxue@suzhou.com"));
            put2.add(Bytes.toBytes("info"), Bytes.toBytes("age"), Bytes.toBytes("35"));
            
            Put put3 = new Put(Bytes.toBytes("10004"));
            put3.add(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes("范冰冰"));
            put3.add(Bytes.toBytes("info"), Bytes.toBytes("city"), Bytes.toBytes("山东烟台"));
            put3.add(Bytes.toBytes("info"), Bytes.toBytes("mail"), Bytes.toBytes("www.fanbingbing@yantai.com"));
            put3.add(Bytes.toBytes("info"), Bytes.toBytes("age"), Bytes.toBytes("40"));
            
            
            put.setDurability(Durability.ASYNC_WAL);    //这里请参照上述注释内容:ASYNC_WAL : 当数据变动时,异步写WAL日志 
            put1.setDurability(Durability.ASYNC_WAL);
            table.put(put);  //逐一添加数据
            table.put(put1);
            table.put(put2);
            table.put(put3);
            table.close();//关闭
        }
    
    }

    7.  GetDataFamily

    package com.HbaseTest.hdfs;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.CellUtil;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.client.Get;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.client.Result;
    import org.apache.hadoop.hbase.util.Bytes;
    
    public class GetDataFamily {
        
    //创建getTable方法内部加载配置文件及table名便于我们获取table,tablename是String类型因此我们方法中传的参数也定义为string类型
    public static HTable getTable(String name)throws IOException { Configuration conf = HBaseConfiguration.create(); HTable table = new HTable(conf, name); return table; } //创建getData方法,获取数据首先要先获取表,因此我们参数传的是上述定义的HTable类型
    public static void getData(HTable table) throws IOException{ Get get = new Get(Bytes.toBytes("10004"));    //创建Get方法指定“rowkey” //get.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name")); //可以通过.addColumn方法获取指定列簇下的列名信息。 get.addFamily(Bytes.toBytes("info"));  //通过.addFamily获取指定的列簇信息 Result rs =table.get(get);  //因此get返回的是一个Result类型。要想获取get到信息下的每个元素首先要先创建Result类型
            //根据rowkey进行存储是每一行为一个Cell。每个rowkey下对应多个Cell。因此要想获取到所有的cell,就要通过增强for循环来获取每个cell的信息
    for(Cell cell:rs.rawCells()){ System.out.println(Bytes.toString(CellUtil.cloneFamily(cell)) //Bytes.toString()最终输出的是一个String类型。而内容是通过CellUtil工具类来获取。 .cloneFamily()是获取列簇信息 +"=>"+Bytes.toString(CellUtil.cloneQualifier(cell))   //.cloneQualifier()是获取列信息 +"=>"+Bytes.toString(CellUtil.cloneValue(cell))   // .cloneValue()是获取value信息 +"=>"+cell.getTimestamp());   //cell.getTimeStamp()是获取时间戳 } table.get(get); } public static void main(String[] args) throws IOException { HTable table = getTable("HadoopHbase:Person");  //指定命令空间及table表名 getData(table); //获取数据 } }

     8.  GetDataColumn

    package com.HbaseTest.hdfs;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.CellUtil;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.client.Get;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.client.Result;
    import org.apache.hadoop.hbase.util.Bytes;
    
    public class GetDataColumn {
        
        public static void main(String[] args) throws IOException {
            Configuration conf = HBaseConfiguration.create();
            HTable table = new HTable(conf, "HadoopHbase:Person");
            
            Get get = new Get(Bytes.toBytes("10004"));
            get.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name"));
            
            Result rs = table.get(get);
            
            for(Cell cell:rs.rawCells()){
                System.out.println(Bytes.toString(rs.getRow())    //获取rowkey
                        +"	"+ new String(CellUtil.cloneQualifier(cell))  //获取列信息
                        +"=>"+ new String(CellUtil.cloneValue(cell))   //获取value信息
                        +"=>"+ cell.getTimestamp());  //获取时间戳
            }
            
            
        }
    
    }

    9. ScanAllData

    package com.HbaseTest.hdfs;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.CellUtil;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.client.Result;
    import org.apache.hadoop.hbase.client.ResultScanner;
    import org.apache.hadoop.hbase.client.Scan;
    import org.apache.hadoop.hbase.util.Bytes;
    
    public class ScanAllData {
    
        public static void main(String[] args) throws IOException {
            Configuration conf = HBaseConfiguration.create();
            HTable table = new HTable(conf, Bytes.toBytes("liupeng:getInfo"));
            
            Scan scan = new Scan();
            
            ResultScanner rs = table.getScanner(scan);
            for(Result result:rs){
                for(Cell cell:result.rawCells()){
                    System.out.println(new String(CellUtil.cloneRow(cell))
                            +"	"+ new String(CellUtil.cloneFamily(cell))
                            +"=>"+ new String(CellUtil.cloneQualifier(cell))
                            +"=>"+ new String(CellUtil.cloneValue(cell),"UTF-8")
                            +"=>"+ cell.getTimestamp());
                }
                System.out.println("========================================");
            }
            table.close();
        }
        
    }

    10.  SingleColumnValueFilter类根据查询的列簇,列及value值,列举出匹配条件的所有rowkey下的值

    package com.HbaseTest.hdfs;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.CellUtil;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.client.Result;
    import org.apache.hadoop.hbase.client.ResultScanner;
    import org.apache.hadoop.hbase.client.Scan;
    import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
    import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
    
    public class SingleColumnValueFilter_Demo {
        
        public static void main(String[] args) throws IOException {
            Configuration conf = HBaseConfiguration.create();
            HTable table = new HTable(conf, "HadoopHbase:Person");
            /**
             * 根据条件把rowkey下所有的列簇及列中的数据进行列举
             * 
             * 指定列簇(info),列(name,age,city,mail任意),设置条件CompareOp.EQUAL是代表“等于“的意思,设定具体查找的值一定要跟列对应
             */
            SingleColumnValueFilter scvf = new SingleColumnValueFilter("info".getBytes(), "nation".getBytes(), CompareOp.EQUAL, "蒙古族".getBytes());
            Scan scan = new Scan();
            scan.setFilter(scvf);
            ResultScanner rs = table.getScanner(scan);
            for(Result result:rs){
                for(Cell cell:result.rawCells()){
                    System.out.println(new String(CellUtil.cloneRow(cell))
                            +"	"+ new String(CellUtil.cloneFamily(cell))
                            +"=>"+ new String(CellUtil.cloneQualifier(cell))
                            +"=>"+ new String(CellUtil.cloneValue(cell),"UTF-8")
                            +"=>"+ cell.getTimestamp());
                }
                System.out.println("========================================");
            }
            table.close();
        }
    
    }

    11. SingleColumnValueFilter类通过SubStringcomparator比较器来指定要查询的值(scan结果同上列举所有匹配到的列簇,列跟value)

    package com.HbaseTest.hdfs;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.CellUtil;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.client.Result;
    import org.apache.hadoop.hbase.client.ResultScanner;
    import org.apache.hadoop.hbase.client.Scan;
    import org.apache.hadoop.hbase.filter.RegexStringComparator;
    import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
    import org.apache.hadoop.hbase.filter.SubstringComparator;
    import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
    
    /***
     * 需求:获取到表中列'mail'尾号为.163为邮箱的所有人员信息
     * @author liupeng
     *
     */
    public class SingleColumnValueFilter_Demo2 {
        
        public static void main(String[] args) throws IOException {
            Configuration conf = HBaseConfiguration.create();
            HTable table = new HTable(conf, "liupeng:employee");
            
            SubstringComparator comparator = new SubstringComparator("23");  //设置SubStringComparator比较器,参数传的是字符串。只要value中匹配到含有匹配字符串的值就可以把所有对应的rowkey下列columnfaily,column,value值列举出来
            SingleColumnValueFilter scvf = new SingleColumnValueFilter("info".getBytes(), "age".getBytes(), CompareOp.EQUAL, comparator);
            Scan scan = new Scan();
            scan.setFilter(scvf);
            ResultScanner rs = table.getScanner(scan);
            for(Result result:rs){
                for(Cell cell:result.rawCells()){
                    System.out.println(new String(CellUtil.cloneRow(cell))
                            +"	"+ new String(CellUtil.cloneFamily(cell))
                            +"=>"+ new String(CellUtil.cloneQualifier(cell))
                            +"=>"+ new String(CellUtil.cloneValue(cell),"UTF-8")
                            +"=>"+ cell.getTimestamp());
                }
                System.out.println("========================================");
            }
            table.close();
        }
    
    }

    12. 通过RegexStringComparator正则表达式比较器匹配指定值,列举所有符合条件的rowkey,columnfaily,column,value值

    package com.HbaseTest.hdfs;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.CellUtil;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.client.Result;
    import org.apache.hadoop.hbase.client.ResultScanner;
    import org.apache.hadoop.hbase.client.Scan;
    import org.apache.hadoop.hbase.filter.RegexStringComparator;
    import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
    import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
    
    /***
     * 需求:获取到表中列'mail'尾号为.163为邮箱的所有人员信息
     * @author liupeng
     *
     */
    public class SingleColumnValueFilter_Demo3 {
        
        public static void main(String[] args) throws IOException {
            Configuration conf = HBaseConfiguration.create();
            HTable table = new HTable(conf, "Alibaba:AliYun");
            
            //通过正则表达式的方式进行过滤
            RegexStringComparator comparator = new RegexStringComparator(".163");
            
            SingleColumnValueFilter scvf = new SingleColumnValueFilter("info".getBytes(), "mail".getBytes(), CompareOp.EQUAL, comparator);
            Scan scan = new Scan();
            scan.setFilter(scvf);
            ResultScanner rs = table.getScanner(scan);
            for(Result result:rs){
                for(Cell cell:result.rawCells()){
                    System.out.println(new String(CellUtil.cloneRow(cell))
                            +"	"+ new String(CellUtil.cloneFamily(cell))
                            +"=>"+ new String(CellUtil.cloneQualifier(cell))
                            +"=>"+ new String(CellUtil.cloneValue(cell),"UTF-8")
                            +"=>"+ cell.getTimestamp());
                }
                System.out.println("========================================");
            }
            table.close();
        }
    
    }

    13. RangeData  (通过指定StartRow,StopRow范围来列举查询数据)

    注释:Startrow : 有下线没有上线

              Stoprow : 有上线没有下线

    注意: 可以通过前后加0的方式包含指定stoprow或者startrow的值

    package com.HbaseTest.hdfs;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.CellUtil;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.client.Result;
    import org.apache.hadoop.hbase.client.ResultScanner;
    import org.apache.hadoop.hbase.client.Scan;
    import org.apache.hadoop.hbase.util.Bytes;
    
    public class Scan_StartAndStopRow {
        
        public static void main(String[] args) throws IOException {
            Configuration conf = HBaseConfiguration.create();
            HTable table = new HTable(conf, "HadoopHbase:Person");
            
            Scan scan = new Scan();
            scan.setStartRow(Bytes.toBytes("10001"));
            scan.setStopRow(Bytes.toBytes("10003"));
            //通过末尾加0使得结果集包含StopRow
            //scan.setStopRow(Bytes.toBytes("100030"));
            ResultScanner scanner = table.getScanner(scan);
            for(Result rs:scanner){
                for(Cell cell:rs.rawCells()){
                    System.out.println(new String(CellUtil.cloneRow(cell))
                            +"	"+ new String(CellUtil.cloneFamily(cell))
                            +"=>"+ new String(CellUtil.cloneQualifier(cell))
                            +"=>"+ new String(CellUtil.cloneValue(cell),"UTF-8")
                            +"=>"+ cell.getTimestamp());
                }
                System.out.println("==============================================");
            }
            
        }
    
    }

    14. RangeData (通过FilterList集合类加载每个SingleColumnValueFilter传入对象的条件查询数据)

    package com.HbaseTest.hdfs;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.CellUtil;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.client.Result;
    import org.apache.hadoop.hbase.client.ResultScanner;
    import org.apache.hadoop.hbase.client.Scan;
    import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
    import org.apache.hadoop.hbase.filter.FilterList;
    import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
    import org.apache.hadoop.hbase.util.Bytes;
    
    public class RangeDataDemo2 {
        
        public static void main(String[] args) throws IOException {
            Configuration conf = HBaseConfiguration.create();
            HTable table = new HTable(conf, Bytes.toBytes("liupeng:employee"));
            
            FilterList filterlist = new FilterList(FilterList.Operator.MUST_PASS_ALL);
            SingleColumnValueFilter filter1 = new SingleColumnValueFilter(
                    Bytes.toBytes("info"), Bytes.toBytes("age"), 
                    CompareOp.GREATER_OR_EQUAL, Bytes.toBytes("40"));
            
            SingleColumnValueFilter filter2 = new SingleColumnValueFilter(
                    Bytes.toBytes("info"), Bytes.toBytes("age"), 
                    CompareOp.LESS_OR_EQUAL, Bytes.toBytes("46"));
            
            filterlist.addFilter(filter1);
            filterlist.addFilter(filter2);
            
            Scan scan = new Scan();
            scan.setFilter(filterlist);
            ResultScanner rs = table.getScanner(scan);
            for(Result result:rs){
                for(Cell cell:result.rawCells()){
                    System.out.println(new String(CellUtil.cloneRow(cell))
                            +"	"+ new String(CellUtil.cloneFamily(cell))
                            +"=>"+ new String(CellUtil.cloneQualifier(cell))
                            +"=>"+ new String(CellUtil.cloneValue(cell),"UTF-8")
                            +"=>"+ cell.getTimestamp());
                }
                System.out.println("========================================");
            }
            table.close();
        }
        
    }

    15. ChangeTableSetMaxVersions(变更table最大版本数)

    package com.HbaseTest.hdfs;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.HColumnDescriptor;
    import org.apache.hadoop.hbase.HTableDescriptor;
    import org.apache.hadoop.hbase.MasterNotRunningException;
    import org.apache.hadoop.hbase.ZooKeeperConnectionException;
    import org.apache.hadoop.hbase.client.HBaseAdmin;
    import org.apache.hadoop.hbase.util.Bytes;
    
    public class ChangeTableSetMaxVersions {
        
        
        public static void main(String[] args) throws MasterNotRunningException, ZooKeeperConnectionException, IOException {
            Configuration conf = HBaseConfiguration.create();
            HBaseAdmin admin =  new HBaseAdmin(conf);
            String tablename = "Alibaba:AliYun";
            
            if(admin.tableExists(tablename)){
                admin.disableTable(tablename);
                
                HTableDescriptor htd = admin.getTableDescriptor(Bytes.toBytes("Alibaba:AliYun"));
                HColumnDescriptor infocf = htd.getFamily(Bytes.toBytes("info"));
                infocf.setMaxVersions(50);
                
                admin.modifyTable(Bytes.toBytes("Alibaba:AliYun"), htd);
                admin.enableTable(tablename);
            }
            admin.close();
        }
        
    
    }
  • 相关阅读:
    X
    W
    J
    A
    Q
    P
    B
    排列和组合的求解
    深度学习之序列处理
    32位和64位数据类型大小对比
  • 原文地址:https://www.cnblogs.com/liupengpengg/p/9197394.html
Copyright © 2020-2023  润新知