• HBase编程 API入门系列之HTable pool(6)


      HTable是一个比较重的对此,比如加载配置文件,连接ZK,查询meta表等等,高并发的时候影响系统的性能,因此引入了“池”的概念。

      引入“HBase里的连接池”的目的是: 

                       为了更高的,提高程序的并发和访问速度。

      从“池”里去拿,拿完之后,放“池”即可。

     1 package zhouls.bigdata.HbaseProject.Pool;
     2 
     3 import java.io.IOException;
     4 import java.util.concurrent.ExecutorService;
     5 import java.util.concurrent.Executors;
     6 
     7 import org.apache.hadoop.conf.Configuration;
     8 import org.apache.hadoop.hbase.HBaseConfiguration;
     9 import org.apache.hadoop.hbase.client.HConnection;
    10 import org.apache.hadoop.hbase.client.HConnectionManager;
    11 
    12 
    13 public class TableConnection {
    14     private TableConnection(){
    15 }
    16     private static HConnection connection = null;
    17 public static HConnection getConnection(){
    18     if(connection == null){
    19         ExecutorService pool = Executors.newFixedThreadPool(10);//建立一个固定大小的线程池
    20         Configuration conf = HBaseConfiguration.create();
    21         conf.set("hbase.zookeeper.quorum","HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
    22         try{
    23             connection = HConnectionManager.createConnection(conf,pool);//创建连接时,拿到配置文件和线程池
    24         }catch (IOException e){
    25         }
    26     }
    27     return connection;    
    28     }
    29 }

      转到程序里,怎么来用这个“池”呢?

      即,TableConnection是公共的,新建好的“池”。可以一直作为模板啦。

     

    1、引用“池”超过

    HBase编程 API入门系列之put(客户端而言)(1)

      上面这种方式

     1 package zhouls.bigdata.HbaseProject.Pool;
     2 
     3 import java.io.IOException;
     4 
     5 import zhouls.bigdata.HbaseProject.Pool.TableConnection;
     6 
     7 import javax.xml.transform.Result;
     8 
     9 import org.apache.hadoop.conf.Configuration;
    10 import org.apache.hadoop.hbase.Cell;
    11 import org.apache.hadoop.hbase.CellUtil;
    12 import org.apache.hadoop.hbase.HBaseConfiguration;
    13 import org.apache.hadoop.hbase.TableName;
    14 import org.apache.hadoop.hbase.client.Delete;
    15 import org.apache.hadoop.hbase.client.Get;
    16 import org.apache.hadoop.hbase.client.HTable;
    17 import org.apache.hadoop.hbase.client.HTableInterface;
    18 import org.apache.hadoop.hbase.client.Put;
    19 import org.apache.hadoop.hbase.client.ResultScanner;
    20 import org.apache.hadoop.hbase.client.Scan;
    21 import org.apache.hadoop.hbase.util.Bytes;
    22 
    23 public class HBaseTest {
    24     public static void main(String[] args) throws Exception {
    25 //        HTable table = new HTable(getConfig(),TableName.valueOf("test_table"));//表名是test_table
    26 //        Put put = new Put(Bytes.toBytes("row_04"));//行键是row_04
    27 //        put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy1"));//列簇是f,列修饰符是name,值是Andy0
    28 //        put.add(Bytes.toBytes("f2"),Bytes.toBytes("name"),Bytes.toBytes("Andy3"));//列簇是f2,列修饰符是name,值是Andy3
    29 //        table.put(put);
    30 //        table.close();
    31 
    32 //        Get get = new Get(Bytes.toBytes("row_04"));
    33 //        get.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("age"));如现在这样,不指定,默认把所有的全拿出来
    34 //        org.apache.hadoop.hbase.client.Result rest = table.get(get);
    35 //        System.out.println(rest.toString());
    36 //        table.close();
    37 
    38 //        Delete delete = new Delete(Bytes.toBytes("row_2"));
    39 //        delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("email"));
    40 //        delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("name"));
    41 //        table.delete(delete);
    42 //        table.close();
    43 
    44 //        Delete delete = new Delete(Bytes.toBytes("row_04"));
    45 ////    delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
    46 //        delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
    47 //        table.delete(delete);
    48 //        table.close();
    49 
    50 
    51 //        Scan scan = new Scan();
    52 //        scan.setStartRow(Bytes.toBytes("row_01"));//包含开始行键
    53 //        scan.setStopRow(Bytes.toBytes("row_03"));//不包含结束行键
    54 //        scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    55 //        ResultScanner rst = table.getScanner(scan);//整个循环
    56 //        System.out.println(rst.toString());
    57 //        for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() ){
    58 //        for(Cell cell:next.rawCells()){//某个row key下的循坏
    59 //        System.out.println(next.toString());
    60 //        System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
    61 //        System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
    62 //        System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
    63 //        }
    64 //        }
    65 //    table.close();
    66 
    67         HBaseTest hbasetest =new HBaseTest();
    68         hbasetest.insertValue();
    69     }
    70 
    71     public void insertValue() throws Exception{
    72         HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    73         Put put = new Put(Bytes.toBytes("row_04"));//行键是row_01
    74         put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("北京"));
    75         table.put(put);
    76         table.close();
    77     }
    78 
    79 
    80 
    81     public static Configuration getConfig(){
    82         Configuration configuration = new Configuration(); 
    83 //        conf.set("hbase.rootdir","hdfs:HadoopMaster:9000/hbase");
    84         configuration.set("hbase.zookeeper.quorum", "HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
    85         return configuration;
    86     }
    87 }

    hbase(main):035:0> scan 'test_table'
    ROW COLUMN+CELL
    row_01 column=f:col, timestamp=1478095650110, value=maizi
    row_01 column=f:name, timestamp=1478095741767, value=Andy2
    row_02 column=f:name, timestamp=1478095849538, value=Andy2
    row_03 column=f:name, timestamp=1478095893278, value=Andy3
    row_04 column=f:name, timestamp=1478096702098, value=Andy1
    4 row(s) in 0.1190 seconds

    hbase(main):036:0> scan 'test_table'
    ROW COLUMN+CELL
    row_01 column=f:col, timestamp=1478095650110, value=maizi
    row_01 column=f:name, timestamp=1478095741767, value=Andy2
    row_02 column=f:name, timestamp=1478095849538, value=Andy2
    row_03 column=f:name, timestamp=1478095893278, value=Andy3
    row_04 column=f:name, timestamp=1478097220790, value=xE5x8Cx97xE4xBAxAC
    4 row(s) in 0.5970 seconds

    hbase(main):037:0>

     

     1 package zhouls.bigdata.HbaseProject.Pool;
     2 
     3 import java.io.IOException;
     4 
     5 import zhouls.bigdata.HbaseProject.Pool.TableConnection;
     6 
     7 import javax.xml.transform.Result;
     8 
     9 import org.apache.hadoop.conf.Configuration;
    10 import org.apache.hadoop.hbase.Cell;
    11 import org.apache.hadoop.hbase.CellUtil;
    12 import org.apache.hadoop.hbase.HBaseConfiguration;
    13 import org.apache.hadoop.hbase.TableName;
    14 import org.apache.hadoop.hbase.client.Delete;
    15 import org.apache.hadoop.hbase.client.Get;
    16 import org.apache.hadoop.hbase.client.HTable;
    17 import org.apache.hadoop.hbase.client.HTableInterface;
    18 import org.apache.hadoop.hbase.client.Put;
    19 import org.apache.hadoop.hbase.client.ResultScanner;
    20 import org.apache.hadoop.hbase.client.Scan;
    21 import org.apache.hadoop.hbase.util.Bytes;
    22 
    23 public class HBaseTest {
    24     public static void main(String[] args) throws Exception {
    25 //        HTable table = new HTable(getConfig(),TableName.valueOf("test_table"));//表名是test_table
    26 //        Put put = new Put(Bytes.toBytes("row_04"));//行键是row_04
    27 //        put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy1"));//列簇是f,列修饰符是name,值是Andy0
    28 //        put.add(Bytes.toBytes("f2"),Bytes.toBytes("name"),Bytes.toBytes("Andy3"));//列簇是f2,列修饰符是name,值是Andy3
    29 //        table.put(put);
    30 //        table.close();
    31 
    32 //        Get get = new Get(Bytes.toBytes("row_04"));
    33 //        get.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("age"));如现在这样,不指定,默认把所有的全拿出来
    34 //        org.apache.hadoop.hbase.client.Result rest = table.get(get);
    35 //        System.out.println(rest.toString());
    36 //        table.close();
    37 
    38 //        Delete delete = new Delete(Bytes.toBytes("row_2"));
    39 //        delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("email"));
    40 //        delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("name"));
    41 //        table.delete(delete);
    42 //        table.close();
    43 
    44 //        Delete delete = new Delete(Bytes.toBytes("row_04"));
    45 ////    delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
    46 //        delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
    47 //        table.delete(delete);
    48 //        table.close();
    49 
    50 
    51 //        Scan scan = new Scan();
    52 //        scan.setStartRow(Bytes.toBytes("row_01"));//包含开始行键
    53 //        scan.setStopRow(Bytes.toBytes("row_03"));//不包含结束行键
    54 //        scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    55 //        ResultScanner rst = table.getScanner(scan);//整个循环
    56 //        System.out.println(rst.toString());
    57 //        for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() ){
    58 //        for(Cell cell:next.rawCells()){//某个row key下的循坏
    59 //        System.out.println(next.toString());
    60 //        System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
    61 //        System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
    62 //        System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
    63 //        }
    64 //        }
    65 //        table.close();
    66 
    67         HBaseTest hbasetest =new HBaseTest();
    68         hbasetest.insertValue();
    69 }
    70 
    71     public void insertValue() throws Exception{
    72         HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    73         Put put = new Put(Bytes.toBytes("row_05"));//行键是row_01
    74         put.add(Bytes.toBytes("f"),Bytes.toBytes("address"),Bytes.toBytes("beijng"));
    75         table.put(put);
    76         table.close();
    77     }
    78 
    79 
    80 
    81     public static Configuration getConfig(){
    82         Configuration configuration = new Configuration(); 
    83 //        conf.set("hbase.rootdir","hdfs:HadoopMaster:9000/hbase");
    84         configuration.set("hbase.zookeeper.quorum", "HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
    85         return configuration;
    86     }
    87 }

    2016-12-11 14:22:14,784 INFO [org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper] - Process identifier=hconnection-0x19d12e87 connecting to ZooKeeper ensemble=HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181
    2016-12-11 14:22:14,796 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:zookeeper.version=3.4.6-1569965, built on 02/20/2014 09:09 GMT
    2016-12-11 14:22:14,796 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:host.name=WIN-BQOBV63OBNM
    2016-12-11 14:22:14,796 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.version=1.7.0_51
    2016-12-11 14:22:14,796 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.vendor=Oracle Corporation
    2016-12-11 14:22:14,796 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.home=C:Program FilesJavajdk1.7.0_51jre
    2016-12-11 14:22:14,797 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.class.path=D:CodeMyEclipseJavaCodeHbaseProjectin;D:SoftWarehbase-1.2.3libactivation-1.1.jar;D:SoftWarehbase-1.2.3libaopalliance-1.0.jar;D:SoftWarehbase-1.2.3libapacheds-i18n-2.0.0-M15.jar;D:SoftWarehbase-1.2.3libapacheds-kerberos-codec-2.0.0-M15.jar;D:SoftWarehbase-1.2.3libapi-asn1-api-1.0.0-M20.jar;D:SoftWarehbase-1.2.3libapi-util-1.0.0-M20.jar;D:SoftWarehbase-1.2.3libasm-3.1.jar;D:SoftWarehbase-1.2.3libavro-1.7.4.jar;D:SoftWarehbase-1.2.3libcommons-beanutils-1.7.0.jar;D:SoftWarehbase-1.2.3libcommons-beanutils-core-1.8.0.jar;D:SoftWarehbase-1.2.3libcommons-cli-1.2.jar;D:SoftWarehbase-1.2.3libcommons-codec-1.9.jar;D:SoftWarehbase-1.2.3libcommons-collections-3.2.2.jar;D:SoftWarehbase-1.2.3libcommons-compress-1.4.1.jar;D:SoftWarehbase-1.2.3libcommons-configuration-1.6.jar;D:SoftWarehbase-1.2.3libcommons-daemon-1.0.13.jar;D:SoftWarehbase-1.2.3libcommons-digester-1.8.jar;D:SoftWarehbase-1.2.3libcommons-el-1.0.jar;D:SoftWarehbase-1.2.3libcommons-httpclient-3.1.jar;D:SoftWarehbase-1.2.3libcommons-io-2.4.jar;D:SoftWarehbase-1.2.3libcommons-lang-2.6.jar;D:SoftWarehbase-1.2.3libcommons-logging-1.2.jar;D:SoftWarehbase-1.2.3libcommons-math-2.2.jar;D:SoftWarehbase-1.2.3libcommons-math3-3.1.1.jar;D:SoftWarehbase-1.2.3libcommons-net-3.1.jar;D:SoftWarehbase-1.2.3libdisruptor-3.3.0.jar;D:SoftWarehbase-1.2.3libfindbugs-annotations-1.3.9-1.jar;D:SoftWarehbase-1.2.3libguava-12.0.1.jar;D:SoftWarehbase-1.2.3libguice-3.0.jar;D:SoftWarehbase-1.2.3libguice-servlet-3.0.jar;D:SoftWarehbase-1.2.3libhadoop-annotations-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-auth-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-client-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-hdfs-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-app-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-core-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-jobclient-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-shuffle-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-api-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-client-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-server-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhbase-annotations-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-annotations-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-client-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-common-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-common-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-examples-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-external-blockcache-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-hadoop2-compat-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-hadoop-compat-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-it-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-it-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-prefix-tree-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-procedure-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-protocol-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-resource-bundle-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-rest-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-server-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-server-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-shell-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-thrift-1.2.3.jar;D:SoftWarehbase-1.2.3libhtrace-core-3.1.0-incubating.jar;D:SoftWarehbase-1.2.3libhttpclient-4.2.5.jar;D:SoftWarehbase-1.2.3libhttpcore-4.4.1.jar;D:SoftWarehbase-1.2.3libjackson-core-asl-1.9.13.jar;D:SoftWarehbase-1.2.3libjackson-jaxrs-1.9.13.jar;D:SoftWarehbase-1.2.3libjackson-mapper-asl-1.9.13.jar;D:SoftWarehbase-1.2.3libjackson-xc-1.9.13.jar;D:SoftWarehbase-1.2.3libjamon-runtime-2.4.1.jar;D:SoftWarehbase-1.2.3libjasper-compiler-5.5.23.jar;D:SoftWarehbase-1.2.3libjasper-runtime-5.5.23.jar;D:SoftWarehbase-1.2.3libjavax.inject-1.jar;D:SoftWarehbase-1.2.3libjava-xmlbuilder-0.4.jar;D:SoftWarehbase-1.2.3libjaxb-api-2.2.2.jar;D:SoftWarehbase-1.2.3libjaxb-impl-2.2.3-1.jar;D:SoftWarehbase-1.2.3libjcodings-1.0.8.jar;D:SoftWarehbase-1.2.3libjersey-client-1.9.jar;D:SoftWarehbase-1.2.3libjersey-core-1.9.jar;D:SoftWarehbase-1.2.3libjersey-guice-1.9.jar;D:SoftWarehbase-1.2.3libjersey-json-1.9.jar;D:SoftWarehbase-1.2.3libjersey-server-1.9.jar;D:SoftWarehbase-1.2.3libjets3t-0.9.0.jar;D:SoftWarehbase-1.2.3libjettison-1.3.3.jar;D:SoftWarehbase-1.2.3libjetty-6.1.26.jar;D:SoftWarehbase-1.2.3libjetty-sslengine-6.1.26.jar;D:SoftWarehbase-1.2.3libjetty-util-6.1.26.jar;D:SoftWarehbase-1.2.3libjoni-2.1.2.jar;D:SoftWarehbase-1.2.3libjruby-complete-1.6.8.jar;D:SoftWarehbase-1.2.3libjsch-0.1.42.jar;D:SoftWarehbase-1.2.3libjsp-2.1-6.1.14.jar;D:SoftWarehbase-1.2.3libjsp-api-2.1-6.1.14.jar;D:SoftWarehbase-1.2.3libjunit-4.12.jar;D:SoftWarehbase-1.2.3libleveldbjni-all-1.8.jar;D:SoftWarehbase-1.2.3liblibthrift-0.9.3.jar;D:SoftWarehbase-1.2.3liblog4j-1.2.17.jar;D:SoftWarehbase-1.2.3libmetrics-core-2.2.0.jar;D:SoftWarehbase-1.2.3lib etty-all-4.0.23.Final.jar;D:SoftWarehbase-1.2.3libparanamer-2.3.jar;D:SoftWarehbase-1.2.3libprotobuf-java-2.5.0.jar;D:SoftWarehbase-1.2.3libservlet-api-2.5.jar;D:SoftWarehbase-1.2.3libservlet-api-2.5-6.1.14.jar;D:SoftWarehbase-1.2.3libslf4j-api-1.7.7.jar;D:SoftWarehbase-1.2.3libslf4j-log4j12-1.7.5.jar;D:SoftWarehbase-1.2.3libsnappy-java-1.0.4.1.jar;D:SoftWarehbase-1.2.3libspymemcached-2.11.6.jar;D:SoftWarehbase-1.2.3libxmlenc-0.52.jar;D:SoftWarehbase-1.2.3libxz-1.0.jar;D:SoftWarehbase-1.2.3libzookeeper-3.4.6.jar
    2016-12-11 14:22:14,797 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.library.path=C:Program FilesJavajdk1.7.0_51in;C:WindowsSunJavain;C:Windowssystem32;C:Windows;C:ProgramDataOracleJavajavapath;C:Python27;C:Python27Scripts;C:Windowssystem32;C:Windows;C:WindowsSystem32Wbem;C:WindowsSystem32WindowsPowerShellv1.0;D:SoftWareMATLAB R2013a untimewin64;D:SoftWareMATLAB R2013ain;C:Program Files (x86)IDM Computer SolutionsUltraCompare;C:Program FilesJavajdk1.7.0_51in;C:Program FilesJavajdk1.7.0_51jrein;D:SoftWareapache-ant-1.9.0in;HADOOP_HOMEin;D:SoftWareapache-maven-3.3.9in;D:SoftWareScalain;D:SoftWareScalajrein;%MYSQL_HOMEin;D:SoftWareMySQL ServerMySQL Server 5.0in;D:SoftWareapache-tomcat-7.0.69in;%C:WindowsSystem32;%C:WindowsSysWOW64;D:SoftWareSSH Secure Shell;.
    2016-12-11 14:22:14,798 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.io.tmpdir=C:UsersADMINI~1AppDataLocalTemp
    2016-12-11 14:22:14,798 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.compiler=<NA>
    2016-12-11 14:22:14,798 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:os.name=Windows 7
    2016-12-11 14:22:14,798 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:os.arch=amd64
    2016-12-11 14:22:14,798 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:os.version=6.1
    2016-12-11 14:22:14,799 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:user.name=Administrator
    2016-12-11 14:22:14,799 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:user.home=C:UsersAdministrator
    2016-12-11 14:22:14,799 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:user.dir=D:CodeMyEclipseJavaCodeHbaseProject
    2016-12-11 14:22:14,801 INFO [org.apache.zookeeper.ZooKeeper] - Initiating client connection, connectString=HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181 sessionTimeout=90000 watcher=hconnection-0x19d12e870x0, quorum=HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181, baseZNode=/hbase
    2016-12-11 14:22:14,853 INFO [org.apache.zookeeper.ClientCnxn] - Opening socket connection to server HadoopMaster/192.168.80.10:2181. Will not attempt to authenticate using SASL (unknown error)
    2016-12-11 14:22:14,855 INFO [org.apache.zookeeper.ClientCnxn] - Socket connection established to HadoopMaster/192.168.80.10:2181, initiating session
    2016-12-11 14:22:14,960 INFO [org.apache.zookeeper.ClientCnxn] - Session establishment complete on server HadoopMaster/192.168.80.10:2181, sessionid = 0x1582556e7c5001c, negotiated timeout = 40000

    hbase(main):035:0> scan 'test_table'
    ROW COLUMN+CELL
    row_01 column=f:col, timestamp=1478095650110, value=maizi
    row_01 column=f:name, timestamp=1478095741767, value=Andy2
    row_02 column=f:name, timestamp=1478095849538, value=Andy2
    row_03 column=f:name, timestamp=1478095893278, value=Andy3
    row_04 column=f:name, timestamp=1478096702098, value=Andy1
    4 row(s) in 0.1190 seconds

    hbase(main):036:0> scan 'test_table'
    ROW COLUMN+CELL
    row_01 column=f:col, timestamp=1478095650110, value=maizi
    row_01 column=f:name, timestamp=1478095741767, value=Andy2
    row_02 column=f:name, timestamp=1478095849538, value=Andy2
    row_03 column=f:name, timestamp=1478095893278, value=Andy3
    row_04 column=f:name, timestamp=1478097220790, value=xE5x8Cx97xE4xBAxAC
    4 row(s) in 0.5970 seconds

    hbase(main):037:0> scan 'test_table'
    ROW COLUMN+CELL
    row_01 column=f:col, timestamp=1478095650110, value=maizi
    row_01 column=f:name, timestamp=1478095741767, value=Andy2
    row_02 column=f:name, timestamp=1478095849538, value=Andy2
    row_03 column=f:name, timestamp=1478095893278, value=Andy3
    row_04 column=f:name, timestamp=1478097227253, value=xE5x8Cx97xE4xBAxAC
    row_05 column=f:address, timestamp=1478097364649, value=beijng
    5 row(s) in 0.2630 seconds

    hbase(main):038:0>

       即,这就是,“”的概念,会一直保持

      详细分析

          这里,我设定的是10个线程池,

      其实,很简单,就好比,你来拿一个去用,别人来拿一个去用。等你们用完了,再还回来。(好比跟图书馆里的借书一样)

      那有人会问,若我设定的固定10个线程池,都被别人拿完了,若第11个来了,怎办?岂不是,没得拿?

          答案:那你就等着呗,等别人还回来。这跟队列是一样的原理。

      

      这样做的理由,很简单,有了线程池,不需,我们再每次都手动配置文件啊连接zk了。因为,在TableConnection.java里,写好了。

    2、引用“池”超过

    HBase编程 API入门系列之get(客户端而言)(2)

      上面这种方式

      为了更进一步,给博友们,深层次明白,“池”的魅力,当然,这也是在公司实际开发里,首推和强烈建议去做的。

     

    hbase(main):038:0> scan 'test_table'
    ROW COLUMN+CELL
    row_01 column=f:col, timestamp=1478095650110, value=maizi
    row_01 column=f:name, timestamp=1478095741767, value=Andy2
    row_02 column=f:name, timestamp=1478095849538, value=Andy2
    row_03 column=f:name, timestamp=1478095893278, value=Andy3
    row_04 column=f:name, timestamp=1478097227253, value=xE5x8Cx97xE4xBAxAC
    row_05 column=f:address, timestamp=1478097364649, value=beijng
    5 row(s) in 0.2280 seconds

    hbase(main):039:0>

     

      1 package zhouls.bigdata.HbaseProject.Pool;
      2 
      3 import java.io.IOException;
      4 
      5 import zhouls.bigdata.HbaseProject.Pool.TableConnection;
      6 
      7 import javax.xml.transform.Result;
      8 
      9 import org.apache.hadoop.conf.Configuration;
     10 import org.apache.hadoop.hbase.Cell;
     11 import org.apache.hadoop.hbase.CellUtil;
     12 import org.apache.hadoop.hbase.HBaseConfiguration;
     13 import org.apache.hadoop.hbase.TableName;
     14 import org.apache.hadoop.hbase.client.Delete;
     15 import org.apache.hadoop.hbase.client.Get;
     16 import org.apache.hadoop.hbase.client.HTable;
     17 import org.apache.hadoop.hbase.client.HTableInterface;
     18 import org.apache.hadoop.hbase.client.Put;
     19 import org.apache.hadoop.hbase.client.ResultScanner;
     20 import org.apache.hadoop.hbase.client.Scan;
     21 import org.apache.hadoop.hbase.util.Bytes;
     22 
     23 public class HBaseTest {
     24     public static void main(String[] args) throws Exception {
     25 //        HTable table = new HTable(getConfig(),TableName.valueOf("test_table"));//表名是test_table
     26 //        Put put = new Put(Bytes.toBytes("row_04"));//行键是row_04
     27 //        put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy1"));//列簇是f,列修饰符是name,值是Andy0
     28 //        put.add(Bytes.toBytes("f2"),Bytes.toBytes("name"),Bytes.toBytes("Andy3"));//列簇是f2,列修饰符是name,值是Andy3
     29 //        table.put(put);
     30 //        table.close();
     31 
     32 //        Get get = new Get(Bytes.toBytes("row_04"));
     33 //        get.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("age"));如现在这样,不指定,默认把所有的全拿出来
     34 //        org.apache.hadoop.hbase.client.Result rest = table.get(get);
     35 //        System.out.println(rest.toString());
     36 //        table.close();
     37         
     38 //        Delete delete = new Delete(Bytes.toBytes("row_2"));
     39 //        delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("email"));
     40 //        delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("name"));
     41 //        table.delete(delete);
     42 //        table.close();
     43 
     44 //        Delete delete = new Delete(Bytes.toBytes("row_04"));
     45 ////    delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
     46 //        delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
     47 //        table.delete(delete);
     48 //        table.close();
     49 
     50 
     51 //        Scan scan = new Scan();
     52 //        scan.setStartRow(Bytes.toBytes("row_01"));//包含开始行键
     53 //        scan.setStopRow(Bytes.toBytes("row_03"));//不包含结束行键
     54 //        scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
     55 //        ResultScanner rst = table.getScanner(scan);//整个循环
     56 //        System.out.println(rst.toString());
     57 //        for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() ){
     58 //        for(Cell cell:next.rawCells()){//某个row key下的循坏
     59 //        System.out.println(next.toString());
     60 //        System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
     61 //        System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
     62 //        System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
     63 //        }
     64 //        }
     65 //        table.close();
     66 
     67         HBaseTest hbasetest =new HBaseTest();
     68 //        hbasetest.insertValue();
     69         hbasetest.getValue();
     70 }
     71 
     72 
     73 //        public void insertValue() throws Exception{
     74 //        HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
     75 //        Put put = new Put(Bytes.toBytes("row_05"));//行键是row_01
     76 //        put.add(Bytes.toBytes("f"),Bytes.toBytes("address"),Bytes.toBytes("beijng"));
     77 //        table.put(put);
     78 //        table.close();
     79 //        }
     80 
     81  
     82 
     83     public void getValue() throws Exception{
     84         HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
     85         Get get = new Get(Bytes.toBytes("row_03"));
     86         get.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
     87         org.apache.hadoop.hbase.client.Result rest = table.get(get);
     88         System.out.println(rest.toString());
     89         table.close();
     90     }
     91 
     92 
     93 
     94     public static Configuration getConfig(){
     95         Configuration configuration = new Configuration(); 
     96 //        conf.set("hbase.rootdir","hdfs:HadoopMaster:9000/hbase");
     97         configuration.set("hbase.zookeeper.quorum", "HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
     98         return configuration;
     99     }
    100 }

    2016-12-11 14:37:12,030 INFO [org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper] - Process identifier=hconnection-0x7660aac9 connecting to ZooKeeper ensemble=HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181
    2016-12-11 14:37:12,040 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:zookeeper.version=3.4.6-1569965, built on 02/20/2014 09:09 GMT
    2016-12-11 14:37:12,041 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:host.name=WIN-BQOBV63OBNM
    2016-12-11 14:37:12,041 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.version=1.7.0_51
    2016-12-11 14:37:12,041 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.vendor=Oracle Corporation
    2016-12-11 14:37:12,041 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.home=C:Program FilesJavajdk1.7.0_51jre
    2016-12-11 14:37:12,041 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.class.path=D:CodeMyEclipseJavaCodeHbaseProjectin;D:SoftWarehbase-1.2.3libactivation-1.1.jar;D:SoftWarehbase-1.2.3libaopalliance-1.0.jar;D:SoftWarehbase-1.2.3libapacheds-i18n-2.0.0-M15.jar;D:SoftWarehbase-1.2.3libapacheds-kerberos-codec-2.0.0-M15.jar;D:SoftWarehbase-1.2.3libapi-asn1-api-1.0.0-M20.jar;D:SoftWarehbase-1.2.3libapi-util-1.0.0-M20.jar;D:SoftWarehbase-1.2.3libasm-3.1.jar;D:SoftWarehbase-1.2.3libavro-1.7.4.jar;D:SoftWarehbase-1.2.3libcommons-beanutils-1.7.0.jar;D:SoftWarehbase-1.2.3libcommons-beanutils-core-1.8.0.jar;D:SoftWarehbase-1.2.3libcommons-cli-1.2.jar;D:SoftWarehbase-1.2.3libcommons-codec-1.9.jar;D:SoftWarehbase-1.2.3libcommons-collections-3.2.2.jar;D:SoftWarehbase-1.2.3libcommons-compress-1.4.1.jar;D:SoftWarehbase-1.2.3libcommons-configuration-1.6.jar;D:SoftWarehbase-1.2.3libcommons-daemon-1.0.13.jar;D:SoftWarehbase-1.2.3libcommons-digester-1.8.jar;D:SoftWarehbase-1.2.3libcommons-el-1.0.jar;D:SoftWarehbase-1.2.3libcommons-httpclient-3.1.jar;D:SoftWarehbase-1.2.3libcommons-io-2.4.jar;D:SoftWarehbase-1.2.3libcommons-lang-2.6.jar;D:SoftWarehbase-1.2.3libcommons-logging-1.2.jar;D:SoftWarehbase-1.2.3libcommons-math-2.2.jar;D:SoftWarehbase-1.2.3libcommons-math3-3.1.1.jar;D:SoftWarehbase-1.2.3libcommons-net-3.1.jar;D:SoftWarehbase-1.2.3libdisruptor-3.3.0.jar;D:SoftWarehbase-1.2.3libfindbugs-annotations-1.3.9-1.jar;D:SoftWarehbase-1.2.3libguava-12.0.1.jar;D:SoftWarehbase-1.2.3libguice-3.0.jar;D:SoftWarehbase-1.2.3libguice-servlet-3.0.jar;D:SoftWarehbase-1.2.3libhadoop-annotations-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-auth-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-client-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-hdfs-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-app-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-core-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-jobclient-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-shuffle-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-api-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-client-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-server-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhbase-annotations-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-annotations-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-client-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-common-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-common-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-examples-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-external-blockcache-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-hadoop2-compat-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-hadoop-compat-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-it-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-it-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-prefix-tree-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-procedure-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-protocol-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-resource-bundle-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-rest-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-server-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-server-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-shell-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-thrift-1.2.3.jar;D:SoftWarehbase-1.2.3libhtrace-core-3.1.0-incubating.jar;D:SoftWarehbase-1.2.3libhttpclient-4.2.5.jar;D:SoftWarehbase-1.2.3libhttpcore-4.4.1.jar;D:SoftWarehbase-1.2.3libjackson-core-asl-1.9.13.jar;D:SoftWarehbase-1.2.3libjackson-jaxrs-1.9.13.jar;D:SoftWarehbase-1.2.3libjackson-mapper-asl-1.9.13.jar;D:SoftWarehbase-1.2.3libjackson-xc-1.9.13.jar;D:SoftWarehbase-1.2.3libjamon-runtime-2.4.1.jar;D:SoftWarehbase-1.2.3libjasper-compiler-5.5.23.jar;D:SoftWarehbase-1.2.3libjasper-runtime-5.5.23.jar;D:SoftWarehbase-1.2.3libjavax.inject-1.jar;D:SoftWarehbase-1.2.3libjava-xmlbuilder-0.4.jar;D:SoftWarehbase-1.2.3libjaxb-api-2.2.2.jar;D:SoftWarehbase-1.2.3libjaxb-impl-2.2.3-1.jar;D:SoftWarehbase-1.2.3libjcodings-1.0.8.jar;D:SoftWarehbase-1.2.3libjersey-client-1.9.jar;D:SoftWarehbase-1.2.3libjersey-core-1.9.jar;D:SoftWarehbase-1.2.3libjersey-guice-1.9.jar;D:SoftWarehbase-1.2.3libjersey-json-1.9.jar;D:SoftWarehbase-1.2.3libjersey-server-1.9.jar;D:SoftWarehbase-1.2.3libjets3t-0.9.0.jar;D:SoftWarehbase-1.2.3libjettison-1.3.3.jar;D:SoftWarehbase-1.2.3libjetty-6.1.26.jar;D:SoftWarehbase-1.2.3libjetty-sslengine-6.1.26.jar;D:SoftWarehbase-1.2.3libjetty-util-6.1.26.jar;D:SoftWarehbase-1.2.3libjoni-2.1.2.jar;D:SoftWarehbase-1.2.3libjruby-complete-1.6.8.jar;D:SoftWarehbase-1.2.3libjsch-0.1.42.jar;D:SoftWarehbase-1.2.3libjsp-2.1-6.1.14.jar;D:SoftWarehbase-1.2.3libjsp-api-2.1-6.1.14.jar;D:SoftWarehbase-1.2.3libjunit-4.12.jar;D:SoftWarehbase-1.2.3libleveldbjni-all-1.8.jar;D:SoftWarehbase-1.2.3liblibthrift-0.9.3.jar;D:SoftWarehbase-1.2.3liblog4j-1.2.17.jar;D:SoftWarehbase-1.2.3libmetrics-core-2.2.0.jar;D:SoftWarehbase-1.2.3lib etty-all-4.0.23.Final.jar;D:SoftWarehbase-1.2.3libparanamer-2.3.jar;D:SoftWarehbase-1.2.3libprotobuf-java-2.5.0.jar;D:SoftWarehbase-1.2.3libservlet-api-2.5.jar;D:SoftWarehbase-1.2.3libservlet-api-2.5-6.1.14.jar;D:SoftWarehbase-1.2.3libslf4j-api-1.7.7.jar;D:SoftWarehbase-1.2.3libslf4j-log4j12-1.7.5.jar;D:SoftWarehbase-1.2.3libsnappy-java-1.0.4.1.jar;D:SoftWarehbase-1.2.3libspymemcached-2.11.6.jar;D:SoftWarehbase-1.2.3libxmlenc-0.52.jar;D:SoftWarehbase-1.2.3libxz-1.0.jar;D:SoftWarehbase-1.2.3libzookeeper-3.4.6.jar
    2016-12-11 14:37:12,042 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.library.path=C:Program FilesJavajdk1.7.0_51in;C:WindowsSunJavain;C:Windowssystem32;C:Windows;C:ProgramDataOracleJavajavapath;C:Python27;C:Python27Scripts;C:Windowssystem32;C:Windows;C:WindowsSystem32Wbem;C:WindowsSystem32WindowsPowerShellv1.0;D:SoftWareMATLAB R2013a untimewin64;D:SoftWareMATLAB R2013ain;C:Program Files (x86)IDM Computer SolutionsUltraCompare;C:Program FilesJavajdk1.7.0_51in;C:Program FilesJavajdk1.7.0_51jrein;D:SoftWareapache-ant-1.9.0in;HADOOP_HOMEin;D:SoftWareapache-maven-3.3.9in;D:SoftWareScalain;D:SoftWareScalajrein;%MYSQL_HOMEin;D:SoftWareMySQL ServerMySQL Server 5.0in;D:SoftWareapache-tomcat-7.0.69in;%C:WindowsSystem32;%C:WindowsSysWOW64;D:SoftWareSSH Secure Shell;.
    2016-12-11 14:37:12,042 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.io.tmpdir=C:UsersADMINI~1AppDataLocalTemp
    2016-12-11 14:37:12,042 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.compiler=<NA>
    2016-12-11 14:37:12,042 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:os.name=Windows 7
    2016-12-11 14:37:12,042 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:os.arch=amd64
    2016-12-11 14:37:12,042 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:os.version=6.1
    2016-12-11 14:37:12,042 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:user.name=Administrator
    2016-12-11 14:37:12,042 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:user.home=C:UsersAdministrator
    2016-12-11 14:37:12,042 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:user.dir=D:CodeMyEclipseJavaCodeHbaseProject
    2016-12-11 14:37:12,044 INFO [org.apache.zookeeper.ZooKeeper] - Initiating client connection, connectString=HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181 sessionTimeout=90000 watcher=hconnection-0x7660aac90x0, quorum=HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181, baseZNode=/hbase
    2016-12-11 14:37:12,091 INFO [org.apache.zookeeper.ClientCnxn] - Opening socket connection to server HadoopMaster/192.168.80.10:2181. Will not attempt to authenticate using SASL (unknown error)
    2016-12-11 14:37:12,094 INFO [org.apache.zookeeper.ClientCnxn] - Socket connection established to HadoopMaster/192.168.80.10:2181, initiating session
    2016-12-11 14:37:12,162 INFO [org.apache.zookeeper.ClientCnxn] - Session establishment complete on server HadoopMaster/192.168.80.10:2181, sessionid = 0x1582556e7c5001d, negotiated timeout = 40000
    keyvalues={row_03/f:name/1478095893278/Put/vlen=5/seqid=0}

    3.1、引用“池”超过

    HBase编程 API入门系列之delete(客户端而言)(3)

    HBase编程 API入门之delete.deleteColumn和delete.deleteColumns区别(客户端而言)(4)

     

      上面这种方式

        时间戳版本旧到新,是Andy2   ->   Andy1   ->  Andy0

                   先建                              后建

      1 package zhouls.bigdata.HbaseProject.Pool;
      2 
      3 import java.io.IOException;
      4 
      5 import zhouls.bigdata.HbaseProject.Pool.TableConnection;
      6 
      7 import javax.xml.transform.Result;
      8 
      9 import org.apache.hadoop.conf.Configuration;
     10 import org.apache.hadoop.hbase.Cell;
     11 import org.apache.hadoop.hbase.CellUtil;
     12 import org.apache.hadoop.hbase.HBaseConfiguration;
     13 import org.apache.hadoop.hbase.TableName;
     14 import org.apache.hadoop.hbase.client.Delete;
     15 import org.apache.hadoop.hbase.client.Get;
     16 import org.apache.hadoop.hbase.client.HTable;
     17 import org.apache.hadoop.hbase.client.HTableInterface;
     18 import org.apache.hadoop.hbase.client.Put;
     19 import org.apache.hadoop.hbase.client.ResultScanner;
     20 import org.apache.hadoop.hbase.client.Scan;
     21 import org.apache.hadoop.hbase.util.Bytes;
     22 
     23 public class HBaseTest {
     24     public static void main(String[] args) throws Exception {
     25 //        HTable table = new HTable(getConfig(),TableName.valueOf("test_table"));//表名是test_table
     26 //        Put put = new Put(Bytes.toBytes("row_04"));//行键是row_04
     27 //        put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy1"));//列簇是f,列修饰符是name,值是Andy0
     28 //        put.add(Bytes.toBytes("f2"),Bytes.toBytes("name"),Bytes.toBytes("Andy3"));//列簇是f2,列修饰符是name,值是Andy3
     29 //        table.put(put);
     30 //        table.close();
     31 
     32 //        Get get = new Get(Bytes.toBytes("row_04"));
     33 //        get.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("age"));如现在这样,不指定,默认把所有的全拿出来
     34 //        org.apache.hadoop.hbase.client.Result rest = table.get(get);
     35 //        System.out.println(rest.toString());
     36 //        table.close();
     37 
     38 //        Delete delete = new Delete(Bytes.toBytes("row_2"));
     39 //        delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("email"));
     40 //        delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("name"));
     41 //        table.delete(delete);
     42 //        table.close();
     43 
     44 //        Delete delete = new Delete(Bytes.toBytes("row_04"));
     45 ////    delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
     46 //        delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
     47 //        table.delete(delete);
     48 //        table.close();
     49 
     50 
     51 //        Scan scan = new Scan();
     52 //        scan.setStartRow(Bytes.toBytes("row_01"));//包含开始行键
     53 //        scan.setStopRow(Bytes.toBytes("row_03"));//不包含结束行键
     54 //        scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
     55 //        ResultScanner rst = table.getScanner(scan);//整个循环
     56 //        System.out.println(rst.toString());
     57 //        for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() ){
     58 //            for(Cell cell:next.rawCells()){//某个row key下的循坏
     59 //            System.out.println(next.toString());
     60 //            System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
     61 //            System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
     62 //            System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
     63 //        }
     64 //        }
     65 //        table.close();
     66 
     67         HBaseTest hbasetest =new HBaseTest();
     68 //        hbasetest.insertValue();
     69 //        hbasetest.getValue();
     70         hbasetest.delete();
     71     }
     72 
     73 
     74 //    public void insertValue() throws Exception{
     75 //        HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
     76 //        Put put = new Put(Bytes.toBytes("row_01"));//行键是row_01
     77 //        put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy0"));
     78 //        table.put(put);
     79 //        table.close();
     80 //    }
     81 
     82  
     83 
     84 //    public void getValue() throws Exception{
     85 //        HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
     86 //        Get get = new Get(Bytes.toBytes("row_03"));
     87 //        get.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
     88 //        org.apache.hadoop.hbase.client.Result rest = table.get(get);
     89 //        System.out.println(rest.toString());
     90 //        table.close();
     91 //    }
     92 //    
     93 
     94 
     95     public void delete() throws Exception{
     96         HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
     97         Delete delete = new Delete(Bytes.toBytes("row_01"));
     98 //        delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
     99         delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
    100         table.delete(delete);
    101         table.close();
    102     }
    103 
    104 
    105 
    106     public static Configuration getConfig(){
    107         Configuration configuration = new Configuration(); 
    108 //        conf.set("hbase.rootdir","hdfs:HadoopMaster:9000/hbase");
    109         configuration.set("hbase.zookeeper.quorum", "HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
    110         return configuration;
    111     }
    112 }

       

    delete.deleteColumn和delete.deleteColumns区别是:

        deleteColumn是删除某一个列簇里的最新时间戳版本。

        delete.deleteColumns是删除某个列簇里的所有时间戳版本。

         

    3.2、引用“池”超过

    HBase编程 API入门之delete(客户端而言)

    HBase编程 API入门之delete.deleteColumn和delete.deleteColumns区别(客户端而言)

      上面这种方式

      时间戳版本旧到新,是Andy2   ->   Andy1   ->  Andy0

                   先建                              后建

     

      

          时间戳版本旧到新,是Andy2   ->   Andy1   ->  Andy0

                   先建                              后建

      1 package zhouls.bigdata.HbaseProject.Pool;
      2 
      3 import java.io.IOException;
      4 
      5 import zhouls.bigdata.HbaseProject.Pool.TableConnection;
      6 
      7 import javax.xml.transform.Result;
      8 
      9 import org.apache.hadoop.conf.Configuration;
     10 import org.apache.hadoop.hbase.Cell;
     11 import org.apache.hadoop.hbase.CellUtil;
     12 import org.apache.hadoop.hbase.HBaseConfiguration;
     13 import org.apache.hadoop.hbase.TableName;
     14 import org.apache.hadoop.hbase.client.Delete;
     15 import org.apache.hadoop.hbase.client.Get;
     16 import org.apache.hadoop.hbase.client.HTable;
     17 import org.apache.hadoop.hbase.client.HTableInterface;
     18 import org.apache.hadoop.hbase.client.Put;
     19 import org.apache.hadoop.hbase.client.ResultScanner;
     20 import org.apache.hadoop.hbase.client.Scan;
     21 import org.apache.hadoop.hbase.util.Bytes;
     22 
     23 public class HBaseTest {
     24     public static void main(String[] args) throws Exception {
     25 //        HTable table = new HTable(getConfig(),TableName.valueOf("test_table"));//表名是test_table
     26 //        Put put = new Put(Bytes.toBytes("row_04"));//行键是row_04
     27 //        put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy1"));//列簇是f,列修饰符是name,值是Andy0
     28 //        put.add(Bytes.toBytes("f2"),Bytes.toBytes("name"),Bytes.toBytes("Andy3"));//列簇是f2,列修饰符是name,值是Andy3
     29 //        table.put(put);
     30 //        table.close();
     31 
     32 //        Get get = new Get(Bytes.toBytes("row_04"));
     33 //        get.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("age"));如现在这样,不指定,默认把所有的全拿出来
     34 //        org.apache.hadoop.hbase.client.Result rest = table.get(get);
     35 //        System.out.println(rest.toString());
     36 //        table.close();
     37 
     38 //        Delete delete = new Delete(Bytes.toBytes("row_2"));
     39 //        delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("email"));
     40 //        delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("name"));
     41 //        table.delete(delete);
     42 //        table.close();
     43 
     44 //        Delete delete = new Delete(Bytes.toBytes("row_04"));
     45 ////    delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
     46 //        delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
     47 //        table.delete(delete);
     48 //        table.close();
     49 
     50 
     51 //        Scan scan = new Scan();
     52 //        scan.setStartRow(Bytes.toBytes("row_01"));//包含开始行键
     53 //        scan.setStopRow(Bytes.toBytes("row_03"));//不包含结束行键
     54 //        scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
     55 //        ResultScanner rst = table.getScanner(scan);//整个循环
     56 //        System.out.println(rst.toString());
     57 //        for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() ){
     58 //        for(Cell cell:next.rawCells()){//某个row key下的循坏
     59 //            System.out.println(next.toString());
     60 //            System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
     61 //            System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
     62 //            System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
     63 //        }
     64 //    }
     65 //        table.close();
     66 
     67         HBaseTest hbasetest =new HBaseTest();
     68 //        hbasetest.insertValue();
     69 //        hbasetest.getValue();
     70         hbasetest.delete();
     71     }
     72 
     73 
     74 //    public void insertValue() throws Exception{
     75 //        HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
     76 //        Put put = new Put(Bytes.toBytes("row_01"));//行键是row_01
     77 //        put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy0"));
     78 //        table.put(put);
     79 //        table.close();
     80 //    }
     81 
     82  
     83 
     84 //    public void getValue() throws Exception{
     85 //        HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
     86 //        Get get = new Get(Bytes.toBytes("row_03"));
     87 //        get.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
     88 //        org.apache.hadoop.hbase.client.Result rest = table.get(get);
     89 //        System.out.println(rest.toString());
     90 //        table.close();
     91 //    }
     92 //    
     93 
     94 
     95     public void delete() throws Exception{
     96         HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
     97         Delete delete = new Delete(Bytes.toBytes("row_01"));
     98         delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
     99 //        delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
    100         table.delete(delete);
    101         table.close();
    102 }
    103 
    104 
    105 
    106     public static Configuration getConfig(){
    107         Configuration configuration = new Configuration(); 
    108 //        conf.set("hbase.rootdir","hdfs:HadoopMaster:9000/hbase");
    109         configuration.set("hbase.zookeeper.quorum", "HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
    110         return configuration;
    111     }
    112 }

          

     

                时间戳版本旧到新,是Andy2   ->   Andy1   ->  Andy0

                                      先建                              后建

     delete.deleteColumn和delete.deleteColumns区别是:

        deleteColumn是删除某一个列簇里的最新时间戳版本。

        delete.deleteColumns是删除某个列簇里的所有时间戳版本。

    4、引用“池”超过

    HBase编程 API入门之scan(客户端而言)

      上面这种方式

      1 package zhouls.bigdata.HbaseProject.Pool;
      2 
      3 import java.io.IOException;
      4 
      5 import zhouls.bigdata.HbaseProject.Pool.TableConnection;
      6 
      7 import javax.xml.transform.Result;
      8 
      9 import org.apache.hadoop.conf.Configuration;
     10 import org.apache.hadoop.hbase.Cell;
     11 import org.apache.hadoop.hbase.CellUtil;
     12 import org.apache.hadoop.hbase.HBaseConfiguration;
     13 import org.apache.hadoop.hbase.TableName;
     14 import org.apache.hadoop.hbase.client.Delete;
     15 import org.apache.hadoop.hbase.client.Get;
     16 import org.apache.hadoop.hbase.client.HTable;
     17 import org.apache.hadoop.hbase.client.HTableInterface;
     18 import org.apache.hadoop.hbase.client.Put;
     19 import org.apache.hadoop.hbase.client.ResultScanner;
     20 import org.apache.hadoop.hbase.client.Scan;
     21 import org.apache.hadoop.hbase.util.Bytes;
     22 
     23 public class HBaseTest {
     24     public static void main(String[] args) throws Exception {
     25 //        HTable table = new HTable(getConfig(),TableName.valueOf("test_table"));//表名是test_table
     26 //        Put put = new Put(Bytes.toBytes("row_04"));//行键是row_04
     27 //        put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy1"));//列簇是f,列修饰符是name,值是Andy0
     28 //        put.add(Bytes.toBytes("f2"),Bytes.toBytes("name"),Bytes.toBytes("Andy3"));//列簇是f2,列修饰符是name,值是Andy3
     29 //        table.put(put);
     30 //        table.close();
     31 
     32 //        Get get = new Get(Bytes.toBytes("row_04"));
     33 //        get.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("age"));如现在这样,不指定,默认把所有的全拿出来
     34 //        org.apache.hadoop.hbase.client.Result rest = table.get(get);
     35 //        System.out.println(rest.toString());
     36 //        table.close();
     37 
     38 //        Delete delete = new Delete(Bytes.toBytes("row_2"));
     39 //        delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("email"));
     40 //        delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("name"));
     41 //        table.delete(delete);
     42 //        table.close();
     43 
     44 //        Delete delete = new Delete(Bytes.toBytes("row_04"));
     45 ////    delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
     46 //        delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
     47 //        table.delete(delete);
     48 //        table.close();
     49 
     50 
     51 //        Scan scan = new Scan();
     52 //        scan.setStartRow(Bytes.toBytes("row_01"));//包含开始行键
     53 //        scan.setStopRow(Bytes.toBytes("row_03"));//不包含结束行键
     54 //        scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
     55 //        ResultScanner rst = table.getScanner(scan);//整个循环
     56 //        System.out.println(rst.toString());
     57 //        for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() ){
     58 //            for(Cell cell:next.rawCells()){//某个row key下的循坏
     59 //                System.out.println(next.toString());
     60 //                System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
     61 //                System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
     62 //                System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
     63 //            }
     64 //        }
     65 //        table.close();
     66 
     67         HBaseTest hbasetest =new HBaseTest();
     68 //        hbasetest.insertValue();
     69 //        hbasetest.getValue();
     70 //        hbasetest.delete();
     71         hbasetest.scanValue();
     72     }
     73 
     74 
     75 //    public void insertValue() throws Exception{
     76 //        HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
     77 //        Put put = new Put(Bytes.toBytes("row_01"));//行键是row_01
     78 //        put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy0"));
     79 //        table.put(put);
     80 //        table.close();
     81 //    }
     82 
     83  
     84 
     85 //    public void getValue() throws Exception{
     86 //        HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
     87 //        Get get = new Get(Bytes.toBytes("row_03"));
     88 //        get.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
     89 //        org.apache.hadoop.hbase.client.Result rest = table.get(get);
     90 //        System.out.println(rest.toString());
     91 //        table.close();
     92 //    }
     93 //    
     94 
     95 
     96 //    public void delete() throws Exception{
     97 //        HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
     98 //        Delete delete = new Delete(Bytes.toBytes("row_01"));
     99 //     delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
    100 ////    delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
    101 //        table.delete(delete);
    102 //        table.close();
    103 //    }
    104 
    105 
    106     public void scanValue() throws Exception{
    107         HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    108         Scan scan = new Scan();
    109         scan.setStartRow(Bytes.toBytes("row_02"));//包含开始行键
    110         scan.setStopRow(Bytes.toBytes("row_04"));//不包含结束行键
    111         scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    112         ResultScanner rst = table.getScanner(scan);//整个循环
    113         System.out.println(rst.toString());
    114         for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() ){
    115             for(Cell cell:next.rawCells()){//某个row key下的循坏
    116                 System.out.println(next.toString());
    117                 System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
    118                 System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
    119                 System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
    120             }
    121         }
    122         table.close();
    123     }
    124 
    125 
    126 
    127     public static Configuration getConfig(){
    128         Configuration configuration = new Configuration(); 
    129 //        conf.set("hbase.rootdir","hdfs:HadoopMaster:9000/hbase");
    130         configuration.set("hbase.zookeeper.quorum", "HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
    131         return configuration;
    132     }
    133 }

    2016-12-11 15:14:56,940 INFO [org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper] - Process identifier=hconnection-0x278a676 connecting to ZooKeeper ensemble=HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181
    2016-12-11 15:14:56,954 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:zookeeper.version=3.4.6-1569965, built on 02/20/2014 09:09 GMT
    2016-12-11 15:14:56,954 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:host.name=WIN-BQOBV63OBNM
    2016-12-11 15:14:56,954 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.version=1.7.0_51
    2016-12-11 15:14:56,954 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.vendor=Oracle Corporation
    2016-12-11 15:14:56,954 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.home=C:Program FilesJavajdk1.7.0_51jre
    2016-12-11 15:14:56,954 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.class.path=D:CodeMyEclipseJavaCodeHbaseProjectin;D:SoftWarehbase-1.2.3libactivation-1.1.jar;D:SoftWarehbase-1.2.3libaopalliance-1.0.jar;D:SoftWarehbase-1.2.3libapacheds-i18n-2.0.0-M15.jar;D:SoftWarehbase-1.2.3libapacheds-kerberos-codec-2.0.0-M15.jar;D:SoftWarehbase-1.2.3libapi-asn1-api-1.0.0-M20.jar;D:SoftWarehbase-1.2.3libapi-util-1.0.0-M20.jar;D:SoftWarehbase-1.2.3libasm-3.1.jar;D:SoftWarehbase-1.2.3libavro-1.7.4.jar;D:SoftWarehbase-1.2.3libcommons-beanutils-1.7.0.jar;D:SoftWarehbase-1.2.3libcommons-beanutils-core-1.8.0.jar;D:SoftWarehbase-1.2.3libcommons-cli-1.2.jar;D:SoftWarehbase-1.2.3libcommons-codec-1.9.jar;D:SoftWarehbase-1.2.3libcommons-collections-3.2.2.jar;D:SoftWarehbase-1.2.3libcommons-compress-1.4.1.jar;D:SoftWarehbase-1.2.3libcommons-configuration-1.6.jar;D:SoftWarehbase-1.2.3libcommons-daemon-1.0.13.jar;D:SoftWarehbase-1.2.3libcommons-digester-1.8.jar;D:SoftWarehbase-1.2.3libcommons-el-1.0.jar;D:SoftWarehbase-1.2.3libcommons-httpclient-3.1.jar;D:SoftWarehbase-1.2.3libcommons-io-2.4.jar;D:SoftWarehbase-1.2.3libcommons-lang-2.6.jar;D:SoftWarehbase-1.2.3libcommons-logging-1.2.jar;D:SoftWarehbase-1.2.3libcommons-math-2.2.jar;D:SoftWarehbase-1.2.3libcommons-math3-3.1.1.jar;D:SoftWarehbase-1.2.3libcommons-net-3.1.jar;D:SoftWarehbase-1.2.3libdisruptor-3.3.0.jar;D:SoftWarehbase-1.2.3libfindbugs-annotations-1.3.9-1.jar;D:SoftWarehbase-1.2.3libguava-12.0.1.jar;D:SoftWarehbase-1.2.3libguice-3.0.jar;D:SoftWarehbase-1.2.3libguice-servlet-3.0.jar;D:SoftWarehbase-1.2.3libhadoop-annotations-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-auth-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-client-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-hdfs-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-app-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-core-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-jobclient-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-mapreduce-client-shuffle-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-api-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-client-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhadoop-yarn-server-common-2.5.1.jar;D:SoftWarehbase-1.2.3libhbase-annotations-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-annotations-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-client-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-common-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-common-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-examples-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-external-blockcache-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-hadoop2-compat-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-hadoop-compat-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-it-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-it-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-prefix-tree-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-procedure-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-protocol-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-resource-bundle-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-rest-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-server-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-server-1.2.3-tests.jar;D:SoftWarehbase-1.2.3libhbase-shell-1.2.3.jar;D:SoftWarehbase-1.2.3libhbase-thrift-1.2.3.jar;D:SoftWarehbase-1.2.3libhtrace-core-3.1.0-incubating.jar;D:SoftWarehbase-1.2.3libhttpclient-4.2.5.jar;D:SoftWarehbase-1.2.3libhttpcore-4.4.1.jar;D:SoftWarehbase-1.2.3libjackson-core-asl-1.9.13.jar;D:SoftWarehbase-1.2.3libjackson-jaxrs-1.9.13.jar;D:SoftWarehbase-1.2.3libjackson-mapper-asl-1.9.13.jar;D:SoftWarehbase-1.2.3libjackson-xc-1.9.13.jar;D:SoftWarehbase-1.2.3libjamon-runtime-2.4.1.jar;D:SoftWarehbase-1.2.3libjasper-compiler-5.5.23.jar;D:SoftWarehbase-1.2.3libjasper-runtime-5.5.23.jar;D:SoftWarehbase-1.2.3libjavax.inject-1.jar;D:SoftWarehbase-1.2.3libjava-xmlbuilder-0.4.jar;D:SoftWarehbase-1.2.3libjaxb-api-2.2.2.jar;D:SoftWarehbase-1.2.3libjaxb-impl-2.2.3-1.jar;D:SoftWarehbase-1.2.3libjcodings-1.0.8.jar;D:SoftWarehbase-1.2.3libjersey-client-1.9.jar;D:SoftWarehbase-1.2.3libjersey-core-1.9.jar;D:SoftWarehbase-1.2.3libjersey-guice-1.9.jar;D:SoftWarehbase-1.2.3libjersey-json-1.9.jar;D:SoftWarehbase-1.2.3libjersey-server-1.9.jar;D:SoftWarehbase-1.2.3libjets3t-0.9.0.jar;D:SoftWarehbase-1.2.3libjettison-1.3.3.jar;D:SoftWarehbase-1.2.3libjetty-6.1.26.jar;D:SoftWarehbase-1.2.3libjetty-sslengine-6.1.26.jar;D:SoftWarehbase-1.2.3libjetty-util-6.1.26.jar;D:SoftWarehbase-1.2.3libjoni-2.1.2.jar;D:SoftWarehbase-1.2.3libjruby-complete-1.6.8.jar;D:SoftWarehbase-1.2.3libjsch-0.1.42.jar;D:SoftWarehbase-1.2.3libjsp-2.1-6.1.14.jar;D:SoftWarehbase-1.2.3libjsp-api-2.1-6.1.14.jar;D:SoftWarehbase-1.2.3libjunit-4.12.jar;D:SoftWarehbase-1.2.3libleveldbjni-all-1.8.jar;D:SoftWarehbase-1.2.3liblibthrift-0.9.3.jar;D:SoftWarehbase-1.2.3liblog4j-1.2.17.jar;D:SoftWarehbase-1.2.3libmetrics-core-2.2.0.jar;D:SoftWarehbase-1.2.3lib etty-all-4.0.23.Final.jar;D:SoftWarehbase-1.2.3libparanamer-2.3.jar;D:SoftWarehbase-1.2.3libprotobuf-java-2.5.0.jar;D:SoftWarehbase-1.2.3libservlet-api-2.5.jar;D:SoftWarehbase-1.2.3libservlet-api-2.5-6.1.14.jar;D:SoftWarehbase-1.2.3libslf4j-api-1.7.7.jar;D:SoftWarehbase-1.2.3libslf4j-log4j12-1.7.5.jar;D:SoftWarehbase-1.2.3libsnappy-java-1.0.4.1.jar;D:SoftWarehbase-1.2.3libspymemcached-2.11.6.jar;D:SoftWarehbase-1.2.3libxmlenc-0.52.jar;D:SoftWarehbase-1.2.3libxz-1.0.jar;D:SoftWarehbase-1.2.3libzookeeper-3.4.6.jar
    2016-12-11 15:14:56,955 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.library.path=C:Program FilesJavajdk1.7.0_51in;C:WindowsSunJavain;C:Windowssystem32;C:Windows;C:ProgramDataOracleJavajavapath;C:Python27;C:Python27Scripts;C:Windowssystem32;C:Windows;C:WindowsSystem32Wbem;C:WindowsSystem32WindowsPowerShellv1.0;D:SoftWareMATLAB R2013a untimewin64;D:SoftWareMATLAB R2013ain;C:Program Files (x86)IDM Computer SolutionsUltraCompare;C:Program FilesJavajdk1.7.0_51in;C:Program FilesJavajdk1.7.0_51jrein;D:SoftWareapache-ant-1.9.0in;HADOOP_HOMEin;D:SoftWareapache-maven-3.3.9in;D:SoftWareScalain;D:SoftWareScalajrein;%MYSQL_HOMEin;D:SoftWareMySQL ServerMySQL Server 5.0in;D:SoftWareapache-tomcat-7.0.69in;%C:WindowsSystem32;%C:WindowsSysWOW64;D:SoftWareSSH Secure Shell;.
    2016-12-11 15:14:56,956 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.io.tmpdir=C:UsersADMINI~1AppDataLocalTemp
    2016-12-11 15:14:56,956 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:java.compiler=<NA>
    2016-12-11 15:14:56,956 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:os.name=Windows 7
    2016-12-11 15:14:56,956 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:os.arch=amd64
    2016-12-11 15:14:56,956 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:os.version=6.1
    2016-12-11 15:14:56,957 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:user.name=Administrator
    2016-12-11 15:14:56,957 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:user.home=C:UsersAdministrator
    2016-12-11 15:14:56,957 INFO [org.apache.zookeeper.ZooKeeper] - Client environment:user.dir=D:CodeMyEclipseJavaCodeHbaseProject
    2016-12-11 15:14:56,958 INFO [org.apache.zookeeper.ZooKeeper] - Initiating client connection, connectString=HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181 sessionTimeout=90000 watcher=hconnection-0x278a6760x0, quorum=HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181, baseZNode=/hbase
    2016-12-11 15:14:57,015 INFO [org.apache.zookeeper.ClientCnxn] - Opening socket connection to server HadoopMaster/192.168.80.10:2181. Will not attempt to authenticate using SASL (unknown error)
    2016-12-11 15:14:57,018 INFO [org.apache.zookeeper.ClientCnxn] - Socket connection established to HadoopMaster/192.168.80.10:2181, initiating session
    2016-12-11 15:14:57,044 INFO [org.apache.zookeeper.ClientCnxn] - Session establishment complete on server HadoopMaster/192.168.80.10:2181, sessionid = 0x1582556e7c50024, negotiated timeout = 40000
    org.apache.hadoop.hbase.client.ClientScanner@4362f2fe
    keyvalues={row_02/f:name/1478095849538/Put/vlen=5/seqid=0}
    family:f
    col:name
    valueAndy2
    keyvalues={row_03/f:name/1478095893278/Put/vlen=5/seqid=0}
    family:f
    col:name
    valueAndy3

      好的,其他的功能,就不带领大家去做了,自行去研究。

    最后,总结:

      在实际开发中,一定要掌握线程池!!!

    附上代码

    package zhouls.bigdata.HbaseProject.Pool;

    import java.io.IOException;

    import zhouls.bigdata.HbaseProject.Pool.TableConnection;

    import javax.xml.transform.Result;

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.Cell;
    import org.apache.hadoop.hbase.CellUtil;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.TableName;
    import org.apache.hadoop.hbase.client.Delete;
    import org.apache.hadoop.hbase.client.Get;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.client.HTableInterface;
    import org.apache.hadoop.hbase.client.Put;
    import org.apache.hadoop.hbase.client.ResultScanner;
    import org.apache.hadoop.hbase.client.Scan;
    import org.apache.hadoop.hbase.util.Bytes;

    public class HBaseTest {

    public static void main(String[] args) throws Exception {
    // HTable table = new HTable(getConfig(),TableName.valueOf("test_table"));//表名是test_table
    // Put put = new Put(Bytes.toBytes("row_04"));//行键是row_04
    // put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy1"));//列簇是f,列修饰符是name,值是Andy0
    // put.add(Bytes.toBytes("f2"),Bytes.toBytes("name"),Bytes.toBytes("Andy3"));//列簇是f2,列修饰符是name,值是Andy3
    // table.put(put);
    // table.close();

    // Get get = new Get(Bytes.toBytes("row_04"));
    // get.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("age"));如现在这样,不指定,默认把所有的全拿出来
    // org.apache.hadoop.hbase.client.Result rest = table.get(get);
    // System.out.println(rest.toString());
    // table.close();

    // Delete delete = new Delete(Bytes.toBytes("row_2"));
    // delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("email"));
    // delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("name"));
    // table.delete(delete);
    // table.close();


    // Delete delete = new Delete(Bytes.toBytes("row_04"));
    //// delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
    // delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
    // table.delete(delete);
    // table.close();


    // Scan scan = new Scan();
    // scan.setStartRow(Bytes.toBytes("row_01"));//包含开始行键
    // scan.setStopRow(Bytes.toBytes("row_03"));//不包含结束行键
    // scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    // ResultScanner rst = table.getScanner(scan);//整个循环
    // System.out.println(rst.toString());
    // for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() )
    // {
    // for(Cell cell:next.rawCells()){//某个row key下的循坏
    // System.out.println(next.toString());
    // System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
    // System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
    // System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
    // }
    // }
    // table.close();

    HBaseTest hbasetest =new HBaseTest();
    // hbasetest.insertValue();
    // hbasetest.getValue();
    // hbasetest.delete();
    hbasetest.scanValue();

    }


    //生产开发中,建议这样用线程池做
    // public void insertValue() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Put put = new Put(Bytes.toBytes("row_01"));//行键是row_01
    // put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy0"));
    // table.put(put);
    // table.close();
    // }



    //生产开发中,建议这样用线程池做
    // public void getValue() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Get get = new Get(Bytes.toBytes("row_03"));
    // get.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    // org.apache.hadoop.hbase.client.Result rest = table.get(get);
    // System.out.println(rest.toString());
    // table.close();
    // }
    //

    //生产开发中,建议这样用线程池做
    // public void delete() throws Exception{
    // HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    // Delete delete = new Delete(Bytes.toBytes("row_01"));
    // delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));//deleteColumn是删除某一个列簇里的最新时间戳版本。
    //// delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));//delete.deleteColumns是删除某个列簇里的所有时间戳版本。
    // table.delete(delete);
    // table.close();
    //
    // }

    //生产开发中,建议这样用线程池做
    public void scanValue() throws Exception{
    HTableInterface table = TableConnection.getConnection().getTable(TableName.valueOf("test_table"));
    Scan scan = new Scan();
    scan.setStartRow(Bytes.toBytes("row_02"));//包含开始行键
    scan.setStopRow(Bytes.toBytes("row_04"));//不包含结束行键
    scan.addColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
    ResultScanner rst = table.getScanner(scan);//整个循环
    System.out.println(rst.toString());
    for (org.apache.hadoop.hbase.client.Result next = rst.next();next !=null;next = rst.next() )
    {
    for(Cell cell:next.rawCells()){//某个row key下的循坏
    System.out.println(next.toString());
    System.out.println("family:" + Bytes.toString(CellUtil.cloneFamily(cell)));
    System.out.println("col:" + Bytes.toString(CellUtil.cloneQualifier(cell)));
    System.out.println("value" + Bytes.toString(CellUtil.cloneValue(cell)));
    }
    }
    table.close();
    }



    public static Configuration getConfig(){
    Configuration configuration = new Configuration();
    // conf.set("hbase.rootdir","hdfs:HadoopMaster:9000/hbase");
    configuration.set("hbase.zookeeper.quorum", "HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
    return configuration;
    }
    }

    package zhouls.bigdata.HbaseProject.Pool;

    import java.io.IOException;
    import java.util.concurrent.ExecutorService;
    import java.util.concurrent.Executors;

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.client.HConnection;
    import org.apache.hadoop.hbase.client.HConnectionManager;


    public class TableConnection {
    private TableConnection(){
    }
    private static HConnection connection = null;
    public static HConnection getConnection(){
    if(connection == null){
    ExecutorService pool = Executors.newFixedThreadPool(10);//建立一个固定大小的线程池
    Configuration conf = HBaseConfiguration.create();
    conf.set("hbase.zookeeper.quorum","HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
    try{
    connection = HConnectionManager.createConnection(conf,pool);//创建连接时,拿到配置文件和线程池
    }catch (IOException e){
    }
    }
    return connection;
    }
    }

  • 相关阅读:
    SQL Azure (17) SQL Azure V12
    Microsoft Azure News(5) Azure新DV2系列虚拟机上线
    Azure Redis Cache (3) 在Windows 环境下使用Redis Benchmark
    Azure PowerShell (11) 使用自定义虚拟机镜像模板,创建Azure虚拟机并绑定公网IP(VIP)和内网IP(DIP)
    Windows Azure Virtual Machine (31) 迁移Azure虚拟机
    Windows Azure Web Site (16) Azure Web Site HTTPS
    Azure China (12) 域名备案问题
    一分钟快速入门openstack
    管理员必备的Linux系统监控工具
    Keepalived+Nginx实现高可用和双主节点负载均衡
  • 原文地址:https://www.cnblogs.com/zlslch/p/6159427.html
Copyright © 2020-2023  润新知