• hive javaapi 002


    默认开启10000端口
    开启前,编辑hive-site.xml设置impersonation,防止hdfs权限问题,这样hive server会以提交用户的身份去执行语句,如果设置为false,则会以起hive server daemon的admin user来执行语句

    <property>    
      <name>hive.server2.enable.doAs</name>    
      <value>false</value>    
    </property>  

    pom.xml

    <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
      xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
      <modelVersion>4.0.0</modelVersion>
    
      <groupId>xinwei</groupId>
      <artifactId>Hive</artifactId>
      <version>0.0.1-SNAPSHOT</version>
      <packaging>jar</packaging>
    
      <name>Hive</name>
      <url>http://maven.apache.org</url>
    
      <properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
      </properties>
    
      <dependencies>
        <dependency>
          <groupId>junit</groupId>
          <artifactId>junit</artifactId>
          <version>3.8.1</version>
          <scope>test</scope>
        </dependency>
        
        
        <dependency>  
                <groupId>org.apache.hive</groupId>  
                <artifactId>hive-jdbc</artifactId>  
                <version>0.14.0</version>  
            </dependency>  
            <dependency>  
                <groupId>org.apache.hadoop</groupId>  
                <artifactId>hadoop-common</artifactId>  
                <version>2.4.1</version>  
            </dependency>  
            <dependency>  
                <groupId>jdk.tools</groupId>  
                <artifactId>jdk.tools</artifactId>  
                <version>1.8</version>  
                <scope>system</scope>  
                <systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>  
            </dependency>  
      </dependencies>
    </project>
    
    import java.sql.Connection;  
    import java.sql.DriverManager;  
    import java.sql.ResultSet;  
    import java.sql.SQLException;  
    import java.sql.Statement;  
    //Hive0.11.0版本提供了一个全新的服务:HiveServer2,这个很好的解决HiveServer存在的安全性、并发性等问题。这个服务启动程序在${HIVE_HOME}/bin/hiveserver2里面,你可以通过下面的方式来启动HiveServer2服务:
    //nohup ./hive --service hiveserver2 >/dev/null 2>1 &
    //默认开启10000端口
    //开启前,编辑hive-site.xml设置impersonation,防止hdfs权限问题,这样hive server会以提交用户的身份去执行语句,如果设置为false,则会以起hive server daemon的admin user来执行语句
    public class HiveTest {  
        private static String driverName = "org.apache.hive.jdbc.HiveDriver";  
        private static String url = "jdbc:hive2://192.168.231.137:10000/default";  
        private static String username = "hadoop";  
        private static String password = "hadoop";  
        private static Connection conn = null;  
        private static Statement stmt = null;  
        private static String sql = "";  
        private static ResultSet res = null;  
        static {  
            try {  
                Class.forName(driverName);  
                conn = DriverManager.getConnection(url, username, password);  
                stmt = conn.createStatement();  
            } catch (Exception e) {  
                e.printStackTrace();  
            }  
        }  
      
        public static void main(String[] args) throws Exception {  
            dropTable("hivetest");  
            createTable("hivetest");  
            showTables("hivetest");  
            describeTables("hivetest");  
            insert("hivetest", new String[]{"10000","tom","23"});  
            insert("hivetest", new String[]{"10001","zhangshan","80"});  
            insert("hivetest", new String[]{"10002","lisi","30"});  
            insert("hivetest", new String[]{"10003","lucy","40"});  
            selectData("hivetest");  
    //        dropTable("hivetest");  
        }  
      
        // 查询数据  
        public static void selectData(String tableName) throws SQLException {  
            sql = "select * from " + tableName;  
            res = stmt.executeQuery(sql);  
            while (res.next()) {  
                System.out.println(res.getInt(1) + "	" + res.getString(2));  
            }  
        }  
      
        // 添加数据  
        public static void insert(String tableName, String[] datas) throws SQLException {  
            sql = "insert into table " + tableName + " values ('" + datas[0] + "','" + datas[1] + "'," + Integer.valueOf(datas[2]) + ")";  
            stmt.execute(sql);  
        }  
      
        // 查询表结构  
        public static void describeTables(String tableName) throws SQLException {  
            sql = "describe " + tableName;  
            res = stmt.executeQuery(sql);  
            while (res.next()) {  
                System.out.println(res.getString(1) + "	" + res.getString(2));  
            }  
        }  
      
        // 查看表  
        public static void showTables(String tableName) throws SQLException {  
            sql = "show tables '" + tableName + "'";  
            res = stmt.executeQuery(sql);  
            if (res.next()) {  
                System.out.println(res.getString(1));  
            }  
        }  
      
        // 创建表  
        public static void createTable(String tableName) throws SQLException {  
            sql = "create table " + tableName + " (id string, name string,age int)  row format delimited fields terminated by '	'";  
            stmt.execute(sql);  
        }  
      
        // 删除表  
        public static String dropTable(String tableName) throws SQLException {  
            // 创建的表名  
            sql = "drop table " + tableName;  
            stmt.execute(sql);  
            return tableName;  
        }  
    }  
    
    //
    //hivetest
    //id    string
    //name    string
    //age    int
    //10000    tom
    //10001    zhangshan
    //10002    lisi
    //10003    lucy
  • 相关阅读:
    找水王
    统计txt文档中的单词个数
    返回一个数组中最大子数组的和
    寻找最长字符串
    第二阶段冲刺第九天
    第二阶段冲刺第八天
    第二阶段冲刺第七天
    第二阶段冲刺第六天
    构建之法阅读笔记06
    小目标
  • 原文地址:https://www.cnblogs.com/alamps/p/7985102.html
Copyright © 2020-2023  润新知