• java通过jdbc连接hive并实时获取日志(转)


    转载:https://blog.csdn.net/weixin_43455443/article/details/115343995

    4、通过java代码连接hive on spark,使用hive-jdbc
    引入pom文件

    <dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-jdbc</artifactId>
    <version>2.1.1</version>
    </dependency>
    import java.sql.*;
    import java.util.Scanner;
    
    public class HiveJDBC {
        public static void main(String[] args) throws ClassNotFoundException, SQLException, InterruptedException {
            String driverName = "org.apache.hive.jdbc.HiveDriver";
            String url = "jdbc:hive2://hadoop162:10000";
            String dbName = "default";
            Connection con = null;
            Statement state = null;
            ResultSet res = null;
    
            Class.forName(driverName);
            //这里必须指定用户名和密码,密码可以为空字符串,如果不指定则或报错启动sparksession失败
            con= DriverManager.getConnection(url+"/"+dbName,"hive","");
            state = con.createStatement();
            Scanner scan = new Scanner(System.in);
            String sql=null;
            //创建临时udf,可以不创建
            state.execute("create temporary function pinjie as 'com.topnet.MyUDF' using jar 'hdfs:///user/hive/udf/hiveUDF-1.0.jar'");
            while (true){
                System.out.println("亲输入sql:");
                if(scan.hasNext()){
                    sql=scan.nextLine();
                }
                System.out.println(sql);
                res = state.executeQuery(sql);
                while (res.next()) {
                    System.out.println(res.getString(1));
                }
                Thread.sleep(100);
            }
        }
    }


    5、运行sql的执行日志获取
    使用hive-jdbc运行时,如果想获取sql的执行日志,则可以通过这几个方法获取运行的日志信息。List<String> getQueryLog(),List<String> getQueryLog(boolean incremental, int fetchSize)和boolean hasMoreLogs()三个方法,在进行hive的sql查询时,有时一个sql可能需要运行很长时间,借助这三个方法,还可以实时显示sql 的查询进度。

    想要实时的显示sql查询进度,则需要再开启一个线程进行日志获取打印。

    public class HiveJDBC {
        public static void main(String[] args) throws ClassNotFoundException, SQLException, InterruptedException {
            Logger log = LoggerFactory.getLogger(HiveJDBC.class);
            String driverName = "org.apache.hive.jdbc.HiveDriver";
            String url = "jdbc:hive2://hadoop162:10000";
            String dbName = "default";
            Connection con = null;
            Statement state = null;
            ResultSet res = null;
    
            Class.forName(driverName);
            //这里必须指定用户名和密码,密码可以为空字符串,如果不指定则或报错启动sparksession失败
            con = DriverManager.getConnection(url + "/" + dbName, "hive", "");
            state = con.createStatement();
            Scanner scan = new Scanner(System.in);
            String sql = null;
            
            //开启线程获取sql执行日志
            Thread logThread = new Thread(new HiveLog((HiveStatement) state));
            logThread.setDaemon(true);
            logThread.start();
    
            //注册临时udf函数,可以不创建
            state.execute("create temporary function pinjie as 'com.topnet.MyUDF' using jar 'hdfs:///user/hive/udf/hiveUDF-1.0.jar'");
            while (true) {
                System.out.println("亲输入sql:");
                if (scan.hasNext()) {
                    sql = scan.nextLine();
                }
                log.error("打印日志sql语句:" + sql);
                res = state.executeQuery(sql);
                while (res.next()) {
                    System.out.println(res.getString(1));
                }
                Thread.sleep(100);
            }
        }
    
        static class HiveLog extends Thread {
            private final HiveStatement state;
    
            HiveLog(HiveStatement state) {
                this.state = state;
            }
            
            private void updateQueryLog() {
                try {
                    List<String> queryLogs = state.getQueryLog();
                    for (String log : queryLogs) {
                        System.out.println("进度信息-->" + log);
                    }
                } catch (Exception e) {
                }
            }
            
            @Override
            public void run() {
                try {
                    //循环不断的获取sql执行的日志
                    while (true) {
                        if (state.hasMoreLogs()) {
                            updateQueryLog();
                        }
                        Thread.sleep(100);
                    }
                } catch (InterruptedException e) {
                    e.getStackTrace();
                }
            }
        }
    }
  • 相关阅读:
    康托(逆)展开(2015.8.6)
    高精度计算(2015.8.1)
    笔记(2015.8.1)
    筛法求素数(2015.7.30)
    欧几里德算法(2015.7.28)
    快速幂取模(2015.7.29)
    同余
    图论相关算法
    笔记(2015-07-24)
    ACM进阶计划
  • 原文地址:https://www.cnblogs.com/wangbin2188/p/16646668.html
Copyright © 2020-2023  润新知