• JDBC连接抽象方法的使用例子


    package com.zdlt.auth.api.base.datasource;
    
    import java.sql.*;
    import java.util.ArrayList;
    import java.util.LinkedHashMap;
    import java.util.List;
    import java.util.Map;
    
    /**
     * BaseDataSource
     * <p>
     * Created by shiyanjun on 2019/12/19.
     */
    public abstract class BaseDataSource {
        /**
         * 获取连接
         *
         * @return
         */
        abstract Connection getConnection() throws SQLException;
    
        /**
         * 执行查询
         *
         * @param sql
         * @return
         * @throws SQLException
         */
        public List<Map<String, Object>> query(String sql) throws SQLException {
            Connection connection = getConnection();
            PreparedStatement statement = connection.prepareStatement(sql);
            ResultSet resultSet = statement.executeQuery();
            return resoleResult(resultSet);
        }
    
        /**
         * 解析结果
         *
         * @param resultSet
         * @return
         */
        public List<Map<String, Object>> resoleResult(ResultSet resultSet) throws SQLException {
            List<Map<String, Object>> resultList = new ArrayList<>();
            ResultSetMetaData metaData = resultSet.getMetaData();
            while (resultSet.next()) {
                Map<String, Object> resultMap = new LinkedHashMap<>();
                for (int i = 1; i <= metaData.getColumnCount(); i++) {
                    String columnLabel = metaData.getColumnLabel(i);
                    String columnValue = resultSet.getString(columnLabel);
                    resultMap.put(columnLabel, columnValue);
                }
                resultList.add(resultMap);
            }
            return resultList;
        }
    }
    package com.zdlt.auth.api.base.datasource;
    
    import java.sql.Connection;
    import java.sql.SQLException;
    
    /**
     * CustomDataSource
     * <p>
     * Created by shiyanjun on 2019/12/19.
     */
    public class CustomDataSource extends BaseDataSource {
        private String url;
        private String username;
        private String password;
    
        CustomDataSource(String url, String username, String password) {
            this.url = url;
            this.username = username;
            this.password = password;
        }
    
        /**
         * 获取连接
         *
         * @return
         */
        @Override
        public Connection getConnection() throws SQLException {
            return ConnectUtil.getConnection(url, username, password);
        }
    }
    package com.zdlt.auth.api.base.datasource;
    
    import java.sql.Connection;
    import java.sql.DriverManager;
    import java.sql.SQLException;
    
    /**
     * ConnectUtil
     * <p>
     * Created by shiyanjun on 2019/12/19.
     */
    public class ConnectUtil {
        /**
         * 获取JDBC连接
         *
         * @param url
         * @param username
         * @param password
         * @return
         * @throws SQLException
         */
        public static Connection getConnection(String url, String username, String password) throws SQLException {
            return DriverManager.getConnection(url, username, password);
        }
    }
    package com.zdlt.auth.api.base.datasource;
    
    import java.sql.SQLException;
    import java.util.List;
    import java.util.Map;
    
    public class App {
        public static void main(String[] args) {
            CustomDataSource customDataSource = new CustomDataSource(
                    "jdbc:mysql://127.0.0.1:3306/authapi", "root", "123456");
            try {
                List<Map<String, Object>> resultList = customDataSource.query("select * from auth_code");
                System.out.println(resultList);
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }
    }
  • 相关阅读:
    hive: insert数据时Error during job, obtaining debugging information 以及beyond physical memory limits
    hadoop性能调优
    mysql主键,外键,索引
    Hive语法
    Hbase配置java客户端
    Hive命令及操作
    sqoop:mysql和Hbase/Hive/Hdfs之间相互导入数据
    mysql字符设置
    linux及hadoop修改权限
    Scalaz(55)- scalaz-stream: fs2-基础介绍,fs2 stream transformation
  • 原文地址:https://www.cnblogs.com/jun1019/p/12081311.html
Copyright © 2020-2023  润新知