• HttpClient+Jsoup爬取页面数据


    为什么不使用爬虫框架?

      原本使用的WebMagic框架,但是报了协议版本不一致异常,百度很多方法没解决掉,而且也是自己写着玩,就换了方式; 

    javax.net.ssl.SSLException: Received fatal alert: protocol_version

    jar包依赖:

            <!--解析网页数据-->
            <dependency>
                <groupId>org.jsoup</groupId>
                <artifactId>jsoup</artifactId>
                <version>1.10.2</version>
            </dependency>
    
            <dependency>
                <groupId>org.apache.httpcomponents</groupId>
                <artifactId>httpclient</artifactId>
                <version>4.5.2</version>
            </dependency>

    代码:

    import com.maxinhai.world.utils.JdbcUtils;
    import org.apache.http.HttpEntity;
    import org.apache.http.StatusLine;
    import org.apache.http.client.config.RequestConfig;
    import org.apache.http.client.methods.CloseableHttpResponse;
    import org.apache.http.client.methods.HttpGet;
    import org.apache.http.config.RegistryBuilder;
    import org.apache.http.conn.socket.ConnectionSocketFactory;
    import org.apache.http.conn.socket.PlainConnectionSocketFactory;
    import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
    import org.apache.http.impl.client.CloseableHttpClient;
    import org.apache.http.impl.client.HttpClientBuilder;
    import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
    import org.apache.http.ssl.SSLContexts;
    import org.apache.http.util.EntityUtils;
    import org.jsoup.Jsoup;
    import org.jsoup.nodes.Document;
    import org.jsoup.nodes.Element;
    import org.jsoup.select.Elements;
    
    import javax.net.ssl.SSLContext;
    import java.io.IOException;
    import java.sql.*;
    import java.sql.Date;
    import java.util.*;
    
    /**
     * @program: world
     * @description: 雅拉伊图片爬虫
     * @author: XinHai.Ma
     * @create: 2021-10-21 20:33
     */
    public class YaLaYiImageReptile {
    
        // 解决问题: javax.net.ssl.SSLException: Received fatal alert: protocol_version
        private static final PoolingHttpClientConnectionManager HTTP_CLIENT_CONNECTION_MANAGER;
        private static final CloseableHttpClient HTTP_CLIENT;
        static {
            SSLContext ctx = SSLContexts.createSystemDefault();
            SSLConnectionSocketFactory fac =
                    new SSLConnectionSocketFactory(ctx, new String[]{"SSLv2Hello", "TLSv1.2"}, null, SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
    
            HTTP_CLIENT_CONNECTION_MANAGER = new PoolingHttpClientConnectionManager(RegistryBuilder
                    .<ConnectionSocketFactory> create().register("http", PlainConnectionSocketFactory.getSocketFactory())
                    .register("https", fac).build());
            HTTP_CLIENT_CONNECTION_MANAGER.setDefaultMaxPerRoute(100);
            HTTP_CLIENT_CONNECTION_MANAGER.setMaxTotal(200);
            RequestConfig requestConfig = RequestConfig.custom().setConnectionRequestTimeout(60000).setConnectTimeout(60000)
                    .setSocketTimeout(60000).build();
    
            HTTP_CLIENT = HttpClientBuilder.create().setConnectionManager(HTTP_CLIENT_CONNECTION_MANAGER)
                    .setDefaultRequestConfig(requestConfig).build();
        }
    
        /**
         * 报异常 => javax.net.ssl.SSLException: Received fatal alert: protocol_version
         * @param args
         */
        public static void main(String[] args) {
            // 要爬取的页面
            for(int i=1; i<=10; i++) {
                if(i != 1) {
                    reptile("https://www.yalayi.com/gallery/index_" + i + ".html");
                } else {
                    reptile("https://www.yalayi.com/gallery/");
                }
            }
        }
    
        private static void reptile(String url) {
            try {
                HttpGet httpGet = new HttpGet(url);
                CloseableHttpResponse httpResponse = HTTP_CLIENT.execute(httpGet);
                StatusLine statusLine = httpResponse.getStatusLine();
                if(statusLine.getStatusCode() == 200) {
                    HttpEntity httpEntity = httpResponse.getEntity();
                    String result = EntityUtils.toString(httpEntity, "utf-8");
                    //System.out.println(statusLine + "=>" + result);
    
                    Document document = Jsoup.parse(result);
                    Elements urlElements = document.select("body > div.main.bgf6 > div.gallery-list.list-box > div > ul > li > div.img-box > a");
                    Elements imageElements = document.select("body > div.main.bgf6 > div.gallery-list.list-box > div > ul > li > div.img-box > a > img");
                    Elements titleElements = document.select("body > div.main.bgf6 > div.gallery-list.list-box > div > ul > li > div.text-box > p > a");
                    Elements sizeElements = document.select("body > div.main.bgf6 > div.gallery-list.list-box > div > ul > li > div.img-box > em");
                    List<Map<String, String>> dataList = new ArrayList<>();
                    for (int i = 0; i < titleElements.size(); i++) {
                        Element urlElement = urlElements.get(i);
                        Element imageElement = imageElements.get(i);
                        Element titleElement = titleElements.get(i);
                        Element sizeElement = sizeElements.get(i);
                        String href = urlElement.attr("href");
                        String src = imageElement.attr("src");
                        String title = titleElement.text();
                        String size = sizeElement.text();
                        System.out.println("标题:" + title + " 封面:" + src + " 画质:" + size + " 详情页:" + href);
    
                        Map<String, String> data = new HashMap<>();
                        data.put("pageUrl", href);
                        data.put("imageUrl", src);
                        data.put("title", title);
                        data.put("size", size);
                        dataList.add(data);
                    }
                    saveDate(dataList);
                }
    
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    
        /**
         * 保存爬取数据
         * @param dataList
         */
        private static void saveDate(List<Map<String, String>> dataList) {
            List<String> titleList = new ArrayList<>();
            for (int i = 0; i < dataList.size(); i++) {
                Map<String, String> data = dataList.get(i);
                String title = data.get("title");
                titleList.add(title);
            }
    
            // 查询已经爬取的数据,做筛选处理
            List<String> resultList = query(titleList);
            Iterator<Map<String, String>> iterator = dataList.iterator();
            while (iterator.hasNext()) {
                Map<String, String> next = iterator.next();
                if(resultList.contains(next.get("title"))) {
                    iterator.remove();
                }
            }
    
            // 获取数据库连接对象
            Connection connection = JdbcUtils.getConnection();
    
            // 插入数据
            String insert_novel = "INSERT INTO `world`.`yalayi_image_reptile`(`is_active`, `is_delete`, `create_time`, `title`, `size`, `cover_url`, `page_url`) " +
                    "VALUES (?, ?, ?, ?, ?, ?, ?)";
    
            try {
                PreparedStatement preparedStatement = connection.prepareStatement(insert_novel);
                for (int i = 0; i < dataList.size(); i++) {
                    Map<String, String> data = dataList.get(i);
                    preparedStatement.setInt(1, 0);
                    preparedStatement.setInt(2, 0);
                    preparedStatement.setDate(3, new Date(System.currentTimeMillis()));
                    preparedStatement.setString(4, data.get("title"));
                    preparedStatement.setString(5, data.get("size"));
                    preparedStatement.setString(6, data.get("imageUrl"));
                    preparedStatement.setString(7, data.get("pageUrl"));
                    preparedStatement.addBatch();
                }
    
                int[] row = preparedStatement.executeBatch();
                System.out.println("插入" + row.length + "行");
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }
    
    
        /**
         * in范围查询
         * @param titleList
         * @return
         */
        private static List<String> query(List<String> titleList) {
            List<String> titles = new ArrayList<>();
            String sql = "SELECT title FROM `yalayi_image_reptile` where is_delete=0 and title in (?)";
            String stringFromList = getStringFromList(titleList);
            String formatSql = String.format(sql.replace("?", "%s"), stringFromList);
            try {
                Connection connection = JdbcUtils.getConnection();
                PreparedStatement preparedStatement = connection.prepareStatement(formatSql);
                ResultSet resultSet = preparedStatement.executeQuery();
                while (resultSet.next()) {
                    String title = resultSet.getString(1);
                    titles.add(title);
                }
            } catch (SQLException e) {
                e.printStackTrace();
            }
            return titles;
        }
    
        /**
         * 格式化参数
         * @param paramList
         * @return
         */
        private static String getStringFromList(List<String> paramList) {
            StringBuffer sb = new StringBuffer();
            for (int i = 0; i < paramList.size(); i++) {
                if(i == paramList.size()-1) {
                    sb.append("'" + paramList.get(i) + "'");
                } else {
                    sb.append("'" + paramList.get(i) + "'");
                    sb.append(",");
                }
            }
            return sb.toString();
        }
    
    }

    Ps: 1. 上面代码中没有对数据库连接做关闭处理,是因为其他代码会用到这个连接对象,就没有关闭;

      2. document.select()方法里的css不会写没关系,在页面中找到你要爬取的dom节点,右键检查,就能看到html代码,选中要爬取的html标签,右键复制为selector即可;

    关于协议版本不一致问题:

      WebMagic默认的HttpClient只会用TLSv1去请求,对于某些只支持TLS1.2的站点(例如https://juejin.im/) ,就会报错;

      处理方法:https://github.com/code4craft/webmagic/issues/701;

  • 相关阅读:
    Picasa生成图片幻灯片页面图文教程
    Ubuntu下缓冲器溢出攻击实验(可以看看问题分析)
    redis源码笔记 aof
    redis源码笔记 bio
    redis源码笔记 slowlog
    记录一个字符数组和字符指针的不同
    redis源码笔记 rediscli.c
    redis源码笔记 redis对过期值的处理(in redis.c)
    redis源码笔记 有关LRU cache相关的代码
    redis源码笔记 initServer
  • 原文地址:https://www.cnblogs.com/mxh-java/p/15440227.html
Copyright © 2020-2023  润新知