温馨提示×

温馨提示×

您好,登录后才能下订单哦!

密码登录×
登录注册×
其他方式登录
点击 登录注册 即表示同意《亿速云用户服务条款》

OkHttpClient和Jsoup进行网页爬取

发布时间:2020-07-21 11:21:38 来源:网络 阅读:611 作者:lifeneedyou 栏目:编程语言
通过http请求,返回一个json格式的数据,然后将json数据转化为java对象返回给调用方。Http采用OkHttp库,json转化采用fastjson库。

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<groupId>com.ok.http.client</groupId>
<artifactId>okhttp</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>

<name>okhttp</name>
<url>http://maven.apache.org</url>

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>

<dependencies>
<dependency>
<groupId>org.jsoup</groupId>
<artifactId>jsoup</artifactId>
<version>1.11.3</version>
</dependency>

<dependency>
    <groupId>org.mongodb</groupId>
    <artifactId>bson</artifactId>
    <version>3.6.4</version>
</dependency>

<dependency>
    <groupId>com.google.cloud.trace.instrumentation.jdbc</groupId>
    <artifactId>driver</artifactId>
    <version>0.1.1</version>
    <type>pom</type>
</dependency>
<dependency>
    <groupId>ch.qos.logback.contrib</groupId>
    <artifactId>logback-mongodb-access</artifactId>
    <version>0.1.5</version>
</dependency>
<!-- MongoDB数据库连接驱动 -->
<dependency>
    <groupId>org.mongodb</groupId>
    <artifactId>mongo-java-driver</artifactId>
    <version>3.0.0</version>
</dependency>
<dependency>
    <groupId>com.squareup.okio</groupId>
    <artifactId>okio</artifactId>
    <version>1.11.0</version>

</dependency>
<dependency>
    <groupId>com.squareup.okhttp3</groupId>
    <artifactId>okhttp</artifactId>
    <version>3.6.0</version>
</dependency>

<dependency>
    <groupId>com.alibaba</groupId>
    <artifactId>fastjson</artifactId>
    <version>1.2.47</version>
</dependency>

<dependency>
  <groupId>junit</groupId>
  <artifactId>junit</artifactId>
  <version>3.8.1</version>
  <scope>test</scope>
</dependency>

</dependencies>
</project>

package com.ok.http.client.okhttp;

import java.util.List;
import java.util.Map;

public class ExecuteTask {
public static void main(String[] args) throws Exception {
// 调用downloadHtml下载网页
CrawlData crawlData = new CrawlData();
String url = null;
url = "http://top.chinaz.com/all/index.html";
System.out.println("开始爬取,请等待.");
String htmlBody = crawlData.downloadHtml(url);
System.out.println("爬取成功");
// 将下载的数据进行分析
List<Map<String, Object>> dataList = Analysis.analysisData(htmlBody);
System.out.println("数据解析成功");
for (Map<String, Object> data : dataList) {
StoreData.adds(data);
System.out.println("存储成功");
}
}
}

package com.ok.http.client.okhttp;

import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;

/**

  • Hello world!
  • */
    public class CrawlData
    {
    public static String downloadHtml(String url) {
    String body = null;
    OkHttpClient client = new OkHttpClient();
    //请求
    Request request = new Request.Builder().url(url).build();
    //发起请求
    try {

        Response response = client.newCall(request).execute();
        body = new String(response.body().bytes());
    
    } catch (Exception e) {
        e.printStackTrace();
    }
    return body;//取得目标

    }
    }

    package com.ok.http.client.okhttp;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import java.io.IOException;
import java.util.*;

public class Analysis {
/**

  • 解析数据
  • @param htmlBody
  • @return
  • @throws IOException
    */
    public static List<Map<String, Object>> analysisData(String htmlBody) throws Exception {
    // 获取目标HTML代码
    List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
    Document doc = Jsoup.parse(htmlBody);
    Elements elements = doc.select("ul.listCentent").select("li");
    System.out.println(" 数据集合大小=====" + elements.size());
    for (Element elmemt : elements) {
    Map<String, Object> map1 = new HashMap<String, Object>();
    // 获取公司名
    String siteName = elmemt.select("div.CentTxt > h4.rightTxtHead > a").text();
    System.out.println("siteName=====" + siteName);
    // 获取域名
    String domainName = elmemt.select("div.CentTxt > h4.rightTxtHead > span").text();
    System.out.println("domainName=====" + domainName);
    // 获取AlexaRank排名
    String AlexaRank = elmemt.select("li.clearfix >div.CentTxt > div.RtCPart >p").text();
    System.out.println("AlexaRank=====" + AlexaRank);
    // 获取公司简介
    String Synopsis = elmemt.select("div.CentTxt> p").text();
    System.out.println("公司简介====" + Synopsis);
    // 获取得分
    String score = elmemt.select("div.RtCRateCent>span").text();
    System.out.println(score);
    // 获取排名
    String siteRank = elmemt.select("div.RtCRateCent> strong").text();
    System.out.println("排名:" + siteRank);
    // 获取网址
    String webSite = "http://top.chinaz.com" + elmemt.select("a").first().attr("href");
    System.out.println("网址:" + webSite);
    // 获取备案信息
    String stringecordInformation = getGecordInformation(webSite);
    System.out.println("备案信息" + stringecordInformation);
    System.out.println("\t");
    // StoreData.add(siteName,domainName, AlexaRank , Synopsis, score, siteRank, webSite ,RecordInformation);
    map1.put("siteName", siteName);
    map1.put("domainName", domainName);
    map1.put("AlexaRank", AlexaRank);
    map1.put("公司简介", Synopsis);
    map1.put("排名", siteRank);
    map1.put("网址", webSite);
    map1.put("备案信息", stringecordInformation);
    list.add(map1);
    }
    return list;
    }

    /**

  • 获取备案信息
  • @param url
  • @return
  • @throws Exception
    */
    private static String getGecordInformation(String url) throws Exception {
    String htmlBody = CrawlData.downloadHtml(url);
    if (htmlBody != null) {
    Document doc = Jsoup.parse(htmlBody);
    String stringecordInformation = doc.select("li.TMain06List-Left>p").text();
    return stringecordInformation;
    }
    return null;
    }
    }

    package com.ok.http.client.okhttp;
    import com.alibaba.fastjson.JSONObject;
    import com.mongodb.MongoClient;
    import com.mongodb.client.MongoCollection;
    import com.mongodb.client.MongoDatabase;
    import org.bson.Document;
    import java.util.Map;

public class StoreData{

public static void adds(Map<String,Object> dataMap){
    try{
        // 连接到 mongodb 服务String siteRank
        MongoClient mongoClient = new MongoClient( "localhost" , 27017 );
        // 连接到数据库
        MongoDatabase mongoDatabase = mongoClient.getDatabase("sit_rank");
        System.out.println(mongoDatabase);
        System.out.println("成功连接数据库");

        MongoCollection<Document> collection = mongoDatabase.getCollection("information");
        System.out.println(collection);
        System.out.println("集合 information 选择成功");
        //插入文档
        /**
         * 1. 创建文档 org.bson.Document 参数为key-value的格式
         * 2. 创建文档集合List<Document>
         * 3. 将文档集合插入数据库集合中 mongoCollection.insertMany(List<Document>) 插入单个文档可以用 mongoCollection.insertOne(Document)
         * */
        String siteName=null;String domainName=null;String AlexaRank=null;String Synopsis=null;
                String score=null;String siteRank=null;String webSite=null;String RecordInformation=null;
        JSONObject josn = JSONObject.parseObject(dataMap.toString());
                Document document = new Document(josn);
        document.put("_id",siteName);
        document.append("domainName", domainName);
        document.append("AlexaRank",AlexaRank);
        document.append("Synopsis",Synopsis);
        document.append("score",score);
        document.append("siteRank",siteRank);
        document.append("webSite",webSite);
        document.append("RecordInformation",RecordInformation);
        collection.insertOne(document);
        System.out.println("文档插入成功");
        //关闭mongodb连接
        mongoClient.close();
        System.out.println("MongoDB连接已关闭");
    }catch(Exception e){
        System.err.println( e.getClass().getName() + ": " + e.getMessage() );
    }
}

}

向AI问一下细节

免责声明:本站发布的内容(图片、视频和文字)以原创、转载和分享为主,文章观点不代表本网站立场,如果涉及侵权请联系站长邮箱:is@yisu.com进行举报,并提供相关证据,一经查实,将立刻删除涉嫌侵权内容。

AI