目前说起爬虫,很多人都会想到 Python 语言,因为近年来关于 Python 的爬虫技术层出不穷。但爬虫不是 Python 的专利,本篇文章将会带大家用 Java 语言来爬取网页内容,并把爬取的数据导出到 Excel 文档中。
爬虫技术简述
网络爬虫(Web Crawler),按照一定的规则,自动抓取万维网信息的程序或者脚本,如今被广泛地应用在互联网搜索引擎或者其他类似网站。
爬虫在功能上分为采集、处理和储存三个部分。
爬虫基本上可以分为三大类:分布式爬虫、Java爬虫以及非Java爬虫。
在Java爬虫中又可以细分出三种,Crawler4j、WebMagic、WebCollector。
实例代码
添加依赖
<!--json-->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.12.0</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.47</version>
</dependency>
<!--excel-->
<dependency>
<groupId>net.sourceforge.jexcelapi</groupId>
<artifactId>jxl</artifactId>
<version>2.6.12</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>3.17</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>3.17</version>
</dependency>
<!--爬虫-->
<dependency>
<groupId>org.jsoup</groupId>
<artifactId>jsoup</artifactId>
<version>1.13.1</version>
</dependency>
创建一个Weather
实体类
public class Weather {
/**
* 日期
*/
private String date;
/**
* 最高气温
*/
private String maxTemperature;
/**
* 最低气温
*/
private String minTemperature;
/**
* 白天天气
*/
private String dayTimeWeather;
/**
* 夜间天气
*/
private String nightWeather;
/**
* 风向
*/
private String windDirection;
/**
* 风力
*/
private String windPower;
public String getDate() {
return date;
}
public void setDate(String date) {
this.date = date;
}
public String getMaxTemperature() {
return maxTemperature;
}
public void setMaxTemperature(String maxTemperature) {
this.maxTemperature = maxTemperature;
}
public String getMinTemperature() {
return minTemperature;
}
public void setMinTemperature(String minTemperature) {
this.minTemperature = minTemperature;
}
public String getDayTimeWeather() {
return dayTimeWeather;
}
public void setDayTimeWeather(String dayTimeWeather) {
this.dayTimeWeather = dayTimeWeather;
}
public String getNightWeather() {
return nightWeather;
}
public void setNightWeather(String nightWeather) {
this.nightWeather = nightWeather;
}
public String getWindDirection() {
return windDirection;
}
public void setWindDirection(String windDirection) {
this.windDirection = windDirection;
}
public String getWindPower() {
return windPower;
}
public void setWindPower(String windPower) {
this.windPower = windPower;
}
@Override
public String toString() {
return "Weather{" +
"date='" + date + '\'' +
", maxTemperature='" + maxTemperature + '\'' +
", minTemperature='" + minTemperature + '\'' +
", dayTimeWeather='" + dayTimeWeather + '\'' +
", nightWeather='" + nightWeather + '\'' +
", windDirection='" + windDirection + '\'' +
", windPower='" + windPower + '\'' +
'}';
}
}
创建一个WeatherTest
测试类
public class WeatherTest {
public static void main(String[] args) throws FileNotFoundException, IOException {
List<Weather> list = getInfo("http://www.tianqi234.com/2020shanghai/1yue.html", 12);
for (Weather weather : list) {
System.out.println(weather.toString());
}
testHSSFWorkbook(list);
}
//可以指定网址,并且按照需求爬取前多少页的数据
public static List<Weather> getInfo(String url, int month) {
List<Weather> weatherList = new ArrayList<Weather>();
for (int i = 1; i < month + 1; i++) {
try {
System.out.println("url:" + url);
Document doc = Jsoup.connect(url).get();
Elements table = doc.select(".graybox_cnt");
/* Elements tbody = table.select("tbody");*/
Elements trList = table.select("tr");
//每次移除的时候,你的列表长度就会发生新的变化,所以要结合实际进行移除
trList.remove(0);
if (i > 1) {
trList.remove(0);
trList.remove(10);
trList.remove(10);
trList.remove(20);
trList.remove(20);
trList.remove(20);
} else {
trList.remove(11);
trList.remove(11);
trList.remove(21);
trList.remove(21);
trList.remove(21);
}
for (Element tr : trList) {
Elements tdList = tr.select("td");
Elements aList = tdList.select("a");//查询a标签
Weather weather = new Weather();
if (aList != null && aList.size() > 0) {
weather.setDate(aList.get(0).html().toString());
} else {
weather.setDate(tdList.get(0).html().toString());
}
weather.setMaxTemperature(tdList.get(1).html().toString());
weather.setMinTemperature(tdList.get(2).html().toString());
weather.setDayTimeWeather(tdList.get(3).html().toString());
weather.setNightWeather(tdList.get(4).html().toString());
weather.setWindDirection(tdList.get(5).html().toString());
weather.setWindPower(tdList.get(6).html().toString());
weatherList.add(weather);
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
url = "http://www.tianqi234.com/2020shanghai/" + (i + 1) + "yue.html";
}
return weatherList;
}
public static void testHSSFWorkbook(List<Weather> list) throws IOException {
HSSFWorkbook workbook = new HSSFWorkbook();//创建excel文件(workbook)
HSSFSheet sheet = workbook.createSheet("2020年上海天气统计");
HSSFRow row = sheet.createRow(0);//创建行 从0开始
HSSFCellStyle style = workbook.createCellStyle();//设置单元格样式
style.setAlignment(HorizontalAlignment.CENTER);//水平居中
style.setVerticalAlignment(VerticalAlignment.CENTER);//垂直居中
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
sheet.setDefaultColumnWidth(30);
row.setHeightInPoints(25);
Map<String, String> map = (Map<String, String>) getMap(list.get(0));
//设置表头
int c = 0;
for (String key : map.keySet()) {
HSSFCell cell = row.createCell(c);//创建行的单元格,从0开始
cell.setCellValue(map.get(key));//设置单元格内容
cell.setCellStyle(style);
c++;
}
Map<Integer, Weather> weatherMap = new HashMap<>();
//除去表头
for (int i = 1; i < list.size(); i++) {
weatherMap.put(i, list.get(i));
}
for (int i = 1; i <= weatherMap.size(); i++) {
HSSFRow rowInfo = sheet.createRow(i);
rowInfo.setHeightInPoints(30);
Map<String, String> map1 = (Map<String, String>) getMap(list.get(i));
int j = 0;
for (String key : map1.keySet()) {
HSSFCell cellInfo = rowInfo.createCell(j);
cellInfo.setCellValue(map1.get(key));
cellInfo.setCellStyle(style);
j++;
}
}
FileOutputStream out = new FileOutputStream("D:\\weather1.xlsx");
workbook.write(out);
out.close();
}
/**
* json转map
*
* @param object
* @return
*/
public static Map<?, ?> getMap(Object object) {
if (object == null) {
throw new RuntimeException("对象为空,转json失败");
}
Map<String, Object> map = new HashMap<>();
try {
map = (Map) JSON.parse(JSON.toJSONString(object));
} catch (Exception e) {
System.out.println("对象转map转换失败");
}
return map;
}
}
最后结果展示
总结
本篇关于使用Java爬虫爬取网页内容,并导出到Excel文档的内容就介绍到此结束了。想要了解更多和Java有关的知识,请关注W3Cschool以前的文章或者继续浏览接下来的内容,也希望大家能够多多支持我们。