import * as puppeteer from 'puppeteer-core';
(async () => {
const browser = await puppeteer.launch({
executablePath: '/Applications/Microsoft Edge.app/Contents/MacOS/Microsoft Edge',
args: ['--start-maximized', '--disable-extensions', '--disable-infobars', '--disable-web-security', '--disable-features=IsolateOrigins,site-per-process'],
headless: true,
});
browser.newPage().then(async (page) => {
await page.setViewport({width: 1920, height: 1080});
await page.goto('http://www.petsworld.cn/Html/zhzx/guonei/2023/3/6376401601033.html', {waitUntil: 'networkidle2'});
const xpath = "/html/body/table[2]/tbody/tr/td[1]/table/tbody/tr[2]/td/table/tbody/tr[4]/td/table[1]/tbody/tr[1]";
const images = await getImages(page, xpath);
console.log(images)
const html = await getHtml(page, xpath);
console.log(html)
await page.close()
await browser.close();
});
})();
const getHtml = (page, xpath) => {
return new Promise((resolve, reject) => {
page.$x(xpath).then(async (element) => {
//将image的url变为绝对路径
await page.evaluate(element => {
const images = element.querySelectorAll('img');
for (const image of images) {
image.src = new URL(image.src, window.location.href).href;
}
}, element[0]);
// return html
let html = await page.evaluate(element => element.innerHTML, element[0]);
html = clean_html(html)
resolve(html);
}).catch((err) => {
reject(err);
})
})
}
const getImages = (page, xpath) => {
return new Promise((resolve, reject) => {
page.$x(xpath).then(async (element) => {
const images = await page.evaluate(element => {
const images = element.querySelectorAll('img');
for (const image of images) {
image.src = new URL(image.src, window.location.href).href;
}
return Array.from(images).map(image => image.src);
}, element[0]);
resolve(images);
}).catch((err) => {
reject(err);
})
})
}
function clean_property(html) {
const properties = [
'style', 'class', 'size', 'face', 'lang', 'width', 'height', 'valign', 'align', 'border', 'cellpadding',
'cellspacing', 'onload', 'onunload', 'onclick', 'ondblclick', 'onmousedown', 'onmouseup',
'onmouseover', 'onmousemove', 'onmouseout', 'onkeypress', 'onkeydown', 'onkeyup', 'onfocus',
'onblur', 'onselect', 'onchange', 'onsubmit', 'onreset', 'onselectstart', 'onerrorupdate',
'onhelp', 'onrowexit', 'onrowenter', 'onbeforeunload', 'onbeforeupdate', 'onafterupdate',
'oncellchange', 'oncontextmenu', 'ondataavailable', 'ondatasetchanged', 'ondatasetcomplete',
'onmousewheel', 'onreadystatechange', 'onstop', 'onrowsdelete', 'onrowsinserted', 'onbeforeeditfocus',
];
properties.forEach((property) => {
const reg = new RegExp(property + '="[^"]*"', "g");
html = html.replace(reg, "");
})
return html;
}
function clean_tags(html) {
const tags = ['script', 'style', 'iframe', 'link', 'meta', 'noscript', 'object', 'param', 'applet', 'embed'];
tags.forEach((tag) => {
const reg = new RegExp('<' + tag + '.*?>.*?</' + tag + '>', "g");
html = html.replace(reg, "");
})
//
const otherTags = ['font', 'span', 'o:p'];
otherTags.forEach((tag) => {
const regStart = new RegExp('<' + tag + '.*?>', "g");
html = html.replace(regStart, "");
const regEnd = new RegExp('</' + tag + '.*?>', "g");
html = html.replace(regEnd, "");
})
return html;
}
function clean_html(html) {
html = clean_property(html);
html = clean_tags(html);
//清除注释
html = html.replace(/<!--.*?-->/g, "");
//清除空格和空行
html = html.replace(/ /g, "");
html = html.replace(/\s{2}/g, "");
html = html.replace(/\r/g, "");
html = html.replace(/\n/g, "");
//删除空标签
html = html.replace(/<(\w+)[^>]*>\s*<\/\1>/g, "");
html = html_beautify(html);
return html;
}
function html_beautify(html) {
// eslint-disable-next-line no-undef
const beautify = require('js-beautify').html;
const options = {
indent_size: 4,
indent_char: ' ',
eol: '\r\n',
indent_level: 0,
indent_with_tabs: false,
preserve_newlines: true,
max_preserve_newlines: 10,
}
return beautify(html, options);
}
puppeteer 爬虫模板
©著作权归作者所有,转载或内容合作请联系作者
- 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
- 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
- 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...
推荐阅读更多精彩内容
- 确保自己熟练使用async与await以及分析dom结构 npm i puppeteer 在node引用 尝试在无...
- 上一节我们完成了并发数的控制,这一节完成连接池。 因为单纯只用并发数来控制的话,每次爬取新的页面都会开启新的浏览器...
- 介绍 爬虫就是自动化浏览网站程序,收集我们所需要的数据信息,不需要人为频繁的执行一些操作。什么是Puppeteer...