我正在努力学习使用Puppeteer来刮取reddit页面。新的reddit动态添加了内容和无限滚动。我从代码中获得了非常不一致的结果,并且很难调试并弄清楚如何使这项工作。
主要的server.js文件,这里没什么了。
'use strict';
const express = require('express');
const cors = require('cors');
const app = express();
const cheerio = require('./redditScraper');
app.use(express.json());
app.use(
cors({
origin: ['http://localhost:3000']
})
);
app.get('/', (req, res) => {
let { dynamicScraper } = cheerio;
dynamicScraper()
.then(html => {
console.log('data was sent');
res.json(html);
})
.catch(err => {
console.log(err);
});
});
app.listen(process.env.PORT || 8080, () => {
console.log(`Your app is listening on port ${process.env.PORT || 8080}`);
});
用刮刀填充文件
'use strict';
const rp = require('request-promise');
const $ = require('cheerio');
const puppeteer = require('puppeteer');
const url2 = 'https://www.reddit.com/r/GameDeals/';
const cheerio = {
dynamicScraper: function() {
return puppeteer
.launch()
.then(browser => {
return browser.newPage();
})
.then(page => {
return page.goto(url2)
.then(() => {
//attempting to scroll through page to cause loading in more elements, doesn't seem to work
page.evaluate(() => {
window.scrollBy(0, window.innerHeight);
})
return page.content()
});
})
.then(html => {
//should log the the first post's a tag's href value
console.log($('.scrollerItem div:nth-of-type(2) article div div:nth-of-type(3) a', html).attr('href'));
const urls = [];
//should be the total number of a tag's across all posts
const numLinks = $('.scrollerItem div:nth-of-type(2) article div div:nth-of-type(3) a', html).attr('href').length;
const links = $('.scrollerItem div:nth-of-type(2) article div div:nth-of-type(3) a', html);
//was using numLinks as increment counter but was getting undefined, as the pages only seems to load inconsistent between 10-20 elements
for (let i=0; i<10; i++) {
urls.push(links[i].attribs.href);
}
console.log('start of urls:', urls);
console.log('scraped urls:', urls.length);
console.log('intended number of urls to be scraped:', numLinks);
// console.log($('.scrollerItem div:nth-of-type(2) article div div:nth-of-type(3) a', html).length);
})
.catch(err => console.log(err));
}
}
module.exports = cheerio;
上面的代码目前有效,但正如您从评论中看到的那样,我将计数器硬编码为10,这显然不是页面上<a href=#>
的总数。
以下是上述输出:
[nodemon] starting `node server.js`
Your app is listening on port 8080
https://www.gamebillet.com/garfield-kart
start of urls: [ 'https://www.gamebillet.com/garfield-kart',
'https://www.humblebundle.com/store/deep-rock-galactic?hmb_source=humble_home&hmb_medium=product_tile&hmb_campaign=mosaic_section_1_layout_index_9_layout_type_twos_tile_index_1', 'https://www.greenmangaming.com/games/batman-arkham-asylum-game-of-the-year/',
'https://www.humblebundle.com/store/ftl-faster-than-light',
'https://www.greenmangaming.com/vip/vip-deals/',
'https://store.steampowered.com/app/320300/',
'https://store.steampowered.com/app/356650/Deaths_Gambit/',
'https://www.chrono.gg/?=Turmoil',
'https://www.fanatical.com/en/game/slain',
'https://freebies.indiegala.com/super-destronaut/?dev_id=freebies' ]
scraped urls: 10
numLinks: 40
data was sent
这是for循环更改为numlinks
时的输出
for (let i=0; i<numLinks; i++) {
urls.push(links[i].attribs.href);
}
[nodemon] starting `node server.js`
Your app is listening on port 8080
https://www.gamebillet.com/garfield-kart
TypeError: Cannot read property 'attribs' of undefined
at puppeteer.launch.then.then.then.html (/file.js:49:40)
at process._tickCallback (internal/process/next_tick.js:68:7)
data was sent
我希望这不是一个混乱的阅读。我将不胜感激任何帮助。谢谢。
我试图实现异步方式,但我不知道如何返回路由处理程序中使用的值?
dynamicScraper: function() {
async function f() {
const browser = await puppeteer.launch();
const [page] = await browser.pages();
await page.goto(url2, { waitUntil: 'networkidle0' });
const links = await page.evaluate(async () => {
const scrollfar = document.body.clientHeight;
console.log(scrollfar); //trying to find the height
window.scrollBy(0, scrollfar);
await new Promise(resolve => setTimeout(resolve, 5000));
return [...document.querySelectorAll('.scrollerItem div:nth-of-type(2) article div div:nth-of-type(3) a')]
.map((el) => el.href);
});
console.log(links, links.length);
await browser.close();
//how do I return the value to pass to the route handler?
return (links);
};
return(f());
}
我从console.log
得到。
Your app is listening on port 8080
[ 'https://www.nintendo.com/games/detail/celeste-switch',
'https://store.steampowered.com/app/460790/Bayonetta/',
'https://www.greenmangaming.com/de/vip/vip-deals/']
但是服务器到客户端的响应是浏览器{}
上的一个空对象
没关系,想出来需要处理异步函数的承诺。
dynamicScraper().then((output) => {
res.json(output);
});
您的代码中有几个问题:
page.goto(url2)
默认情况下,page.goto
只会等待load
事件。将其更改为page.goto(url2, { waitUntil: 'networkidle0' })
将等待所有请求完成。
page.evaluate(() => {
window.scrollBy(0, window.innerHeight);
})
您在该声明前面缺少await
(或者您需要将其嵌入到您的承诺流程中)。此外,您不会滚动到页面的末尾,而只会滚动到窗口高度。您应该使用document.body.clientHeight
滚动到页面的末尾。
此外,滚动后你必须等待一段时间(或者你想要的某些选择器)。您可以使用此代码等待一秒钟,以便页面有足够的时间来加载更多内容:
new Promise(resolve => setTimeout(resolve, 5000))
关于你的一般想法,我建议只使用puppeteer而不是使用puppeteer导航然后提取所有数据并将其放入cheerio。如果您只使用puppeteer,您的代码可能就像这样简单(您仍然需要将其包装到函数中):
const puppeteer = require('puppeteer');
(async () => {
const browser = await puppeteer.launch();
const [page] = await browser.pages();
await page.goto('https://www.reddit.com/r/GameDeals/', { waitUntil: 'networkidle0' });
const links = await page.evaluate(async () => {
window.scrollBy(0, document.body.clientHeight);
await new Promise(resolve => setTimeout(resolve, 5000)); // wait for some time, you might need to figure out a good value for this yourself
return [...document.querySelectorAll('.scrollerItem div:nth-of-type(2) article div div:nth-of-type(3) a')]
.map((el) => el.href);
});
console.log(links, links.length);
await browser.close();
})();