gpt4 book ai didi

javascript - 如何使用 Puppeteer 抓取 reddit 页面?

转载 作者:行者123 更新时间:2023-11-29 23:01:53 27 4
gpt4 key购买 nike

我正在尝试学习使用 Puppeteer 来抓取 reddit 页面。新的 reddit 具有动态添加的内容和无限滚动。我从代码中得到非常不一致的结果,并且很难调试和弄清楚如何使它工作。

主 server.js 文件,这里没有太多内容。

'use strict';

const express = require('express');
const cors = require('cors');
const app = express();
const cheerio = require('./redditScraper');

app.use(express.json());
app.use(
cors({
origin: ['http://localhost:3000']
})
);

app.get('/', (req, res) => {
let { dynamicScraper } = cheerio;

dynamicScraper()
.then(html => {
console.log('data was sent');
res.json(html);
})
.catch(err => {
console.log(err);
});
});

app.listen(process.env.PORT || 8080, () => {
console.log(`Your app is listening on port ${process.env.PORT || 8080}`);
});

用抓取工具归档

'use strict';

const rp = require('request-promise');
const $ = require('cheerio');
const puppeteer = require('puppeteer');
const url2 = 'https://www.reddit.com/r/GameDeals/';



const cheerio = {
dynamicScraper: function() {
return puppeteer
.launch()
.then(browser => {
return browser.newPage();
})
.then(page => {
return page.goto(url2)
.then(() => {
//attempting to scroll through page to cause loading in more elements, doesn't seem to work
page.evaluate(() => {
window.scrollBy(0, window.innerHeight);
})
return page.content()
});
})
.then(html => {
//should log the the first post's a tag's href value
console.log($('.scrollerItem div:nth-of-type(2) article div div:nth-of-type(3) a', html).attr('href'));

const urls = [];

//should be the total number of a tag's across all posts
const numLinks = $('.scrollerItem div:nth-of-type(2) article div div:nth-of-type(3) a', html).attr('href').length;

const links = $('.scrollerItem div:nth-of-type(2) article div div:nth-of-type(3) a', html);

//was using numLinks as increment counter but was getting undefined, as the pages only seems to load inconsistent between 10-20 elements
for (let i=0; i<10; i++) {
urls.push(links[i].attribs.href);
}

console.log('start of urls:', urls);
console.log('scraped urls:', urls.length);
console.log('intended number of urls to be scraped:', numLinks);
// console.log($('.scrollerItem div:nth-of-type(2) article div div:nth-of-type(3) a', html).length);
})
.catch(err => console.log(err));
}

}

module.exports = cheerio;

上面的代码目前有效,但是正如您从评论中看到的那样,我将计数器硬编码为 10,这显然不是 <a href=#> 的总数。在页面上。

这是上面的输出:

[nodemon] starting `node server.js`
Your app is listening on port 8080
https://www.gamebillet.com/garfield-kart
start of urls: [ 'https://www.gamebillet.com/garfield-kart',
'https://www.humblebundle.com/store/deep-rock-galactic?hmb_source=humble_home&hmb_medium=product_tile&hmb_campaign=mosaic_section_1_layout_index_9_layout_type_twos_tile_index_1', 'https://www.greenmangaming.com/games/batman-arkham-asylum-game-of-the-year/',
'https://www.humblebundle.com/store/ftl-faster-than-light',
'https://www.greenmangaming.com/vip/vip-deals/',
'https://store.steampowered.com/app/320300/',
'https://store.steampowered.com/app/356650/Deaths_Gambit/',
'https://www.chrono.gg/?=Turmoil',
'https://www.fanatical.com/en/game/slain',
'https://freebies.indiegala.com/super-destronaut/?dev_id=freebies' ]
scraped urls: 10
numLinks: 40
data was sent

这是将 for 循环更改为 numlinks 时的输出

for (let i=0; i<numLinks; i++) {
urls.push(links[i].attribs.href);
}
[nodemon] starting `node server.js`
Your app is listening on port 8080
https://www.gamebillet.com/garfield-kart
TypeError: Cannot read property 'attribs' of undefined
at puppeteer.launch.then.then.then.html (/file.js:49:40)
at process._tickCallback (internal/process/next_tick.js:68:7)
data was sent

我希望这不是一团乱麻。我将不胜感激任何帮助。谢谢。

更新/编辑:

我正在尝试实现异步方式,但我不确定如何返回要在路由处理程序中使用的值?

    dynamicScraper: function() {
async function f() {
const browser = await puppeteer.launch();
const [page] = await browser.pages();

await page.goto(url2, { waitUntil: 'networkidle0' });
const links = await page.evaluate(async () => {
const scrollfar = document.body.clientHeight;
console.log(scrollfar); //trying to find the height
window.scrollBy(0, scrollfar);
await new Promise(resolve => setTimeout(resolve, 5000));
return [...document.querySelectorAll('.scrollerItem div:nth-of-type(2) article div div:nth-of-type(3) a')]
.map((el) => el.href);
});
console.log(links, links.length);

await browser.close();
//how do I return the value to pass to the route handler?
return (links);
};
return(f());
}

我从 console.log .

Your app is listening on port 8080
[ 'https://www.nintendo.com/games/detail/celeste-switch',
'https://store.steampowered.com/app/460790/Bayonetta/',
'https://www.greenmangaming.com/de/vip/vip-deals/']

但是从服务器到客户端的响应在浏览器上是一个空对象 {}

更新/编辑 2:

没关系,我认为它需要处理来自异步函数的 promise 。

dynamicScraper().then((output) => {
res.json(output);
});

最佳答案

你的代码有几个问题:

page.goto(url2)

默认情况下,page.goto只会等待 load 事件。将其更改为 page.goto(url2, { waitUntil: 'networkidle0' }) 将等待所有请求完成。

page.evaluate(() => {
window.scrollBy(0, window.innerHeight);
})

您在该语句前面缺少 await(或者您需要将其嵌入到您的 promise 流程中)。此外,您不会滚动到页面末尾,而只会滚动到窗口高度。您应该使用 document.body.clientHeight 滚动到页面末尾。

此外,您必须在滚动后等待一段时间(或等待您期望的某些选择器)。您可以使用此代码等待一秒钟,以便页面有足够的时间加载更多内容:

new Promise(resolve => setTimeout(resolve, 5000))

关于您的总体想法,我建议只使用 puppeteer 而不是使用 puppeteer 进行导航,然后提取所有数据并将其放入 cheerio。如果你只使用 puppeteer,你的代码可以像这样简单(你仍然需要将它包装成一个函数):

const puppeteer = require('puppeteer');

(async () => {
const browser = await puppeteer.launch();
const [page] = await browser.pages();

await page.goto('https://www.reddit.com/r/GameDeals/', { waitUntil: 'networkidle0' });
const links = await page.evaluate(async () => {
window.scrollBy(0, document.body.clientHeight);
await new Promise(resolve => setTimeout(resolve, 5000)); // wait for some time, you might need to figure out a good value for this yourself
return [...document.querySelectorAll('.scrollerItem div:nth-of-type(2) article div div:nth-of-type(3) a')]
.map((el) => el.href);
});
console.log(links, links.length);

await browser.close();
})();

关于javascript - 如何使用 Puppeteer 抓取 reddit 页面?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/55438812/

27 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com