Reputation: 83
I'm trying to make simple feed reader in node and I'm facing a problem with multiple requests in node.js. For example, i got this code :
var url = [
"https://site1.com",
"https://site2.com"
];
var items = [];
var i;
for(i=0; i<url.length; i++){
request(url[i], function(err, respone, html){
if(!err) {
var $ = cheerio.load(html);
if(i == 0){
$(".class1").find("a").each(function (index, element){
items.push($(element).text());
});
}else if(i == 1){
$(".class1").find("a").each(function (index, element){
items.push($(element).text());
});
}
}
});
}
How to scraping multi site in scraping loop ?
Upvotes: 0
Views: 1226
Reputation: 995
You can use Promise with Promise.all in this case, since network requests can be asynchronous.
var urls = [
"https://site1.com",
"https://site2.com"
];
var promises = [];
for(var url of urls) {
promises.push(new Promise((resolve, reject)=>{
request(url, function(err, respone, html){
if(err) {
return reject(err);
}
var $ = cheerio.load(html);
var items = [];
$(".class1").find("a").each(function (index, element){
items.push($(element).text());
});
return resolve(items);
});
})
}
Promise.all(promises).then((results)=>{
console.log(results);
}).catch((error)=>{
console.log(error);
});
Upvotes: 1