Reputation: 63
I'm trying to use Recursive Loop and Promises to Scrape a website. But it fails.. It make the request only for the first page and at the second the program stops giving to me unhandled promise rejection warning
I have this three JS files:
scrapeAll.js:
var indexPage = 0;
scrapePage(indexPage).then((json)=>{
console.log(JSON.stringify(json, null, 4));
if(indexPage === Number.MAX_SAFE_INTEGER){
console.log("MAX SAFE INTEGER");
return;
}
save(json);
indexpage++;
scrapePage(indexPage);
}).catch((data)=>{
console.log(data);
if(indexPage === Number.MAX_SAFE_INTEGER){
console.log("MAX SAFE INTEGER");
return;
}
indexPage++;
scrapePage(indexPage);
});
ScrapePage.JS
let makeRequestCounter = 0;
function scrapePage(number) {
return new Promise((resolve, reject) => {
let url = URL + number;
let options = {
url: url,
headers: {
Host: SITE,
Connection: "keep-alive",
Accept: "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Accept-Language": "it-IT,it;q=0.9,en-US;q=0.8,en;q=0.7",
"Cache-Control": "max-age=0",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.92 Safari/537.36",
"Cookie": restoreCookieToString()
}
};
makeRequest(options).then((jsonData) => {
resolve(jsonData);
}).catch((error) => {
//REQUEST_LIMIT_EXCEEDED
if (error === CONSTANTS.REQUEST_LIMIT_EXCEEDED) {
reject(CONSTANTS.REQUEST_LIMIT_EXCEEDED);
}
//ALREADY_EXIST
else if (error === CONSTANTS.ALREADY_EXIST) {
reject(CONSTANTS.ALREADY_EXIST);
}
else if (error === 404) {
reject("no data found at this page");
}
//error can beeconnrefused or econnreset
else if (error.code !== undefined) {
//econnrefused
if (error.code === CONSTANTS.ECONNREFUSED) {
reject("WRONG_URL", url);
}
//econnreset
else if (error.code === CONSTANTS.ECONNRESET) {
console.log("\neconnreset error\n");
makeRequest(options);
}
}
}
);
});
}
function makeRequest(options) {
return new Promise((resolve, reject) => {
let json = {
category: [],
imgs: [],
title: "",
description: "",
url: ""
};
if (makeRequestCounter === CONSTANTS.REQUEST_LIMIT) {
reject(CONSTANTS.REQUEST_LIMIT_EXCEEDED);
}
makeRequestCounter++;
console.log("request to: ", options.url);
request(options, function (error, response, html) {
if (error) {
//error: possible econnreset econnrefused
reject(error);
} else {
if (response.statusCode === 200) {
cookieSave(response.headers);
//---------- check if in db the url is already saved -------------//
check(response.request.uri.href, (err) => {
if (!err) {
reject(CONSTANTS.ALREADY_EXIST);
}
});
//----------finish checking, is new -------------------//
//GETTING TITLE
title(html, json_recipe).then((json) => {
//GETTING category
category(html, json).then((json) => {
//GETTING images
imgs(html, json).then((json) => {
description(html, json).then((json) => {
json.url = response.request.uri.href;
resolve(json);
//description error
}).catch((error) => {
console.log(error);
});
//images error
}).catch((error) => {
console.log(error);
});
//category error
}).catch((error) => {
console.log(error);
});
//title error
}
).catch((error) => {
console.log(error);
});
}
//no data in this page
if (response.statusCode === 404) {
reject(response.statusCode);
}
}
});
});
}
scrapeComponents.js
...
function description(html, json) {
return new Promise((resolve, reject) => {
const $ = cheerio.load(html);
let description = $('.submitter__description').text().trim();
json.description = JSON.parse(description);
resolve(json);
});
}
...
error:
UnhandledPromiseRejectionWarning: Unhandled promise rejection (rejection id: 1): no data found at this page
The program make the first request and return correctly at the scrapeAll.js that correctly the scrapePage(indexPage = 1). The second time my program do exactly same as first time but when is time to return to the scrapeAll.js ( reject("no data found at this page"); in ScrapePage.js ) the program ends with the error. Both the pages are without data but program fails also with good pages saving only the first. I think that I made a big mistake with promises. Thank you very much guys.
Upvotes: 0
Views: 408
Reputation: 1218
Your call for scrapPage function is running only once and you are not calling it iteratively. You might have to call it in iteration using a function. Update your scrapeAll.js:
function callScrapPage() {
var indexPage = 0;
while (indexPage < Number.MAX_SAFE_INTEGER) {
scrapePage(indexPage).then((json) => {
console.log(JSON.stringify(json, null, 4));
save(json);
indexpage++;
}
}
}
Upvotes: 1
Reputation: 31983
The problem is that one or more of your calls to scrapePage(indexPage)
in scrapeAll.js
are failing. You cannot recursively call a promise the way you might with other code, so you need a .then
and .catch
on the additional calls as well. Adding a .catch
to the other calls will enable you to see the true source of failure.
scrapePage(indexPage)
.then((json)=>{
console.log(JSON.stringify(json, null, 4));
if(indexPage === Number.MAX_SAFE_INTEGER){
console.log("MAX SAFE INTEGER");
return;
}
save(json);
indexpage++;
scrapePage(indexPage).catch(e => console.log(e));
})
.catch((data)=>{
console.log(data);
if(indexPage === Number.MAX_SAFE_INTEGER){
console.log("MAX SAFE INTEGER");
return;
}
indexPage++;
scrapePage(indexPage).catch(e => console.log(e));
});
Upvotes: 1