Reputation: 103
I am trying to scrape images from wikipedia page using puppeteer and node js and i am successfully getting the links . Also i am able to download 1 image from the page. But whenever i am trying to call my download function in a loop it download the invalid images. The download function work when i try to scrape one image but this function is not working in loop. if anyone has any suggestion let me know . Thanks in advance
Here is my code
const puppeteer = require('puppeteer');
const fs = require('fs');
const request = require('request');
// download function
function download(uri, filename, callback) {
request.head(uri, function(err, res, body) {
request(uri)
.pipe(fs.createWriteStream(filename))
.on("close", callback);
});
}
let scrape = async ()=> {
const browser = await puppeteer.launch({
"headless": false
});
const page =await browser.newPage(); //opening new page
await page.goto("https://www.wikipedia.org/"); // go to url
const xpathselector = `//span[contains(text(), "Commons")]`; //click on the commons buttons
const commonlinks = await page.waitForXPath(xpathselector);
await page.waitFor(3000);
await commonlinks.click();
await page.waitFor(2000)
const xpath = '//*[@id="mainpage-potd"]/div[1]/a/img';
const imageXpath = await page.waitForXPath(xpath);
const src = await imageXpath.evaluate(el => el.src)
//downloading the 1st image from the page
download(src, "image.jpg", function() {
console.log("Image downloaded");
});
await page.waitFor(2000)
//here we are going to another page
const xpathselector1 ='//*[@id="mf-picture-picture"]/div[2]/ul/li[4]/a'
const previousPictture = await page.waitForXPath(xpathselector1);
await page.waitFor(2000);
previousPictture.click();
await page.waitFor(1500);
//getting urls of images
const link_start = 0;
const cue_card_links = await page.evaluate((selector) => {
const anchors_node_list = document.querySelectorAll(selector);
const anchors = [...anchors_node_list];
return anchors.map(link => link.href);
}, '#mw-content-text > div > table > tbody > tr > td > div > div > a');
console.log("[#] Done getting links\n");
for (let i = link_start; i < cue_card_links.length; i++) {
let link = cue_card_links[i];
//downloading all images from second page
download(link, `image${i}.jpg`, function() {
console.log("Image downloaded");
});
}
}
scrape();
Upvotes: 0
Views: 1274
Reputation: 2319
Download Should look like this
// download function
function download(files, callback) {
let index = 0;
var data = setInterval(async () => {
let i = index++
if (i === files.length)
clearInterval(data)
else {
request.head(files[i % files.length], function (err, res, body) {
request(files[i % files.length])
.pipe(fs.createWriteStream(`image${i}.jpg`))
.on("close", callback);
});
}
}, 4000);
}
and remove the function calling from for loop, for is sync fn, call the Fn this way
download(cue_card_links, function () { console.log("Image downloaded"); });
Upvotes: 2