climboid
climboid

Reputation: 6962

Fetch 100 zip files in node

So I'm trying to fetch a bunch of files from a server. The current code is basically as follows.

var http = require('http');
var fs = require('fs');

var arr = [{id:'fileOne', id:'fileTwo', id:'fileThree',....];

function fetchData() {
  for (var i = 0; i < arr.length; i++) {
    var file = fs.createWriteStream("../path/file.zip");
    var request = http.get("url/AFG_adm.zip", function(response) {
      response.pipe(file);
    });   
  } 
}

I don't think this is the best approach, trying to figure out how to handle errors, how to make sure that a file gets loaded before the next iteration... Any help is much appreciated.

Upvotes: 2

Views: 133

Answers (1)

Gntem
Gntem

Reputation: 7155

You should use the async module for handling the async part, also the request module will save you a lot of effort.

You can handle this in many ways using either async.cargo or async.map. The theory is to group up things or a series of things, and then take action according to what you want it to do, but in async way.

so a basic .map of an array of files to download would be like this.

// required modules
var async = require('async');
var request = require('request');
// array of urls
var URLs = ['hxxp://...ZipFile1.zip', 'hxxp://...ZipFile2.zip'];
// destination directory
var destinationDirectory = 'downloads';
// asyncDownload function
function asyncDownload(url, callback) {
  // get filename
  var filename = url.substring(url.lastIndexOf(".") + 1);
  // create write stream
  var stream = fs.createWriteStream(destinationDirectory + "/" + filename);
  // listen for open event to start request and pipe
  stream.on('open', function () {
    request(url).pipe(stream);
  })
  // when finish , call callback
  stream.on('finish', function () {
    callback(null, destinationDirectory + "/" + filename);
  })
}

async.map(
  URLs, asyncDownload, function (err, results) {
    console.log(results);
  });

Upvotes: 1

Related Questions