user180574
user180574

Reputation: 6084

nodejs: write multiple files in for loop

I am still learning nodejs. This question is related to a few others (e.g., Writing multiple files a loop in Nodejs) but a bit different. It is actually quite simple. I want to write a number of files and when it is all done continue on other task.

Without for loop, I am doing like this,

fs.readFile(f1.path, function(err, data) {
    fs.writeFile("/tmp/" + f1.path, data, function(err) {
        fs.readFile(f2.path, function(err, data) {
            fs.writeFile("/tmp/" + f2.path, data, function(err) {
                ...
                if (err) throw err;

                // do something when all files are written

If I want to convert it using for loop, how to do it? Suppose I could put f1, f2 ... into an array and iterate them.

Thank you for the help.

Upvotes: 4

Views: 11361

Answers (4)

Aakash
Aakash

Reputation: 23737

STEP 1 : Install fs-extra

npm i -D fs-extra

Documentation : https://www.npmjs.com/package/fs-extra

STEP 2 : Write files with fs.outputFile

const fs = require('fs-extra');
// Async
fs.outputFile(file, data, [options, callback])
// Sync
fs.outputFileSync(file, data, [options])

If output directories are not there, they will be created recursively.

Good Luck...

Upvotes: 0

Abdullah Shahin
Abdullah Shahin

Reputation: 1052

Here is another way

    const fs = require("fs");
    const listOfFiles = [{fileName:"a.txt",data:"dummy data!"},{fileName:"b.txt",data:"dummy data b!"},{fileName:"c.txt",data:"dummy data c!"},{fileName:"d.txt",data:"dummy data d!"},{fileName:"e.txt",data:"dummy data e!"}];

    listOfFiles.reduce(function(curFile, nextFile){
            return writeData(nextFile).then();
    }, writeData);

    console.log("Another Code to be executed!");
    console.log("Another Code to be executed!");
    console.log("Another Code to be executed!");
    console.log("Another Code to be executed!");

    function writeData(params){
      return new Promise((resolve,reject)=>{
        fs.writeFile(params.fileName,params.data,'utf8',(err)=>{
           if(err)
              reject(err);
           else
              resolve();
          });
     });
}

Upvotes: 0

Brett Beatty
Brett Beatty

Reputation: 5963

You can save promises in an array and use Promise.all to wait for them to all finish:

const fs = require('fs');
const path = require('path');

const files = [f1, f2, ...];

function copyFile(source, destination) {
    const input = fs.createReadStream(source);
    const output = fs.createWriteStream(destination);
    return new Promise((resolve, reject) => {

        output.on('error', reject);
        input.on('error', reject);
        input.on('end', resolve);
        input.pipe(output);
    });
}

const promises = files.map(file => {
    const source = file.path;
    const destination = path.join('/tmp', file.path);
    // Use these instead of line above if you have files in different
    // directories and want them all at the same level:
    // const filename = path.parse(file.path).base;
    // const destination = path.join('/tmp', filename);
    return copyFile(source, destination);
});

Promise.all(promises).then(_ => {
    // do what you want
    console.log('done');
}).catch(err => {
    // handle I/O error
    console.error(err);
});

Upvotes: 6

peteb
peteb

Reputation: 19428

You can use recursion to do this without another library. The below code will copy files from an Array, waiting for the prior file to finish copying before moving on to the next asynchronously.

Approach using fs.readFile() and fs.writeFile()

const fs = require('fs')
const path = require('path')

// your files array
let files = [f1, f2]

function copyFile (index, cb) {
  let file = files[index]
  let dest = path.join('/tmp', file.path)
  if (!file) {
    // done copying
    return cb(null)
  }
  fs.readFile(file.path, (err, data) => {
    if (err) {
      // return callback with error
      return cb(err)
    } else {
      fs.writeFile(dest, data, (err) => {
        if (err) {
          return cb(err)
        } else {
          copyFile(index + 1, cb)
        }
      })
    }
  })
}

copyFile(0, (err) => {
    if (err) {
      // Handle Error
        console.log(err)
    } else {
      console.log('Files Copied Successfully!')
    }
})

Approach using streams, better in my opinion

const fs = require('fs')
const path = require('path')

// your files array
let files = [f1, f2]

function copyFile(index, cb) {
    let file = files[index]
    let dest = path.join('/tmp', file.path)

    if (!file) {
        return cb(null)
    }

    let source = fs.createReadStream(file.path)
    let copy = fs.createWriteStream(dest)

    source.on('error', err => {
      // explicitly close writer
      copy.end()
      return cb(err)
    })

    copy.on('error', err => {
      return cb(err)
    })

    copy.on('finish', () => {
      copyFile(index + 1, cb)
    })

    source.pipe(copy)
}

copyFile(0, (err) => {
    if (err) {
      // Handle Error
        console.log(err)
    } else {
      console.log('Files Copied Successfully!')
    }
})

Upvotes: 1

Related Questions