user2405589
user2405589

Reputation: 968

Readfiles in a while loop in NodeJS

I am reading all the json files in the directory and performing CRUD operations on them in Couchbase in a cyclic manner for some performance benchmarks.

function readFiles(dirName,crudOps, onError){
    fs.readdir(dirName,function(err,filenames){
     if (err) {
        onError(err);
        return;
      }
    var circularIterator = cyclicIterator(filenames);


    while(1){
     fname = circularIterator.next().value;

      fs.readFile(dirName + fname, function(err, content) {
          console.log(fname) // NEVER REACHES HERE
          if (err) {
            console.log(err);
            return;
          }
          crudOps(fname, content);
        });
    });
    }
 })

}

However, it does not seem to be executing the fs.readFile function. How can I make it 'circular' iterate through the list of filenames and use the file content for my crudOps function?

EDIT:

Per the suggestion by Ry, I have used a promise to readFile.

const readFile = util.promisify(fs.readFile);

async function getStuff(filename) {
  return await readFile(filename);

}

function readFiles(dirName,onFileContent, onError){
    fs.readdir(dirName,function(err,filenames){
     if (err) {
        onError(err);
        return;
      }
    var iterator = circularIterator(filenames);

    //filenames.forEach(function(filename) {
    while(1){
        fname = iterator.next().value;
    //iterator.getNext(function(filename){
    //
        getStuff(dirName + fname).then(data => {console.log(data); onFileContent(fname, data)})

    }
 })

}

This is hanging and causing my Chrome browser to crash and with the below error on my console:

FATAL ERROR: CALL_AND_RETRY_LAST Allocation failed - JavaScript heap out of memory

Is there a way to make this better?

Upvotes: 1

Views: 1282

Answers (1)

HMR
HMR

Reputation: 39250

You cannot have limitless amounts of file reads, you can find the maximum your computer can handle and limit the program to that.

To limit activity you can recursively call a function that will (if not maximum active processes are started) read file and do stuff with it and if it's finished then do it again.

function readFiles(dirName, onFileContent, onError) {
  fs.readdir(dirName, function (err, filenames) {
    if (err) {
      onError(err);
      return;
    }
    const recur = (iterator) => {
      const fname = iterator.next().value;
      getStuff(dirName + fname)
      .then(
        data => { 
          console.log(data);
          //if the following is an asynchronous function then
          //  you should wait for it to finish before calling recur
          onFileContent(fname, data);
          //recursively call itself
          recur(iterator);
        }
      );
    };
    const iterator = circularIterator(filenames);
    var i = 0;
    while(i++<1000){//maximum amount of active tasks
      recur(iterator);
    }
  })
}

Working example:

var later = time => value =>
  new Promise(
    (resolve,reject)=>
      setTimeout(
        ()=>resolve(value),
        time
      )
  )
;
var waitTwoSeconds = later(2000);
function *myIterator(){
  var i = 1;
  while(i<=15){
    yield i++;
  }
}

function readFiles() {
  const recur = (iterator) => {
    const value = iterator.next().value;
    if(value===undefined){
      console.log("----- Done")
      return;
    }
    waitTwoSeconds(value)
    .then(
      data => { 
        console.log("returned",data);
        recur(iterator);
      }
    );
  };
  const iterator = myIterator();
    var i = 0;
    while(i++<5){
      recur(iterator);
    }
}
readFiles();

Upvotes: 1

Related Questions