TR Alruwaily
TR Alruwaily

Reputation: 21

how i can read files in folder node js advanced?

i will use this script and i get some bugs because i will read 10 million line and detected him to some function and i can't solve this but i think if if anyone solve this not add all to area and we will solve this bugs thanks for help me

const fs = require('fs');
//joining path of directory 
const directoryPath = path.join(__dirname, 'Documents');
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
   //handling error
   if (err) {
       return console.log('Unable to scan directory: ' + err);
   } 
   //listing all files using forEach
   files.forEach(function (file) {
       // Do whatever you want to do with the file
       console.log(file); 
   });
});


error :-
<--- Last few GCs --->

[1564:00000280010DEA30]    80020 ms: Mark-sweep 2055.1 (2057.5) -> 2055.1 (2058.8) MB, 2339.5 / 0.0 ms  (average mu = 0.159, current mu = 0.093) allocation failure scavenge might not succeed
[1564:00000280010DEA30]    82029 ms: Mark-sweep 2056.4 (2058.8) -> 2056.4 (2060.3) MB, 1874.6 / 0.0 ms  (average mu = 0.118, current mu = 0.067) allocation failure scavenge might not succeed


<--- JS stacktrace --->

==== JS stack trace =========================================

Upvotes: 1

Views: 176

Answers (1)

eol
eol

Reputation: 24565

Starting from node >= v12.12.0 async readdir is supported, which will not load all the entries into memory:

const fs = require('fs');

async function processDir(path) {
  const dir = await fs.promises.opendir(path);
  for await (const entry of dir) {
    console.log(entry.name); // process the directory entry here
  }
}

processDir('./path-to-dir').catch(console.error);

EDIT: As the files themselves are big, you also need to process those asynchronously instead of loading their contents into memory at once. There are a number of ways to do this, this SO-question addresses the same issue: Parsing huge logfiles in Node.js - read in line-by-line

So for example you can do (still needs error-handling but should give you a start):

const fs = require('fs');
const path = require('path');
const LineByLineReader = require('line-by-line');

async function processDir(pathToDir) {
    const dir = await fs.promises.opendir(pathToDir);
    const processFilePromises = [];
    for await (const entry of dir) {
        processFilePromises.push(processFile(path.resolve(pathToDir, entry.name)));
    }
    return Promise.all(processFilePromises);
}

async function processFile(file) {
    return new Promise(resolve => {
        const lr = new LineByLineReader(file);
        lr.on('line', (line) => {
           // process the line here
        });

        lr.on('end', () => {
            resolve();
        });
    })
}

processDir('./path-to-your-dir').catch(console.error);

Upvotes: 2

Related Questions