Reputation: 36715
My node.js application requires several files. I understand that it's recommended to read files asynchronously, so, apparently I have to do something like this:
fs.readFile("file1", function(...) {
fs.readFile("file2", function(...) {
fs.readFile("file3", function(...) {
[my application]
}
}
}
However, in this case, the files will be read sequentially.
Is there a way to read many files in parallel, and still make sure that the application starts after ALL of them have been loaded?
Upvotes: 2
Views: 2802
Reputation: 16233
You can also use the module async
const fs = require('fs')
const async = require('async')
async.each(
['dir1/file2.txt', 'dir2/file3.txt', 'dir/file5.txt'],
(file, callback) => {
fs.readFile(file, (err, data) => {
if (err) callback(Error(err))
else {
console.log(data)
callback()
}
})
},
(err) => {
if (err) {
console.error(err)
} else {
console.log('All files have been read OK')
}
})
Upvotes: 0
Reputation: 10801
You can use a modern Promise-based API:
import {promises as fs} from 'fs';
function readFiles(files) {
return Promise.all(
files.map(path => fs.readFile(path))
);
}
readFiles(['file1', 'file2', 'file3'])
.then(fileContents => {
console.log('All the files are read', fileContents);
});
Upvotes: 2
Reputation: 152956
Pretty easy.
var files = [...], remaining = files.length;
var callback = function () {
remaining--;
if (!remaining) {
go();
}
};
files.forEach(function(file) {
fs.readFile(file, callback);
});
Upvotes: 3