d-_-b
d-_-b

Reputation: 23171

NodeJS concatenate all files in a directory

Is there a faster or more succinct way to concatenate all of the files located in a directory using NodeJS?

In bash I could do something like this:

for file in $1
do
  cat "$file"
  echo
done > $2;

Here is what I'm doing now:

var fs = require('fs');
var Promise = require('bluebird');

module.exports = function(directory, destination) {
    return new Promise((resolve, reject) => {
        fs.readdir(directory, (err, files) => {
            if (err) {
                return reject(err);
            }

            (function next() {
                var file = files.shift();
                if (!file) {
                    return resolve();
                }

                fs.readFile(directory + '/' + file, (err, content) => {
                    if (err) {
                        return reject(err);
                    }

                    fs.appendFile(destination, '\n' + content, (err) => {
                        if (err) {
                            return reject(err);
                        }

                        return next();
                    });
                });
            })();
        });
    });
};

Upvotes: 14

Views: 25637

Answers (5)

Nicolae Iotu
Nicolae Iotu

Reputation: 435

All in one line:

fs.readdirSync('./').forEach((file) => {
  if(fs.lstatSync(file).isFile())
    fs.appendFileSync('./output.file', fs.readFileSync(file).toString())
})

Replace './' with target directory and './output.file' with target destination.

Upvotes: 3

oury.ba
oury.ba

Reputation: 76

You can do the same thing without bluebird , since node 8.X includes the util package to promisify fs API.

This exemple shows how i use it in my project to concatenate minified files (so remove the filter if necessary).

You can do the same thing without bluebird , since node 8.X includes the util package to promisify fs API.

This exemple shows how i use it in my project to concatenate minified files (so remove the filter if necessary).


const {promisify} = require("util");  //requires node 8.X
const readdir = promisify(fs.readdir);
const readFile = promisify(fs.readFile);
const appendFile = promisify(fs.appendFile);


// Append all minified and obsfucated files in source directory
// The resulting file is generated in destination
function appendFile(directory, destination) {
	readdir(directory)
		.then((files) => {
			console.log('FILES CONTENT:', files);
			files.filter(file => {
				console.log('FILTER > ' + file);
				return (file.indexOf('-min.js') != -1 && file.indexOf('-min.js.map') == -1) 
			})
			.map(file => {
				console.log('MAP ('+destination+') > ' + path.join(directory, file));
				readFile(path.join(directory, file), 'utf8')
				.then(data => {
					//console.log('DATA:', data);
					appendFile(destination, data+'\n')
					.then(() => {
						console.log('append done');
					})
					.catch((err) => {
						displayError(err);
					});
				});
			});
		})
		.catch((err) => {
			console.log('ERROR:', err);
			displayError(err);
		});
}

Upvotes: 1

JLM
JLM

Reputation: 599

That ?

require('child_process').execSync('cat *').toString('UTF-8')

:D

Upvotes: 24

peteb
peteb

Reputation: 19428

If you're going to use bluebird than you get the benefit of promisification. You can use promisifyAll() to convert all error first callback accepting async functions in the fs module to return a promise. You can read more about in the above promisification link.

The below code reads in all of the files as strings and then reduces all of their contents into a single string and writes that string to the destination.

Its probably best to not catch() any returned errors here. Rather, the caller should attach a catch() to handle any returned errors as they need.

const Promise = require('bluebird')
const fs = Promise.promisifyAll(require('fs'))
const path = require('path')

module.exports = (directory, destination) => {
  return fs.readdirAsync(directory)
    .map(file => fs.readFileAsync(path.join(directory, file), 'utf8'))
    .then(contents => fs.writeFileAsync(destination, contents.join('\n')))
}

Upvotes: 4

Marcos Casagrande
Marcos Casagrande

Reputation: 40404

Using the async library you can easily read files in parallel and then join the results.

const fs = require("fs");
const async = require("async");
const path = require("path");

module.exports = function(directory, destination) {

  return new Promise((resolve, reject) => {

    fs.readdir(directory, (err, files) => {
        if (err)
            return reject(err);

        files = files.map(file => path.join(directory,file));

        //Read all files in parallel
        async.map(files, fs.readFile, (err, results) => {
            if (err)
                return reject(err);

           //results[0] contents of file #1
           //results[1] contents of file #2
           //results[n] ...

            //Write the joined results to destination
            fs.writeFile(destination, results.join("\n"), (err) => {
                if (err)
                    return reject(err);

                resolve();
            });
        });

    });
  });
}

Upvotes: 4

Related Questions