crawf
crawf

Reputation: 9658

Node.js fs.readdir recursive directory search

Any ideas on an async directory search using fs.readdir? I realize that we could introduce recursion and call the read directory function with the next directory to read, but I'm a little worried about it not being async...

Any ideas? I've looked at node-walk which is great, but doesn't give me just the files in an array, like readdir does. Although

Looking for output like...

['file1.txt', 'file2.txt', 'dir/file3.txt']

Upvotes: 404

Views: 448517

Answers (30)

qwtel
qwtel

Reputation: 5057

This one uses the maximum amount of new, buzzwordy features available in node 8, including Promises, util/promisify, destructuring, async-await, map+reduce and more, making your co-workers scratch their heads as they try to figure out what is going on.

Node 8+

No external dependencies.

const { promisify } = require('util');
const { resolve } = require('path');
const fs = require('fs');
const readdir = promisify(fs.readdir);
const stat = promisify(fs.stat);

async function getFiles(dir) {
  const subdirs = await readdir(dir);
  const files = await Promise.all(subdirs.map(async (subdir) => {
    const res = resolve(dir, subdir);
    return (await stat(res)).isDirectory() ? getFiles(res) : res;
  }));
  return files.reduce((a, f) => a.concat(f), []);
}

Usage

getFiles(__dirname)
  .then(files => console.log(files))
  .catch(e => console.error(e));

Node 10.10+

Updated for node 10+ with even more whizbang:

const { resolve } = require('path');
const { readdir } = require('fs').promises;

async function getFiles(dir) {
  const dirents = await readdir(dir, { withFileTypes: true });
  const files = await Promise.all(dirents.map((dirent) => {
    const res = resolve(dir, dirent.name);
    return dirent.isDirectory() ? getFiles(res) : res;
  }));
  return Array.prototype.concat(...files);
}

Note that starting with node 11.15.0 you can use files.flat() instead of Array.prototype.concat(...files) to flatten the files array.

Node 11+

If you want to blow everybody's head up completely, you can use the following version using async iterators. In addition to being really cool, it also allows consumers to pull results one-at-a-time, making it better suited for really large directories.

const { resolve } = require('path');
const { readdir } = require('fs').promises;

async function* getFiles(dir) {
  const dirents = await readdir(dir, { withFileTypes: true });
  for (const dirent of dirents) {
    const res = resolve(dir, dirent.name);
    if (dirent.isDirectory()) {
      yield* getFiles(res);
    } else {
      yield res;
    }
  }
}

Usage has changed because the return type is now an async iterator instead of a promise

;(async () => {
  for await (const f of getFiles('.')) {
    console.log(f);
  }
})()

In case somebody is interested, I've written more about async iterators here: https://qwtel.com/posts/software/async-generators-in-the-wild/

Node 20+

As of Node 20, fs.readdir has a { recursive: true } option

const files = await fs.promises.readdir(dir, { recursive: true });

Upvotes: 416

Mir-Ismaili
Mir-Ismaili

Reputation: 17224

Short, Modern and Efficient:

import {readdir} from 'node:fs/promises'
import {join} from 'node:path'

const walk = async (dirPath) => Promise.all(
  await readdir(dirPath, { withFileTypes: true }).then((entries) => entries.map((entry) => {
    const childPath = join(dirPath, entry.name)
    return entry.isDirectory() ? walk(childPath) : childPath
  })),
)

Special thank to Function for hinting: {withFileTypes: true}.


This automatically keeps tree-structure of the source directory (which you may need). For example if:

const allFiles = await walk('src')

then allFiles would be a TREE like this:

[
  [
    'src/client/api.js',
    'src/client/http-constants.js',
    'src/client/index.html',
    'src/client/index.js',
    [ 'src/client/res/favicon.ico' ],
    'src/client/storage.js'
  ],
  [ 'src/crypto/keygen.js' ],
  'src/discover.js',
  [
    'src/mutations/createNewMutation.js',
    'src/mutations/newAccount.js',
    'src/mutations/transferCredit.js',
    'src/mutations/updateApp.js'
  ],
  [
    'src/server/authentication.js',
    'src/server/handlers.js',
    'src/server/quick-response.js',
    'src/server/server.js',
    'src/server/static-resources.js'
  ],
  [ 'src/util/prompt.js', 'src/util/safeWriteFile.js' ],
  'src/util.js'
]

Flat it, if you don't want tree-structure:

allFiles.flat(Number.POSITIVE_INFINITY)
[
  'src/client/api.js',
  'src/client/http-constants.js',
  'src/client/index.html',
  'src/client/index.js',
  'src/client/res/favicon.ico',
  'src/client/storage.js',
  'src/crypto/keygen.js',
  'src/discover.js',
  'src/mutations/createNewMutation.js',
  'src/mutations/newAccount.js',
  'src/mutations/transferCredit.js',
  'src/mutations/updateApp.js',
  'src/server/authentication.js',
  'src/server/handlers.js',
  'src/server/quick-response.js',
  'src/server/server.js',
  'src/server/static-resources.js',
  'src/util/prompt.js',
  'src/util/safeWriteFile.js',
  'src/util.js'
]

Upvotes: 39

Lot of good answers here, but the most obvious one seems to be missing: don't use recursion by not even using Node itself for this. You don't need it.

Both Windows and *n*x (including MacOS) have commands for this, and getting the result of those commands is basically a one-liner.

If you want easy to maintain, bug free code, then the real trick is to not use Node's file system API and instead just ask the system to do the work for you with an exec call you capture the output of, which has the added benefit of giving you a fully qualified list of paths that you can plug into file stat/read/write operations:

const { execSync } = require("child_process");

const onWindows = process.platform === `win32`;

function getEverything(dir) {
  const listCommand = onWindows ? `dir /b/o/s "${dir}"` : `find ${dir}`;
  return execSync(listCommand).toString(`utf-8`).split(/\r?\n/);
}

console.log(`tree in the current dir:`, getEverything(`.`));

Done.

Short of "getting the command wrong", there is basically nothing to mess up here, there's barely enough code to introduce bugs into.

That said, you may need to add a { maxBuffer: some_large_number } to the execSync call if you're trying to list huge directories, but that doesn't really make things any more complex.

Upvotes: 1

am0wa
am0wa

Reputation: 8407

The simplest is to use fast-glob

npm install fast-glob

dir/**/*.?? — matches all files in the 'dir' directory (any level of nesting)

const getAllFiles = (dirPath, extension = '*.??') => 
  return fg([`${dirPath}/**/${extension}`], { dot: true });
};

const getAllTxtFiles = async () => {
  const files = await getAllFiles(dirPath, '*.txt');
  console.log('Found files:', files);
}

Upvotes: 0

EuberDeveloper
EuberDeveloper

Reputation: 1048

You can use this npm module:

npm dree

It walks through all the directory tree of that object and returns it as a string or as an object. Using its file callback will allow you to reach your goal.

Example:

const dree = require('dree');
const options = {
    followLinks: true,               // If you want to follow the folders pointed by symbolic links
    depth: 5,                        // If you want to stop after 5 directory levels
    exclude: /dir_to_exclude/,       // If you want to exclude some pahts with a regexp
    extensions: [ 'txt', 'jpg' ]     // If you want only some extensions
};

const paths = [];
const fileCallback = function (file) {
    paths.push(file.relativePath);
};

let tree;
// Do it synchronously
tree = dree.scan('./dir', options, fileCallback);

// Do it asynchronously (returns promise)
tree = await dree.scanAsync('./dir', options, fileCallback);

// Now paths contains the paths you want
console.log(paths);

// tree contains an object representing the directory tree (filtered in base of the conditions)

Note that if you use await, this code should be included in an async function. A promise is returned, so you can use the .then() method.

Upvotes: 0

Fawaz Ahmed
Fawaz Ahmed

Reputation: 1594

Tested in NodeJS v21

async function listDirRecursive(pathToDir, onlyFiles, onlyDir){
    let result = await fs.readdir(pathToDir,{withFileTypes:true, recursive:true})
    if(onlyFiles)
    result = result.filter(e=>e.isFile())
    else if(onlyDir)
    result = result.filter(e=>e.isDirectory())
 
    return result.map(e=>path.join(e.path,e.name))
 
 }

Upvotes: 0

mbelsky
mbelsky

Reputation: 6618

Shortest native solution which is available with v20.1 release:

import fs from 'node:fs'

const results = fs.promises.readdir('/tmp', { recursive: true })

recursive option is also supported by fs.readdir and fs.readdirSync functions.

Upvotes: 7

toddmo
toddmo

Reputation: 22476

well, this is

  • short
  • readable
  • no external libs
  • no explicit stat
  • no extra arguments
  • flat list
export const files = async (directory: string) => await
  (await readdir(directory, { withFileTypes: true }))
  .reduce(async (_, o) => {
      var _path = path.join(directory, o.name)
      return [...await _, ...o.isDirectory() ? (await files(_path)) : [_path]]
    },
    Promise.resolve([])
  )

use:

var myfiles: string[] = await files('/home/toddmo/Pictures')

Upvotes: 0

Andrew Odri
Andrew Odri

Reputation: 9450

Vanilla ES6 + async/await + small & readable

I didn't find the answer I was looking for in this thread; there were a few similar elements spread across different answers, but I just wanted something simple and readable.

Just in case it helps anyone in the future (i.e. myself in a couple of months), this I what I ended up using:

const { readdir } = require('fs/promises');
const { join } = require('path');

const readdirRecursive = async dir => {
  const files = await readdir( dir, { withFileTypes: true } );

  const paths = files.map( async file => {
    const path = join( dir, file.name );

    if ( file.isDirectory() ) return await readdirRecursive( path );

    return path;
  } );

  return ( await Promise.all( paths ) ).flat( Infinity );
}

module.exports = {
  readdirRecursive,
}

Upvotes: 9

Elli Zorro
Elli Zorro

Reputation: 501

One more approach. I just leave it here. May be it will be useful for someone in future.

const fs = require("fs");
const { promisify } = require("util");
const p = require("path");
const readdir = promisify(fs.readdir);

async function getFiles(path) {
  try {
    const entries = await readdir(path, { withFileTypes: true });

    const files = entries
      .filter((file) => !file.isDirectory())
      .map((file) => ({
        path: `${path}/${file.name}`,
        ext: p.extname(`${path}/${file.name}`),
        pathDir: path,
      }));

    const folders = entries.filter((folder) => folder.isDirectory());

    for (const folder of folders) {
      files.push(...(await getFiles(`${path}/${folder.name}`)));
    }
    return files;
  } catch (error) {
    return error;
  }
}

Usage:

getFiles(rootFolderPath)
 .then()
 .catch()

Upvotes: 1

Tilo
Tilo

Reputation: 635

Modern promise based read dir recursive version:

const fs = require('fs');
const path = require('path');

const readDirRecursive = async (filePath) => {
    const dir = await fs.promises.readdir(filePath);
    const files = await Promise.all(dir.map(async relativePath => {
        const absolutePath = path.join(filePath, relativePath);
        const stat = await fs.promises.lstat(absolutePath);

        return stat.isDirectory() ? readDirRecursive(absolutePath) : absolutePath;
    }));

    return files.flat();
}

Upvotes: 8

Kabeer Jaffri
Kabeer Jaffri

Reputation: 708

Simple, Async Promise Based


const fs = require('fs/promises');
const getDirRecursive = async (dir) => {
    try {
        const items = await fs.readdir(dir);
        let files = [];
        for (const item of items) {
            if ((await fs.lstat(`${dir}/${item}`)).isDirectory()) files = [...files, ...(await getDirRecursive(`${dir}/${item}`))];
            else files.push({file: item, path: `${dir}/${item}`, parents: dir.split("/")});
        }
        return files;
    } catch (e) {
        return e
    }
};

Usage: await getDirRecursive("./public");

Upvotes: 6

Got To Figure
Got To Figure

Reputation: 422

Here is a simple synchronous recursive solution

const fs = require('fs')

const getFiles = path => {
    const files = []
    for (const file of fs.readdirSync(path)) {
        const fullPath = path + '/' + file
        if(fs.lstatSync(fullPath).isDirectory())
            getFiles(fullPath).forEach(x => files.push(file + '/' + x))
        else files.push(file)
    }
    return files
}

Usage:

const files = getFiles(process.cwd())

console.log(files)

You could write it asynchronously, but there is no need. Just make sure that the input directory exists and is accessible.

Upvotes: 8

iolibitz
iolibitz

Reputation: 442

There is a new module called cup-readdir that recursively searches directories very fast. It uses asynchronous promises and outperforms many popular modules when dealing with deep directory structures.

It can return all files in an array and sort them by their properties, but lacks features like file filtering and entering symlinked directories. This could be useful for large projects where you simply want to get every file from a directory. Here is a link to their project homepage.

Upvotes: 1

Gander
Gander

Reputation: 2001

qwtel's answer variant, in TypeScript

import { resolve } from 'path';
import { readdir } from 'fs/promises';

async function* getFiles(dir: string): AsyncGenerator<string> {
    const entries = await readdir(dir, { withFileTypes: true });
    for (const entry of entries) {
        const res = resolve(dir, entry.name);
        if (entry.isDirectory()) {
            yield* getFiles(res);
        } else {
            yield res;
        }
    }
}

Upvotes: 7

just a simple walk

let pending = [baseFolderPath]
function walk () {
    pending.shift();
    // do stuffs width pending[0] and change pending items
    if (pending[0]) walk(pending[0])
}
walk(pending[0])

Upvotes: 0

Whoever wants a synchronous alternative to the accepted answer (I know I did):

var fs = require('fs');
var path = require('path');
var walk = function(dir) {
    let results = [], err = null, list;
    try {
        list = fs.readdirSync(dir)
    } catch(e) {
        err = e.toString();
    }
    if (err) return err;
    var i = 0;
    return (function next() {
        var file = list[i++];

        if(!file) return results;
        file = path.resolve(dir, file);
        let stat = fs.statSync(file);
        if (stat && stat.isDirectory()) {
          let res = walk(file);
          results = results.concat(res);
          return next();
        } else {
          results.push(file);
           return next();
        }

    })();

};

console.log(
    walk("./")
)

Upvotes: 1

Patrick Michalina
Patrick Michalina

Reputation: 1379

Promise based recursive solution in TypeScript using Array.flat() for handling nested returns.

import { resolve } from 'path'
import { Dirent } from 'fs'
import * as fs from 'fs'

function getFiles(root: string): Promise<string[]> {
 return fs.promises
   .readdir(root, { withFileTypes: true })
   .then(dirents => {
      const mapToPath = (r: string) => (dirent: Dirent): string => resolve(r, dirent.name)
      const directoryPaths = dirents.filter(a => a.isDirectory()).map(mapToPath(root))
      const filePaths = dirents.filter(a => a.isFile()).map(mapToPath(root))

     return Promise.all<string>([
       ...directoryPaths.map(a => getFiles(a, include)).flat(),
       ...filePaths.map(a => Promise.resolve(a))
     ]).then(a => a.flat())
  })
}

Upvotes: 1

chjj
chjj

Reputation: 14602

There are basically two ways of accomplishing this. In an async environment you'll notice that there are two kinds of loops: serial and parallel. A serial loop waits for one iteration to complete before it moves onto the next iteration - this guarantees that every iteration of the loop completes in order. In a parallel loop, all the iterations are started at the same time, and one may complete before another, however, it is much faster than a serial loop. So in this case, it's probably better to use a parallel loop because it doesn't matter what order the walk completes in, just as long as it completes and returns the results (unless you want them in order).

A parallel loop would look like this:

var fs = require('fs');
var path = require('path');
var walk = function(dir, done) {
  var results = [];
  fs.readdir(dir, function(err, list) {
    if (err) return done(err);
    var pending = list.length;
    if (!pending) return done(null, results);
    list.forEach(function(file) {
      file = path.resolve(dir, file);
      fs.stat(file, function(err, stat) {
        if (stat && stat.isDirectory()) {
          walk(file, function(err, res) {
            results = results.concat(res);
            if (!--pending) done(null, results);
          });
        } else {
          results.push(file);
          if (!--pending) done(null, results);
        }
      });
    });
  });
};

A serial loop would look like this:

var fs = require('fs');
var path = require('path');
var walk = function(dir, done) {
  var results = [];
  fs.readdir(dir, function(err, list) {
    if (err) return done(err);
    var i = 0;
    (function next() {
      var file = list[i++];
      if (!file) return done(null, results);
      file = path.resolve(dir, file);
      fs.stat(file, function(err, stat) {
        if (stat && stat.isDirectory()) {
          walk(file, function(err, res) {
            results = results.concat(res);
            next();
          });
        } else {
          results.push(file);
          next();
        }
      });
    })();
  });
};

And to test it out on your home directory (WARNING: the results list will be huge if you have a lot of stuff in your home directory):

walk(process.env.HOME, function(err, results) {
  if (err) throw err;
  console.log(results);
});

EDIT: Improved examples.

Upvotes: 445

Afanasii Kurakin
Afanasii Kurakin

Reputation: 3479

Async

const fs = require('fs')
const path = require('path')

const readdir = (p, done, a = [], i = 0) => fs.readdir(p, (e, d = []) =>
  d.map(f => readdir(a[a.push(path.join(p, f)) - 1], () =>
    ++i == d.length && done(a), a)).length || done(a))

readdir(__dirname, console.log)

Sync

const fs = require('fs')
const path = require('path')

const readdirSync = (p, a = []) => {
  if (fs.statSync(p).isDirectory())
    fs.readdirSync(p).map(f => readdirSync(a[a.push(path.join(p, f)) - 1], a))
  return a
}

console.log(readdirSync(__dirname))

Async readable

function readdir (currentPath, done, allFiles = [], i = 0) {
  fs.readdir(currentPath, function (e, directoryFiles = []) {
    if (!directoryFiles.length)
      return done(allFiles)
    directoryFiles.map(function (file) {
      var joinedPath = path.join(currentPath, file)
      allFiles.push(joinedPath)
      readdir(joinedPath, function () {
        i = i + 1
        if (i == directoryFiles.length)
          done(allFiles)}
      , allFiles)
    })
  })
}

readdir(__dirname, console.log)

Note: both versions will follow symlinks (same as the original fs.readdir)

Upvotes: 13

mpen
mpen

Reputation: 283355

For Node 10.3+, here is a for-await solution:

#!/usr/bin/env node

const FS = require('fs');
const Util = require('util');
const readDir = Util.promisify(FS.readdir);
const Path = require('path');

async function* readDirR(path) {
    const entries = await readDir(path,{withFileTypes:true});
    for(let entry of entries) {
        const fullPath = Path.join(path,entry.name);
        if(entry.isDirectory()) {
            yield* readDirR(fullPath);
        } else {
            yield fullPath;
        }
    }
}

async function main() {
    const start = process.hrtime.bigint();
    for await(const file of readDirR('/mnt/home/media/Unsorted')) {
        console.log(file);
    }
    console.log((process.hrtime.bigint()-start)/1000000n);
}

main().catch(err => {
    console.error(err);
});

The benefit of this solution is that you can start processing the results immediately; e.g. it takes 12 seconds to read all the files in my media directory, but if I do it this way I can get the first result within a few milliseconds.

Upvotes: 2

mpen
mpen

Reputation: 283355

Using async/await, this should work:

const FS = require('fs');
const readDir = promisify(FS.readdir);
const fileStat = promisify(FS.stat);

async function getFiles(dir) {
    let files = await readDir(dir);

    let result = files.map(file => {
        let path = Path.join(dir,file);
        return fileStat(path).then(stat => stat.isDirectory() ? getFiles(path) : path);
    });

    return flatten(await Promise.all(result));
}

function flatten(arr) {
    return Array.prototype.concat(...arr);
}

You can use bluebird.Promisify or this:

/**
 * Returns a function that will wrap the given `nodeFunction`. Instead of taking a callback, the returned function will return a promise whose fate is decided by the callback behavior of the given node function. The node function should conform to node.js convention of accepting a callback as last argument and calling that callback with error as the first argument and success value on the second argument.
 *
 * @param {Function} nodeFunction
 * @returns {Function}
 */
module.exports = function promisify(nodeFunction) {
    return function(...args) {
        return new Promise((resolve, reject) => {
            nodeFunction.call(this, ...args, (err, data) => {
                if(err) {
                    reject(err);
                } else {
                    resolve(data);
                }
            })
        });
    };
};

Node 8+ has Promisify built-in

See my other answer for a generator approach that can give results even faster.

Upvotes: 5

Erik Vullings
Erik Vullings

Reputation: 5770

Yet another answer, but this time using TypeScript:

/**
 * Recursively walk a directory asynchronously and obtain all file names (with full path).
 *
 * @param dir Folder name you want to recursively process
 * @param done Callback function, returns all files with full path.
 * @param filter Optional filter to specify which files to include, 
 *   e.g. for json files: (f: string) => /.json$/.test(f)
 */
const walk = (
  dir: string,
  done: (err: Error | null, results ? : string[]) => void,
  filter ? : (f: string) => boolean
) => {
  let results: string[] = [];
  fs.readdir(dir, (err: Error, list: string[]) => {
    if (err) {
      return done(err);
    }
    let pending = list.length;
    if (!pending) {
      return done(null, results);
    }
    list.forEach((file: string) => {
      file = path.resolve(dir, file);
      fs.stat(file, (err2, stat) => {
        if (stat && stat.isDirectory()) {
          walk(file, (err3, res) => {
            if (res) {
              results = results.concat(res);
            }
            if (!--pending) {
              done(null, results);
            }
          }, filter);
        } else {
          if (typeof filter === 'undefined' || (filter && filter(file))) {
            results.push(file);
          }
          if (!--pending) {
            done(null, results);
          }
        }
      });
    });
  });
};

Upvotes: 1

Victor Powell
Victor Powell

Reputation: 1848

Just in case anyone finds it useful, I also put together a synchronous version.

var walk = function(dir) {
    var results = [];
    var list = fs.readdirSync(dir);
    list.forEach(function(file) {
        file = dir + '/' + file;
        var stat = fs.statSync(file);
        if (stat && stat.isDirectory()) { 
            /* Recurse into a subdirectory */
            results = results.concat(walk(file));
        } else { 
            /* Is a file */
            results.push(file);
        }
    });
    return results;
}

Tip: To use less resources when filtering. Filter within this function itself. E.g. Replace results.push(file); with below code. Adjust as required:

    file_type = file.split(".").pop();
    file_name = file.split(/(\\|\/)/g).pop();
    if (file_type == "json") results.push(file);

Upvotes: 152

Domenic
Domenic

Reputation: 112937

If you want to use an npm package, wrench is pretty good.

var wrench = require("wrench");

var files = wrench.readdirSyncRecursive("directory");

wrench.readdirRecursive("directory", function (error, files) {
    // live your dreams
});

EDIT (2018):
Anyone reading through in recent time: The author deprecated this package in 2015:

wrench.js is deprecated, and hasn't been updated in quite some time. I heavily recommend using fs-extra to do any extra filesystem operations.

Upvotes: 17

Jason Clay
Jason Clay

Reputation: 11

This is how I use the nodejs fs.readdir function to recursively search a directory.

const fs = require('fs');
const mime = require('mime-types');
const readdirRecursivePromise = path => {
    return new Promise((resolve, reject) => {
        fs.readdir(path, (err, directoriesPaths) => {
            if (err) {
                reject(err);
            } else {
                if (directoriesPaths.indexOf('.DS_Store') != -1) {
                    directoriesPaths.splice(directoriesPaths.indexOf('.DS_Store'), 1);
                }
                directoriesPaths.forEach((e, i) => {
                    directoriesPaths[i] = statPromise(`${path}/${e}`);
                });
                Promise.all(directoriesPaths).then(out => {
                    resolve(out);
                }).catch(err => {
                    reject(err);
                });
            }
        });
    });
};
const statPromise = path => {
    return new Promise((resolve, reject) => {
        fs.stat(path, (err, stats) => {
            if (err) {
                reject(err);
            } else {
                if (stats.isDirectory()) {
                    readdirRecursivePromise(path).then(out => {
                        resolve(out);
                    }).catch(err => {
                        reject(err);
                    });
                } else if (stats.isFile()) {
                    resolve({
                        'path': path,
                        'type': mime.lookup(path)
                    });
                } else {
                    reject(`Error parsing path: ${path}`);
                }
            }
        });
    });
};
const flatten = (arr, result = []) => {
    for (let i = 0, length = arr.length; i < length; i++) {
        const value = arr[i];
        if (Array.isArray(value)) {
            flatten(value, result);
        } else {
            result.push(value);
        }
    }
    return result;
};

Let's say you have a path called '/database' in your node projects root. Once this promise is resolved, it should spit out an array of every file under '/database'.

readdirRecursivePromise('database').then(out => {
    console.log(flatten(out));
}).catch(err => {
    console.log(err);
});

Upvotes: 0

clinyong
clinyong

Reputation: 1845

Another simple and helpful one

function walkDir(root) {
    const stat = fs.statSync(root);

    if (stat.isDirectory()) {
        const dirs = fs.readdirSync(root).filter(item => !item.startsWith('.'));
        let results = dirs.map(sub => walkDir(`${root}/${sub}`));
        return [].concat(...results);
    } else {
        return root;
    }
}

Upvotes: 0

Daniel
Daniel

Reputation: 9534

Here's a recursive method of getting all files including subdirectories.

const FileSystem = require("fs");
const Path = require("path");

//...

function getFiles(directory) {
    directory = Path.normalize(directory);
    let files = FileSystem.readdirSync(directory).map((file) => directory + Path.sep + file);

    files.forEach((file, index) => {
        if (FileSystem.statSync(file).isDirectory()) {
            Array.prototype.splice.apply(files, [index, 1].concat(getFiles(file)));
        }
    });

    return files;
}

Upvotes: 0

dat
dat

Reputation: 1720

klaw and klaw-sync are worth considering for this sort of thing. These were part of node-fs-extra.

Upvotes: 2

nickool
nickool

Reputation: 844

A library called Filehound is another option. It will recursively search a given directory (working directory by default). It supports various filters, callbacks, promises and sync searches.

For example, search the current working directory for all files (using callbacks):

const Filehound = require('filehound');

Filehound.create()
.find((err, files) => {
    if (err) {
        return console.error(`error: ${err}`);
    }
    console.log(files); // array of files
});

Or promises and specifying a specific directory:

const Filehound = require('filehound');

Filehound.create()
.paths("/tmp")
.find()
.each(console.log);

Consult the docs for further use cases and examples of usage: https://github.com/nspragg/filehound

Disclaimer: I'm the author.

Upvotes: 4

Related Questions