Async and recursive directory scan, for file listing in Nodejs and Expressjs Async and recursive directory scan, for file listing in Nodejs and Expressjs express express

Async and recursive directory scan, for file listing in Nodejs and Expressjs


You can simplify the task a bunch if you promisify the fs functions you're using so that all async logic is promises and then use async/await to help you serialize the flow of control.

Here's one way to do that:

const promisify = require('util').promisify;const path = require('path');const fs = require('fs');const readdirp = promisify(fs.readdir);const statp = promisify(fs.stat);async function scan(directoryName = './data', results = []) {    let files = await readdirp(directoryName);    for (let f of files) {        let fullPath = path.join(directoryName, f);        let stat = await statp(fullPath);        if (stat.isDirectory()) {            await scan(fullPath, results);        } else {            results.push(fullPath);        }    }    return results;}

The above code was tested in node v10.14.1.

You could then use that the same way you were:

router.get('/', (req, res, next) => {  scan().then(data => res.render('list', {      title: 'List',      data: data   })).catch(next);});

FYI, there is a newer (still experimental) promise-based API for the fs module. You can use that like this:

const path = require('path');const fsp = require('fs').promises;async function scan2(directoryName = './data', results = []) {    let files = await fsp.readdir(directoryName, {withFileTypes: true});    for (let f of files) {        let fullPath = path.join(directoryName, f.name);        if (f.isDirectory()) {            await scan2(fullPath, results);        } else {            results.push(fullPath);        }    }    return results;}

Note, this new version also uses the new withFileTypes option that saves having to call stat() on every file.


The examples above all create one big result array before processing the found entries.

Here is a solution that 'streams' all found file entries of the given directory and sub-directories into an iterator.

Now a filter can be added into the stream to reduce the result to the filter rules.In this examples only the markdown files are accepted.

const fsp = require('fs').promises;const path = require('path');// scan the directory recursively and push each filename into the iterator.async function* scan3(dir) {  const entries = await fsp.readdir(dir, { withFileTypes: true });  for (const de of entries) {    const res = path.resolve(dir, de.name);    // console.log('>' + res);    if (de.isDirectory()) {      yield* scan3(res);    } else {      yield res;    }  }}// get all filenames from the iterator param// and push each filename with valid extension into the resulting iterator.async function* filterExt(it, ext) {  for await (const e of it) {    if (e.endsWith(ext)) {      // console.log('>>' + e);      yield e;    }  }}async function main() {  const it_files = scan3('.')  const it_mdFiles = filterExt(it_files, '.md');  for await (const f of it_mdFiles) {    console.log('>>>' + f);  }}main();console.log("done.");

just enable the console.log lines to see what filename is handled in what stage.