Pedro Dalla
Pedro Dalla

Reputation: 45

How to wait for loop of stream.write() to end

I am using Node's fs.WriteStream to write data into files. I am looping through an array of objects and writing each object with the write() function.

The problem is that I would like to know once this loop is over and all the write() calls are done, but I cannot make it work.

I've attempted a few solutions, such as not using a stream, but that would create other problems. The solution I last attempted was checking in the loop if it was the last item, and if so, closing, ending or destroying the stream.

None of these worked. The event is emmited before the file is actually written.

Here follows my code, and I appreciate any help I can get. Thank you very much.

    async function writeFile(path, data) {

    try {

        const writeStream = fs.createWriteStream(path, {
            flags: "w"
        })

        data.forEach((file, index) => {
            writeStream.write(`${file.name}\n`, (err) => {
                if(err) throw err
                if(index === (data.length - 1)) writeStream.end(); //I attempted close() and destroy() too, none worked
            })
        })

        writeStream.on("finish", () => {
            console.log("All files were written.") //Currently being emmited before all is written.
        })

    } catch (err) {

        throw (err)
    }
}

Upvotes: 1

Views: 4201

Answers (2)

jfriend00
jfriend00

Reputation: 707158

Since your file data is already in memory and thus doesn't seem that large, I would just transform it in memory and write it out in one call to fs.writeFile() or fs.promises.writeFile().

function writeFile(path, data) {
    let fileData = data.map(file => file.name + "\n").join("");
    return fs.promises.writeFile(path, fileData);
}

If you really want to use a stream, then you have to very carefully pay attention to what .write() returns in case the write buffer gets full so you can wait for the drain event.

const fs = require('fs');

function writeFile(path, data, completionCallback) {
    let index = 0;
    const writeStream = fs.createWriteStream(path, { flags: "w" });

    writeStream.on('error', function(err) {
        // stream will be automatically closed here
        completionCallback(err);
    });

    // write one piece of data and call callback
    // when ready to write the next piece of data
    function writeData(data, cb) {
        if (!writeStream.write(data)) {
            // when .write() returns false, you have to wait for the drain
            // event before doing any more writing
            stream.once('drain', cb);
        } else {
            // so we always call the callback asynchronously and have no
            // stack buildup
            process.nextTick(cb);
        }
    }

    function run() {
        if (index < data.length) {
            let line = data[index++].name + "\n";
            writeData(line, run);
        } else {
            // all done with no errors
            writeStream.end(completionCallback);
        }
    }

    run();
}

Upvotes: 4

Tony
Tony

Reputation: 910

can you please try this method,

const util = require("util");

async function writeFile(path, data) {
    try {
        const writeStream = fs.createWriteStream(path, {
            flags: "w"
        });

        const promisify = util.promisify(writeStream.write);

        for (const file of data) {
            await promisify(`${file.name}\n`);
        }
        
        writeStream.end();
        writeStream.on("finish", () => {
            console.log("All files were written.");
        });

    } catch (error) {
        console.log('error',error);
        throw (error)'
    }
}

Upvotes: 0

Related Questions