Elliott de Launay
Elliott de Launay

Reputation: 1168

How to resolve a promise once a json stringify writeStream has completed?

I'm using npm's big-json to get around a limitation with JSON.stringify() for a large nested object.

Once the object is stringified, I'm then saving it to the file system.

However this whole process has slowed down my overall processing - and I'm assuming it has something to do with the fact that I might now be waiting for a stream to timeout or something before it's closed.

This is my code:

const fs = require('fs-extra');
const json = require('big-json');

function offlineSave(objArr){
  return new Promise((resolve, reject) => {
    let promises = [];
    for(let obj of objArr){
      const filePath = path.join(__dirname, `${obj}.json`);
      //this used to be a promisified fs.writeFile() - which seemed to be faster
      promises.push(saveStreamPromise(filePath,obj));
    }
    Promise.all(promises)
    .then((msg) =>{
      resolve(`Saved ${msg.length} files`);
    });
  });
}

let saveStreamPromise = function(filePath, obj){
  return new Promise((resolve, reject) =>{
    let stream = fs.createWriteStream(filePath, { flag: 'w+' });
    const stringifyStream = json.createStringifyStream({ body: obj});
    stringifyStream.on('data', strChunk =>{
      stream.write(strChunk);
    });

    //is this the correct way to return a promise upon stringify complete?
    stringifyStream.on('end', () =>{
      stream.end();
      resolve(`${obj} saved!`);
    });
  });
}

Upvotes: 0

Views: 1401

Answers (1)

jfriend00
jfriend00

Reputation: 708156

Your code is listening for the end of the readstream. While that might work, it would be better to listen for the end of the write stream so you only resolve the promise once the data is committed to disk.

It would also be best to add error handling for both streams to any error on either stream rejects the promise:

function saveStreamPromise(filePath, obj){
  return new Promise((resolve, reject) =>{
    let stream = fs.createWriteStream(filePath, { flag: 'w+' });
    const stringifyStream = json.createStringifyStream({ body: obj});
    stringifyStream.on('data', strChunk =>{
      stream.write(strChunk);
    });

    // resolve when read stream and write stream are done
    stringifyStream.on('end', () =>{
      stream.end(function() {
          resolve('object saved!');
      });
    }).on('error', reject);
    stream.on('error', reject);
  });
}

I'd also suggest that you use .pipe() to automatically feed one stream to another like this:

function saveStreamPromise(filePath, obj){
  return new Promise((resolve, reject) =>{
    let stream = fs.createWriteStream(filePath, { flag: 'w+' });
    const stringifyStream = json.createStringifyStream({ body: obj});
    stringifyStream.pipe(stream);

    // when using pipe, the write stream will automatically close when the
    // readable gets to the end of its data

    stream.on('error', reject).on('close', function() {
        resolve('object saved!');
    });
    stringifyStream.on('error', reject);
}

And, ${obj} saved! will just give you something like "[object object] saved!" which won't likely be what you want.

Upvotes: 1

Related Questions