Reputation: 10060
I'm using Node.js streams to go through a text file line by line, make some transforms and output to a SVG file.
I am trying to write one last piece of data (</svg>
) after the processing is done, however by the time the write stream emits the finish
event, attempting to write()
will throw Error: write after end
.
Is there an elegant way I can solve this?
Note: The input file is large (around 1GB) so there's no going around the pipe()
method due to its I/O and memory management.
var fs = require('fs');
var split2 = require('split2');
var through2 = require('through2');
var read_stream = fs.createReadStream('input.txt');
var write_stream = fs.createWriteStream('output.svg');
write_stream.write('<svg>');
write_stream.on('finish', function() {
this.write('</svg>'); // doesn't work
});
read_stream
.pipe(split2())
.pipe(through2.obj(function(line, encoding, next) {
this.push(line);
next();
}))
.pipe(write_stream);
Thank you Jordan & pNre for helping me figuring this out.
pipe()
the write stream with the end:false
option and manually end()
the stream.
var fs = require('fs');
var split2 = require('split2');
var through2 = require('through2');
var read_stream = fs.createReadStream('input.txt');
var write_stream = fs.createWriteStream('output.svg');
write_stream.write('<svg>');
read_stream
.pipe(split2())
.pipe(through2.obj(function(line, encoding, next) {
this.push(line);
next();
}))
.pipe(write_stream, { end: false });
read_stream.on('end', function() {
write_stream.end('</svg>');
});
through
/through2
transform streams)through2
has a flush function that can be used to write the final data.
var fs = require('fs');
var split2 = require('split2');
var through2 = require('through2');
var read_stream = fs.createReadStream('input.txt');
var write_stream = fs.createWriteStream('output.svg');
write_stream.write('<svg>');
read_stream
.pipe(split2())
.pipe(through2.obj(function(line, encoding, next) {
this.push(line);
next();
}, function(flush) {
this.push('</svg>');
flush();
}))
.pipe(write_stream);
Upvotes: 7
Views: 15491
Reputation: 2432
Recently ran in to this issue and found a more elegant solution. There's a (well) documented _flush
method on the native Transform stream.
https://nodejs.org/api/stream.html#stream_transform_flush_callback
The solution looks something like this:
const fs = require('fs')
const split2 = require('split2')
const { Transform } = require('stream')
const input = fs.createReadStream('input.txt')
const output = fs.createWriteStream('output.svg')
class SVGWrapper extends Transform {
constructor(){ this.push('<svg>') }
_flush(done){ this.push('</svg>') }
_transform(line, enc, next){
this.push(line)
next()
}
}
input
.pipe(split2())
.pipe(new SVGWrapper)
.pipe(output)
Upvotes: 0
Reputation: 4025
It seems that there is an un-documented event called 'prefinish'
.
I have not used it though.
Upvotes: 1
Reputation: 338
It would seem that pipe closes the stream when it is finished.
The documentation at http://nodejs.org/api/stream.html states:
By default end() is called on the destination when the source stream emits end, so that destination is no longer writable. Pass { end: false } as options to keep the destination stream open.
This keeps writer open so that "Goodbye" can be written at the end.
reader.pipe(writer, { end: false });
reader.on('end', function() {
writer.end('Goodbye\n');
});
Upvotes: 10
Reputation: 5376
Have you considered creating a new stream to append the </svg>
tag?
through
can help you with that:
var fs = require('fs');
var split2 = require('split2');
var through = require('through');
var read_stream = fs.createReadStream('input.txt');
var write_stream = fs.createWriteStream('output.svg');
write_stream.write('<svg>');
var tag = through(function write(data) {
this.queue(data);
}, function end() {
this.queue('</svg>');
});
read_stream.pipe(split2()).pipe(some_transform).pipe(tag).pipe(write_stream);
Upvotes: 3