Reputation: 392
The following data stream does not trigger the 'end' event. The 'data' event is triggered and I can see every data row logged to the console.
var AWS = require('aws-sdk');
var ogr2ogr = require('ogr2ogr');
var JSONStream = require('JSONStream');
var S3 = new AWS.S3();
var source = S3.getObject({bucket: ..., key: ...}).createReadStream();
var stream = ogr2ogr(source).format("GeoJSON").stream()
.pipe(JSONStream.parse('features.*'));
stream.on('data', function(data){
console.log(data); // Correctly outputs 70 rows of data.
})
stream.on('end', function(){
console.log('end'); // This code is never executed.
})
stream.on('error', function(err){
console.log(err); // No errors...
})
The process works if I create a write -> read stream after the ogr2ogr transform.
Upvotes: 10
Views: 15881
Reputation: 9034
Take a look at the docs: https://nodejs.org/api/stream.html#stream_event_end
Note that the 'end' event will not fire unless the data is completely consumed. This can be done by switching into a flowing mode, or by calling stream.read() repeatedly until you get to the end
Upvotes: 4