Reputation: 125
I am sure this is a fundamentally misunderstanding of how streams work, but I am banging my head on the wall.
I have some sensor data in json format that I want to append to a csv file using the csv-write-stream package. The data is sent as a post request to a node server with the intention of having it appended to a csv file. It writes a line to the csv file fine the first time, but then I get "Error: write after end" error if I try to send another post request.
function write_csv(obj) {
writer.pipe(fs.createWriteStream('tides.csv', { flags: 'a' }));
writer.write(obj);
writer.end();
};
If I comment out "writer.end()" it works fine, but won't this eventually throw a memory error? If so, what is the correct way to append to a csv file and avoid this error?
EDIT: Here is the entire server.js file
const express = require('express');
const bodyParser = require('body-parser');
const path = require('path');
const exphbs = require('express-handlebars');
const fs = require('fs');
const csvWriter = require('csv-write-stream');
const writer = csvWriter({ sendHeaders: false });
const app = express();
app.set('views', path.join(__dirname, 'views'));
app.engine('handlebars', exphbs({ defaultLayout: 'main' }));
app.set('view engine', 'handlebars');
app.set('port', (process.env.PORT || 3000));
app.use(express.static(path.join(__dirname, 'public')));
app.use(bodyParser.urlencoded({ extended: false }))
app.use(bodyParser.json())
app.get('/', function (req, res) {
res.render('home')
})
app.post('/test', function (req, res, next) {
// console.log("post received");
distance = req.body.distance;
let result = test(distance);
let result_str = JSON.stringify(result);
res.end(result_str)
});
function write_csv(obj) {
writer.pipe(fs.createWriteStream('out.csv', { flags: 'a' }));
writer.write(obj);
writer.end();
};
function test(dist) {
let d = new Date();
let YYYY = d.getFullYear();
let MM = d.getMonth();
let DD = d.getDate();
let HH = d.getHours();
let mm = d.getMinutes();
let ss = d.getSeconds();
let date = YYYY + ':' + (MM + 1) + ':' + DD + ':' + HH + ':' + mm + ':' + ss;
let time_distance = { 'time': date, 'distance': distance };
console.log(time_distance);
write_csv(time_distance);
return time_distance;
};
app.listen(app.get('port'), function () {
console.log('Sever started on port ' + app.get('port'));
})
Upvotes: 1
Views: 5214
Reputation: 40374
Without seeing the full code, I can imagine that you're calling write_csv
multiple times, since you're trying to write multiple objects to that file.
The issue is that the first time you call write_csv
you're ending the writer
, that's why the second time you call it you get:
Error [ERR_STREAM_WRITE_AFTER_END]: write after end
function write_csv(obj) {
writer.pipe(fs.createWriteStream('out.csv', { flags: 'a' }))
writer.write()
writer.end();
}
write_csv({ hello: 'world', foo: 'bar', baz: 'taco'});
// When you call it again, writer.end(); is already closed
// The following line will trigger the error
write_csv({ hello: 'world', foo: 'bar', baz: 'taco'});
What you should do instead, is close the writer only when you're done writing to it.
const writer = csvWriter(); // Create new writer
// open file
writer.pipe(fs.createWriteStream('out.csv', { flags: 'a' }));
for(const obj of objects) // Write as many times as you wish
writer.write(obj);
writer.end(); // I'm done writing.
Now the problem you have, is that if you try to perform multiple .writes
you will reach memory limit, because you're not dealing with backpressure correctly.
I recommend reading the following question:
why does attempting to write large a large file cause js heap to run out of memory
To deal with that, you will need to wait for the drain
event to be emitted.
Here is a wrapper around csvWriter
that will handle backpressure.
const fs = require('fs');
const csvWriter = require('csv-write-stream');
class Writer {
constructor(file) {
this.writer = csvWriter();
this.writer.pipe(fs.createWriteStream(file, { flags: 'a' }));
}
write(obj) {
// if .write returns false we have to wait until `drain` is emitted
if(!this.writer.write(obj))
return new Promise(resolve => this.writer.once('drain', resolve))
return true;
}
end() {
// Wrap it in a promise if you wish to wait for the callback.
this.writer.end();
}
}
(async() => {
const writer = new Writer('out.csv');
for(let i = 0; i < 1e8; i++) {
const res = writer.write({ hello: 'world', foo: 'bar', baz: 'taco' });
if(res instanceof Promise) {
// You can remove this if, and leave just: await writer.write...
// but the code will be slower
await res; // This will wait for the stream to emit the drain event
}
}
writer.end();
})();
Update: Now with the actual code, the above answer still stands, but since you're writing to the file when receiving a request. You can choose whether to open the file once, and write on every request, close it when the server shuts down (or when you choose). or just open the file, write to it, and close it on every request,
For the former, you should use the answer above, for the latter, all you need to do, is create a new writer every time you call write_csv
instead of having one global writer.
function write_csv(obj) {
// Create a new writer every time
const writer = csvWriter({ sendHeaders: false });
writer.pipe(fs.createWriteStream('out.csv', { flags: 'a' }));
writer.write(obj);
writer.end();
};
Upvotes: 3
Reputation: 374
You can try this code instead, I thinks it solves your problem. Use this function in your route and pass the json data.
First install this package: npm i --save json2csv
const Json2csvParser = require("json2csv").Parser;
const fs = require("fs");
function csvConverter(jsonData, cb) {
const json2csvParser = new Json2csvParser();
const csv_data = json2csvParser.parse(jsonData);
const file_name = "report";
// TODO: Change file path accordingly
const file_path = `/Users/public/csv_files/${file_name}.csv`;
fs.appendFile(file_path, csv_data, err => {
if (err) {
console.log(err);
cb(err, null);
return;
}
const response = {
msg: "successful",
file_address: file_path
};
cb(null, response);
return;
});
}
Upvotes: 0