Reputation: 2147
Using Meteor and on the server try to generate a large csv file by looping through a Meteor collection and inserting a row. At some point the server will get an out of memory error - my guess is I run out of memory before the loop finishes, depending on the collection size. How can I fix this (clear the memory somehow)? Here's the code:
var job = Jobs.findOne();
var fs = Npm.require('fs');
var file = '/tmp/csv-' + job._id + '.csv';
var headers = ["Email", "Processed?", "Integration", "Passed?", "Reason", "Date"];
var stream = fs.createWriteStream(file);
var first_line = headers.join() + '\n';
var wstream = fs.createWriteStream(file);
var emails = rawEmails.find();
wstream.write(first_line);
emails.forEach(function(rawemail) {
var line_item = [];
line_item.push(rawemail.email);
if (rawemail.processed === true || rawemail.processed === false)
line_item.push(rawemail.processed);
if (rawemail.integration)
line_item.push(rawemail.integration);
if (rawemail.passed === true || rawemail.passed === false)
line_item.push(rawemail.passed);
if (rawemail.reason)
line_item.push(rawemail.reason);
if (rawemail.updated_at)
line_item.push(rawemail.updated_at);
var to_write = line_item.join() + '\n';
wstream.write(to_write);
});
wstream.end();
Upvotes: 1
Views: 426
Reputation: 9925
var emails = rawEmails.find();
is not good. You will need to limit, paginate and write the bulk of records to the file
var skip = 0
var emails = rawEmails.find({}, {limit: 100, skip: skip})
while (emails) {
// write to buffer
skip = (++skip) * 100
emails = rawEmails.find({}, {limit: 100, skip: skip})
}
Note that if the number of record is too huge, node process will consume a lot of memory for the writeStream as well, thus it will make the out of memory exception again. Consider to write to multiple files and zip it to send back to the client (if client wants to download it)
Upvotes: 2