Reputation: 1396
I am trying to use babyParse to convert a JSON object to CSV and output the resulting csv format to a file on the system.
module.exports.downloadItemCsv = function(req, res){
Item.find({})
.sort({date:-1})
.exec(function(err, allItems){
if(err){
res.error(err)
} else{
var configuration = {
quotes: false,
delimiter: ",",
newline: "\r\n"
};
console.log(allItems);
console.log("Adding items to object.");
var csv = baby.unparse(allItems, configuration);
var targetPath = path.join(__dirname,"../../uploads/" + "newFile01");
fs.writeFile(targetPath, csv, function(err){
if(err){
console.log("Write complete!")
}
});
console.log("The file was saved!");
res.json({status: 200})
}
})
};
The console.log(allItems);
outputs the correct JSON object but when I do a console.log for the csv variable, the output is what appears to be a page of functions from the baby Parse modules.
As far as I can tell in the PapaParse documentation, I should just need to pass the JSON object in the line var csv = baby.unparse(allItems, configuration);
.
Once I have the unparse data in the variable "csv", i should be able to then write csv to a file. Does anyone know why the JSON object is not being unparsed to a csv object?
Below is what the data in allItems looks like:
[ { __v: 0,
itemId: 2507,
item: 'TEST',
description: 'TEST',
brand: 'TEST',
category: 'TEST',
subcategory: 'TEST',
size: '10',
gender: 'F',
costPrice: 10,
salePrice: 10,
saleDate: '2016-01-31',
purchaseDate: '2016-01-31',
_id: 56ae7972049ce640150453b7 } ]
Below is an exert of the result populated into the variable "csv". The full result is to large to put below.
$__,isNew,errors,_doc,$__original_save,save,_pres,_posts,db,discriminators,__v,id,_id,purchaseDate,saleDate,salePrice,costPrice,gender,size,subcategory,category,brand,description,item,itemId,schema,collection,$__handleSave,$__save,$__delta,$__version,increment,$__where,remove,model,on,once,emit,listeners,removeListener,setMaxListeners,removeAllListeners,addListener,$__buildDoc,init,$__storeShard,hook,pre,post,removePre,removePost,_lazySetupHooks,update,set,$__shouldModify,$__set,getValue,setValue,get,$__path,markModified,modifiedPaths,isModified,$isDefault,isDirectModified,isInit,isSelected,validate,$__validate,validateSync,invalidate,$markValid,$isValid,$__reset,$__dirty,$__setSchema,$__getArrayPathsToValidate,$__getAllSubdocs,$__registerHooksFromSchema,$__handleReject,$toObject,toObject,toJSON,inspect,toString,equals,populate,execPopulate,populated,depopulate,$__fullPath
[object Object],false,,[object Object],"function () {
var self = this
, hookArgs // arguments eventually passed to the hook - are mutable
, lastArg = arguments[arguments.length-1]
, pres = this._pres[name]
, posts = this._posts[name]
, _total = pres.length
, _current = -1
, _asyncsLeft = proto[name].numAsyncPres
, _asyncsDone = function(err) {
if (err) {
return handleError(err);
}
--_asyncsLeft || _done.apply(self, hookArgs);
}
, handleError = function(err) {
if ('function' == typeof lastArg)
return lastArg(err);
if (errorCb) return errorCb.call(self, err);
throw err;
Upvotes: 0
Views: 2079
Reputation: 4398
The problem is related to allItems
being a collection of Mongoose Documents, not plain javascript objects. You can converts those objects using .toObject() or simply add the lean option to your query:
module.exports.downloadItemCsv = function(req, res){
Item.find({})
.sort({date:-1})
.lean()
.exec(function(err, allItems){
...
});
};
Upvotes: 2