Reputation: 101
I am parsing CSV in node.js using csv-parser
lib. But I need that parsed data would be avaliable in the whole project, not only in 'fs' section. I know that there is fs.readFileSync
option but it does not useful since CSV is binary file (at least in node.js interpretation). What should I do?
const csv = require("csv-parser");
const fs = require("fs");
const cities = [];
let content = fs.createReadStream('data.csv')
.pipe(csv())
.on('data', (row) => {
cities.push(row);
});
var city_data = {
createArrayId: function(){
console.log(cities);
return cities;
}
}
module.exports = city_data;
As you can see, I need to export "cities" array. Right now it returns empty value (initialized value).
Upvotes: 2
Views: 2524
Reputation: 41
if you work with async functions you can wrap the call to csv-parser with a promise and then use await to wait for it to finish before moving to the next task. Here is an example of how you do it:
const fs = require('fs');
const csv = require('csv-parser');
async function readCsv(csvFileName) {
function doParse(resolve) {
const results = [];
fs.createReadStream(csvFileName)
.pipe(csv())
.on('data', data=>results.push(data))
.on('end', () => {
resolve(results);
});
}
return new Promise((resolve) => doParse(resolve));
}
async function main(argv) {
console.log("BEGIN...\n");
let results = await readCsv(argv[2]);
for (let i = 0; i < results.length; ++i)
console.log(results[i]);
console.log("\n...END");
}
main(process.argv);
Upvotes: 0
Reputation: 168913
csv-parser
seems to always be asynchronous, so you're out of luck there.
If data.csv
doesn't change often, I'd recommend parsing it into JSON once and then require
the JSON directly.
Otherwise, you can either implement synchronous CSV parsing yourself, or alternately refactor your code to wait until the asynchronous parsing is complete before carrying on with that data.
Upvotes: 3