Reputation: 111
I am trying to write a simple express/node.js app that responds to GET requests using data found in a CSV file. I would like to read this CSV file to generate a javascript object (essentially a key-value mapping), and then make that generated map available for the HTTP request handling logic in the controller.
I wrote a module that reads the CSV files and exports the desired objects, but I'm not sure how to ensure:
How can I organize my code to meet these goals in the context of an express app?
This is how I am processing the CSV file:
var myMap = {};
fs.createReadStream('filename.csv')
.pipe(csv())
.on('data', (row) => {
// Build javascript object
myMap[row['key']] = row['value'];
})
.on('end', () => {
console.log('Done.');
});
// Does this work?
module.exports = myMap;
Upvotes: 11
Views: 12461
Reputation: 41
A simple way to "synchronously" parse a CSV file would be to wrap the parser code with a Promise to convert it to an async function then you can use await in order to wait for it to complete. Here is a code example:
const fs = require('fs');
const csv = require('csv-parser');
async function readCsv(csvFileName) {
function doParse(resolve) {
const results = [];
fs.createReadStream(csvFileName)
.pipe(csv())
.on('data', data=>results.push(data))
.on('end', () => {
resolve(results);
});
}
return new Promise((resolve) => doParse(resolve));
}
async function main(argv) {
console.log("BEGIN...\n");
let results = await readCsv(argv[2]);
for (let i = 0; i < results.length; ++i)
console.log(results[i]);
console.log("\n...END");
}
main(process.argv);
Upvotes: 1
Reputation: 19929
Update:
Below one is deprecated, not maintained anymore.
Deprecated:
Hi I have created an npm package to read CSV synchronously or as a promise :
https://www.npmjs.com/package/csv-parser-sync-plus-promise
Description:
csv-parser-sync-plus-promise
A module to read csv synchronously or as promise
Features
now read any csv synchronously or as promise. Choice is yours
Usage
let parser = require('csv-parser-sync-plus-promise')
// for sync
let a=parser.readCsvSync('<filepath>')
// for promise
let b=parser.readCsvPromise('<filepath>')
**Note:** You can use both fully qualified and relative paths <filepath>
Errors
All errors will be printed as console.error and the process will exit with exit code 222
Upvotes: 2
Reputation: 664
How about ensuring http object listens after the file is loaded into memory:
// server.js
var myMap = {};
function readCsv(cb){
fs.createReadStream('filename.csv')
.pipe(csv())
.on('data', (row) => {
// Build javascript object
myMap[row['key']] = row['value'];
})
.on('end', () => {
console.log('Done.');
cb();
});
}
var app = express();
exports = Object.freeze({
server: http.createServer(app)
init(){
readCsv(() => {
this.server.listen(80)
})
}
})
Something like that.
You can also utilize Promise
// server.js
var myMap = {};
function readCsv(){
return new Promise((resolve, reject) => {
fs.createReadStream('filename.csv')
.pipe(csv())
.on('data', (row) => {
// Build javascript object
myMap[row['key']] = row['value'];
})
.on('end', () => {
console.log('Done.');
resolve();
})
.on('error', reject)
})
}
var app = express();
exports = Object.freeze({
server: http.createServer(app)
init(){
return readCsv().then(() => {
this.server.listen(80)
})
}
})
Upvotes: 9
Reputation: 1083
In order to meet both of your goals, you can include the code in the app.js file. App.js only runs when the express server starts. It doesn't reload on page refresh. You can run app.listen after the readstream ends.
var myMap = {};
fs.createReadStream('filename.csv')
.pipe(csv())
.on('data', (row) => {
// Build javascript object
myMap[row['key']] = row['value'];
})
.on('end', () => {
app.listen(port, () => console.log(`Example app listening on port ${port}!`));
});
However, since I don't think you're going to have a lot of data, it's better to use a synchronous (blocking) methods, for both the csv parser and file reader. This just makes it easier to understand. I use csv-parse below.
const express = require('express')
const fs = require('fs')
const parse = require('csv-parse/lib/sync')
const app = express()
const port = 3000
/* In this example assume myMap will be
/ `
/ "key_1","key_2"
/ "value 1","value 2"
/ `
*/
var myMap = fs.readFileSync('sample.csv', 'utf8');
/* parsing the csv will return:
/ [Object {key_1: "value 1", key_2: "value 2"}]
*/
const records = parse(myMap, {
columns: true,
skip_empty_lines: true
})
app.get('/', (req, res) => res.send('Hello World!' + records[0].key_1))
app.listen(port, () => console.log(`Example app listening on port ${port}!`))
Upvotes: 1
Reputation: 6809
I would look for more synchronous way to read file and handle http request. Here is sample code of what it should look like,
import fs from 'fs';
async function processCSV() {
try {
let map = await readCsv();
//handle http request in another function with same async await way
let http = await processHttpRequest(map);
// process the http response
} catch (e) {
console.log('e', e);
}
}
function readCsv()
{
let myMap = [];
fs.createReadStream('filename.csv')
.pipe(csv())
.on('data', (row) => {
// Build javascript object
return myMap[row['key']] = row['value'];
})
.on('end', () => {
console.log('Done.');
});
}
async function processHttpRequest(map)
{
try
{
let reqres = await httpReuqest(map); // Your defined function for httpReuqest
}
catch (e)
{
}
}
processCSV();
processHttpReuqet();
Upvotes: 2