Reputation: 353
I am using Node to pre-process .csv
files into .json
file for that I'm using the CSVTOJSON
npm package.
I want to wait for the parsing to finish and then start up the uploading procedure to the database.
I found that Node is required to chain functions with callbacks in order to asynchronously execute them.
But I can't figure out how to apply to my program.
Here is the code.
// 1. Read *.csv file + Parse fields and escape; @dir "raw_data" => @dir "processed"
fs.readdir(rawDataPath, function (err, files) {
if (err) return console.log("Unable to scan raw_data : " + err);
console.log("Processing csv files to JSON...");
console.log("+++++++++++++++++++++++++++++++++++++++++++++++++++++++");
files.forEach(function (file) {
console.log(`CSV ${file.split(".")[0]} being converted...`);
csv({ ignoreEmpty: true })
.fromFile("raw_data/" + file)
.then((jsonObj) => {
// stringify JSON Object
var jsonContent = JSON.stringify(jsonObj);
fs.writeFile(
`processed_data/${file.split(".")[0]}.json`,
jsonContent,
"utf8",
function (err) {
if (err) {
console.log(
"An error occured while writing JSON Object to File."
);
return console.log(err);
}
console.log(
`${file} successfully converted to ${file.split(".")[0]}.json`
);
}
);
});
});
});
// 2. Upload to Cloud Firestore
fs.readdir(processedDataPath, function (err, files) {
if (err) return console.log("Unable to scan processed_data : " + err);
files.forEach(function (file) {
var quiz = require("./processed_data/" + file);
console.log(`Collection ${file.split(".")[0]} being updated...`);
quiz.forEach(function (obj) {
firestore
.collection(`${file.split(".")[0].toUpperCase()}`)
.doc(obj.id)
.set(obj)
.then(function (docRef) {
console.log(
`Document ${obj.id} successfully uploaded to Cloud Firestore!`
);
})
.catch(function (error) {
console.error("Error adding document: ", error);
});
});
});
});
Upvotes: 0
Views: 129
Reputation: 368
There are various ways to handle the asynchronous nature of Javascript. I will use fs.readFile()
as an example to make it easy. These are some of the approaches -
fs.readFile('./some-file.txt', (err, res) => {
if (err) { // err is null if success
return console.log(err); // handle error
}
console.log(res); // handle success
});
fs.promises.readFile('./some-file.txt').then((res) => {
console.log(res); // handle success
}).catch((err) => { // only gets executed if there is an error
console.log(err); // handle error
});
Chaining -
fs.promises.readFile('./some-1.txt').then((res) => {
return fs.promises.readFile('./some-2.txt');
// handle success - 1
}).then((res) => {
return fs.promises.readFile('./some-3.txt');
// handle success - 2
}).then((res) => {
// handle success - 3
}).catch((err) => {
console.log(err);
// handle error
});
const main = async () => {
try {
const res = await fs.promises.readFile('./some-file.txt');
console.log(res)
// handle success
} catch(err) {
console.log(err);
// handle error
}
}
Further reading -
Upvotes: 1