Reputation: 99
Using Mongodb, Nodejs, Async.js and Express.js
I am trying to update multiple documents, where each document has its own update, at the same time. I want to wait for all documents to update so that I can notify user that all documents have updated.
The issue I am having now is that my callback function is not firing, or if it is, then nothing is happening. Here is my progress:
db.client.collection('page').find({page_id: page_id}).toArray(function(page_err, document_page) {
if(page_err) {
throw page_err;
} else if(document_page === '' || document_page === undefined || document_page === null) {
throw page_err;
} else {
var count = 0;
async.each(data, function iteratee(i, callback) {
var item_id = (i.item_id === '') ? new ObjectId() : new ObjectId(i.item_id);
var query = {item_id: item_id};
var update = {
_id : new ObjectId(),
page_id : page_id,
section_id : null,
item_id : item_id,
created : new Date().toISOString(),
item_type : "dish",
item: {
title: i.title,
description: i.description,
price: i.price,
star: false,
double_star: false
},
last_modified: new Date().toISOString()
};
var options = { upsert: true };
db.client.collection('item').updateOne(query, {$set: update}, options, function(item_err, results) {
if(item_err) {
res.sendStatus(500);
} else if(results === '' || results === undefined || results === null) {
res.sendStatus(400);
} else {
++count;
if(count === data.length) {
callback();
return;
}
}
});
}, function() {
console.log('sending 200 status');
res.sendStatus(200);
});
}
});
When I run the code I do enter the if statement where I call callback(). I have been stuck on this for a few hours and I cannot get it to work. If you need more info, I'd be happy to provide it. For simplicity's sake I removed many console.logs to avoid clutter as well.
Upvotes: 0
Views: 712
Reputation: 4017
All iterations need to fire the callback otherwise it will hang indefinitely. callback
must be called in every iteration. Always.
If you encounter an error, you need to call callback(error)
. The problem you'll have is that async.each schedules all iterations beforehand so iteratee
will fire data.length
times regardless of whether an error is encountered half way through execution or not. If you need to run them in series you can use async.eachSeries
which will take more time but gives you better control and no need to rollback.
So code wise it looks like this:
db.client.collection('page').find({page_id: page_id}).toArray(function(page_err, document_page) {
if(page_err) {
throw page_err;
} else if(document_page === '' || document_page === undefined || document_page === null) {
throw page_err;
} else {
async.each(data, function iteratee(i, callback) {
var item_id = (i.item_id === '') ? new ObjectId() : new ObjectId(i.item_id);
var query = {item_id: item_id};
var update = {
_id : new ObjectId(),
page_id : page_id,
section_id : null,
item_id : item_id,
created : new Date().toISOString(),
item_type : "dish",
item: {
title: i.title,
description: i.description,
price: i.price,
star: false,
double_star: false
},
last_modified: new Date().toISOString()
};
var options = { upsert: true };
db.client.collection('item').updateOne(query, {$set: update}, options, function(item_err, results) {
if(item_err) {
callback(500);
} else if(results === '' || results === undefined || results === null) {
callback(400)
} else {
callback();
}
});
}, function(err) {
// Passing the status code only for the example.
// `err` should be an object with more metadata probably
if(err) {
res.sendStatus(err);
return;
}
console.log('sending 200 status');
res.sendStatus(200);
});
}
});
Upvotes: 1