Reputation: 11
Whenever I run my 'save_to_db' function at location B, it fails.
The code below shows the problem...
//location A
var i = 0;
while(true)
{
i++;
if(i == 100)
{
//location B
save_to_db(newUnit, 11214.1, 'TEST');
}
}
//location C
The function 'save_to_db' works fine at Location A & C. But failed at Location B.
I think it is because the while loop is infinite and synchronized. Therefore it doesn't give nodejs chance to run its event loop. How can we properly give nodejs a chance to run its event loop in this case?
===================== updates ===============================
The code above is very simplied version of the code. I have a class/module called 'db_util' and it has two methods, 'save_to_db' and 'read_from_db'. I think because of these methods used 'mysql' module, they access database in an async way. But my while true loop blocks nodejs eventloop. Therefore neither methods in the while loop has a chance to be invoked.
var mysql = require('mysql');
var db_util = function()
{}
db_util.prototype = {
save_to_db: function (neural_network, fitness, type) {
var connection = mysql.createConnection({
host: 'localhost',
user: 'root',
password: '',
database: 'nodejstestdb'
});
connection.connect();
var addSql = 'INSERT INTO mytable(id, nn, type, fitness) VALUES(?,?,?,?)';
var addSqlParams = [null, neural_network, type, fitness];
connection.query(addSql, addSqlParams, function (err, result) {
if (err) {
console.log('[INSERT ERROR] - ', err.message);
return;
}
console.log('INSERT ID:', result);
});
connection.end();
},
read_from_db:function(){
var connection = mysql.createConnection({
host: 'localhost',
user: 'root',
password: '',
database: 'nodejstestdb'
});
connection.connect();
var sql = 'SELECT * FROM mytable ORDER BY fitness DESC';
connection.query(sql, function (err, result) {
if (err) {
console.log('[SELECT ERROR] - ', err.message);
return;
}
console.log(result);
var nn = result[0].nn;
neural_network = synaptic.Network.fromJSON(JSON.parse(nn));
return neural_network;
});
connection.end();
}
}
module.exports = db_util;
================ update 2 because of the answers
let i = 0;
(function tick() {
++i;
if (i%100 == 0) {
save_to_db();
}
setTimeout(tick, 0); // Queue a callback on next(ish) event loop cycle
}());
let i = 0;
tick();
@T.J. Crowder, Sir thanks for your answer. But what's the difference between your above code and the code below?
var i = 0;
function tick() {
++i;
if (i%100 == 0) {
save_to_db();
}
setTimeout(tick, 0); // Queue a callback on next(ish) event loop cycle
}
tick();
Upvotes: 0
Views: 164
Reputation: 1075209
As VLAZ says, I wonder why you need an infinite loop, rather than just using the event loop.
I think it is because the while loop is infinite and synchronized. Therefore it doesn't give nodejs chance to run its event loop.
Yes, you're exactly correct.
It's hard to answer this question without knowing how save_to_db
works, but in general you have three options:
Use a chained series of setTimeout
(or setImmediate
) callbacks.
setTimeout
to schedule the next block.Use an async
function and await
(if save_to_db
returns a promise, or can be modified to return a promise).
Use a worker thread, which posts a message to the main thread when it should do the save_to_db
(then the worker thread's event loop being blocked doesn't prevent events on the main thread that save_to_db
needs). (I probably wouldn't use a worker for this.)
#1 looks roughly like this:
let i = 0;
(function tick() {
++i;
if (i == 100) {
save_to_db();
}
setTimeout(tick, 0); // Queue a callback on next(ish) event loop cycle
}());
Note that Node.js rate-limits timers, so setTimeout(tick, 0)
won't necessarily schedule the next call on the next event loop iteration. You might use setImmediate
instead, because it
Schedules the "immediate" execution of the callback after I/O events' callbacks.
For example, this code:
let i = 0;
let last = Date.now();
let sum = 0;
(function tick() {
++i;
sum += Date.now() - last;
if (i < 1000) {
last = Date.now();
setTimeout(tick, 0);
} else {
console.log(`Average: ${sum / i}`);
}
})();
reports an average elapsed time of 1.155ms on my Node v12.4 installation on my Linux box. The equivalent code using setImmediate
reports an average of just 0.004ms instead.
You've said in a comment that you didn't understand how to implement doing this in "chunks." Here's an example:
const chunkSize = 1000; // Or whatever
let i = 0;
(function chunk() {
const chunkEnd = i + chunkSize;
while (i++ < chunkEnd) {
if (/*time to save to the DB*/) {
// Note that we don't wait for it to complete, you've said it's
// okay not to wait and that it's okay if they overlap
save_to_db();
}
}
setImmediate(chunk, 0);
})();
Doing it that way, it does a chunk of work on the main thread, then yields to allow processing of any I/O callbacks that save_to_db might need, then keeps going. You could optimize it a bit to only yield back when it knows that save_to_db
still has work to do, but that's probably overengineering it.
#2 looks roughly like this:
(async () => {
try {
let i = 0;
while (true) {
if (i == 100) {
await save_to_db();
}
}
} catch (e) {
// Handle/report error
}
})();
#3 I'll leave as an exercise for the reader, but I probably wouldn't use a worker for this.
Upvotes: 1