Reputation: 8520
We've set up a live ticker for one of our events last night and we ran into seriouse performance issues. Socket.io wasn't able to serve socket.io.js with more than 400 to 500 clients connected to it. The ticker is really simple, there is just one message every few minutes which is broadcasted to all the clients so I don't think there is much room for improvement in the code. The Hardware of the server is not the best but we were monitoring the processes during the uptime of the ticker and none of them was causing the problems.
Do you have an any idea how to solve the problem or at least what else could be the reason for it. It looks like socket.io was just struggling but not due to lack of hardware power.
Server Structure
var io = require('socket.io').listen(443);
io.set('log level', 9);
//SQL CONNECTION
io.sockets.on('connection', function (socket) {
var sql_items = 'SELECT * FROM entries ORDER BY tstamp DESC';
db_query(sql_items , function(res_items) {
socket.emit('init', res_items);
});
socket.on('new_entry', function (data) {
//SECURE
if(!checkedSocketUsers[socket.id]) return false;
var currentTime = new Date();
if(currentTime.getMinutes() < 10);
var minutes = currentTime.getMinutes();
if(minutes < 10) minutes = "0" + minutes;
var hours = currentTime.getHours();
if(hours < 10) hours = "0" + hours;
var tstamp = currentTime.getTime() / 1000;
var time = hours + ":" + minutes;
sqli = "INSERT INTO entries (uid, tstamp, text, type) VALUES (null, "+tstamp+", '"+data.text+"', '"+data.type+"')";
client.query(sqli, function(err, info) {
var br_data = {};
br_data.time = time;
br_data.text = data.text;
br_data.uid = info.insertId;
br_data.type = data.type;
socket.broadcast.emit('broadcast_entry', br_data);
socket.emit('broadcast_entry', br_data);
});
});
socket.on('update_entry', function(data) {
//SECURE
if(!checkedSocketUsers[socket.id]) return false;
sqlu = "UPDATE entries SET text = '"+data.text+"' WHERE uid = "+data.uid;
client.query(sqlu, function(err, info) {
br_data = data;
socket.broadcast.emit('broadcast_update_entry', br_data);
});
});
socket.on('remove_entry', function(data) {
//SECURE
if(!checkedSocketUsers[socket.id]) return false;
var uid = data.uid;
sqld = "DELETE FROM entries WHERE uid = "+uid;
client.query(sqld, function(err, info) {
var br_data = {};
br_data.uid = uid;
socket.broadcast.emit('broadcast_remove_entry', br_data);
socket.emit('broadcast_remove_entry', br_data);
});
});
});
Client Structure
socket = io.connect("http://localhost:443");
socket.on('init', function(data) {
//DOM Manipulation
});
socket.on('broadcast_entry', function(data) {
//DOM Manipulation
});
socket.on('broadcast_remove_entry', function(data) {
//DOM Manipulation
});
socket.on('broadcast_update_entry', function(data) {
//DOM Manipulation
});
Upvotes: 2
Views: 5911
Reputation: 126
It's a little hard to tell what bottleneck you're hitting. A couple of possibilities for you to explore:
You've possibly hit the limit on the number of open file handles in your process. Your linux kernel is compiled with a configured maximum number of file handles per process (a socket connection uses a file handle.) You can see the (max) open file limit by running "ulimit -a". You can increase this number prior to running your process just google "ulimit open files".
Another thought... How many messages max do you have at any given time? You're doing a db query to retrieve all of these for every new connection. So if the number of connections drastically increases you could be doing quite a few queries just to load the initial list. As an experiment you could not do the initial on"connection" query to see how your scaling improves. If this helps significantly then you could easily cache your query results in a javascript variable rather than hitting the database for every connection.
Some interesting reading...
http://drewww.github.com/socket.io-benchmarking/
Upvotes: 5