Reputation: 989
I have a Web server that reads and writes to a data file on disk. I'd like a file only be written to in a single Web request.
Here's an example program that illustrates my problem. It keeps a state file in "/tmp/rw.txt" and increments the integer contents on each Web hit. Running this program, and then running something like ab -n 10000 -c 1000 http://localhost:3000/
, shows that the same value is read from the file by multiple hits, and it's written multiple times.
NOTE: I know about flock() and fs-ext. However, flock() will lock the file to the current process; since all the access here is in the same process, flock() doesn't work (and complicates this example considerably).
Also note that I'd usually use express, async, etc. to get most of this done; just sticking to the basics for the sake of example.
var http = require("http"),
fs = require("fs");
var stateFile = "/tmp/rw.txt";
var server = http.createServer(function(req, res) {
var writeNum = function(num) {
var ns = num.toString(10);
console.log("Writing " + ns);
fs.writeFile(stateFile, ns, function(err) {
if (err) {
res.writeHead(500, {"Content-Type": "text/plain"});
res.end(err.message);
} else {
res.writeHead(200, {"Content-Type": "text/plain"});
res.end(ns);
}
});
};
switch (req.url) {
case "/reset":
writeNum(0);
break;
case "/":
fs.readFile(stateFile, function(err, data) {
if (err && err.code == "ENOENT") {
// First time, set it to zero
writeNum(0);
} else if (err) {
res.writeHead(500, {"Content-Type": "text/plain"});
res.end(err.message);
} else {
writeNum(parseInt(data, 10) + 1);
}
});
break;
default:
res.writeHead(404, {"Content-Type": "text/plain"});
res.end("No such resource: " + req.url);
}
});
server.listen(3000);
Upvotes: 6
Views: 6183
Reputation: 989
I wasn't able to find a library that did what I want, so I created one here:
https://npmjs.org/package/schlock
Here's the above example program using the read/write locking. I also used "Step" to make the whole thing more readable.
var http = require("http"),
fs = require("fs"),
Schlock = require("schlock"),
Step = require("step");
var stateFile = "/tmp/rw.txt";
var schlock = new Schlock();
var server = http.createServer(function(req, res) {
var num;
Step(
function() {
schlock.writeLock(stateFile, this);
},
function(err) {
if (err) throw err;
fs.readFile(stateFile, this);
},
function(err, data) {
if (err && err.code == "ENOENT") {
num = 0;
} else if (err) {
throw err;
} else {
num = parseInt(data, 10) + 1;
}
fs.writeFile(stateFile, num.toString(10), this);
},
function(err) {
if (err) throw err;
schlock.writeUnlock(stateFile, this);
},
function(err) {
if (err) {
res.writeHead(500, {"Content-Type": "text/plain"});
res.end(err.message);
} else {
res.writeHead(200, {"Content-Type": "text/plain"});
res.end(num.toString(10));
}
}
);
});
server.listen(3000);
Upvotes: 2
Reputation: 289
And another way to do this (in case there is something behind your question so you don't accept simple solutions ;)) is to create a processing queue. Below is a simple queue pattern, it executes requests in the order they submitted and returns error (if any) to the provided callback once the function is executed.
var Queue = function(fn) {
var queue = [];
var processingFn = fn;
var iterator = function(callback) {
return function(err) {
queue.shift(); // remove processed value
callback(err);
var next = queue[0];
if(next)
processingFn(next.arg, iterator(next.cb));
};
}
this.emit = function(obj, callback) {
var empty = !queue.length;
queue.push({ arg: obj, cb: callback});
if(empty) { // start processing
processingFn(obj, iterator(callback));
}
}
}
function writeNum(inc, continueCb) {
fs.readFile(stateFile, function(err, data) {
if(err)
return continueCb(err);
var value = data || 0;
fs.writeFile(stateFile, value + inc, function(err) {
continueCb(err);
});
});
}
var writer = new Queue(writeNum);
// on each request
writer.emit(1, function(err) {
if(err) {
// respond with error
}
else {
// value written
}
});
Upvotes: 2
Reputation: 289
Storing data in files is not a preferred way in multi-user environment like web server. Databases are more suitable for this. But if you really want to stick with file, I suggest to use a buffer. That is, a memory object to which you write/read, and a separate function that periodically dumps its content to the disk, like this:
var server = http.createServer(function(req, res) {
----- cut ----
switch (req.url) {
case "/reset":
value = 0;
break;
case "/":
value ++;
break;
default:
res.writeHead(404, {"Content-Type": "text/plain"});
res.end("No such resource: " + req.url);
}
}
----- cut ----
}
(function() {
var cb = arguments.callee;
fs.writeFile(stateFile, value, function(err) {
// handle error
setTimeout(cb, 100); // schedule next call
});
})();
Upvotes: 3
Reputation: 989
One solution I just found via npm search
is the locker server: https://github.com/bobrik/locker .
I think it's a good solution to the problem, and it's about how I'd design it.
The big problem is that it handles the general case (multiple processes using a resource) which requires a separate server.
I'm still looking for an in-process solution that does about the same thing.
Upvotes: 1