Reputation: 13522
I have very simple mongodb database with one collection, named "test" and I need to insert 133.306 records to the collection. These records are stored in the JSON file. The size of this file is 21Mb. The 50.000 records are inserted successfully in one second. The 70.000-record insert hangs the script.
The code:
var path = require('path'),
fs = require('fs'),
mongodb = require('mongodb'),
safe = { safe : true },
rowset;
rowset = JSON.parse(fs.readFileSync(path.join(__dirname, 'test.js')));
console.log('Total records: ' + rowset.length);
rowset = rowset.slice(0, 50000); // OK
// rowset = rowset.slice(0, 70000); // FAIL
console.log('Inserting ' + rowset.length + ' records');
mongodb.MongoClient.connect('mongodb://127.0.0.1:27017/browser',
function (err, client) {
if (err) throw err;
client.createCollection('test', safe, function (err, collection) {
if (err) throw err;
collection.insert(rowset, safe, function (err) {
if (err) throw err;
client.close(function (err) {
if (err) throw err;
console.log('done');
});
});
});
});
The last few lines in the mongod output:
Wed Dec 26 16:50:46 [initandlisten] connection accepted from 127.0.0.1:52003 #854 (4 connections now open)
Wed Dec 26 16:50:46 [initandlisten] connection accepted from 127.0.0.1:52004 #855 (5 connections now open)
Wed Dec 26 16:50:46 [initandlisten] connection accepted from 127.0.0.1:52005 #856 (6 connections now open)
It is the typical record from the rowset:
{ _id: 133306,
product: 23089,
version: '1.0.0',
update: null,
edition: null,
lang: null,
entries: [ 54344, 54345 ] }
Maybe the script hits some threshold or limitation?
Upvotes: 3
Views: 1219
Reputation: 9973
I tested your script on my computer with 150000 entries of the type you said and it worked like a charm. The process required an aditional 140MB of RAM for a 20MB json file.
You can monitor the opened connections from mongodb with this comand:
db.$cmd.sys.inprog.findOne( { $all : true } )
UPDATE:
I tried to insert 600000 entries and it hanged. You're right. In this case you should use mongoimport. I generated a file with 1 000 000 entries and mongo import inserted them in less than a minute. Some issues that I take care of: the import file should be formated like this a BSON(superset of json):
{"product": 23089,"version": "1.0.0","update": null,"edition": null,"lang": null,"entries": [ 54344, 54345 ]}
{"product": 23089,"version": "1.0.0","update": null,"edition": null,"lang": null,"entries": [ 54344, 54345 ]}
{"product": 23089,"version": "1.0.0","update": null,"edition": null,"lang": null,"entries": [ 54344, 54345 ]}
one document per line
no comma separators between documents
you should not include them into an array []
Here is the command that I used for importing:
c:\mongodb\bin>
mongoimport --collection browser12 --file E:\Nodejs\StackOverflow.com\Mongodb\veryBigjson.json --dbpath C:\mongodb\data --port 27016 -d browser12 --ignoreBlanks
Upvotes: 3