Reputation: 3407
I am trying to move mysql data to MongoDB. The record is over 3million rows. And every time it runs out of memory even after modifying my node start script
"scripts": {
"debug": "node debug app.js",
"start": "node --max_old_space_size=5120 --optimize_for_size --max_executable_size=5120 --stack_size=5120 app.js",
"test": "node ./node_modules/mocha/bin/mocha test/bootstrap.test.js "
}
I tried using normal callback function and promises and still nothing.
Whole Function
var citiesInit = function(_setup) {
var connection = mysql.createConnection({
host : 'localhost',
user : 'root',
password : 'password',
database : 'WorldCities'
});
connection.connect();
connection.query('SELECT * FROM thecities', function(err, rows, fields){
if (err) {
console.log("The error: ", err);
}
var cityData = [];
rows.forEach(function(theCity){
var data = {
name: theCity.city,
state_code: theCity.region,
country_code: theCity.country,
population: theCity.population,
location: {
type: "Point",
coordinates: [parseFloat(parseFloat(theCity.longitude).toFixed(4)), parseFloat(parseFloat(theCity.latitude).toFixed(4))]
},
max_latitude: parseFloat(parseFloat(theCity.latitude).toFixed(4)),
max_longitude: parseFloat(parseFloat(theCity.longitude).toFixed(4)),
min_latitude: parseFloat(parseFloat(theCity.latitude).toFixed(4)),
min_longitude: parseFloat(parseFloat(theCity.longitude).toFixed(4))
}
var cityCreate = City.create(data);
cityData.push(cityCreate);
})
console.log('The Saved row is: ', cityData.length);
//With Promise
Promise.all([cityData]).spread(function (cityData){
if (cityData) {
console.log(cityData.length);
Setup.findOrCreate({slug: 'city', setup: true}, {name: 'City', slug: 'city', setup: true}).exec(function cb(err, setup){
console.log("Cities initialized successfully");
});
}
})
.catch(function (err){
console.log(err);
})
//Without Promise
City.create(cityData).exec(function cityCB(err, cities){
if (err) {
console.log(err);
}
if (cities.length) {
console.log(cities.length);
Setup.findOrCreate({slug: 'city', setup: true}, {name: 'City', slug: 'city', setup: true}).exec(function cb(err, setup){
console.log("Cities initialized successfully");
});
}
})
});
};
I tried using Streams
too
connection.query('SELECT * FROM thecities')
.stream()
.pipe(stream.Transform({
objectMode: true,
transform:function(data, encoding, callback){
var data = {
name: data.city,
state_code: data.region,
country_code: data.country,
population: data.population,
location: {
type: "Point",
coordinates: [parseFloat(parseFloat(data.longitude).toFixed(4)), parseFloat(parseFloat(data.latitude).toFixed(4))]
},
max_latitude: parseFloat(parseFloat(data.latitude).toFixed(4)),
max_longitude: parseFloat(parseFloat(data.longitude).toFixed(4)),
min_latitude: parseFloat(parseFloat(data.latitude).toFixed(4)),
min_longitude: parseFloat(parseFloat(data.longitude).toFixed(4))
}
City.create(data).exec(function cityCB(err, city){
if (err) {
console.log(err);
}
//if (city.state) { console.log(city.state) }
callback();
})
}
}))
.on('finish', function(){
connection.end();
Setup.findOrCreate({slug: 'city', setup: true}, {name: 'City', slug: 'city', setup: true}).exec(function cb(err, setup){
console.log("Cities initialized successfully");
organizationInit();
});
})
Any help on how to go about this? Thanks.
Upvotes: 2
Views: 69
Reputation: 468
MySQL supports cursors to fetch smaller amounts of a query in batches to save memory:
Please see How to loop through a table using a cursor in MySQL?
Upvotes: 1