Reputation: 726
I want to backup a MySQL database. Is it possible to do it in Node.js? If not, what is the best way to do so?
Upvotes: 13
Views: 27243
Reputation: 43
I prefer the answer given by Balaji because it is simple and clean. However, there are a few improvements to consider:
In my case, the file size reached up to 1 GB, even though the actual data was around 300 MB. This happened because:
A more effective approach is to use the following method:
const mysqldump = require('mysqldump');
const env = process.env.NODE_ENV || 'test';
const config = require(__dirname + '/../config/config.json')[env];
const logger = require('../middleware/log');
const fs = require('fs');
const zlib = require('zlib');
const path = require('path');
async function bkp(){
try {
let backupDir = path.join(__dirname, 'db-backups');
if (!fs.existsSync(backupDir)) {
fs.mkdirSync(backupDir, { recursive: true });
}
let filePath = path.join(backupDir, `${Date.now()}-dump.sql`);
let compressedFilePath = `${filePath}.gz`;
await mysqldump({
connection: {
host: config.host,
user: config.username,
password: config.password,
database: config.database,
},
dumpToFile: filePath
});
logger.info("DB backup created successfully, compressing...");
// 🔹 Compress the backup file using gzip
const gzip = zlib.createGzip();
const input = fs.createReadStream(filePath);
const output = fs.createWriteStream(compressedFilePath);
input.pipe(gzip).pipe(output).on('finish', () => {
logger.info(`Backup compressed successfully: ${compressedFilePath}`);
fs.unlinkSync(filePath); // Remove original uncompressed file to save space
});
} catch (error) {
logger.error("Error in DB Backup", error);
}
}
Upvotes: 0
Reputation: 1044
You can use exec method as described by siavolt it is simple but not the "best way" as requested by the question.
The best and efficient way in case if you care about your system memory and CPU and performance or if you have a big database is to use spawn
along with the stream, so here is the "best way" in my opinion:
var fs = require('fs');
var spawn = require('child_process').spawn;
var wstream = fs.createWriteStream('dumpfilename.sql');
var mysqldump = spawn('mysqldump', [
'-u',
'DB_USER',
'-pDB_PASSWORD',
'DB_NAME',
]);
mysqldump
.stdout
.pipe(wstream)
.on('finish', function () {
console.log('Completed')
})
.on('error', function (err) {
console.log(err)
});
If your database doesn't have a password you can remove '-pDB_PASSWORD',
code.
Remember to replace DB_XXX
with actual values, i.e. 'root'
instead of 'DB_USER'
, or '-pXXXXXXXX'
instead of '-pDB_PASSWORD'
.
Upvotes: 12
Reputation: 1
const {exec} = require('child_process');
exec('mysqldump --user=root --password=yourPassword --result-file=D:/dbBackup.sql --databases database_name);
Upvotes: -2
Reputation: 11017
simple and powerfull way
npm install mysqldump
Run :
import mysqldump from 'mysqldump';
// or const mysqldump = require('mysqldump')
// dump the result straight to a file
mysqldump({
connection: {
host: 'localhost',
user: 'root',
password: '123456',
database: 'my_database',
},
dumpToFile: './dump.sql',
});
File Path:
write absolute path example (check into G Disk Drive)
dumpToFile:'G:\dump.sql'
or Relative path
dumpToFile:'.\dump.sql' (root of project directory)
Upvotes: 11
Reputation: 196
You could backup mySQL Databases utilizing node-cron, I like this because it ensures that your DB backup will always run regardless of the OS that you are hosting your Node App.
npm install node-cron
npm install moment (only for file name formatting)
@app.js (the file that you serve your Node App)
const cron = require('node-cron')
const moment = require('moment')
const fs = require('fs')
const spawn = require('child_process').spawn
// You can adjust the backup frequency as you like, this case will run once a day
cron.schedule('0 0 * * *', () => {
// Use moment.js or any other way to dynamically generate file name
const fileName = `${process.env.DB_NAME}_${moment().format('YYYY_MM_DD')}.sql`
const wstream = fs.createWriteStream(`/Path/You/Want/To/Save/${fileName}`)
console.log('---------------------')
console.log('Running Database Backup Cron Job')
const mysqldump = spawn('mysqldump', [ '-u', process.env.DB_USER, `-p${process.env.DB_PASSWORD}`, process.env.DB_NAME ])
mysqldump
.stdout
.pipe(wstream)
.on('finish', () => {
console.log('DB Backup Completed!')
})
.on('error', (err) => {
console.log(err)
})
})
...
...
app.listen()
You could hardcode all your DB_USER & DB_PASSWORD but i find it is best practice to read from a .env file, so it can work in both Development & Production environment
ENVIRONMENT = development
LOCAL_API = http://api.url
GOD_TOKEN = godtokenauthmenow
DB_HOST = 127.0.0.1
DB_NAME = myDatabaseName
DB_USER = myUsername
DB_PASSWORD = myPassword
Upvotes: 6
Reputation: 31
var exec = require('child_process').exec(' mysqldump -u root -p dbname > fileName.sql');
This worked for me
Upvotes: 0
Reputation: 7077
simple way without libs:
var exec = require('child_process').exec;
var child = exec(' mysqldump -u root -p[root_password] [database_name] > dumpfilename.sql');
Upvotes: 14