Back

Scheduled Database Backup And Upload to Google Drive using Node.js

Last updated on 25 Jan, 2023

In this article I am going to explain you how you can generate scheduled database backup and upload it to google drive using Node.js and Google apis. 

We will use MongoDB as database to backup and will use Google Cloud to create Service Account. I am assuming that you might have knowledge about mongodump command and also used to google cloud project configuration in the past. If not then quickly go though following links to get basic idea. 

Command to generate MongoDB backup 

How to create Google Service Account

If you are like me then obvious question that comes in your mind is why we need Google Service Account? we need that because we don't want any human interaction and complete process should be auto driven between server to server. That's where Google Service is useful. Default Authentication method of google apis requite user account access token so you will need to open auth link and can generate that token manually and refresh that token periodically but that's all manual process and I want it to avoid.

If you reach to this point then I am assuming you might have created google service account and got JSON config file for your service account. Also you might get basic idea about mongodump command and parameters referring the link above. 

We will go by following steps...

Step 1: Create mongodb backup

To run command we will need to execute dump command and for this we will require node.js child_process, shown below.


exec = require("child_process").exec;
const config = require('./config.json');
...

//backup mongodb database
const cmd =
"mongodump --host " +
config.DB_OPTIONS.host +
" --db " +
config.DB_OPTIONS.database +
" --out " +
__dirname; // Command for mongodb dump process

console.log("DB backup started ... ");
console.log(cmd);
exec(cmd, function(error, stdout, stderr) {
//once successful or error 
});

Step 2: Zip Backup Folder

Step 1 will generate backup folder for mongodb database and now we will zip that folder using


zipFolder = require("zip-folder");
...

exec(cmd, function(error, stdout, stderr) {
if (empty(error)) {
console.log("DB backup generated ... ");

//zip backup
zipFolder(
    __dirname + "/" + config.DB_OPTIONS.database, //source
    __dirname + "/" + config.DB_OPTIONS.database + ".zip", //destination
function(err) {
    if (err) {
    console.log("Zip error ... ");
    console.log("oh no!", err);
    } else {
    console.log("Backup zipped successful");

    //upload on drive
    ...
    }
}
);
}
});     

Step 3: Upload to Google Drive

Once backup zip file is generated we can now upload it to google drive using following code 


...
function uploadFileToDrive(cb) {
const fs = require('fs');
const readline = require('readline');
const { google } = require('googleapis');
const key = require('./service.json') //JSON FILE FOR YOUR SERVICE ACCOUNT


// If modifying these scopes, delete token.json.
const SCOPES = ['https://www.googleapis.com/auth/drive'];

authorize(null, uploadFile);


function authorize(credentials, callback) {
const scopes = 'https://www.googleapis.com/auth/drive'
const jwt = new google.auth.JWT(key.client_email, null, key.private_key, scopes)

jwt.authorize((err, response) => {
    if (err) console.log(err)
    if (response) callback(jwt);
})
}

function uploadFile(auth) {
const fileName = config.DB_OPTIONS.database + ".zip";
const drive = google.drive({ version: 'v3', auth });
var fileMetadata = {
    'name': fileName,
    parents: [config.FOLDER_ID] //parameter to upload in any Google Drive folder
};
var media = {
    mimeType: "application/zip",
    body: fs.createReadStream(fileName)
};
drive.files.create({
    resource: fileMetadata,
    media: media,
    fields: 'id'
}, function (err, file) {
    if (err) {
        // Handle error
        console.error(err);
        console.log('Make sure you shared your drive folder with service email/user.')
    } else {
        console.log('File Id: ', file.id);
        if (cb) cb(file.id)
    }
});
}
}

In this step notice that I have specified folder id while creating file on google drive. So just like me if you want to create file within folder then make sure you have such folder in your service account drive OR you can share another google drive folder with that service account user with edit/owner rights.

Step 4: Remove backup folder and zip file


rimraf.sync(config.DB_OPTIONS.database);
rimraf.sync(config.DB_OPTIONS.database + ".zip");

That's it!!

Complete Source Code

config.js

app.js


const exec = require("child_process").exec;
const zipFolder = require("zip-folder");
const rimraf = require("rimraf");
const config = require('./config.json');

function takeMongoBackup() {
//remove directory
rimraf.sync(config.DB_OPTIONS.database);

//backup mongo
const cmd =
"mongodump --host " +
config.DB_OPTIONS.host +
" --db " +
config.DB_OPTIONS.database +
" --out " +
__dirname; // Command for mongodb dump process

console.log("DB backup started ... ");
console.log(cmd);
exec(cmd, function(error, stdout, stderr) {
if (empty(error)) {
console.log("DB backup generated ... ");

//zip backup
zipFolder(
    __dirname + "/" + config.DB_OPTIONS.database,
    __dirname + "/" + config.DB_OPTIONS.database + ".zip",
function(err) {
    if (err) {
    console.log("Zip error ... ");
    console.log("oh no!", err);
    } else {
    console.log("Backup zipped successful");

    //upload on drive
    uploadFileToDrive(()=>{
        //remove directory
        rimraf.sync(config.DB_OPTIONS.database);
        rimraf.sync(config.DB_OPTIONS.database + ".zip");
    })
    }
}
);
}
});
}


function uploadFileToDrive(cb) {
const fs = require('fs');
const readline = require('readline');
const { google } = require('googleapis');
const key = require('./service.json')

authorize(null, uploadFile);

function authorize(credentials, callback) {
const scopes = 'https://www.googleapis.com/auth/drive'
const jwt = new google.auth.JWT(key.client_email, null, key.private_key, scopes)

jwt.authorize((err, response) => {
    if (err) console.log(err)
    if (response) callback(jwt);
})
}

function uploadFile(auth) {
const fileName = config.DB_OPTIONS.database + ".zip";
const drive = google.drive({ version: 'v3', auth });
var fileMetadata = {
    'name': fileName,
    parents: [config.FOLDER_ID]
};
var media = {
    mimeType: "application/zip",
    body: fs.createReadStream(fileName)
};
drive.files.create({
    resource: fileMetadata,
    media: media,
    fields: 'id'
}, function (err, file) {
    if (err) {
        // Handle error
        console.error(err);
        console.log('Make sure you shared your drive folder with service email/user.')
    } else {
        console.log('File Id: ', file.id);
        if (cb) cb(file.id)
    }
});
}
}

/* return if variable is empty or not. */
const empty = function (mixedVar) {
var undef, key, i, len;
var emptyValues = [undef, null, false, 0, '', '0'];
for (i = 0, len = emptyValues.length; i < len; i++) { if (mixedVar === emptyValues[i]) { return true; } } if (typeof mixedVar === 'object') { for (key in mixedVar) { return false; } return true; } return false; }; var cron = require('node-cron'); cron.schedule(config.SCHEDULE_TIME, () => {
console.log("running a task every minute");
takeMongoBackup();
});

takeMongoBackup(); //IF YOU WANT TO RUN BACKUP UPON RUN
What next? if you have similar requirement or completely new requirement then you can contact us at hello@3braintechnologies.com OR call us on +91 8866 133 870.
about author

Hitesh Agja

I am Hitesh Agja, and I have 12+ years of industry experience. I am always excited and passionate about learning new things, technical or not. Life is all about learning new things and making the world more progressive.

Let's talkhire -button