Artur
Artur

Reputation: 357

Mongoose bulk update/save

I read that Model.collection.insert is faster than Model.create.

I have 1kk users in User.collection, I want to update them all every 1 minute: This is my code:

const mongoose = require('mongoose');
const User = require('./models/user');
const async = require('async');

function getRandomArbitrary(min, max) {
    return Math.round(Math.random() * (max - min) + min);
}

User.find({}, (err, docs) => {
    async.mapSeries(docs, (doc, callback) => {
        doc.value = getRandomArbitrary(0,100);
        doc.save((err) => {
            callback(err, doc);
        });
    }, (err, result) => {
        if (err) return err;
        console.log('completed');
    });
});

Is it a good way to accomplish this? It takes mode than 1 minute, How do I can increase performance?

Upvotes: 1

Views: 721

Answers (1)

rsp
rsp

Reputation: 111278

The way you are doing it now is to wait for one thing being saved before going to the other one. You can parallelize it (completely or partially) to make it finish faster.

You can use async.map instead of async.mapSeries to make it work in parallel.

Or you can use async.mapLimit to limit the number of operations taking place at the same time.

See:

Example with async.map:

User.find({}, (err, docs) => {
    async.map(docs, (doc, callback) => {
        doc.value = getRandomArbitrary(0,100);
        doc.save((err) => {
            callback(err, doc);
        });
    }, (err, result) => {
        if (err) return err;
        console.log('completed');
    });
});

Example with async.mapLimit:

const LIMIT = 10;
User.find({}, (err, docs) => {
    async.mapLimit(docs, LIMIT, (doc, callback) => {
        doc.value = getRandomArbitrary(0,100);
        doc.save((err) => {
            callback(err, doc);
        });
    }, (err, result) => {
        if (err) return err;
        console.log('completed');
    });
});

Just the LIMIT to whatever number you want.

Upvotes: 1

Related Questions