Bulk upsert in MongoDB using mongoose Bulk upsert in MongoDB using mongoose mongoose mongoose

Bulk upsert in MongoDB using mongoose


Not in "mongoose" specifically, or at least not yet as of writing. The MongoDB shell as of the 2.6 release actually uses the "Bulk operations API" "under the hood" as it were for all of the general helper methods. In it's implementation, it tries to do this first, and if an older version server is detected then there is a "fallback" to the legacy implementation.

All of the mongoose methods "currently" use the "legacy" implementation or the write concern response and the basic legacy methods. But there is a .collection accessor from any given mongoose model that essentially accesses the "collection object" from the underlying "node native driver" on which mongoose is implemented itself:

 var mongoose = require('mongoose'),     Schema = mongoose.Schema; mongoose.connect('mongodb://localhost/test'); var sampleSchema  = new Schema({},{ "strict": false }); var Sample = mongoose.model( "Sample", sampleSchema, "sample" ); mongoose.connection.on("open", function(err,conn) {     var bulk = Sample.collection.initializeOrderedBulkOp();    var counter = 0;    // representing a long loop    for ( var x = 0; x < 100000; x++ ) {        bulk.find(/* some search */).upsert().updateOne(            /* update conditions */        });        counter++;        if ( counter % 1000 == 0 )            bulk.execute(function(err,result) {                             bulk = Sample.collection.initializeOrderedBulkOp();            });    }    if ( counter % 1000 != 0 )        bulk.execute(function(err,result) {           // maybe do something with result        }); });

The main catch there being that "mongoose methods" are actually aware that a connection may not actually be made yet and "queue" until this is complete. The native driver you are "digging into" does not make this distinction.

So you really have to be aware that the connection is established in some way or form. But you can use the native driver methods as long as you are careful with what you are doing.


You don't need to manage limit (1000) as @neil-lunn suggested. Mongoose does this already. I used his great answer as a basis for this complete Promise-based implementation & example:

var Promise = require('bluebird');var mongoose = require('mongoose');var Show = mongoose.model('Show', {  "id": Number,  "title": String,  "provider":  {'type':String, 'default':'eztv'}});/** * Atomic connect Promise - not sure if I need this, might be in mongoose already.. * @return {Priomise} */function connect(uri, options){  return new Promise(function(resolve, reject){    mongoose.connect(uri, options, function(err){      if (err) return reject(err);      resolve(mongoose.connection);    });  });}/** * Bulk-upsert an array of records * @param  {Array}    records  List of records to update * @param  {Model}    Model    Mongoose model to update * @param  {Object}   match    Database field to match * @return {Promise}  always resolves a BulkWriteResult */function save(records, Model, match){  match = match || 'id';  return new Promise(function(resolve, reject){    var bulk = Model.collection.initializeUnorderedBulkOp();    records.forEach(function(record){      var query = {};      query[match] = record[match];      bulk.find(query).upsert().updateOne( record );    });    bulk.execute(function(err, bulkres){        if (err) return reject(err);        resolve(bulkres);    });  });}/** * Map function for EZTV-to-Show * @param  {Object} show EZTV show * @return {Object}      Mongoose Show object */function mapEZ(show){  return {    title: show.title,    id: Number(show.id),    provider: 'eztv'  };}// if you are  not using EZTV, put shows in herevar shows = []; // giant array of {id: X, title: "X"}// var eztv = require('eztv');// eztv.getShows({}, function(err, shows){//   if(err) return console.log('EZ Error:', err);//   var shows = shows.map(mapEZ);  console.log('found', shows.length, 'shows.');  connect('mongodb://localhost/tv', {}).then(function(db){    save(shows, Show).then(function(bulkRes){      console.log('Bulk complete.', bulkRes);      db.close();    }, function(err){        console.log('Bulk Error:', err);        db.close();    });  }, function(err){    console.log('DB Error:', err);  });// });

This has the bonus of closing the connection when it's done, displaying any errors if you care, but ignoring them if not (error callbacks in Promises are optional.) It's also very fast. Just leaving this here to share my findings. You can uncomment the eztv stuff if you want to save all eztv shows to a database, as an example.


await Model.bulkWrite(docs.map(doc => ({    updateOne: {        filter: {id: doc.id},        update: doc,        upsert: true    }})))

Or more verbose:

const bulkOps = docs.map(doc => ({    updateOne: {        filter: {id: doc.id},        update: doc,        upsert: true    }}))Model.bulkWrite(bulkOps)        .then(bulkWriteOpResult => console.log('BULK update OK:', bulkWriteOpResult))        .catch(err => console.error('BULK update error:', err))

https://stackoverflow.com/a/60330161/5318303