You need the .aggregate() method, which provides access to the aggregation pipeline.
At the $project stage, you need to use $concat to combine your field.
You can then use the aggregation result to update your collection with "bulk" operations for efficiency
var bulk = db.events.initializeOrderedBulkOp(); var count = 0; db.events.aggregate([ { "$project": { "iso_start": { "$concat": [ "$date", " ", "$time" ] }, "iso_end": { "$concat": [ "$date", " ", "$endtime" ] } }} ]).forEach(function(doc) { bulk.find({'_id': doc._id}).updateOne({ "$set": { "iso_start": new Date(doc.iso_start), "iso_end": new Date(doc.iso_end) } }); count++; if(count % 200 === 0) {
After this operation, your documents look as follows:
{ "_id" : "aaaaaaaaaaaa", "title" : "Hello, World!", "date" : "Thursday, November 12, 2015", "time" : "9:30 AM", "endtime" : "11:30 AM", "iso_start" : ISODate("2015-11-12T06:30:00Z"), "iso_end" : ISODate("2015-11-12T08:30:00Z") } { "_id" : "bbbbbbbbbbbb", "title" : "To B or not to B", "date" : "Thursday, November 12, 2015", "time" : "10:30 AM", "endtime" : "11:00 AM", "iso_start" : ISODate("2015-11-12T07:30:00Z"), "iso_end" : ISODate("2015-11-12T08:00:00Z") }
This is not the end of the story, because the "bulk" API and methods associated with it are deprecated in the upcoming version (version 3.2), so from this version we will need to use the db.collection.bulkWrite() method.
var operations = []; db.events.aggregate([ { "$project": { "iso_start": { "$concat": [ "$date", " ", "$time" ] }, "iso_end": { "$concat": [ "$date", " ", "$endtime" ] } }} ]).forEach(function(doc) { var operation = { updateOne: { filter: { "_id": doc._id }, update: { "$set": { "iso_start": new Date(doc.iso_start), "iso_end": new Date(doc.iso_end) } } } }; operations.push(operation); }) operations.push({ ordered: true, writeConcern: { w: "majority", wtimeout: 5000 } }); db.events.bulkWrite(operations)