It is worth noting that although the other answer here is really correct, the general approach here is to $unset element of the corresponding array to create a null value, and then $pull only null values from the array, there are better ways to implement this in modern versions of MongoDB.
Using bulkWrite()
As an alternative to sending two operations for sequential updates as separate requests, the modern MongoDB release supports bulk operations using the recommended bulkWrite() method, which allows you to send these multiple updates as a single request with a single response:
collection.bulkWrite( [ { "updateOne": { "filter": { "array": "bird" }, "update": { "$unset": { "array.$": "" } } }}, { "updateOne": { "filter": { "array": null }, "update": { "$pull": { "array": null } } }} ] );
Does the same as the answer, showing that there are two requests, but this time it is just one . This can save a lot of overhead when interacting with the server, so this is usually the best approach.
Using aggregation expressions
With the release of MongoDB 4.2 , aggregation expressions are now allowed in various MongoDB “update” operations. This is one step in the pipeline: $addFields , $set (which is an alias of $addFields and designed to make these update statements more logical), $project or $replaceRoot and its own alias $replaceWith . The $redact pipeline $redact also applicable to some extent. In principle, any pipeline step that returns a “modified” document is allowed.
collection.updateOne( { "array": "horse" }, [ { "$set": { "array": { "$concatArrays": [ { "$slice": [ "$array", 0, { "$indexOfArray": [ "$array", "horse" ] }] }, { "$slice": [ "$array", { "$add": [{ "$indexOfArray": [ "$array", "horse" ] }, 1] }, { "$size": "$array" } ]} ] } }} ] );
In this case, manipulations are used to implement the $slice and $indexOfArray , which essentially merge a new array that "skips" the first first corresponding element of the array. These abstracts are combined using the $concatArrays operator, returning a new array that is not in the first matched element.
Now this is probably more efficient since the operation, which is still a single request, is now also a single operation and will cause a slightly lesser load on the server.
Of course, the only catch is that this is not supported in any release of MongoDB prior to 4.2. bulkWrite() , on the other hand, may be a newer implementation of the API, but the actual base server calls will be applied back to MongoDB 2.6, implementing the actual "Bulk API" calls, and even revert to earlier versions. By the way, all the main drivers are on actually implement this method.
Demonstration
As a demonstration, a list of both approaches is provided:
const { Schema } = mongoose = require('mongoose'); const uri = 'mongodb://localhost:27017/test'; const opts = { useNewUrlParser: true, useUnifiedTopology: true }; mongoose.Promise = global.Promise; mongoose.set('debug', true); mongoose.set('useCreateIndex', true); mongoose.set('useFindAndModify', false); const arrayTestSchema = new Schema({ array: [String] }); const ArrayTest = mongoose.model('ArrayTest', arrayTestSchema); const array = ["bird", "tiger", "horse", "bird", "horse"]; const log = data => console.log(JSON.stringify(data, undefined, 2)); (async function() { try { const conn = await mongoose.connect(uri, opts); await Promise.all( Object.values(conn.models).map(m => m.deleteMany()) ); await ArrayTest.create({ array }); // Use bulkWrite update await ArrayTest.bulkWrite( [ { "updateOne": { "filter": { "array": "bird" }, "update": { "$unset": { "array.$": "" } } }}, { "updateOne": { "filter": { "array": null }, "update": { "$pull": { "array": null } } }} ] ); log({ bulkWriteResult: (await ArrayTest.findOne()) }); // Use agggregation expression await ArrayTest.collection.updateOne( { "array": "horse" }, [ { "$set": { "array": { "$concatArrays": [ { "$slice": [ "$array", 0, { "$indexOfArray": [ "$array", "horse" ] }] }, { "$slice": [ "$array", { "$add": [{ "$indexOfArray": [ "$array", "horse" ] }, 1] }, { "$size": "$array" } ]} ] } }} ] ); log({ aggregateWriteResult: (await ArrayTest.findOne()) }); } catch (e) { console.error(e); } finally { mongoose.disconnect(); } })();
And the conclusion:
Mongoose: arraytests.deleteMany({}, {}) Mongoose: arraytests.insertOne({ array: [ 'bird', 'tiger', 'horse', 'bird', 'horse' ], _id: ObjectId("5d8f509114b61a30519e81ab"), __v: 0 }, { session: null }) Mongoose: arraytests.bulkWrite([ { updateOne: { filter: { array: 'bird' }, update: { '$unset': { 'array.$': '' } } } }, { updateOne: { filter: { array: null }, update: { '$pull': { array: null } } } } ], {}) Mongoose: arraytests.findOne({}, { projection: {} }) { "bulkWriteResult": { "array": [ "tiger", "horse", "bird", "horse" ], "_id": "5d8f509114b61a30519e81ab", "__v": 0 } } Mongoose: arraytests.updateOne({ array: 'horse' }, [ { '$set': { array: { '$concatArrays': [ { '$slice': [ '$array', 0, { '$indexOfArray': [ '$array', 'horse' ] } ] }, { '$slice': [ '$array', { '$add': [ { '$indexOfArray': [ '$array', 'horse' ] }, 1 ] }, { '$size': '$array' } ] } ] } } } ]) Mongoose: arraytests.findOne({}, { projection: {} }) { "aggregateWriteResult": { "array": [ "tiger", "bird", "horse" ], "_id": "5d8f509114b61a30519e81ab", "__v": 0 } }
NOTE : mongoose is used in the example listing, partly because it was referenced in another answer given, and partly to demonstrate an important point in the example with generalized syntax. Please note that the code uses ArrayTest.collection.updateOne() , because in the current release of Mongoose (at the time of writing 5.7.1), the aggregation pipeline syntax for such updates was removed by the standard methods of the mongoose model.
Thus, the .collection can be used to retrieve the underlying Collection object from the main driver of the MongoDB node. This will be required until mongoose is fixed, which allows the inclusion of this expression.