diff --git a/lib/helpers/model/castBulkWrite.js b/lib/helpers/model/castBulkWrite.js index 5b882b6d5fe..27723faa6e7 100644 --- a/lib/helpers/model/castBulkWrite.js +++ b/lib/helpers/model/castBulkWrite.js @@ -20,7 +20,7 @@ module.exports = function castBulkWrite(originalModel, op, options) { const model = decideModelByObject(originalModel, op['insertOne']['document']); const doc = new model(op['insertOne']['document']); - if (model.schema.options.timestamps) { + if (model.schema.options.timestamps && options.timestamps !== false) { doc.initializeTimestamps(); } if (options.session != null) { diff --git a/lib/model.js b/lib/model.js index 5a7a95a7e0e..26fb9a511a0 100644 --- a/lib/model.js +++ b/lib/model.js @@ -3616,41 +3616,50 @@ Model.bulkWrite = function(ops, options, callback) { * * @param {Array} documents * @param {Object} [options] options passed to the underlying `bulkWrite()` + * @param {Boolean} [options.timestamps] defaults to `null`, when set to false, mongoose will not add/update timestamps to the documents. * @param {ClientSession} [options.session=null] The session associated with this bulk write. See [transactions docs](/docs/transactions.html). * @param {String|number} [options.w=1] The [write concern](https://docs.mongodb.com/manual/reference/write-concern/). See [`Query#w()`](/docs/api.html#query_Query-w) for more information. * @param {number} [options.wtimeout=null] The [write concern timeout](https://docs.mongodb.com/manual/reference/write-concern/#wtimeout). * @param {Boolean} [options.j=true] If false, disable [journal acknowledgement](https://docs.mongodb.com/manual/reference/write-concern/#j-option) * */ -Model.bulkSave = function(documents, options) { - const preSavePromises = documents.map(buildPreSavePromise); - - const writeOperations = this.buildBulkWriteOperations(documents, { skipValidation: true }); - - let bulkWriteResultPromise; - return Promise.all(preSavePromises) - .then(() => bulkWriteResultPromise = this.bulkWrite(writeOperations, options)) - .then(() => documents.map(buildSuccessfulWriteHandlerPromise)) - .then(() => bulkWriteResultPromise) - .catch((err) => { - if (!(err && err.writeErrors && err.writeErrors.length)) { - throw err; - } - return Promise.all( - documents.map((document) => { - const documentError = err.writeErrors.find(writeError => { - const writeErrorDocumentId = writeError.err.op._id || writeError.err.op.q._id; - return writeErrorDocumentId.toString() === document._id.toString(); - }); +Model.bulkSave = async function(documents, options) { + options = options || {}; - if (documentError == null) { - return buildSuccessfulWriteHandlerPromise(document); - } - }) - ).then(() => { - throw err; + const writeOperations = this.buildBulkWriteOperations(documents, { skipValidation: true, timestamps: options.timestamps }); + + if (options.timestamps != null) { + for (const document of documents) { + document.$__.saveOptions = document.$__.saveOptions || {}; + document.$__.saveOptions.timestamps = options.timestamps; + } + } + + await Promise.all(documents.map(buildPreSavePromise)); + + const { bulkWriteResult, bulkWriteError } = await this.bulkWrite(writeOperations, options).then( + (res) => ({ bulkWriteResult: res, bulkWriteError: null }), + (err) => ({ bulkWriteResult: null, bulkWriteError: err }) + ); + + await Promise.all( + documents.map(async(document) => { + const documentError = bulkWriteError && bulkWriteError.writeErrors.find(writeError => { + const writeErrorDocumentId = writeError.err.op._id || writeError.err.op.q._id; + return writeErrorDocumentId.toString() === document._id.toString(); }); - }); + + if (documentError == null) { + await handleSuccessfulWrite(document); + } + }) + ); + + if (bulkWriteError && bulkWriteError.writeErrors && bulkWriteError.writeErrors.length) { + throw bulkWriteError; + } + + return bulkWriteResult; }; function buildPreSavePromise(document) { @@ -3665,24 +3674,21 @@ function buildPreSavePromise(document) { }); } -function buildSuccessfulWriteHandlerPromise(document) { +function handleSuccessfulWrite(document) { return new Promise((resolve, reject) => { - handleSuccessfulWrite(document, resolve, reject); - }); -} + if (document.$isNew) { + _setIsNew(document, false); + } -function handleSuccessfulWrite(document, resolve, reject) { - if (document.$isNew) { - _setIsNew(document, false); - } + document.$__reset(); + document.schema.s.hooks.execPost('save', document, {}, (err) => { + if (err) { + reject(err); + return; + } + resolve(); + }); - document.$__reset(); - document.schema.s.hooks.execPost('save', document, {}, (err) => { - if (err) { - reject(err); - return; - } - resolve(); }); } @@ -3692,6 +3698,7 @@ function handleSuccessfulWrite(document, resolve, reject) { * @param {Array} documents The array of documents to build write operations of * @param {Object} options * @param {Boolean} options.skipValidation defaults to `false`, when set to true, building the write operations will bypass validating the documents. + * @param {Boolean} options.timestamps defaults to `null`, when set to false, mongoose will not add/update timestamps to the documents. * @return {Array} Returns a array of all Promises the function executes to be awaited. * @api private */ @@ -3715,9 +3722,9 @@ Model.buildBulkWriteOperations = function buildBulkWriteOperations(documents, op const isANewDocument = document.isNew; if (isANewDocument) { - accumulator.push({ - insertOne: { document } - }); + const writeOperation = { insertOne: { document } }; + utils.injectTimestampsOption(writeOperation.insertOne, options.timestamps); + accumulator.push(writeOperation); return accumulator; } @@ -3732,13 +3739,9 @@ Model.buildBulkWriteOperations = function buildBulkWriteOperations(documents, op _applyCustomWhere(document, where); document.$__version(where, delta); - - accumulator.push({ - updateOne: { - filter: where, - update: changes - } - }); + const writeOperation = { updateOne: { filter: where, update: changes } }; + utils.injectTimestampsOption(writeOperation.updateOne, options.timestamps); + accumulator.push(writeOperation); return accumulator; } @@ -3757,6 +3760,7 @@ Model.buildBulkWriteOperations = function buildBulkWriteOperations(documents, op } }; + /** * Shortcut for creating a new Document from existing raw data, pre-saved in the DB. * The document returned has no paths marked as modified initially. diff --git a/lib/utils.js b/lib/utils.js index 50058acdc13..8ebd81d8835 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -972,3 +972,11 @@ exports.errorToPOJO = function errorToPOJO(error) { exports.warn = function warn(message) { return process.emitWarning(message, { code: 'MONGOOSE' }); }; + + +exports.injectTimestampsOption = function injectTimestampsOption(writeOperation, timestampsOption) { + if (timestampsOption == null) { + return; + } + writeOperation.timestamps = timestampsOption; +}; diff --git a/test/model.test.js b/test/model.test.js index 39f3a5dfeeb..f47b864aadd 100644 --- a/test/model.test.js +++ b/test/model.test.js @@ -8117,6 +8117,73 @@ describe('Model', function() { assert.equal(writeOperations.length, 3); }); + + it('accepts `timestamps: false` (gh-12059)', async() => { + // Arrange + const userSchema = new Schema({ + name: { type: String, minLength: 5 } + }); + + const User = db.model('User', userSchema); + + const newUser = new User({ name: 'Hafez' }); + const userToUpdate = await User.create({ name: 'Hafez' }); + userToUpdate.name = 'John Doe'; + + // Act + const writeOperations = User.buildBulkWriteOperations([newUser, userToUpdate], { timestamps: false, skipValidation: true }); + + // Assert + const timestampsOptions = writeOperations.map(writeOperationContainer => { + const operationObject = writeOperationContainer.updateOne || writeOperationContainer.insertOne; + return operationObject.timestamps; + }); + assert.deepEqual(timestampsOptions, [false, false]); + }); + it('accepts `timestamps: true` (gh-12059)', async() => { + // Arrange + const userSchema = new Schema({ + name: { type: String, minLength: 5 } + }); + + const User = db.model('User', userSchema); + + const newUser = new User({ name: 'Hafez' }); + const userToUpdate = await User.create({ name: 'Hafez' }); + userToUpdate.name = 'John Doe'; + + // Act + const writeOperations = User.buildBulkWriteOperations([newUser, userToUpdate], { timestamps: true, skipValidation: true }); + + // Assert + const timestampsOptions = writeOperations.map(writeOperationContainer => { + const operationObject = writeOperationContainer.updateOne || writeOperationContainer.insertOne; + return operationObject.timestamps; + }); + assert.deepEqual(timestampsOptions, [true, true]); + }); + it('`timestamps` has `undefined` as default value (gh-12059)', async() => { + // Arrange + const userSchema = new Schema({ + name: { type: String, minLength: 5 } + }); + + const User = db.model('User', userSchema); + + const newUser = new User({ name: 'Hafez' }); + const userToUpdate = await User.create({ name: 'Hafez' }); + userToUpdate.name = 'John Doe'; + + // Act + const writeOperations = User.buildBulkWriteOperations([newUser, userToUpdate], { skipValidation: true }); + + // Assert + const timestampsOptions = writeOperations.map(writeOperationContainer => { + const operationObject = writeOperationContainer.updateOne || writeOperationContainer.insertOne; + return operationObject.timestamps; + }); + assert.deepEqual(timestampsOptions, [undefined, undefined]); + }); }); describe('bulkSave() (gh-9673)', function() { @@ -8205,7 +8272,6 @@ describe('Model', function() { }); it('throws an error on failure', async() => { - const userSchema = new Schema({ name: { type: String, unique: true } }); @@ -8225,8 +8291,8 @@ describe('Model', function() { const err = await User.bulkSave(users).then(() => null, err => err); assert.ok(err); - }); + it('changes document state from `isNew` `false` to `true`', async() => { const userSchema = new Schema({ @@ -8381,6 +8447,76 @@ describe('Model', function() { const res = await model.bulkSave(entries); assert.ok(res); }); + + it('accepts `timestamps: false` (gh-12059)', async() => { + // Arrange + const userSchema = new Schema({ + name: { type: String } + }, { timestamps: true }); + + const User = db.model('User', userSchema); + const newUser = new User({ name: 'Sam' }); + + const userToUpdate = await User.create({ name: 'Hafez', createdAt: new Date('1994-12-04'), updatedAt: new Date('1994-12-04') }); + userToUpdate.name = 'John Doe'; + + // Act + await User.bulkSave([newUser, userToUpdate], { timestamps: false }); + + + // Assert + const createdUserPersistedInDB = await User.findOne({ _id: newUser._id }); + assert.deepStrictEqual(newUser.createdAt, undefined); + assert.deepStrictEqual(newUser.updatedAt, undefined); + + assert.deepStrictEqual(createdUserPersistedInDB.createdAt, undefined); + assert.deepStrictEqual(createdUserPersistedInDB.updatedAt, undefined); + assert.deepStrictEqual(userToUpdate.createdAt, new Date('1994-12-04')); + assert.deepStrictEqual(userToUpdate.updatedAt, new Date('1994-12-04')); + }); + + it('accepts `timestamps: true` (gh-12059)', async() => { + // Arrange + const userSchema = new Schema({ + name: { type: String, minLength: 5 } + }, { timestamps: true }); + + const User = db.model('User', userSchema); + + const newUser = new User({ name: 'Hafez' }); + const userToUpdate = await User.create({ name: 'Hafez' }); + userToUpdate.name = 'John Doe'; + + // Act + await User.bulkSave([newUser, userToUpdate], { timestamps: true }); + + // Assert + assert.ok(newUser.createdAt); + assert.ok(newUser.updatedAt); + assert.ok(userToUpdate.createdAt); + assert.ok(userToUpdate.updatedAt); + }); + it('`timestamps` has `undefined` as default value (gh-12059)', async() => { + // Arrange + const userSchema = new Schema({ + name: { type: String, minLength: 5 } + }, { timestamps: true }); + + const User = db.model('User', userSchema); + + const newUser = new User({ name: 'Hafez' }); + const userToUpdate = await User.create({ name: 'Hafez' }); + userToUpdate.name = 'John Doe'; + + // Act + await User.bulkSave([newUser, userToUpdate]); + + // Assert + assert.ok(newUser.createdAt); + assert.ok(newUser.updatedAt); + assert.ok(userToUpdate.createdAt); + assert.ok(userToUpdate.updatedAt); + }); }); describe('Setting the explain flag', function() { diff --git a/test/types/models.test.ts b/test/types/models.test.ts index 5d8e33bd1d2..193992db7fb 100644 --- a/test/types/models.test.ts +++ b/test/types/models.test.ts @@ -320,6 +320,24 @@ function gh11911() { }); } + +function gh12059() { + interface IAnimal { + name?: string; + } + + const animalSchema = new Schema({ + name: { type: String } + }); + + const Animal = model('Animal', animalSchema); + const animal = new Animal(); + + Animal.bulkSave([animal], { timestamps: false }); + Animal.bulkSave([animal], { timestamps: true }); + Animal.bulkSave([animal], {}); +} + function gh12100() { const schema = new Schema(); diff --git a/types/models.d.ts b/types/models.d.ts index d4a6f1a1797..6396a4bebf1 100644 --- a/types/models.d.ts +++ b/types/models.d.ts @@ -149,7 +149,7 @@ declare module 'mongoose' { * sending multiple `save()` calls because with `bulkSave()` there is only one * network round trip to the MongoDB server. */ - bulkSave(documents: Array, options?: mongodb.BulkWriteOptions): Promise; + bulkSave(documents: Array, options?: mongodb.BulkWriteOptions & { timestamps?: boolean }): Promise; /** Collection the model uses. */ collection: Collection;