diff --git a/lib/cast/bigint.js b/lib/cast/bigint.js index 20e01ae58b1..c046ba0f00a 100644 --- a/lib/cast/bigint.js +++ b/lib/cast/bigint.js @@ -1,6 +1,5 @@ 'use strict'; -const assert = require('assert'); const { Long } = require('bson'); /** @@ -13,6 +12,10 @@ const { Long } = require('bson'); * @api private */ +const MAX_BIGINT = 9223372036854775807n; +const MIN_BIGINT = -9223372036854775808n; +const ERROR_MESSAGE = `Mongoose only supports BigInts between ${MIN_BIGINT} and ${MAX_BIGINT} because MongoDB does not support arbitrary precision integers`; + module.exports = function castBigInt(val) { if (val == null) { return val; @@ -21,6 +24,9 @@ module.exports = function castBigInt(val) { return null; } if (typeof val === 'bigint') { + if (val > MAX_BIGINT || val < MIN_BIGINT) { + throw new Error(ERROR_MESSAGE); + } return val; } @@ -29,8 +35,12 @@ module.exports = function castBigInt(val) { } if (typeof val === 'string' || typeof val === 'number') { - return BigInt(val); + val = BigInt(val); + if (val > MAX_BIGINT || val < MIN_BIGINT) { + throw new Error(ERROR_MESSAGE); + } + return val; } - assert.ok(false); + throw new Error(`Cannot convert value to BigInt: "${val}"`); }; diff --git a/lib/document.js b/lib/document.js index aeb94d9bd44..e43c0e67157 100644 --- a/lib/document.js +++ b/lib/document.js @@ -3836,15 +3836,39 @@ Document.prototype.$toObject = function(options, json) { // Parent options should only bubble down for subdocuments, not populated docs options._parentOptions = this.$isSubdocument ? options : null; - // remember the root transform function - // to save it from being overwritten by sub-transform functions - // const originalTransform = options.transform; + const schemaFieldsOnly = options._calledWithOptions.schemaFieldsOnly + ?? options.schemaFieldsOnly + ?? defaultOptions.schemaFieldsOnly + ?? false; let ret; if (hasOnlyPrimitiveValues && !options.flattenObjectIds) { // Fast path: if we don't have any nested objects or arrays, we only need a // shallow clone. - ret = this.$__toObjectShallow(); + ret = this.$__toObjectShallow(schemaFieldsOnly); + } else if (schemaFieldsOnly) { + ret = {}; + for (const path of Object.keys(this.$__schema.paths)) { + const value = this.$__getValue(path); + if (value === undefined) { + continue; + } + let pathToSet = path; + let objToSet = ret; + if (path.indexOf('.') !== -1) { + const segments = path.split('.'); + pathToSet = segments[segments.length - 1]; + for (let i = 0; i < segments.length - 1; ++i) { + objToSet[segments[i]] = objToSet[segments[i]] ?? {}; + objToSet = objToSet[segments[i]]; + } + } + if (value === null) { + objToSet[pathToSet] = null; + continue; + } + objToSet[pathToSet] = clone(value, options); + } } else { ret = clone(this._doc, options) || {}; } @@ -3910,10 +3934,12 @@ Document.prototype.$toObject = function(options, json) { * Internal shallow clone alternative to `$toObject()`: much faster, no options processing */ -Document.prototype.$__toObjectShallow = function $__toObjectShallow() { +Document.prototype.$__toObjectShallow = function $__toObjectShallow(schemaFieldsOnly) { const ret = {}; if (this._doc != null) { - for (const key of Object.keys(this._doc)) { + const keys = schemaFieldsOnly ? Object.keys(this.$__schema.paths) : Object.keys(this._doc); + for (const key of keys) { + // Safe to do this even in the schemaFieldsOnly case because we assume there's no nested paths const value = this._doc[key]; if (value instanceof Date) { ret[key] = new Date(value); @@ -4066,6 +4092,7 @@ Document.prototype.$__toObjectShallow = function $__toObjectShallow() { * @param {Boolean} [options.flattenMaps=false] if true, convert Maps to POJOs. Useful if you want to `JSON.stringify()` the result of `toObject()`. * @param {Boolean} [options.flattenObjectIds=false] if true, convert any ObjectIds in the result to 24 character hex strings. * @param {Boolean} [options.useProjection=false] - If true, omits fields that are excluded in this document's projection. Unless you specified a projection, this will omit any field that has `select: false` in the schema. + * @param {Boolean} [options.schemaFieldsOnly=false] - If true, the resulting object will only have fields that are defined in the document's schema. By default, `toObject()` returns all fields in the underlying document from MongoDB, including ones that are not listed in the schema. * @return {Object} document as a plain old JavaScript object (POJO). This object may contain ObjectIds, Maps, Dates, mongodb.Binary, Buffers, and other non-POJO values. * @see mongodb.Binary https://mongodb.github.io/node-mongodb-native/4.9/classes/Binary.html * @api public @@ -4336,6 +4363,7 @@ function omitDeselectedFields(self, json) { * @param {Object} options * @param {Boolean} [options.flattenMaps=true] if true, convert Maps to [POJOs](https://masteringjs.io/tutorials/fundamentals/pojo). Useful if you want to `JSON.stringify()` the result. * @param {Boolean} [options.flattenObjectIds=false] if true, convert any ObjectIds in the result to 24 character hex strings. + * @param {Boolean} [options.schemaFieldsOnly=false] - If true, the resulting object will only have fields that are defined in the document's schema. By default, `toJSON()` returns all fields in the underlying document from MongoDB, including ones that are not listed in the schema. * @return {Object} * @see Document#toObject https://mongoosejs.com/docs/api/document.html#Document.prototype.toObject() * @see JSON.stringify() in JavaScript https://thecodebarbarian.com/the-80-20-guide-to-json-stringify-in-javascript.html @@ -4506,6 +4534,8 @@ Document.prototype.equals = function(doc) { * @param {Object|Function} [options.match=null] Add an additional filter to the populate query. Can be a filter object containing [MongoDB query syntax](https://www.mongodb.com/docs/manual/tutorial/query-documents/), or a function that returns a filter object. * @param {Function} [options.transform=null] Function that Mongoose will call on every populated document that allows you to transform the populated document. * @param {Object} [options.options=null] Additional options like `limit` and `lean`. + * @param {Boolean} [options.forceRepopulate=true] Set to `false` to prevent Mongoose from repopulating paths that are already populated + * @param {Boolean} [options.ordered=false] Set to `true` to execute any populate queries one at a time, as opposed to in parallel. We recommend setting this option to `true` if using transactions, especially if also populating multiple paths or paths with multiple models. MongoDB server does **not** support multiple operations in parallel on a single transaction. * @param {Function} [callback] Callback * @see population https://mongoosejs.com/docs/populate.html * @see Query#select https://mongoosejs.com/docs/api/query.html#Query.prototype.select() @@ -4532,6 +4562,7 @@ Document.prototype.populate = async function populate() { } const paths = utils.object.vals(pop); + let topLevelModel = this.constructor; if (this.$__isNested) { topLevelModel = this.$__[scopeSymbol].constructor; diff --git a/lib/model.js b/lib/model.js index f5aede7f591..7536609e4fa 100644 --- a/lib/model.js +++ b/lib/model.js @@ -3104,11 +3104,9 @@ Model.$__insertMany = function(arr, options, callback) { const res = { acknowledged: true, insertedCount: 0, - insertedIds: {}, - mongoose: { - validationErrors: validationErrors - } + insertedIds: {} }; + decorateBulkWriteResult(res, validationErrors, validationErrors); return callback(null, res); } callback(null, []); @@ -3161,10 +3159,7 @@ Model.$__insertMany = function(arr, options, callback) { // Decorate with mongoose validation errors in case of unordered, // because then still do `insertMany()` - res.mongoose = { - validationErrors: validationErrors, - results: results - }; + decorateBulkWriteResult(res, validationErrors, results); } return callback(null, res); } @@ -3198,10 +3193,7 @@ Model.$__insertMany = function(arr, options, callback) { if (error.writeErrors != null) { for (let i = 0; i < error.writeErrors.length; ++i) { const originalIndex = validDocIndexToOriginalIndex.get(error.writeErrors[i].index); - error.writeErrors[i] = { - ...error.writeErrors[i], - index: originalIndex - }; + error.writeErrors[i] = { ...error.writeErrors[i], index: originalIndex }; if (!ordered) { results[originalIndex] = error.writeErrors[i]; } @@ -3245,10 +3237,7 @@ Model.$__insertMany = function(arr, options, callback) { }); if (rawResult && ordered === false) { - error.mongoose = { - validationErrors: validationErrors, - results: results - }; + decorateBulkWriteResult(error, validationErrors, results); } callback(error, null); @@ -3486,8 +3475,14 @@ Model.bulkWrite = async function bulkWrite(ops, options) { then(res => ([res, null])). catch(error => ([null, error])); + const writeErrorsByIndex = {}; + if (error?.writeErrors) { + for (const writeError of error.writeErrors) { + writeErrorsByIndex[writeError.err.index] = writeError; + } + } for (let i = 0; i < validOpIndexes.length; ++i) { - results[validOpIndexes[i]] = null; + results[validOpIndexes[i]] = writeErrorsByIndex[i] ?? null; } if (error) { if (validationErrors.length > 0) { @@ -4386,6 +4381,7 @@ Model.validate = async function validate(obj, pathsOrOptions, context) { * @param {Object} [options.options=null] Additional options like `limit` and `lean`. * @param {Function} [options.transform=null] Function that Mongoose will call on every populated document that allows you to transform the populated document. * @param {Boolean} [options.forceRepopulate=true] Set to `false` to prevent Mongoose from repopulating paths that are already populated + * @param {Boolean} [options.ordered=false] Set to `true` to execute any populate queries one at a time, as opposed to in parallel. Set this option to `true` if populating multiple paths or paths with multiple models in transactions. * @return {Promise} * @api public */ @@ -4403,11 +4399,21 @@ Model.populate = async function populate(docs, paths) { } // each path has its own query options and must be executed separately - const promises = []; - for (const path of paths) { - promises.push(_populatePath(this, docs, path)); + if (paths.find(p => p.ordered)) { + // Populate in series, primarily for transactions because MongoDB doesn't support multiple operations on + // one transaction in parallel. + // Note that if _any_ path has `ordered`, we make the top-level populate `ordered` as well. + for (const path of paths) { + await _populatePath(this, docs, path); + } + } else { + // By default, populate in parallel + const promises = []; + for (const path of paths) { + promises.push(_populatePath(this, docs, path)); + } + await Promise.all(promises); } - await Promise.all(promises); return docs; }; @@ -4527,12 +4533,22 @@ async function _populatePath(model, docs, populateOptions) { return; } - const promises = []; - for (const arr of params) { - promises.push(_execPopulateQuery.apply(null, arr).then(valsFromDb => { vals = vals.concat(valsFromDb); })); + if (populateOptions.ordered) { + // Populate in series, primarily for transactions because MongoDB doesn't support multiple operations on + // one transaction in parallel. + for (const arr of params) { + await _execPopulateQuery.apply(null, arr).then(valsFromDb => { vals = vals.concat(valsFromDb); }); + } + } else { + // By default, populate in parallel + const promises = []; + for (const arr of params) { + promises.push(_execPopulateQuery.apply(null, arr).then(valsFromDb => { vals = vals.concat(valsFromDb); })); + } + + await Promise.all(promises); } - await Promise.all(promises); for (const arr of params) { const mod = arr[0]; diff --git a/lib/types/double.js b/lib/types/double.js new file mode 100644 index 00000000000..6117173570b --- /dev/null +++ b/lib/types/double.js @@ -0,0 +1,13 @@ +/** + * Double type constructor + * + * #### Example: + * + * const pi = new mongoose.Types.Double(3.1415); + * + * @constructor Double + */ + +'use strict'; + +module.exports = require('bson').Double; diff --git a/lib/types/index.js b/lib/types/index.js index d234f6bb62a..8252aabfb21 100644 --- a/lib/types/index.js +++ b/lib/types/index.js @@ -12,6 +12,7 @@ exports.Document = // @deprecate exports.Embedded = require('./arraySubdocument'); exports.DocumentArray = require('./documentArray'); +exports.Double = require('./double'); exports.Decimal128 = require('./decimal128'); exports.ObjectId = require('./objectid'); diff --git a/lib/utils.js b/lib/utils.js index 6fc5c335ef0..e0cddc0ba6a 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -551,8 +551,8 @@ exports.populate = function populate(path, select, model, match, options, subPop }; } - if (typeof obj.path !== 'string') { - throw new TypeError('utils.populate: invalid path. Expected string. Got typeof `' + typeof path + '`'); + if (typeof obj.path !== 'string' && !(Array.isArray(obj.path) && obj.path.every(el => typeof el === 'string'))) { + throw new TypeError('utils.populate: invalid path. Expected string or array of strings. Got typeof `' + typeof path + '`'); } return _populateObj(obj); @@ -600,7 +600,11 @@ function _populateObj(obj) { } const ret = []; - const paths = oneSpaceRE.test(obj.path) ? obj.path.split(manySpaceRE) : [obj.path]; + const paths = oneSpaceRE.test(obj.path) + ? obj.path.split(manySpaceRE) + : Array.isArray(obj.path) + ? obj.path + : [obj.path]; if (obj.options != null) { obj.options = clone(obj.options); } diff --git a/test/bigint.test.js b/test/bigint.test.js index e3d00418e2c..c476039b0ef 100644 --- a/test/bigint.test.js +++ b/test/bigint.test.js @@ -106,22 +106,22 @@ describe('BigInt', function() { }); it('is stored as a long in MongoDB', async function() { - await Test.create({ myBigInt: 42n }); + await Test.create({ myBigInt: 9223372036854775807n }); const doc = await Test.findOne({ myBigInt: { $type: 'long' } }); assert.ok(doc); - assert.strictEqual(doc.myBigInt, 42n); + assert.strictEqual(doc.myBigInt, 9223372036854775807n); }); it('becomes a bigint with lean using useBigInt64', async function() { - await Test.create({ myBigInt: 7n }); + await Test.create({ myBigInt: 9223372036854775807n }); const doc = await Test. - findOne({ myBigInt: 7n }). + findOne({ myBigInt: 9223372036854775807n }). setOptions({ useBigInt64: true }). lean(); assert.ok(doc); - assert.strictEqual(doc.myBigInt, 7n); + assert.strictEqual(doc.myBigInt, 9223372036854775807n); }); it('can query with comparison operators', async function() { diff --git a/test/document.populate.test.js b/test/document.populate.test.js index bbfbc1df99a..00a34ba0ce8 100644 --- a/test/document.populate.test.js +++ b/test/document.populate.test.js @@ -1075,4 +1075,44 @@ describe('document.populate', function() { assert.deepStrictEqual(codeUser.extras[0].config.paymentConfiguration.paymentMethods[0]._id, code._id); assert.strictEqual(codeUser.extras[0].config.paymentConfiguration.paymentMethods[0].code, 'test code'); }); + + it('supports populate with ordered option (gh-15231)', async function() { + const docSchema = new Schema({ + refA: { type: Schema.Types.ObjectId, ref: 'Test1' }, + refB: { type: Schema.Types.ObjectId, ref: 'Test2' }, + refC: { type: Schema.Types.ObjectId, ref: 'Test3' } + }); + + const doc1Schema = new Schema({ name: String }); + const doc2Schema = new Schema({ title: String }); + const doc3Schema = new Schema({ content: String }); + + const Doc = db.model('Test', docSchema); + const Doc1 = db.model('Test1', doc1Schema); + const Doc2 = db.model('Test2', doc2Schema); + const Doc3 = db.model('Test3', doc3Schema); + + const doc1 = await Doc1.create({ name: 'test 1' }); + const doc2 = await Doc2.create({ title: 'test 2' }); + const doc3 = await Doc3.create({ content: 'test 3' }); + + const docD = await Doc.create({ + refA: doc1._id, + refB: doc2._id, + refC: doc3._id + }); + + await docD.populate({ + path: ['refA', 'refB', 'refC'], + ordered: true + }); + + assert.ok(docD.populated('refA')); + assert.ok(docD.populated('refB')); + assert.ok(docD.populated('refC')); + + assert.equal(docD.refA.name, 'test 1'); + assert.equal(docD.refB.title, 'test 2'); + assert.equal(docD.refC.content, 'test 3'); + }); }); diff --git a/test/document.test.js b/test/document.test.js index aff51d64093..755efc34e67 100644 --- a/test/document.test.js +++ b/test/document.test.js @@ -14298,6 +14298,90 @@ describe('document', function() { delete mongoose.Schema.Types.CustomType; }); + it('supports schemaFieldsOnly option for toObject() (gh-15258)', async function() { + const schema = new Schema({ key: String }, { discriminatorKey: 'key' }); + const subschema1 = new Schema({ field1: String }); + const subschema2 = new Schema({ field2: String }); + + const Discriminator = db.model('Test', schema); + Discriminator.discriminator('type1', subschema1); + Discriminator.discriminator('type2', subschema2); + + const doc = await Discriminator.create({ + key: 'type1', + field1: 'test value' + }); + + await Discriminator.updateOne( + { _id: doc._id }, + { + key: 'type2', + field2: 'test2' + }, + { overwriteDiscriminatorKey: true } + ); + + const doc2 = await Discriminator.findById(doc).orFail(); + assert.strictEqual(doc2.field2, 'test2'); + assert.strictEqual(doc2.field1, undefined); + + const obj = doc2.toObject(); + assert.strictEqual(obj.field2, 'test2'); + assert.strictEqual(obj.field1, 'test value'); + + const obj2 = doc2.toObject({ schemaFieldsOnly: true }); + assert.strictEqual(obj.field2, 'test2'); + assert.strictEqual(obj2.field1, undefined); + }); + + it('supports schemaFieldsOnly on nested paths, subdocuments, and arrays (gh-15258)', async function() { + const subSchema = new Schema({ + title: String, + description: String + }, { _id: false }); + const taskSchema = new Schema({ + name: String, + details: { + dueDate: Date, + priority: Number + }, + subtask: subSchema, + tasks: [subSchema] + }); + const Task = db.model('Test', taskSchema); + + const doc = await Task.create({ + _id: '0'.repeat(24), + name: 'Test Task', + details: { + dueDate: new Date('2024-01-01'), + priority: 1 + }, + subtask: { + title: 'Subtask 1', + description: 'Test Description' + }, + tasks: [{ + title: 'Array Task 1', + description: 'Array Description 1' + }] + }); + + doc._doc.details.extraField = 'extra'; + doc._doc.subtask.extraField = 'extra'; + doc._doc.tasks[0].extraField = 'extra'; + + const obj = doc.toObject({ schemaFieldsOnly: true }); + assert.deepStrictEqual(obj, { + name: 'Test Task', + details: { dueDate: new Date('2024-01-01T00:00:00.000Z'), priority: 1 }, + subtask: { title: 'Subtask 1', description: 'Test Description' }, + tasks: [{ title: 'Array Task 1', description: 'Array Description 1' }], + _id: new mongoose.Types.ObjectId('0'.repeat(24)), + __v: 0 + }); + }); + it('handles undoReset() on deep recursive subdocuments (gh-15255)', async function() { const RecursiveSchema = new mongoose.Schema({}); diff --git a/test/model.test.js b/test/model.test.js index ceef2073b67..d4aa35ec686 100644 --- a/test/model.test.js +++ b/test/model.test.js @@ -6,6 +6,7 @@ const sinon = require('sinon'); const start = require('./common'); +const CastError = require('../lib/error/cast'); const assert = require('assert'); const { once } = require('events'); const random = require('./util').random; @@ -4707,6 +4708,46 @@ describe('Model', function() { assert.equal(err.validationErrors[0].path, 'age'); assert.equal(err.results[0].path, 'age'); }); + + it('bulkWrite should return both write errors and validation errors in error.results (gh-15265)', async function() { + const userSchema = new Schema({ _id: Number, age: { type: Number } }); + const User = db.model('User', userSchema); + + const createdUser = await User.create({ _id: 1, name: 'Test' }); + + const err = await User.bulkWrite([ + { + updateOne: { + filter: { _id: createdUser._id }, + update: { $set: { age: 'NaN' } } + } + }, + { + insertOne: { + document: { _id: 3, age: 14 } + } + }, + { + insertOne: { + document: { _id: 1, age: 13 } + } + }, + { + insertOne: { + document: { _id: 1, age: 14 } + } + } + ], { ordered: false, throwOnValidationError: true }) + .then(() => null) + .catch(err => err); + + assert.ok(err); + assert.strictEqual(err.mongoose.results.length, 4); + assert.ok(err.mongoose.results[0] instanceof CastError); + assert.strictEqual(err.mongoose.results[1], null); + assert.equal(err.mongoose.results[2].constructor.name, 'WriteError'); + assert.equal(err.mongoose.results[3].constructor.name, 'WriteError'); + }); }); it('deleteOne with cast error (gh-5323)', async function() { @@ -7060,6 +7101,41 @@ describe('Model', function() { assert.deepStrictEqual(docs.map(doc => doc.age), [12, 12]); }); + it('insertMany should return both write errors and validation errors in error.results (gh-15265)', async function() { + const userSchema = new Schema({ _id: Number, age: { type: Number } }); + const User = db.model('User', userSchema); + await User.insertOne({ _id: 1, age: 12 }); + + const err = await User.insertMany([ + { _id: 1, age: 'NaN' }, + { _id: 3, age: 14 }, + { _id: 1, age: 13 }, + { _id: 1, age: 14 } + ], { ordered: false }).then(() => null).catch(err => err); + + assert.ok(err); + assert.strictEqual(err.results.length, 4); + assert.ok(err.results[0] instanceof ValidationError); + assert.ok(err.results[1] instanceof User); + assert.ok(err.results[2].err); + assert.ok(err.results[3].err); + }); + + it('insertMany should return both write errors and validation errors in error.results with rawResult (gh-15265)', async function() { + const userSchema = new Schema({ _id: Number, age: { type: Number } }); + const User = db.model('User', userSchema); + + const res = await User.insertMany([ + { _id: 1, age: 'NaN' }, + { _id: 3, age: 14 } + ], { ordered: false, rawResult: true }); + + assert.ok(res); + assert.strictEqual(res.mongoose.results.length, 2); + assert.ok(res.mongoose.results[0] instanceof ValidationError); + assert.ok(res.mongoose.results[1] instanceof User); + }); + it('returns writeResult on success', async() => { const userSchema = new Schema({ diff --git a/test/types/schema.test.ts b/test/types/schema.test.ts index b4035ceb651..9d1cb073f51 100644 --- a/test/types/schema.test.ts +++ b/test/types/schema.test.ts @@ -1738,3 +1738,11 @@ function gh15244() { const schema = new Schema({}); schema.discriminator('Name', new Schema({}), { value: 'value' }); } + +async function schemaDouble() { + const schema = new Schema({ balance: 'Double' }); + const TestModel = model('Test', schema); + + const doc = await TestModel.findOne().orFail(); + expectType(doc.balance); +} diff --git a/types/index.d.ts b/types/index.d.ts index 425ed49110f..1c3879d476f 100644 --- a/types/index.d.ts +++ b/types/index.d.ts @@ -204,30 +204,32 @@ declare module 'mongoose' { } export interface ToObjectOptions> { - /** apply all getters (path and virtual getters) */ - getters?: boolean; - /** apply virtual getters (can override getters option) */ - virtuals?: boolean | string[]; /** if `options.virtuals = true`, you can set `options.aliases = false` to skip applying aliases. This option is a no-op if `options.virtuals = false`. */ aliases?: boolean; + /** if true, replace any conventionally populated paths with the original id in the output. Has no affect on virtual populated paths. */ + depopulate?: boolean; + /** if true, convert Maps to POJOs. Useful if you want to `JSON.stringify()` the result of `toObject()`. */ + flattenMaps?: boolean; + /** if true, convert any ObjectIds in the result to 24 character hex strings. */ + flattenObjectIds?: boolean; + /** apply all getters (path and virtual getters) */ + getters?: boolean; /** remove empty objects (defaults to true) */ minimize?: boolean; + /** If true, the resulting object will only have fields that are defined in the document's schema. By default, `toJSON()` & `toObject()` returns all fields in the underlying document from MongoDB, including ones that are not listed in the schema. */ + schemaFieldsOnly?: boolean; /** if set, mongoose will call this function to allow you to transform the returned object */ transform?: boolean | (( doc: THydratedDocumentType, ret: Record, options: ToObjectOptions ) => any); - /** if true, replace any conventionally populated paths with the original id in the output. Has no affect on virtual populated paths. */ - depopulate?: boolean; - /** if false, exclude the version key (`__v` by default) from the output */ - versionKey?: boolean; - /** if true, convert Maps to POJOs. Useful if you want to `JSON.stringify()` the result of `toObject()`. */ - flattenMaps?: boolean; - /** if true, convert any ObjectIds in the result to 24 character hex strings. */ - flattenObjectIds?: boolean; /** If true, omits fields that are excluded in this document's projection. Unless you specified a projection, this will omit any field that has `select: false` in the schema. */ useProjection?: boolean; + /** if false, exclude the version key (`__v` by default) from the output */ + versionKey?: boolean; + /** apply virtual getters (can override getters option) */ + virtuals?: boolean | string[]; } export type DiscriminatorModel = T extends Model diff --git a/types/inferschematype.d.ts b/types/inferschematype.d.ts index 061cfb48adc..dac99d09d6c 100644 --- a/types/inferschematype.d.ts +++ b/types/inferschematype.d.ts @@ -312,14 +312,15 @@ type ResolvePathType extends true ? bigint : PathValueType extends 'bigint' | 'BigInt' | typeof Schema.Types.BigInt | typeof BigInt ? bigint : PathValueType extends 'uuid' | 'UUID' | typeof Schema.Types.UUID ? Buffer : - IfEquals extends true ? Buffer : - PathValueType extends MapConstructor | 'Map' ? Map> : - IfEquals extends true ? Map> : - PathValueType extends ArrayConstructor ? any[] : - PathValueType extends typeof Schema.Types.Mixed ? any: - IfEquals extends true ? any: - IfEquals extends true ? any: - PathValueType extends typeof SchemaType ? PathValueType['prototype'] : - PathValueType extends Record ? ObtainDocumentType : - unknown, + PathValueType extends 'double' | 'Double' | typeof Schema.Types.Double ? Types.Double : + IfEquals extends true ? Buffer : + PathValueType extends MapConstructor | 'Map' ? Map> : + IfEquals extends true ? Map> : + PathValueType extends ArrayConstructor ? any[] : + PathValueType extends typeof Schema.Types.Mixed ? any: + IfEquals extends true ? any: + IfEquals extends true ? any: + PathValueType extends typeof SchemaType ? PathValueType['prototype'] : + PathValueType extends Record ? ObtainDocumentType : + unknown, TypeHint>; diff --git a/types/models.d.ts b/types/models.d.ts index 88cedbaa342..4ff5fe83ec3 100644 --- a/types/models.d.ts +++ b/types/models.d.ts @@ -308,7 +308,7 @@ declare module 'mongoose' { bulkWrite( writes: Array>, options: MongooseBulkWriteOptions & { ordered: false } - ): Promise } }>; + ): Promise } }>; bulkWrite( writes: Array>, options?: MongooseBulkWriteOptions diff --git a/types/populate.d.ts b/types/populate.d.ts index 8517c15865c..dac2a248217 100644 --- a/types/populate.d.ts +++ b/types/populate.d.ts @@ -39,6 +39,12 @@ declare module 'mongoose' { foreignField?: string; /** Set to `false` to prevent Mongoose from repopulating paths that are already populated */ forceRepopulate?: boolean; + /** + * Set to `true` to execute any populate queries one at a time, as opposed to in parallel. + * We recommend setting this option to `true` if using transactions, especially if also populating multiple paths or paths with multiple models. + * MongoDB server does **not** support multiple operations in parallel on a single transaction. + */ + ordered?: boolean; } interface PopulateOption { diff --git a/types/types.d.ts b/types/types.d.ts index 503a9b2c9f2..9c56959182e 100644 --- a/types/types.d.ts +++ b/types/types.d.ts @@ -104,5 +104,7 @@ declare module 'mongoose' { } class UUID extends bson.UUID {} + + class Double extends bson.Double {} } }