From 4a79a887c5434ccaee9d890c60d9cdd0ddb43320 Mon Sep 17 00:00:00 2001 From: Daniel Diaz <39510674+IslandRhythms@users.noreply.github.com> Date: Mon, 12 Aug 2024 11:51:55 -0400 Subject: [PATCH 01/84] feat: `useConnection(connection)` function --- lib/model.js | 15 +++++++++++++++ test/model.test.js | 10 ++++++++++ 2 files changed, 25 insertions(+) diff --git a/lib/model.js b/lib/model.js index 7b9223d2408..38e9f0f458d 100644 --- a/lib/model.js +++ b/lib/model.js @@ -146,6 +146,21 @@ Model.prototype.$isMongooseModelPrototype = true; Model.prototype.db; +/** + * @api public + */ + +Model.useConnection = function useConnection(connection) { + if (!connection) { + throw new Error('Please provide a connection.'); + } + if (this.db) { + delete this.db.models[this.modelName]; + } + this.db = connection; + connection.models[this.modelName] = this; +}; + /** * The collection instance this model uses. * A Mongoose collection is a thin wrapper around a [MongoDB Node.js driver collection]([MongoDB Node.js driver collection](https://mongodb.github.io/node-mongodb-native/Next/classes/Collection.html)). diff --git a/test/model.test.js b/test/model.test.js index b73757e4721..71317696517 100644 --- a/test/model.test.js +++ b/test/model.test.js @@ -7575,6 +7575,16 @@ describe('Model', function() { assert.strictEqual(doc.__v, 0); }); + it('updates the model\'s db property to point to the provided connection instance and vice versa', async function() { + const schema = new mongoose.Schema({ + name: String + }); + const Model = db.model('Test', schema); + const connection = start(); + Model.useConnection(connection); + assert.equal(db.models[Model.modelName], undefined); + assert(connection.models[Model.modelName]); + }); }); From 3a9afe678a773d52c197fb036cbc2197a04cd894 Mon Sep 17 00:00:00 2001 From: Daniel Diaz <39510674+IslandRhythms@users.noreply.github.com> Date: Tue, 20 Aug 2024 16:55:54 -0400 Subject: [PATCH 02/84] more assertions --- lib/model.js | 16 +++++++++++++++- test/model.test.js | 19 ++++++++++++++++--- 2 files changed, 31 insertions(+), 4 deletions(-) diff --git a/lib/model.js b/lib/model.js index 38e9f0f458d..bab6e06a7ed 100644 --- a/lib/model.js +++ b/lib/model.js @@ -150,14 +150,28 @@ Model.prototype.db; * @api public */ -Model.useConnection = function useConnection(connection) { +Model.useConnection = async function useConnection(connection) { if (!connection) { throw new Error('Please provide a connection.'); } if (this.db) { delete this.db.models[this.modelName]; + delete this.prototype.db; + delete this.prototype[modelDbSymbol] + delete this.prototype.collection; + delete this.prototype.$collection; + delete this.prototype[modelCollectionSymbol]; } + this.db = connection; + const collection = connection.collection(this.modelName, connection.options); + this.prototype.collection = collection; + this.prototype.$collection = collection; + this.prototype[modelCollectionSymbol] = collection; + this.prototype.db = connection; + this.prototype[modelDbSymbol] = connection; + this.collection = collection; + this.$__collection = collection; connection.models[this.modelName] = this; }; diff --git a/test/model.test.js b/test/model.test.js index 71317696517..f5e784cccea 100644 --- a/test/model.test.js +++ b/test/model.test.js @@ -7575,16 +7575,29 @@ describe('Model', function() { assert.strictEqual(doc.__v, 0); }); - it('updates the model\'s db property to point to the provided connection instance and vice versa', async function() { + it('updates the model\'s db property to point to the provided connection instance and vice versa asdf', async function() { const schema = new mongoose.Schema({ name: String }); const Model = db.model('Test', schema); - const connection = start(); - Model.useConnection(connection); + const connection = start({ uri: start.uri2 }); + const original = Model.find(); + assert.equal(original.model.collection.conn.name, 'mongoose_test'); + await Model.useConnection(connection); assert.equal(db.models[Model.modelName], undefined); assert(connection.models[Model.modelName]); + const res = Model.find(); + assert.equal(res.model.collection.conn.name, 'mongoose_test_2'); }); + it('should throw an error if no connection is passed', async function() { + const schema = new mongoose.Schema({ + name: String + }); + const Model = db.model('Test', schema); + assert.throws(() => { + Model.useConnection(); + }, { message: 'Please provide a connection.' }) + }) }); From e87d506342e46a03cd7126930a69b5d9ca5c9d69 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Sat, 26 Oct 2024 13:57:34 -0400 Subject: [PATCH 03/84] feat(model): complete useConnection() --- lib/model.js | 38 ++++++++++++++++++++---- test/model.test.js | 62 ++++++++++++++++++++++++++------------- test/types/models.test.ts | 11 +++++++ types/models.d.ts | 7 +++++ 4 files changed, 91 insertions(+), 27 deletions(-) diff --git a/lib/model.js b/lib/model.js index bab6e06a7ed..512d65dfffa 100644 --- a/lib/model.js +++ b/lib/model.js @@ -147,22 +147,47 @@ Model.prototype.$isMongooseModelPrototype = true; Model.prototype.db; /** + * Changes the Connection instance this model uses to make requests to MongoDB. + * This function is most useful for changing the Connection that a Model defined using `mongoose.model()` uses + * after initialization. + * + * #### Example: + * + * await mongoose.connect('mongodb://127.0.0.1:27017/db1'); + * const UserModel = mongoose.model('User', mongoose.Schema({ name: String })); + * UserModel.connection === mongoose.connection; // true + * + * const conn2 = await mongoose.createConnection('mongodb://127.0.0.1:27017/db2').asPromise(); + * UserModel.useConnection(conn2); // `UserModel` now stores documents in `db2`, not `db1` + * + * UserModel.connection === mongoose.connection; // false + * UserModel.connection === conn2; // true + * + * conn2.model('User') === UserModel; // true + * mongoose.model('User'); // Throws 'MissingSchemaError' + * + * Note: `useConnection()` does **not** apply any [connection-level plugins](https://mongoosejs.com/docs/api/connection.html#Connection.prototype.plugin()) from the new connection. + * If you use `useConnection()` to switch a model's connection, the model will still have the old connection's plugins. + * + * @function useConnection + * @param [Connection] connection The new connection to use + * @return [Model] this * @api public */ -Model.useConnection = async function useConnection(connection) { +Model.useConnection = function useConnection(connection) { if (!connection) { throw new Error('Please provide a connection.'); } if (this.db) { delete this.db.models[this.modelName]; delete this.prototype.db; - delete this.prototype[modelDbSymbol] + delete this.prototype[modelDbSymbol]; delete this.prototype.collection; delete this.prototype.$collection; delete this.prototype[modelCollectionSymbol]; } - + this.db = connection; const collection = connection.collection(this.modelName, connection.options); this.prototype.collection = collection; @@ -173,6 +198,8 @@ Model.useConnection = async function useConnection(connection) { this.collection = collection; this.$__collection = collection; connection.models[this.modelName] = this; + + return this; }; /** @@ -2096,9 +2123,8 @@ Model.estimatedDocumentCount = function estimatedDocumentCount(options) { * * #### Example: * - * Adventure.countDocuments({ type: 'jungle' }, function (err, count) { - * console.log('there are %d jungle adventures', count); - * }); + * const count = await Adventure.countDocuments({ type: 'jungle' }); + * console.log('there are %d jungle adventures', count); * * If you want to count all documents in a large collection, * use the [`estimatedDocumentCount()` function](https://mongoosejs.com/docs/api/model.html#Model.estimatedDocumentCount()) diff --git a/test/model.test.js b/test/model.test.js index f5e784cccea..f1d31c8c00e 100644 --- a/test/model.test.js +++ b/test/model.test.js @@ -7575,29 +7575,49 @@ describe('Model', function() { assert.strictEqual(doc.__v, 0); }); - it('updates the model\'s db property to point to the provided connection instance and vice versa asdf', async function() { - const schema = new mongoose.Schema({ - name: String + + describe('Model.useConnection() (gh-14802)', function() { + it('updates the model\'s db property to point to the provided connection instance and vice versa (gh-14802))', async function() { + const schema = new mongoose.Schema({ + name: String + }); + const Model = db.model('Test', schema); + assert.equal(db.model('Test'), Model); + const original = Model.find(); + assert.equal(original.model.collection.conn.name, 'mongoose_test'); + await Model.create({ name: 'gh-14802 test' }); + let docs = await original; + assert.equal(docs.length, 1); + assert.strictEqual(docs[0].name, 'gh-14802 test'); + + const connection = start({ uri: start.uri2 }); + await connection.asPromise(); + await Model.useConnection(connection); + assert.equal(db.models[Model.modelName], undefined); + assert(connection.models[Model.modelName]); + const query = Model.find(); + assert.equal(query.model.collection.conn.name, 'mongoose_test_2'); + + await Model.deleteMany({}); + await Model.create({ name: 'gh-14802 test 2' }); + docs = await query; + assert.equal(docs.length, 1); + assert.strictEqual(docs[0].name, 'gh-14802 test 2'); + + assert.equal(connection.model('Test'), Model); + assert.throws(() => db.model('Test'), /MissingSchemaError/); + }); + + it('should throw an error if no connection is passed', async function() { + const schema = new mongoose.Schema({ + name: String + }); + const Model = db.model('Test', schema); + assert.throws(() => { + Model.useConnection(); + }, { message: 'Please provide a connection.' }); }); - const Model = db.model('Test', schema); - const connection = start({ uri: start.uri2 }); - const original = Model.find(); - assert.equal(original.model.collection.conn.name, 'mongoose_test'); - await Model.useConnection(connection); - assert.equal(db.models[Model.modelName], undefined); - assert(connection.models[Model.modelName]); - const res = Model.find(); - assert.equal(res.model.collection.conn.name, 'mongoose_test_2'); }); - it('should throw an error if no connection is passed', async function() { - const schema = new mongoose.Schema({ - name: String - }); - const Model = db.model('Test', schema); - assert.throws(() => { - Model.useConnection(); - }, { message: 'Please provide a connection.' }) - }) }); diff --git a/test/types/models.test.ts b/test/types/models.test.ts index 218c4c90569..0e4636da5d6 100644 --- a/test/types/models.test.ts +++ b/test/types/models.test.ts @@ -2,6 +2,7 @@ import mongoose, { Schema, Document, Model, + createConnection, connection, model, Types, @@ -977,3 +978,13 @@ function testWithLevel1NestedPaths() { 'foo.one': string | null | undefined }>({} as Test2); } + +async function gh14802() { + const schema = new mongoose.Schema({ + name: String + }); + const Model = model('Test', schema); + + const conn2 = mongoose.createConnection('mongodb://127.0.0.1:27017/mongoose_test'); + Model.useConnection(conn2); +} diff --git a/types/models.d.ts b/types/models.d.ts index c042305a828..60ecfd01d4c 100644 --- a/types/models.d.ts +++ b/types/models.d.ts @@ -600,6 +600,13 @@ declare module 'mongoose' { */ updateSearchIndex(name: string, definition: AnyObject): Promise; + /** + * Changes the Connection instance this model uses to make requests to MongoDB. + * This function is most useful for changing the Connection that a Model defined using `mongoose.model()` uses + * after initialization. + */ + useConnection(connection: Connection): this; + /** Casts and validates the given object against this model's schema, passing the given `context` to custom validators. */ validate(): Promise; validate(obj: any): Promise; From 1ccf38ed9b604cc00a2beececf2ff11cc549ec20 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Tue, 19 Nov 2024 13:26:20 -0500 Subject: [PATCH 04/84] temp --- .github/workflows/encryption-tests.yml | 74 ++++++++++++++++++++++++++ test/encryption/encryption.test.js | 11 ++++ 2 files changed, 85 insertions(+) create mode 100644 .github/workflows/encryption-tests.yml create mode 100644 test/encryption/encryption.test.js diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml new file mode 100644 index 00000000000..87ffa37fcc4 --- /dev/null +++ b/.github/workflows/encryption-tests.yml @@ -0,0 +1,74 @@ +on: + push: + branches: ['main'] + workflow_dispatch: {} + +permissions: + contents: write + pull-requests: write + id-token: write + +name: encryption-tests + +jobs: + ssdlc: + permissions: + # required for all workflows + security-events: write + id-token: write + contents: write + environment: release + runs-on: ubuntu-latest + steps: + - name: Install mongodb-client-encryption + uses: mongodb-client-encryption + + - name: Set-up cluster + + + + - name: Run tests + uses: mongodb-labs/drivers-github-tools/node/get_version_info@v2 + with: + npm_package_name: mongodb + + - name: actions/compress_sign_and_upload + uses: mongodb-labs/drivers-github-tools/node/sign_node_package@v2 + with: + aws_role_arn: ${{ secrets.AWS_ROLE_ARN }} + aws_region_name: us-east-1 + aws_secret_id: ${{ secrets.AWS_SECRET_ID }} + npm_package_name: mongodb + dry_run: ${{ needs.release_please.outputs.release_created == '' }} + + - name: Copy sbom file to release assets + shell: bash + if: ${{ '' == '' }} + run: cp sbom.json ${{ env.S3_ASSETS }}/sbom.json + + # only used for mongodb-client-encryption + - name: Augment SBOM and copy to release assets + if: ${{ '' != '' }} + uses: mongodb-labs/drivers-github-tools/sbom@v2 + with: + silk_asset_group: '' + sbom_file_name: sbom.json + + - name: Generate authorized pub report + uses: mongodb-labs/drivers-github-tools/full-report@v2 + with: + release_version: ${{ env.package_version }} + product_name: mongodb + sarif_report_target_ref: 'main' + third_party_dependency_tool: n/a + dist_filenames: artifacts/* + token: ${{ github.token }} + sbom_file_name: sbom.json + evergreen_project: mongo-node-driver-next + evergreen_commit: ${{ env.commit }} + + - uses: mongodb-labs/drivers-github-tools/upload-s3-assets@v2 + with: + version: ${{ env.package_version }} + product_name: mongodb + dry_run: ${{ needs.release_please.outputs.release_created == '' }} \ No newline at end of file diff --git a/test/encryption/encryption.test.js b/test/encryption/encryption.test.js new file mode 100644 index 00000000000..cb9d3be2d66 --- /dev/null +++ b/test/encryption/encryption.test.js @@ -0,0 +1,11 @@ +'use strict'; + +const assert = require('assert'); +const EJSON = require('bson').EJSON; + +describe('setup check', () => { + it('environment variables are set', async function() { + const { local } = EJSON.parse(process.env.CSFLE_KMS_PROVIDERS || '{}'); + assert.ok(local); + }); +}); From aeda36be987adfaf29fa6ce9c5d3ca03aea13e11 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Wed, 20 Nov 2024 02:14:22 -0500 Subject: [PATCH 05/84] temp --- .github/workflows/encryption-tests.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 87ffa37fcc4..82214877e62 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -17,14 +17,15 @@ jobs: security-events: write id-token: write contents: write - environment: release runs-on: ubuntu-latest steps: - name: Install mongodb-client-encryption uses: mongodb-client-encryption - name: Set-up cluster - + - shell: bash + + - name: Run tests From e5d8cadbf86e924f8db30317cddd659366dad390 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Wed, 20 Nov 2024 16:24:58 -0500 Subject: [PATCH 06/84] temp 2 --- .github/scripts/run-kms-servers.sh | 11 +++ .github/scripts/run-orchestration.sh | 36 +++++++++ .github/workflows/encryption-tests.yml | 103 ++++++++++++++----------- 3 files changed, 103 insertions(+), 47 deletions(-) create mode 100644 .github/scripts/run-kms-servers.sh create mode 100644 .github/scripts/run-orchestration.sh diff --git a/.github/scripts/run-kms-servers.sh b/.github/scripts/run-kms-servers.sh new file mode 100644 index 00000000000..988b07bfef1 --- /dev/null +++ b/.github/scripts/run-kms-servers.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -o errexit # Exit the script with error if any of the commands fail + +cd ${DRIVERS_TOOLS}/.evergreen/csfle +. ./prepare-kmsvenv.sh + +echo "$PYTHON_EXEC" + +$PYTHON_EXEC -u kms_http_server.py --ca_file ../x509gen/ca.pem --cert_file ../x509gen/expired.pem --port 8000 & +$PYTHON_EXEC -u kms_http_server.py --ca_file ../x509gen/ca.pem --cert_file ../x509gen/wrong-host.pem --port 8001 & +$PYTHON_EXEC -u kms_http_server.py --ca_file ../x509gen/ca.pem --cert_file ../x509gen/server.pem --port 8002 --require_client_cert & \ No newline at end of file diff --git a/.github/scripts/run-orchestration.sh b/.github/scripts/run-orchestration.sh new file mode 100644 index 00000000000..3708c7bc33d --- /dev/null +++ b/.github/scripts/run-orchestration.sh @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +# This is a thin wrapper around drivers-tools run orchestration meant to print each of the configuration settings we make use of +# Additionally it ensures the downloaded binaries are in the PATH for the script to find (namely, the legacy shell for server set up) + +export MONGODB_VERSION=${VERSION} +echo "MONGODB_VERSION=${VERSION}" + +export TOPOLOGY=${TOPOLOGY} +echo "TOPOLOGY=${TOPOLOGY}" + +export AUTH=${AUTH} +echo "AUTH=${AUTH}" + +export SSL=${SSL} +echo "SSL=${SSL}" + +export ORCHESTRATION_FILE=${ORCHESTRATION_FILE} +echo "ORCHESTRATION_FILE=${ORCHESTRATION_FILE}" + +export REQUIRE_API_VERSION=${REQUIRE_API_VERSION} +echo "REQUIRE_API_VERSION=${REQUIRE_API_VERSION}" + +export LOAD_BALANCER=${LOAD_BALANCER} +echo "LOAD_BALANCER=${LOAD_BALANCER}" + +export COMPRESSOR=${COMPRESSOR} +echo "COMPRESSOR=${COMPRESSOR}" + +export PATH="$MONGODB_BINARIES:$PATH" +echo "MONGODB_BINARIES=${MONGODB_BINARIES}" + +export SKIP_LEGACY_SHELL="true" +echo "SKIP_LEGACY_SHELL=${SKIP_LEGACY_SHELL}" + +bash "${DRIVERS_TOOLS}/.evergreen/run-orchestration.sh" \ No newline at end of file diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 82214877e62..e8afec2d560 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -11,7 +11,7 @@ permissions: name: encryption-tests jobs: - ssdlc: + set-up: permissions: # required for all workflows security-events: write @@ -24,52 +24,61 @@ jobs: - name: Set-up cluster - shell: bash - - - - - name: Run tests - uses: mongodb-labs/drivers-github-tools/node/get_version_info@v2 - with: - npm_package_name: mongodb - - - name: actions/compress_sign_and_upload - uses: mongodb-labs/drivers-github-tools/node/sign_node_package@v2 - with: - aws_role_arn: ${{ secrets.AWS_ROLE_ARN }} - aws_region_name: us-east-1 - aws_secret_id: ${{ secrets.AWS_SECRET_ID }} - npm_package_name: mongodb - dry_run: ${{ needs.release_please.outputs.release_created == '' }} - - - name: Copy sbom file to release assets - shell: bash - if: ${{ '' == '' }} - run: cp sbom.json ${{ env.S3_ASSETS }}/sbom.json - - # only used for mongodb-client-encryption - - name: Augment SBOM and copy to release assets - if: ${{ '' != '' }} - uses: mongodb-labs/drivers-github-tools/sbom@v2 - with: - silk_asset_group: '' - sbom_file_name: sbom.json - - - name: Generate authorized pub report - uses: mongodb-labs/drivers-github-tools/full-report@v2 + run-tests: + needs: + - set-up + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + node: [20] + os: [ubuntu-latest] + mongodb: [8.0.0] + #clone drivers-evergreen-tools (will delete itself once the run is over) + git clone --depth=1 https://github.com/mongodb-labs/drivers-evergreen-tools.git /data/mci/ead9f9a67ef900991e09bd9589dfee0e/drivers-tools + #set path + #calling run orchestration sets up a server on your computer + # it outputs relevants uri in expansions.yml + # put uri into an environment variable + - name: set-up-orchestration + - command: subprocess.exec + params: + binary: bash + add_expansions_to_env: true + env: + # The following is the settings for how we want to launch mongodb + MONGODB_VERSION: ${VERSION} + TOPOLOGY: ${TOPOLOGY} + AUTH: ${AUTH} + SSL: ${SSL} + ORCHESTRATION_FILE: ${ORCHESTRATION_FILE} + REQUIRE_API_VERSION: ${REQUIRE_API_VERSION} + LOAD_BALANCER: ${LOAD_BALANCER} + COMPRESSOR: ${COMPRESSOR} + # These are paths necessary for launching mongodb + DRIVERS_TOOLS: ${DRIVERS_TOOLS} + MONGODB_BINARIES: ${MONGODB_BINARIES} + args: + - .github/scripts/run-orchestration.sh + name: Encryption tests + env: + FORCE_COLOR: true + steps: + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 + - name: Setup node + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: - release_version: ${{ env.package_version }} - product_name: mongodb - sarif_report_target_ref: 'main' - third_party_dependency_tool: n/a - dist_filenames: artifacts/* - token: ${{ github.token }} - sbom_file_name: sbom.json - evergreen_project: mongo-node-driver-next - evergreen_commit: ${{ env.commit }} - - - uses: mongodb-labs/drivers-github-tools/upload-s3-assets@v2 + node-version: latest + - name: Load MongoDB binary cache + id: cache-mongodb-binaries + uses: actions/cache@v4 with: - version: ${{ env.package_version }} - product_name: mongodb - dry_run: ${{ needs.release_please.outputs.release_created == '' }} \ No newline at end of file + path: ~/.cache/mongodb-binaries + key: ${{ matrix.os }}-${{ matrix.mongodb }} + - name: Install Dependencies + run: npm install + - name: Install drivers-evergreen-tools + + - name: Test + run: mocha --exit ./test/encryption/*.test.js \ No newline at end of file From a041206260d287d0746df5c4973f7735c9238418 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Thu, 21 Nov 2024 16:59:50 -0500 Subject: [PATCH 07/84] test --- .github/workflows/encryption-tests.yml | 62 ++++++++++++++++---------- 1 file changed, 39 insertions(+), 23 deletions(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index e8afec2d560..586cc450dae 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -10,6 +10,28 @@ permissions: name: encryption-tests +functions: + "bootstrap mongo-orchestration": + - command: subprocess.exec + params: + binary: bash + add_expansions_to_env: true + env: + # The following is the settings for how we want to launch mongodb + MONGODB_VERSION: ${VERSION} + TOPOLOGY: ${TOPOLOGY} + AUTH: ${AUTH} + SSL: ${SSL} + ORCHESTRATION_FILE: ${ORCHESTRATION_FILE} + REQUIRE_API_VERSION: ${REQUIRE_API_VERSION} + LOAD_BALANCER: ${LOAD_BALANCER} + COMPRESSOR: ${COMPRESSOR} + # These are paths necessary for launching mongodb + DRIVERS_TOOLS: ${DRIVERS_TOOLS} + MONGODB_BINARIES: ${MONGODB_BINARIES} + args: + - ./workflows/run-orchestration.sh + jobs: set-up: permissions: @@ -21,7 +43,6 @@ jobs: steps: - name: Install mongodb-client-encryption uses: mongodb-client-encryption - - name: Set-up cluster - shell: bash @@ -36,31 +57,10 @@ jobs: os: [ubuntu-latest] mongodb: [8.0.0] #clone drivers-evergreen-tools (will delete itself once the run is over) - git clone --depth=1 https://github.com/mongodb-labs/drivers-evergreen-tools.git /data/mci/ead9f9a67ef900991e09bd9589dfee0e/drivers-tools #set path #calling run orchestration sets up a server on your computer # it outputs relevants uri in expansions.yml # put uri into an environment variable - - name: set-up-orchestration - - command: subprocess.exec - params: - binary: bash - add_expansions_to_env: true - env: - # The following is the settings for how we want to launch mongodb - MONGODB_VERSION: ${VERSION} - TOPOLOGY: ${TOPOLOGY} - AUTH: ${AUTH} - SSL: ${SSL} - ORCHESTRATION_FILE: ${ORCHESTRATION_FILE} - REQUIRE_API_VERSION: ${REQUIRE_API_VERSION} - LOAD_BALANCER: ${LOAD_BALANCER} - COMPRESSOR: ${COMPRESSOR} - # These are paths necessary for launching mongodb - DRIVERS_TOOLS: ${DRIVERS_TOOLS} - MONGODB_BINARIES: ${MONGODB_BINARIES} - args: - - .github/scripts/run-orchestration.sh name: Encryption tests env: FORCE_COLOR: true @@ -79,6 +79,22 @@ jobs: - name: Install Dependencies run: npm install - name: Install drivers-evergreen-tools - + run: git clone --depth=1 https://github.com/mongodb-labs/drivers-evergreen-tools.git ~/.cache/drivers-evergreen-tools + - name: Set up cluster + + run: bash .github/workflows/run-orchestration.sh + with: + # The following is the settings for how we want to launch mongodb + MONGODB_VERSION: 8.0.0 + TOPOLOGY: sharded + AUTH: auth + # ORCHESTRATION_FILE: ${ORCHESTRATION_FILE} + # REQUIRE_API_VERSION: ${REQUIRE_API_VERSION} + # LOAD_BALANCER: ${LOAD_BALANCER} + # COMPRESSOR: ${COMPRESSOR} + # These are paths necessary for launching mongodb + DRIVERS_TOOLS: ~/.cache/drivers-evergreen-tools + MONGODB_BINARIES: ~/.cache/mongodb-binaries + - - name: Test run: mocha --exit ./test/encryption/*.test.js \ No newline at end of file From 0c978a283aa6bce08293ff3e82489b98f0d16990 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Thu, 21 Nov 2024 17:51:23 -0500 Subject: [PATCH 08/84] edit --- .github/workflows/encryption-tests.yml | 26 ++------------------------ 1 file changed, 2 insertions(+), 24 deletions(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 586cc450dae..69580c06f40 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -1,3 +1,5 @@ +name: encryption-tests + on: push: branches: ['main'] @@ -8,30 +10,6 @@ permissions: pull-requests: write id-token: write -name: encryption-tests - -functions: - "bootstrap mongo-orchestration": - - command: subprocess.exec - params: - binary: bash - add_expansions_to_env: true - env: - # The following is the settings for how we want to launch mongodb - MONGODB_VERSION: ${VERSION} - TOPOLOGY: ${TOPOLOGY} - AUTH: ${AUTH} - SSL: ${SSL} - ORCHESTRATION_FILE: ${ORCHESTRATION_FILE} - REQUIRE_API_VERSION: ${REQUIRE_API_VERSION} - LOAD_BALANCER: ${LOAD_BALANCER} - COMPRESSOR: ${COMPRESSOR} - # These are paths necessary for launching mongodb - DRIVERS_TOOLS: ${DRIVERS_TOOLS} - MONGODB_BINARIES: ${MONGODB_BINARIES} - args: - - ./workflows/run-orchestration.sh - jobs: set-up: permissions: From f8cbb9aef63b9c8f261c81f5a86383ce828a027c Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Thu, 21 Nov 2024 18:13:16 -0500 Subject: [PATCH 09/84] removed errors --- .github/workflows/encryption-tests.yml | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 69580c06f40..a39ed25b083 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -1,4 +1,4 @@ -name: encryption-tests +name: Encryption Tests on: push: @@ -21,8 +21,6 @@ jobs: steps: - name: Install mongodb-client-encryption uses: mongodb-client-encryption - - name: Set-up cluster - - shell: bash run-tests: needs: @@ -59,9 +57,8 @@ jobs: - name: Install drivers-evergreen-tools run: git clone --depth=1 https://github.com/mongodb-labs/drivers-evergreen-tools.git ~/.cache/drivers-evergreen-tools - name: Set up cluster - - run: bash .github/workflows/run-orchestration.sh - with: + run: bash /.github/scripts/run-orchestration.sh + env: # The following is the settings for how we want to launch mongodb MONGODB_VERSION: 8.0.0 TOPOLOGY: sharded @@ -73,6 +70,5 @@ jobs: # These are paths necessary for launching mongodb DRIVERS_TOOLS: ~/.cache/drivers-evergreen-tools MONGODB_BINARIES: ~/.cache/mongodb-binaries - - - name: Test - run: mocha --exit ./test/encryption/*.test.js \ No newline at end of file + run: mocha --exit ./test/encryption/*.test.js From 2fa3417ce20c45f3398b7b91e2951d5240b1c005 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Thu, 21 Nov 2024 18:16:21 -0500 Subject: [PATCH 10/84] on push --- .github/workflows/encryption-tests.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index a39ed25b083..24c0fd23626 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -1,9 +1,8 @@ name: Encryption Tests on: - push: - branches: ['main'] - workflow_dispatch: {} + push + #workflow_dispatch: {} permissions: contents: write From c71d3c287fd94a21de683005ae97920c9b585e42 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Thu, 21 Nov 2024 18:21:34 -0500 Subject: [PATCH 11/84] test --- .github/workflows/encryption-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 24c0fd23626..6bb982c03b3 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Install mongodb-client-encryption - uses: mongodb-client-encryption + uses: mongodb-client-encryption@v6.1 run-tests: needs: From e6facfe19ce58bc2ebba3e6334406f7ee77cceff Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Thu, 21 Nov 2024 18:27:21 -0500 Subject: [PATCH 12/84] path --- .github/workflows/encryption-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 6bb982c03b3..a5d87177559 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Install mongodb-client-encryption - uses: mongodb-client-encryption@v6.1 + uses: mongodb-js/mongodb-client-encryption@v6.1.0 run-tests: needs: From 06f9758cc2be1a62c196c1d09a05de613c518318 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Thu, 21 Nov 2024 18:33:59 -0500 Subject: [PATCH 13/84] path --- .github/workflows/encryption-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index a5d87177559..788a57d1aaa 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Install mongodb-client-encryption - uses: mongodb-js/mongodb-client-encryption@v6.1.0 + run: npm install mongodb-client-encryption run-tests: needs: From 86f7874c5898ff298167666e25c24b4b047f3f3f Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Thu, 21 Nov 2024 18:35:59 -0500 Subject: [PATCH 14/84] path --- .github/workflows/encryption-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 788a57d1aaa..90a10f1aa21 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -56,7 +56,7 @@ jobs: - name: Install drivers-evergreen-tools run: git clone --depth=1 https://github.com/mongodb-labs/drivers-evergreen-tools.git ~/.cache/drivers-evergreen-tools - name: Set up cluster - run: bash /.github/scripts/run-orchestration.sh + run: bash ../scripts/run-orchestration.sh env: # The following is the settings for how we want to launch mongodb MONGODB_VERSION: 8.0.0 From 7a30734ed0959f673b2046a5e19ba503f167a01a Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Thu, 21 Nov 2024 18:37:54 -0500 Subject: [PATCH 15/84] path --- .github/workflows/encryption-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 90a10f1aa21..00e3324072c 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -56,7 +56,7 @@ jobs: - name: Install drivers-evergreen-tools run: git clone --depth=1 https://github.com/mongodb-labs/drivers-evergreen-tools.git ~/.cache/drivers-evergreen-tools - name: Set up cluster - run: bash ../scripts/run-orchestration.sh + run: bash .github/scripts/run-orchestration.sh env: # The following is the settings for how we want to launch mongodb MONGODB_VERSION: 8.0.0 From b39c75442305b51598efa78966c3c16e7150046e Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Fri, 22 Nov 2024 14:34:08 -0500 Subject: [PATCH 16/84] fixed --- .github/workflows/encryption-tests.yml | 26 ++++++++++---------------- 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 00e3324072c..9234f2b5083 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -53,21 +53,15 @@ jobs: key: ${{ matrix.os }}-${{ matrix.mongodb }} - name: Install Dependencies run: npm install - - name: Install drivers-evergreen-tools - run: git clone --depth=1 https://github.com/mongodb-labs/drivers-evergreen-tools.git ~/.cache/drivers-evergreen-tools - name: Set up cluster - run: bash .github/scripts/run-orchestration.sh - env: - # The following is the settings for how we want to launch mongodb - MONGODB_VERSION: 8.0.0 - TOPOLOGY: sharded - AUTH: auth - # ORCHESTRATION_FILE: ${ORCHESTRATION_FILE} - # REQUIRE_API_VERSION: ${REQUIRE_API_VERSION} - # LOAD_BALANCER: ${LOAD_BALANCER} - # COMPRESSOR: ${COMPRESSOR} - # These are paths necessary for launching mongodb - DRIVERS_TOOLS: ~/.cache/drivers-evergreen-tools - MONGODB_BINARIES: ~/.cache/mongodb-binaries - - name: Test + id: setup-cluster + uses: mongodb-labs/drivers-evergreen-tools@master + with: + version: ${{ matrix.mongodb }} + topology: sharded + auth: auth + - name: Run Tests run: mocha --exit ./test/encryption/*.test.js + env: + MONGODB_URI: ${{ steps.setup-cluster.outputs.cluster-uri }} + CRYPT_SHARED_LIB_PATH: ${{ steps.setup-cluster.outputs.crypt-shared-lib-path }} From ccb726edbb9fcd6a253b2759967a5f3285ed6813 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Fri, 22 Nov 2024 14:47:20 -0500 Subject: [PATCH 17/84] typo --- .github/workflows/encryption-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 9234f2b5083..c83217958dd 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -58,7 +58,7 @@ jobs: uses: mongodb-labs/drivers-evergreen-tools@master with: version: ${{ matrix.mongodb }} - topology: sharded + topology: sharded_cluster auth: auth - name: Run Tests run: mocha --exit ./test/encryption/*.test.js From b0a1c3c08e186925d179dc05a952bd073e768c92 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Fri, 22 Nov 2024 14:51:24 -0500 Subject: [PATCH 18/84] install mocha --- .github/workflows/encryption-tests.yml | 26 +++++++++++--------------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index c83217958dd..c4ee9e31727 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -10,20 +10,12 @@ permissions: id-token: write jobs: - set-up: + run-tests: permissions: # required for all workflows security-events: write id-token: write contents: write - runs-on: ubuntu-latest - steps: - - name: Install mongodb-client-encryption - run: npm install mongodb-client-encryption - - run-tests: - needs: - - set-up runs-on: ${{ matrix.os }} strategy: fail-fast: false @@ -40,17 +32,19 @@ jobs: env: FORCE_COLOR: true steps: + # - name: Install mongodb-client-encryption + # run: npm install mongodb-client-encryption - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - name: Setup node uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: latest - - name: Load MongoDB binary cache - id: cache-mongodb-binaries - uses: actions/cache@v4 - with: - path: ~/.cache/mongodb-binaries - key: ${{ matrix.os }}-${{ matrix.mongodb }} + #- name: Load MongoDB binary cache + # id: cache-mongodb-binaries + # uses: actions/cache@v4 + # with: + # path: ~/.cache/mongodb-binaries + # key: ${{ matrix.os }}-${{ matrix.mongodb }} - name: Install Dependencies run: npm install - name: Set up cluster @@ -60,6 +54,8 @@ jobs: version: ${{ matrix.mongodb }} topology: sharded_cluster auth: auth + - name: Install mocha + run: brew install mocha - name: Run Tests run: mocha --exit ./test/encryption/*.test.js env: From 6198ade074d21b0d0d94df92d01930ca7ac9d995 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Fri, 22 Nov 2024 14:56:31 -0500 Subject: [PATCH 19/84] fixed? --- .github/scripts/run-orchestration.sh | 36 -------------------------- .github/workflows/encryption-tests.yml | 17 +----------- 2 files changed, 1 insertion(+), 52 deletions(-) delete mode 100644 .github/scripts/run-orchestration.sh diff --git a/.github/scripts/run-orchestration.sh b/.github/scripts/run-orchestration.sh deleted file mode 100644 index 3708c7bc33d..00000000000 --- a/.github/scripts/run-orchestration.sh +++ /dev/null @@ -1,36 +0,0 @@ -#! /usr/bin/env bash - -# This is a thin wrapper around drivers-tools run orchestration meant to print each of the configuration settings we make use of -# Additionally it ensures the downloaded binaries are in the PATH for the script to find (namely, the legacy shell for server set up) - -export MONGODB_VERSION=${VERSION} -echo "MONGODB_VERSION=${VERSION}" - -export TOPOLOGY=${TOPOLOGY} -echo "TOPOLOGY=${TOPOLOGY}" - -export AUTH=${AUTH} -echo "AUTH=${AUTH}" - -export SSL=${SSL} -echo "SSL=${SSL}" - -export ORCHESTRATION_FILE=${ORCHESTRATION_FILE} -echo "ORCHESTRATION_FILE=${ORCHESTRATION_FILE}" - -export REQUIRE_API_VERSION=${REQUIRE_API_VERSION} -echo "REQUIRE_API_VERSION=${REQUIRE_API_VERSION}" - -export LOAD_BALANCER=${LOAD_BALANCER} -echo "LOAD_BALANCER=${LOAD_BALANCER}" - -export COMPRESSOR=${COMPRESSOR} -echo "COMPRESSOR=${COMPRESSOR}" - -export PATH="$MONGODB_BINARIES:$PATH" -echo "MONGODB_BINARIES=${MONGODB_BINARIES}" - -export SKIP_LEGACY_SHELL="true" -echo "SKIP_LEGACY_SHELL=${SKIP_LEGACY_SHELL}" - -bash "${DRIVERS_TOOLS}/.evergreen/run-orchestration.sh" \ No newline at end of file diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index c4ee9e31727..158cde2e10b 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -23,28 +23,15 @@ jobs: node: [20] os: [ubuntu-latest] mongodb: [8.0.0] - #clone drivers-evergreen-tools (will delete itself once the run is over) - #set path - #calling run orchestration sets up a server on your computer - # it outputs relevants uri in expansions.yml - # put uri into an environment variable name: Encryption tests env: FORCE_COLOR: true steps: - # - name: Install mongodb-client-encryption - # run: npm install mongodb-client-encryption - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - name: Setup node uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: latest - #- name: Load MongoDB binary cache - # id: cache-mongodb-binaries - # uses: actions/cache@v4 - # with: - # path: ~/.cache/mongodb-binaries - # key: ${{ matrix.os }}-${{ matrix.mongodb }} - name: Install Dependencies run: npm install - name: Set up cluster @@ -54,10 +41,8 @@ jobs: version: ${{ matrix.mongodb }} topology: sharded_cluster auth: auth - - name: Install mocha - run: brew install mocha - name: Run Tests - run: mocha --exit ./test/encryption/*.test.js + run: npx mocha --exit ./test/encryption/*.test.js env: MONGODB_URI: ${{ steps.setup-cluster.outputs.cluster-uri }} CRYPT_SHARED_LIB_PATH: ${{ steps.setup-cluster.outputs.crypt-shared-lib-path }} From a135e79c93a1836acc0639c7dfd3304565c11eda Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Fri, 22 Nov 2024 15:02:53 -0500 Subject: [PATCH 20/84] fix tests --- test/encryption/encryption.test.js | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/test/encryption/encryption.test.js b/test/encryption/encryption.test.js index cb9d3be2d66..66088cf0c51 100644 --- a/test/encryption/encryption.test.js +++ b/test/encryption/encryption.test.js @@ -3,9 +3,14 @@ const assert = require('assert'); const EJSON = require('bson').EJSON; -describe('setup check', () => { - it('environment variables are set', async function() { - const { local } = EJSON.parse(process.env.CSFLE_KMS_PROVIDERS || '{}'); - assert.ok(local); +describe('environmental variables', () => { + it('MONGODB_URI is set', async function() { + const uri = process.env.MONGODB_URI; + assert.ok(uri); + }); + + it('CRYPT_SHARED_LIB_PATH is set', async function() { + const shared_library_path = process.env.CRYPT_SHARED_LIB_PATH; + assert.ok(shared_library_path); }); }); From ca258684b1db84b5cfb13a9b8f8eb9b75dc95279 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Fri, 22 Nov 2024 15:59:31 -0500 Subject: [PATCH 21/84] run tests --- .github/workflows/encryption-tests.yml | 4 ++- mongocryptd.pid | 0 package.json | 1 + test/encryption/encryption.test.js | 35 +++++++++++++++++++++++--- 4 files changed, 36 insertions(+), 4 deletions(-) create mode 100644 mongocryptd.pid diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 158cde2e10b..09dc64df74b 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -34,6 +34,8 @@ jobs: node-version: latest - name: Install Dependencies run: npm install + - name: Install mongodb-client-encryption + run: npm install mongodb-client-encryption - name: Set up cluster id: setup-cluster uses: mongodb-labs/drivers-evergreen-tools@master @@ -44,5 +46,5 @@ jobs: - name: Run Tests run: npx mocha --exit ./test/encryption/*.test.js env: - MONGODB_URI: ${{ steps.setup-cluster.outputs.cluster-uri }} + MONGOOSE_TEST_URI: ${{ steps.setup-cluster.outputs.cluster-uri }} CRYPT_SHARED_LIB_PATH: ${{ steps.setup-cluster.outputs.crypt-shared-lib-path }} diff --git a/mongocryptd.pid b/mongocryptd.pid new file mode 100644 index 00000000000..e69de29bb2d diff --git a/package.json b/package.json index 085d0655a7a..c24f2444972 100644 --- a/package.json +++ b/package.json @@ -22,6 +22,7 @@ "bson": "^6.7.0", "kareem": "2.6.3", "mongodb": "~6.10.0", + "mongodb-client-encryption": "^6.1.0", "mpath": "0.9.0", "mquery": "5.0.0", "ms": "2.1.3", diff --git a/test/encryption/encryption.test.js b/test/encryption/encryption.test.js index 66088cf0c51..e6aa2f1d7b0 100644 --- a/test/encryption/encryption.test.js +++ b/test/encryption/encryption.test.js @@ -1,11 +1,11 @@ 'use strict'; const assert = require('assert'); -const EJSON = require('bson').EJSON; +const mdb = require('mongodb'); describe('environmental variables', () => { - it('MONGODB_URI is set', async function() { - const uri = process.env.MONGODB_URI; + it('MONGODB_TEST_URI is set', async function() { + const uri = process.env.MONGOOSE_TEST_URI; assert.ok(uri); }); @@ -14,3 +14,32 @@ describe('environmental variables', () => { assert.ok(shared_library_path); }); }); + +describe('basic integration', () => { + it('supports mongodb csfle auto-encryption integration', async() => { + // 1. Create a MongoClient configured with auto encryption (referred to as `client_encrypted`) + const client = new mdb.MongoClient( + process.env.MONGOOSE_TEST_URI, + { + autoEncryption: { + keyVaultNamespace: 'keyvault.datakeys', + kmsProviders: { local: { key: Buffer.from( + 'Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk', + 'base64' + ) + } }, + extraOptions: { + cryptdSharedLibRequired: true, + cryptSharedLibPath: process.env.CRYPT_SHARED_LIB_PATH + } + } + } + ); + await client.connect(); + const insertResult = await client + .db('db') + .collection('coll') + .insertOne({ unencrypted: 'test' }); + assert.ok(insertResult.insertedId); + }); +}); From 270d151fcb14ef8bfff2be5b4420d9e69c368bb4 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Mon, 25 Nov 2024 16:54:17 -0500 Subject: [PATCH 22/84] ready for rereview --- .github/scripts/run-kms-servers.sh | 11 ---- .github/workflows/encryption-tests.yml | 17 ++---- package.json | 1 + test/encryption/encryption.test.js | 80 +++++++++++++++++++++----- 4 files changed, 72 insertions(+), 37 deletions(-) delete mode 100644 .github/scripts/run-kms-servers.sh diff --git a/.github/scripts/run-kms-servers.sh b/.github/scripts/run-kms-servers.sh deleted file mode 100644 index 988b07bfef1..00000000000 --- a/.github/scripts/run-kms-servers.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -set -o errexit # Exit the script with error if any of the commands fail - -cd ${DRIVERS_TOOLS}/.evergreen/csfle -. ./prepare-kmsvenv.sh - -echo "$PYTHON_EXEC" - -$PYTHON_EXEC -u kms_http_server.py --ca_file ../x509gen/ca.pem --cert_file ../x509gen/expired.pem --port 8000 & -$PYTHON_EXEC -u kms_http_server.py --ca_file ../x509gen/ca.pem --cert_file ../x509gen/wrong-host.pem --port 8001 & -$PYTHON_EXEC -u kms_http_server.py --ca_file ../x509gen/ca.pem --cert_file ../x509gen/server.pem --port 8002 --require_client_cert & \ No newline at end of file diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 09dc64df74b..2151b4eb3ea 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -1,8 +1,9 @@ name: Encryption Tests on: - push - #workflow_dispatch: {} + push: + branches: ['master'] + workflow_dispatch: {} permissions: contents: write @@ -16,13 +17,7 @@ jobs: security-events: write id-token: write contents: write - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - node: [20] - os: [ubuntu-latest] - mongodb: [8.0.0] + runs-on: ubuntu-latest name: Encryption tests env: FORCE_COLOR: true @@ -40,11 +35,11 @@ jobs: id: setup-cluster uses: mongodb-labs/drivers-evergreen-tools@master with: - version: ${{ matrix.mongodb }} + version: 8.0.0 topology: sharded_cluster auth: auth - name: Run Tests - run: npx mocha --exit ./test/encryption/*.test.js + run: npm run encryption-test env: MONGOOSE_TEST_URI: ${{ steps.setup-cluster.outputs.cluster-uri }} CRYPT_SHARED_LIB_PATH: ${{ steps.setup-cluster.outputs.crypt-shared-lib-path }} diff --git a/package.json b/package.json index c24f2444972..405af204ade 100644 --- a/package.json +++ b/package.json @@ -105,6 +105,7 @@ "test-deno": "deno run --allow-env --allow-read --allow-net --allow-run --allow-sys --allow-write ./test/deno.js", "test-rs": "START_REPLICA_SET=1 mocha --timeout 30000 --exit ./test/*.test.js", "test-tsd": "node ./test/types/check-types-filename && tsd", + "test-encryption": "mocha --exit ./test/encryption/*.test.js", "tdd": "mocha ./test/*.test.js --inspect --watch --recursive --watch-files ./**/*.{js,ts}", "test-coverage": "nyc --reporter=html --reporter=text npm test", "ts-benchmark": "cd ./benchmarks/typescript/simple && npm install && npm run benchmark | node ../../../scripts/tsc-diagnostics-check" diff --git a/test/encryption/encryption.test.js b/test/encryption/encryption.test.js index e6aa2f1d7b0..a1b082433a7 100644 --- a/test/encryption/encryption.test.js +++ b/test/encryption/encryption.test.js @@ -2,9 +2,12 @@ const assert = require('assert'); const mdb = require('mongodb'); +const isBsonType = require('../../lib/helpers/isBsonType'); + +const LOCAL_KEY = Buffer.from('Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk', 'base64'); describe('environmental variables', () => { - it('MONGODB_TEST_URI is set', async function() { + it('MONGOOSE_TEST_URI is set', async function() { const uri = process.env.MONGOOSE_TEST_URI; assert.ok(uri); }); @@ -16,18 +19,44 @@ describe('environmental variables', () => { }); describe('basic integration', () => { - it('supports mongodb csfle auto-encryption integration', async() => { - // 1. Create a MongoClient configured with auto encryption (referred to as `client_encrypted`) - const client = new mdb.MongoClient( + let keyVaultClient; + let dataKey; + let encryptedClient; + let dummyClient; + + beforeEach(async function() { + keyVaultClient = new mdb.MongoClient(process.env.MONGOOSE_TEST_URI); + await keyVaultClient.connect(); + await keyVaultClient.db('keyvault').collection('datakeys'); + const clientEncryption = new mdb.ClientEncryption(keyVaultClient, { + keyVaultNamespace: 'keyvault.datakeys', + kmsProviders: { local: { key: LOCAL_KEY } } + }); + dataKey = await clientEncryption.createDataKey('local'); + + encryptedClient = new mdb.MongoClient( process.env.MONGOOSE_TEST_URI, { autoEncryption: { keyVaultNamespace: 'keyvault.datakeys', - kmsProviders: { local: { key: Buffer.from( - 'Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk', - 'base64' - ) - } }, + kmsProviders: { local: { key: LOCAL_KEY } }, + schemaMap: { + 'db.coll': { + bsonType: 'object', + encryptMetadata: { + keyId: [new mdb.UUID(dataKey)] + }, + properties: { + a: { + encrypt: { + bsonType: 'int', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Random', + keyId: [new mdb.UUID(dataKey)] + } + } + } + } + }, extraOptions: { cryptdSharedLibRequired: true, cryptSharedLibPath: process.env.CRYPT_SHARED_LIB_PATH @@ -35,11 +64,32 @@ describe('basic integration', () => { } } ); - await client.connect(); - const insertResult = await client - .db('db') - .collection('coll') - .insertOne({ unencrypted: 'test' }); - assert.ok(insertResult.insertedId); + + dummyClient = new mdb.MongoClient(process.env.MONGOOSE_TEST_URI); + }); + + afterEach(async function() { + await keyVaultClient.close(); + await encryptedClient.close(); + await dummyClient.close(); + }); + + it('supports mongodb csfle auto-encryption integration', async() => { + await encryptedClient.connect(); + await encryptedClient.db('db').collection('coll').insertOne({ a: 1 }); + + // a dummyClient not configured with autoEncryption, returns a encrypted binary type, meaning that encryption succeeded + const encryptedCursor = await dummyClient.db('db').collection('coll').find(); + const encryptedResult = await encryptedCursor.next(); + assert.ok(encryptedResult); + assert.ok(encryptedResult.a); + assert.ok(isBsonType(encryptedResult.a, 'Binary')); + assert.ok(encryptedResult.a.sub_type === 6); + + // when the encryptedClient runs a find, the original unencrypted value is returned + const unencryptedCursor = await encryptedClient.db('db').collection('coll').find(); + const unencryptedResult = await unencryptedCursor.next(); + assert.ok(unencryptedResult); + assert.ok(unencryptedResult.a === 1); }); }); From 69dbda600aaaee77b07d497c43b9830f4a78f13b Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Mon, 25 Nov 2024 16:55:28 -0500 Subject: [PATCH 23/84] ready for rereview 2 --- .github/workflows/encryption-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 2151b4eb3ea..dc586d31118 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -2,7 +2,7 @@ name: Encryption Tests on: push: - branches: ['master'] + branches: ['master', 'NODE-6505/ci-setup'] workflow_dispatch: {} permissions: From 3ce14a41f8ba476bef63c0018d20e5fdd9258126 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Mon, 25 Nov 2024 16:58:55 -0500 Subject: [PATCH 24/84] typo --- .github/workflows/encryption-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index dc586d31118..17ffbe3ca22 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -39,7 +39,7 @@ jobs: topology: sharded_cluster auth: auth - name: Run Tests - run: npm run encryption-test + run: npm run test-encryption env: MONGOOSE_TEST_URI: ${{ steps.setup-cluster.outputs.cluster-uri }} CRYPT_SHARED_LIB_PATH: ${{ steps.setup-cluster.outputs.crypt-shared-lib-path }} From eac708f94c7318adcd86e9214e0630fe7b5a4222 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Tue, 10 Dec 2024 17:24:32 -0500 Subject: [PATCH 25/84] ready for review --- .github/workflows/encryption-tests.yml | 4 +- .gitignore | 2 + CONTRIBUTING.md | 1 + mongocryptd.pid | 0 package.json | 2 + scripts/encrypted-cluster.sh | 57 ++++++++++++++++++++++++++ test/encryption/encryption.test.js | 10 +++-- 7 files changed, 71 insertions(+), 5 deletions(-) delete mode 100644 mongocryptd.pid create mode 100755 scripts/encrypted-cluster.sh diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 17ffbe3ca22..e605778920b 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -2,7 +2,9 @@ name: Encryption Tests on: push: - branches: ['master', 'NODE-6505/ci-setup'] + branches: ['master'] + pull_request: + branches: [ 'master' ] workflow_dispatch: {} permissions: diff --git a/.gitignore b/.gitignore index 47c0742bb12..f88cd4f6ad9 100644 --- a/.gitignore +++ b/.gitignore @@ -67,3 +67,5 @@ examples/ecommerce-netlify-functions/.netlify/state.json notes.md list.out + +encrypted-cluster \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6ba098d3897..a6d1cfac102 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -46,6 +46,7 @@ If you have a question about Mongoose (not a bug report) please post it to eithe * execute `npm run test-tsd` to run the typescript tests * execute `npm run ts-benchmark` to run the typescript benchmark "performance test" for a single time. * execute `npm run ts-benchmark-watch` to run the typescript benchmark "performance test" while watching changes on types folder. Note: Make sure to commit all changes before executing this command. +* in order to run tests that require an encrypted cluster locally, run `npm run test-encryption-local`. Alternatively, you can start an encrypted cluster using the `scripts/encrypted-cluster.sh` file ## Documentation diff --git a/mongocryptd.pid b/mongocryptd.pid deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/package.json b/package.json index 405af204ade..660698caaab 100644 --- a/package.json +++ b/package.json @@ -20,6 +20,7 @@ "license": "MIT", "dependencies": { "bson": "^6.7.0", + "hugo": "^0.0.3", "kareem": "2.6.3", "mongodb": "~6.10.0", "mongodb-client-encryption": "^6.1.0", @@ -106,6 +107,7 @@ "test-rs": "START_REPLICA_SET=1 mocha --timeout 30000 --exit ./test/*.test.js", "test-tsd": "node ./test/types/check-types-filename && tsd", "test-encryption": "mocha --exit ./test/encryption/*.test.js", + "test-encryption-local": "chmod +x scripts/encrypted-cluster.sh && scripts/encrypted-cluster.sh", "tdd": "mocha ./test/*.test.js --inspect --watch --recursive --watch-files ./**/*.{js,ts}", "test-coverage": "nyc --reporter=html --reporter=text npm test", "ts-benchmark": "cd ./benchmarks/typescript/simple && npm install && npm run benchmark | node ../../../scripts/tsc-diagnostics-check" diff --git a/scripts/encrypted-cluster.sh b/scripts/encrypted-cluster.sh new file mode 100755 index 00000000000..e919f1870c6 --- /dev/null +++ b/scripts/encrypted-cluster.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env bash + +# sets up an encrypted mongodb cluster + +if [ -d "encrypted-cluster" ]; then + cd encrypted-cluster +else + mkdir encrypted-cluster + cd encrypted-cluster + + if [-d drivers-evergreen-tools]; then + git clone --depth=1 "https://github.com/mongodb-labs/drivers-evergreen-tools.git" + fi + + export DRIVERS_TOOLS=$(pwd)/drivers-evergreen-tools + export MONGODB_VERSION=8.0 + export AUTH=true + export MONGODB_BINARIES=$DRIVERS_TOOLS/mongodb/bin + export NODE_DRIVER=~/dev/node-mongodb-native + export MONGO_ORCHESTRATION_HOME=$DRIVERS_TOOLS/mo + export PROJECT_ORCHESTRATION_HOME=$DRIVERS_TOOLS/.evergreen/orchestration + export TOPOLOGY=sharded_cluster + export SSL=nossl + + cd $DRIVERS_TOOLS + rm -rf mongosh mongodb mo + mkdir mo + cd - + + rm expansions.sh 2> /dev/null + + bash $DRIVERS_TOOLS/.evergreen/run-orchestration.sh +fi + +# IMPORTANT: extracts mongodb-uri, and starts the cluster of servers, store the uri for GitHub output + +read -r -d '' SOURCE_SCRIPT << EOM +const fs = require('fs'); +const file = fs.readFileSync('mo-expansion.yml', { encoding: 'utf-8' }) + .trim().split('\\n'); +const regex = /^(?.*): "(?.*)"$/; +const variables = file.map( + (line) => regex.exec(line.trim()).groups +).map( + ({key, value}) => \`export \${key}='\${value}'\` +).join('\n'); + +process.stdout.write(variables); +process.stdout.write('\n'); +EOM + +node --eval "$SOURCE_SCRIPT" | tee expansions.sh +source expansions.sh + +export MONGOOSE_TEST_URI=$MONGODB_URI + +npm run test-encryption diff --git a/test/encryption/encryption.test.js b/test/encryption/encryption.test.js index a1b082433a7..ba29506965b 100644 --- a/test/encryption/encryption.test.js +++ b/test/encryption/encryption.test.js @@ -44,14 +44,14 @@ describe('basic integration', () => { 'db.coll': { bsonType: 'object', encryptMetadata: { - keyId: [new mdb.UUID(dataKey)] + keyId: [dataKey] }, properties: { a: { encrypt: { bsonType: 'int', algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Random', - keyId: [new mdb.UUID(dataKey)] + keyId: [dataKey] } } } @@ -78,9 +78,11 @@ describe('basic integration', () => { await encryptedClient.connect(); await encryptedClient.db('db').collection('coll').insertOne({ a: 1 }); + const { insertedId } = await encryptedClient.db('db').collection('coll').insertOne({ a: 1 }); + // a dummyClient not configured with autoEncryption, returns a encrypted binary type, meaning that encryption succeeded - const encryptedCursor = await dummyClient.db('db').collection('coll').find(); - const encryptedResult = await encryptedCursor.next(); + const encryptedResult = await dummyClient.db('db').collection('coll').findOne({ _id: insertedId }); + assert.ok(encryptedResult); assert.ok(encryptedResult.a); assert.ok(isBsonType(encryptedResult.a, 'Binary')); From f38366bd1ea3fcb9d8e3f0e1c0e07cb51159e17c Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Tue, 10 Dec 2024 17:41:46 -0500 Subject: [PATCH 26/84] change to two files for local testing --- .gitignore | 2 +- CONTRIBUTING.md | 2 +- package.json | 2 +- ...uster.sh => run-encryption-tests-local.sh} | 28 ++----------------- scripts/start-encrypted-cluster.sh | 27 ++++++++++++++++++ 5 files changed, 33 insertions(+), 28 deletions(-) rename scripts/{encrypted-cluster.sh => run-encryption-tests-local.sh} (51%) create mode 100644 scripts/start-encrypted-cluster.sh diff --git a/.gitignore b/.gitignore index f88cd4f6ad9..88f5a87efba 100644 --- a/.gitignore +++ b/.gitignore @@ -68,4 +68,4 @@ examples/ecommerce-netlify-functions/.netlify/state.json notes.md list.out -encrypted-cluster \ No newline at end of file +encrypted-cluster diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a6d1cfac102..086e2734a22 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -46,7 +46,7 @@ If you have a question about Mongoose (not a bug report) please post it to eithe * execute `npm run test-tsd` to run the typescript tests * execute `npm run ts-benchmark` to run the typescript benchmark "performance test" for a single time. * execute `npm run ts-benchmark-watch` to run the typescript benchmark "performance test" while watching changes on types folder. Note: Make sure to commit all changes before executing this command. -* in order to run tests that require an encrypted cluster locally, run `npm run test-encryption-local`. Alternatively, you can start an encrypted cluster using the `scripts/encrypted-cluster.sh` file +* in order to run tests that require an encrypted cluster locally, run `npm run test-encryption-local`. Alternatively, you can start an encrypted cluster using the `scripts/start-encrypted-cluster.sh` file ## Documentation diff --git a/package.json b/package.json index 660698caaab..06adb0af08b 100644 --- a/package.json +++ b/package.json @@ -107,7 +107,7 @@ "test-rs": "START_REPLICA_SET=1 mocha --timeout 30000 --exit ./test/*.test.js", "test-tsd": "node ./test/types/check-types-filename && tsd", "test-encryption": "mocha --exit ./test/encryption/*.test.js", - "test-encryption-local": "chmod +x scripts/encrypted-cluster.sh && scripts/encrypted-cluster.sh", + "test-encryption-local": "chmod +x scripts/run-encryption-tests-local.sh && scripts/run-encryption-tests-local.sh", "tdd": "mocha ./test/*.test.js --inspect --watch --recursive --watch-files ./**/*.{js,ts}", "test-coverage": "nyc --reporter=html --reporter=text npm test", "ts-benchmark": "cd ./benchmarks/typescript/simple && npm install && npm run benchmark | node ../../../scripts/tsc-diagnostics-check" diff --git a/scripts/encrypted-cluster.sh b/scripts/run-encryption-tests-local.sh similarity index 51% rename from scripts/encrypted-cluster.sh rename to scripts/run-encryption-tests-local.sh index e919f1870c6..dd29eb98800 100755 --- a/scripts/encrypted-cluster.sh +++ b/scripts/run-encryption-tests-local.sh @@ -2,34 +2,12 @@ # sets up an encrypted mongodb cluster +export CWD=$(pwd); + if [ -d "encrypted-cluster" ]; then cd encrypted-cluster else - mkdir encrypted-cluster - cd encrypted-cluster - - if [-d drivers-evergreen-tools]; then - git clone --depth=1 "https://github.com/mongodb-labs/drivers-evergreen-tools.git" - fi - - export DRIVERS_TOOLS=$(pwd)/drivers-evergreen-tools - export MONGODB_VERSION=8.0 - export AUTH=true - export MONGODB_BINARIES=$DRIVERS_TOOLS/mongodb/bin - export NODE_DRIVER=~/dev/node-mongodb-native - export MONGO_ORCHESTRATION_HOME=$DRIVERS_TOOLS/mo - export PROJECT_ORCHESTRATION_HOME=$DRIVERS_TOOLS/.evergreen/orchestration - export TOPOLOGY=sharded_cluster - export SSL=nossl - - cd $DRIVERS_TOOLS - rm -rf mongosh mongodb mo - mkdir mo - cd - - - rm expansions.sh 2> /dev/null - - bash $DRIVERS_TOOLS/.evergreen/run-orchestration.sh + source $CWD/scripts/start-encrypted-cluster.sh fi # IMPORTANT: extracts mongodb-uri, and starts the cluster of servers, store the uri for GitHub output diff --git a/scripts/start-encrypted-cluster.sh b/scripts/start-encrypted-cluster.sh new file mode 100644 index 00000000000..b04c4b655c1 --- /dev/null +++ b/scripts/start-encrypted-cluster.sh @@ -0,0 +1,27 @@ + +export CWD=$(pwd); +mkdir encrypted-cluster +cd encrypted-cluster + +if [ ! -d "drivers-evergreen-tools/" ]; then + git clone --depth=1 "https://github.com/mongodb-labs/drivers-evergreen-tools.git" +fi + +export DRIVERS_TOOLS=$CWD/encrypted-cluster/drivers-evergreen-tools +export MONGODB_VERSION=8.0 +export AUTH=true +export MONGODB_BINARIES=$DRIVERS_TOOLS/mongodb/bin +export NODE_DRIVER=~/dev/node-mongodb-native +export MONGO_ORCHESTRATION_HOME=$DRIVERS_TOOLS/mo +export PROJECT_ORCHESTRATION_HOME=$DRIVERS_TOOLS/.evergreen/orchestration +export TOPOLOGY=sharded_cluster +export SSL=nossl + +cd $DRIVERS_TOOLS +rm -rf mongosh mongodb mo +mkdir mo +cd - + +rm expansions.sh 2> /dev/null + +bash $DRIVERS_TOOLS/.evergreen/run-orchestration.sh \ No newline at end of file From 62d18d830a74d24218b24cef26253317c38a2086 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Tue, 10 Dec 2024 17:43:43 -0500 Subject: [PATCH 27/84] fixed deps --- CONTRIBUTING.md | 2 +- package.json | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 086e2734a22..97f084447dd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -46,7 +46,7 @@ If you have a question about Mongoose (not a bug report) please post it to eithe * execute `npm run test-tsd` to run the typescript tests * execute `npm run ts-benchmark` to run the typescript benchmark "performance test" for a single time. * execute `npm run ts-benchmark-watch` to run the typescript benchmark "performance test" while watching changes on types folder. Note: Make sure to commit all changes before executing this command. -* in order to run tests that require an encrypted cluster locally, run `npm run test-encryption-local`. Alternatively, you can start an encrypted cluster using the `scripts/start-encrypted-cluster.sh` file +* in order to run tests that require an encrypted cluster locally, run `npm run test-encryption-local`. Alternatively, you can start an encrypted cluster using the `scripts/start-encrypted-cluster.sh` file. ## Documentation diff --git a/package.json b/package.json index 06adb0af08b..0d0fe6f99c3 100644 --- a/package.json +++ b/package.json @@ -20,10 +20,8 @@ "license": "MIT", "dependencies": { "bson": "^6.7.0", - "hugo": "^0.0.3", "kareem": "2.6.3", "mongodb": "~6.10.0", - "mongodb-client-encryption": "^6.1.0", "mpath": "0.9.0", "mquery": "5.0.0", "ms": "2.1.3", From 40858d490b529dac1eec8b3e4d69fcb98e9e6b49 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Wed, 11 Dec 2024 14:25:03 -0500 Subject: [PATCH 28/84] requested changes --- .eslintrc.js | 3 +- mongocryptd.pid | 0 package.json | 2 +- scripts/run-encryption-tests-local.sh | 8 +- scripts/start-encrypted-cluster.sh | 13 ++- test/encryption/encryption.test.js | 139 +++++++++++++------------- 6 files changed, 88 insertions(+), 77 deletions(-) create mode 100644 mongocryptd.pid diff --git a/.eslintrc.js b/.eslintrc.js index b4d7d1652d9..7670926d241 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -14,7 +14,8 @@ module.exports = { '**/docs/js/native.js', '!.*', 'node_modules', - '.git' + '.git', + 'encrypted-cluster' ], overrides: [ { diff --git a/mongocryptd.pid b/mongocryptd.pid new file mode 100644 index 00000000000..e69de29bb2d diff --git a/package.json b/package.json index 0d0fe6f99c3..5c726e144c4 100644 --- a/package.json +++ b/package.json @@ -105,7 +105,7 @@ "test-rs": "START_REPLICA_SET=1 mocha --timeout 30000 --exit ./test/*.test.js", "test-tsd": "node ./test/types/check-types-filename && tsd", "test-encryption": "mocha --exit ./test/encryption/*.test.js", - "test-encryption-local": "chmod +x scripts/run-encryption-tests-local.sh && scripts/run-encryption-tests-local.sh", + "test-encryption-local": "bash scripts/run-encryption-tests-local.sh", "tdd": "mocha ./test/*.test.js --inspect --watch --recursive --watch-files ./**/*.{js,ts}", "test-coverage": "nyc --reporter=html --reporter=text npm test", "ts-benchmark": "cd ./benchmarks/typescript/simple && npm install && npm run benchmark | node ../../../scripts/tsc-diagnostics-check" diff --git a/scripts/run-encryption-tests-local.sh b/scripts/run-encryption-tests-local.sh index dd29eb98800..ff4332370bd 100755 --- a/scripts/run-encryption-tests-local.sh +++ b/scripts/run-encryption-tests-local.sh @@ -1,17 +1,18 @@ #!/usr/bin/env bash -# sets up an encrypted mongodb cluster +# sets up an encrypted mongodb cluster, adds relevant variables to the environment, and runs encryption tests export CWD=$(pwd); +# set up encrypted mongodb cluster if the encrypted-cluster folder does not exist +# note: for tooling, cluster set-up and configuration look into the 'scripts/start-encrypted-cluster.sh' script if [ -d "encrypted-cluster" ]; then cd encrypted-cluster else source $CWD/scripts/start-encrypted-cluster.sh fi -# IMPORTANT: extracts mongodb-uri, and starts the cluster of servers, store the uri for GitHub output - +# extracts MONGOOSE_TEST_URI and CRYPT_SHARED_LIB_PATH from .yml file into environment variables for this test run read -r -d '' SOURCE_SCRIPT << EOM const fs = require('fs'); const file = fs.readFileSync('mo-expansion.yml', { encoding: 'utf-8' }) @@ -32,4 +33,5 @@ source expansions.sh export MONGOOSE_TEST_URI=$MONGODB_URI +# run encryption tests npm run test-encryption diff --git a/scripts/start-encrypted-cluster.sh b/scripts/start-encrypted-cluster.sh index b04c4b655c1..1fb8a4aa22c 100644 --- a/scripts/start-encrypted-cluster.sh +++ b/scripts/start-encrypted-cluster.sh @@ -1,17 +1,25 @@ +# creates a encrypted cluster (sharded on 8.0 server) export CWD=$(pwd); mkdir encrypted-cluster cd encrypted-cluster +# note: + # we're using drivers-evergreen-tools which is a repo that handles cluster set-up for us. + # if you'd like to make changes to the cluster settings, edit the exported variables below. + # for configuration options for the exported variables, see here: https://github.com/mongodb-labs/drivers-evergreen-tools/blob/master/.evergreen/run-orchestration.sh + # after this script is run, the encrypted-cluster/ folder will notably contain the following: + # 'mo-expansion.yml' file which contains for your cluster URI and crypt shared library path + # 'drivers-evergreen-tools/mongodb/bin' which contain executables for other mongodb libraries such as mongocryptd, mongosh, and mongod if [ ! -d "drivers-evergreen-tools/" ]; then git clone --depth=1 "https://github.com/mongodb-labs/drivers-evergreen-tools.git" fi +# configure cluster settings export DRIVERS_TOOLS=$CWD/encrypted-cluster/drivers-evergreen-tools export MONGODB_VERSION=8.0 export AUTH=true export MONGODB_BINARIES=$DRIVERS_TOOLS/mongodb/bin -export NODE_DRIVER=~/dev/node-mongodb-native export MONGO_ORCHESTRATION_HOME=$DRIVERS_TOOLS/mo export PROJECT_ORCHESTRATION_HOME=$DRIVERS_TOOLS/.evergreen/orchestration export TOPOLOGY=sharded_cluster @@ -24,4 +32,5 @@ cd - rm expansions.sh 2> /dev/null -bash $DRIVERS_TOOLS/.evergreen/run-orchestration.sh \ No newline at end of file +# start cluster +bash $DRIVERS_TOOLS/.evergreen/run-orchestration.sh diff --git a/test/encryption/encryption.test.js b/test/encryption/encryption.test.js index ba29506965b..14e18306d94 100644 --- a/test/encryption/encryption.test.js +++ b/test/encryption/encryption.test.js @@ -6,92 +6,91 @@ const isBsonType = require('../../lib/helpers/isBsonType'); const LOCAL_KEY = Buffer.from('Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk', 'base64'); -describe('environmental variables', () => { - it('MONGOOSE_TEST_URI is set', async function() { - const uri = process.env.MONGOOSE_TEST_URI; - assert.ok(uri); - }); +describe('ci', () => { + describe('environmental variables', () => { + it('MONGOOSE_TEST_URI is set', async function() { + const uri = process.env.MONGOOSE_TEST_URI; + assert.ok(uri); + }); - it('CRYPT_SHARED_LIB_PATH is set', async function() { - const shared_library_path = process.env.CRYPT_SHARED_LIB_PATH; - assert.ok(shared_library_path); + it('CRYPT_SHARED_LIB_PATH is set', async function() { + const shared_library_path = process.env.CRYPT_SHARED_LIB_PATH; + assert.ok(shared_library_path); + }); }); -}); -describe('basic integration', () => { - let keyVaultClient; - let dataKey; - let encryptedClient; - let dummyClient; + describe('basic integration', () => { + let keyVaultClient; + let dataKey; + let encryptedClient; + let unencryptedClient; - beforeEach(async function() { - keyVaultClient = new mdb.MongoClient(process.env.MONGOOSE_TEST_URI); - await keyVaultClient.connect(); - await keyVaultClient.db('keyvault').collection('datakeys'); - const clientEncryption = new mdb.ClientEncryption(keyVaultClient, { - keyVaultNamespace: 'keyvault.datakeys', - kmsProviders: { local: { key: LOCAL_KEY } } - }); - dataKey = await clientEncryption.createDataKey('local'); + beforeEach(async function() { + keyVaultClient = new mdb.MongoClient(process.env.MONGOOSE_TEST_URI); + await keyVaultClient.connect(); + await keyVaultClient.db('keyvault').collection('datakeys'); + const clientEncryption = new mdb.ClientEncryption(keyVaultClient, { + keyVaultNamespace: 'keyvault.datakeys', + kmsProviders: { local: { key: LOCAL_KEY } } + }); + dataKey = await clientEncryption.createDataKey('local'); - encryptedClient = new mdb.MongoClient( - process.env.MONGOOSE_TEST_URI, - { - autoEncryption: { - keyVaultNamespace: 'keyvault.datakeys', - kmsProviders: { local: { key: LOCAL_KEY } }, - schemaMap: { - 'db.coll': { - bsonType: 'object', - encryptMetadata: { - keyId: [dataKey] - }, - properties: { - a: { - encrypt: { - bsonType: 'int', - algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Random', - keyId: [dataKey] + encryptedClient = new mdb.MongoClient( + process.env.MONGOOSE_TEST_URI, + { + autoEncryption: { + keyVaultNamespace: 'keyvault.datakeys', + kmsProviders: { local: { key: LOCAL_KEY } }, + schemaMap: { + 'db.coll': { + bsonType: 'object', + encryptMetadata: { + keyId: [dataKey] + }, + properties: { + a: { + encrypt: { + bsonType: 'int', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Random', + keyId: [dataKey] + } } } } + }, + extraOptions: { + cryptdSharedLibRequired: true, + cryptSharedLibPath: process.env.CRYPT_SHARED_LIB_PATH } - }, - extraOptions: { - cryptdSharedLibRequired: true, - cryptSharedLibPath: process.env.CRYPT_SHARED_LIB_PATH } } - } - ); + ); - dummyClient = new mdb.MongoClient(process.env.MONGOOSE_TEST_URI); - }); - - afterEach(async function() { - await keyVaultClient.close(); - await encryptedClient.close(); - await dummyClient.close(); - }); + unencryptedClient = new mdb.MongoClient(process.env.MONGOOSE_TEST_URI); + }); - it('supports mongodb csfle auto-encryption integration', async() => { - await encryptedClient.connect(); - await encryptedClient.db('db').collection('coll').insertOne({ a: 1 }); + afterEach(async function() { + await keyVaultClient.close(); + await encryptedClient.close(); + await unencryptedClient.close(); + }); - const { insertedId } = await encryptedClient.db('db').collection('coll').insertOne({ a: 1 }); + it('ci set-up should support basic mongodb auto-encryption integration', async() => { + await encryptedClient.connect(); + const { insertedId } = await encryptedClient.db('db').collection('coll').insertOne({ a: 1 }); - // a dummyClient not configured with autoEncryption, returns a encrypted binary type, meaning that encryption succeeded - const encryptedResult = await dummyClient.db('db').collection('coll').findOne({ _id: insertedId }); + // client not configured with autoEncryption, returns a encrypted binary type, meaning that encryption succeeded + const encryptedResult = await unencryptedClient.db('db').collection('coll').findOne({ _id: insertedId }); - assert.ok(encryptedResult); - assert.ok(encryptedResult.a); - assert.ok(isBsonType(encryptedResult.a, 'Binary')); - assert.ok(encryptedResult.a.sub_type === 6); + assert.ok(encryptedResult); + assert.ok(encryptedResult.a); + assert.ok(isBsonType(encryptedResult.a, 'Binary')); + assert.ok(encryptedResult.a.sub_type === 6); - // when the encryptedClient runs a find, the original unencrypted value is returned - const unencryptedCursor = await encryptedClient.db('db').collection('coll').find(); - const unencryptedResult = await unencryptedCursor.next(); - assert.ok(unencryptedResult); - assert.ok(unencryptedResult.a === 1); + // when the encryptedClient runs a find, the original unencrypted value is returned + const unencryptedResult = await encryptedClient.db('db').collection('coll').findOne({ _id: insertedId }); + assert.ok(unencryptedResult); + assert.ok(unencryptedResult.a === 1); + }); }); }); From d6044e79358fd574a1b7190b593f7452d614069b Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Wed, 11 Dec 2024 15:28:33 -0500 Subject: [PATCH 29/84] fix --- mongocryptd.pid | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 mongocryptd.pid diff --git a/mongocryptd.pid b/mongocryptd.pid deleted file mode 100644 index e69de29bb2d..00000000000 From 03407f69de8ae2c7f5b506ff10c93fca6caff1f8 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Sun, 15 Dec 2024 14:18:12 -0500 Subject: [PATCH 30/84] types(document): make sure `toObject()` and `toJSON()` apply versionKey __v Fix #15077 --- test/types/document.test.ts | 36 ++++++++++++++++++++++++++++++++++++ types/document.d.ts | 26 +++++++++++++------------- 2 files changed, 49 insertions(+), 13 deletions(-) diff --git a/test/types/document.test.ts b/test/types/document.test.ts index 19ca1083296..4080248ebab 100644 --- a/test/types/document.test.ts +++ b/test/types/document.test.ts @@ -423,3 +423,39 @@ async function gh14876() { expectType(populatedCar.owner); expectType(depopulatedCar.owner); } + +async function gh15077() { + type Foo = { + state: 'on' | 'off'; + }; + + const fooSchema = new Schema( + { + state: { + type: String, + enum: ['on', 'off'] + } + }, + { timestamps: true } + ); + + const fooModel = model('foo', fooSchema); + + let foundFoo = await fooModel + .findOne({ + state: 'on' + }) + .lean() + .exec(); + + if (!foundFoo) { + const newFoo = { + state: 'on' + // extra props but irrelevant + }; + + const createdFoo = await fooModel.create(newFoo); + + foundFoo = createdFoo.toObject(); // this errors on 8.8.3 + } +} diff --git a/types/document.d.ts b/types/document.d.ts index e52169dfca4..8787e3cb218 100644 --- a/types/document.d.ts +++ b/types/document.d.ts @@ -256,21 +256,21 @@ declare module 'mongoose' { set(value: string | Record): this; /** The return value of this method is used in calls to JSON.stringify(doc). */ - toJSON(options?: ToObjectOptions & { flattenMaps?: true, flattenObjectIds?: false }): FlattenMaps>; - toJSON(options: ToObjectOptions & { flattenObjectIds: false }): FlattenMaps>; - toJSON(options: ToObjectOptions & { flattenObjectIds: true }): ObjectIdToString>>; - toJSON(options: ToObjectOptions & { flattenMaps: false }): Require_id; - toJSON(options: ToObjectOptions & { flattenMaps: false; flattenObjectIds: true }): ObjectIdToString>; - - toJSON>(options?: ToObjectOptions & { flattenMaps?: true, flattenObjectIds?: false }): FlattenMaps; - toJSON>(options: ToObjectOptions & { flattenObjectIds: false }): FlattenMaps; - toJSON>(options: ToObjectOptions & { flattenObjectIds: true }): ObjectIdToString>; - toJSON>(options: ToObjectOptions & { flattenMaps: false }): T; - toJSON>(options: ToObjectOptions & { flattenMaps: false; flattenObjectIds: true }): ObjectIdToString; + toJSON(options?: ToObjectOptions & { flattenMaps?: true, flattenObjectIds?: false }): FlattenMaps>>; + toJSON(options: ToObjectOptions & { flattenObjectIds: false }): FlattenMaps>>; + toJSON(options: ToObjectOptions & { flattenObjectIds: true }): ObjectIdToString>>>; + toJSON(options: ToObjectOptions & { flattenMaps: false }): Default__v>; + toJSON(options: ToObjectOptions & { flattenMaps: false; flattenObjectIds: true }): ObjectIdToString>>; + + toJSON>>(options?: ToObjectOptions & { flattenMaps?: true, flattenObjectIds?: false }): FlattenMaps; + toJSON>>(options: ToObjectOptions & { flattenObjectIds: false }): FlattenMaps; + toJSON>>(options: ToObjectOptions & { flattenObjectIds: true }): ObjectIdToString>; + toJSON>>(options: ToObjectOptions & { flattenMaps: false }): T; + toJSON>>(options: ToObjectOptions & { flattenMaps: false; flattenObjectIds: true }): ObjectIdToString; /** Converts this document into a plain-old JavaScript object ([POJO](https://masteringjs.io/tutorials/fundamentals/pojo)). */ - toObject(options?: ToObjectOptions): Require_id; - toObject(options?: ToObjectOptions): Require_id; + toObject(options?: ToObjectOptions): Default__v>; + toObject(options?: ToObjectOptions): Default__v>; /** Clears the modified state on the specified path. */ unmarkModified(path: T): void; From 8cd67ef14fc1043b9784a91120ea7a947178cc53 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Sun, 15 Dec 2024 16:58:21 -0500 Subject: [PATCH 31/84] fix(model): throw error if calling `create()` with multiple docs in a transaction unless `ordered: true` Fix #15091 --- lib/model.js | 4 ++++ test/docs/transactions.test.js | 40 ++++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+) diff --git a/lib/model.js b/lib/model.js index 9f347ac41c4..7a7277de9be 100644 --- a/lib/model.js +++ b/lib/model.js @@ -2619,6 +2619,10 @@ Model.create = async function create(doc, options) { delete options.aggregateErrors; // dont pass on the option to "$save" + if (options.session && !options.ordered && args.length > 1) { + throw new MongooseError('Cannot call `create()` with a session and multiple documents unless `ordered: true` is set'); + } + if (options.ordered) { for (let i = 0; i < args.length; i++) { try { diff --git a/test/docs/transactions.test.js b/test/docs/transactions.test.js index 2a63f07a984..e478ee1ec33 100644 --- a/test/docs/transactions.test.js +++ b/test/docs/transactions.test.js @@ -660,4 +660,44 @@ describe('transactions', function() { const { name } = await Test.findById(_id); assert.strictEqual(name, 'bar'); }); + + it('throws error if using `create()` with multiple docs in a transaction (gh-15091)', async function() { + const BookingSchema = new Schema({ + user: mongoose.Types.ObjectId, + slot: mongoose.Types.ObjectId, + bookingFor: String, + moreInfo: String + }); + + // Create models + const Booking = db.model('Test', BookingSchema); + + // Define a sample payload + const user = { userId: new mongoose.Types.ObjectId() }; + const payload = { + slotId: new mongoose.Types.ObjectId(), + data: [ + { bookingFor: 'Person A', moreInfo: 'Some info' }, + { bookingFor: 'Person B', moreInfo: 'Other info' } + ] + }; + + const session = await mongoose.startSession(); + session.startTransaction(); + + const bookingData = payload.data.map((obj) => ({ + user: user.userId, + slot: payload.slotId, + bookingFor: obj.bookingFor, + moreInfo: obj.moreInfo + })); + + await assert.rejects( + Booking.create(bookingData, { session }), + /Cannot call `create\(\)` with a session and multiple documents unless `ordered: true` is set/ + ); + + const bookings = await Booking.create(bookingData, { session, ordered: true }); + assert.equal(bookings.length, 2); + }); }); From 9a6aeff2f359caeed85e3e3712db74593975bed6 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 16 Dec 2024 11:38:44 -0500 Subject: [PATCH 32/84] Update test/types/document.test.ts Co-authored-by: hasezoey --- test/types/document.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/types/document.test.ts b/test/types/document.test.ts index 4080248ebab..a8451bf6c47 100644 --- a/test/types/document.test.ts +++ b/test/types/document.test.ts @@ -456,6 +456,6 @@ async function gh15077() { const createdFoo = await fooModel.create(newFoo); - foundFoo = createdFoo.toObject(); // this errors on 8.8.3 + foundFoo = createdFoo.toObject(); } } From 7a6f744831a4513fa7431c76d1f123f4e7422f18 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 16 Dec 2024 11:45:52 -0500 Subject: [PATCH 33/84] fix tests --- test/docs/transactions.test.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/docs/transactions.test.js b/test/docs/transactions.test.js index e478ee1ec33..6b5d9abfda7 100644 --- a/test/docs/transactions.test.js +++ b/test/docs/transactions.test.js @@ -344,7 +344,10 @@ describe('transactions', function() { const session = await db.startSession(); session.startTransaction(); - await Character.create([{ name: 'Will Riker', rank: 'Commander' }, { name: 'Jean-Luc Picard', rank: 'Captain' }], { session }); + await Character.create([ + { name: 'Will Riker', rank: 'Commander' }, + { name: 'Jean-Luc Picard', rank: 'Captain' } + ], { session, ordered: true }); let names = await Character.distinct('name', {}, { session }); assert.deepStrictEqual(names.sort(), ['Jean-Luc Picard', 'Will Riker']); @@ -670,6 +673,7 @@ describe('transactions', function() { }); // Create models + db.deleteModel(/Test/); const Booking = db.model('Test', BookingSchema); // Define a sample payload From eebe0ff34a55fbcc95ccd8a104ce12d07003e6b4 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 16 Dec 2024 11:55:23 -0500 Subject: [PATCH 34/84] fix tests --- test/docs/transactions.test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/docs/transactions.test.js b/test/docs/transactions.test.js index 6b5d9abfda7..307e40d5bd6 100644 --- a/test/docs/transactions.test.js +++ b/test/docs/transactions.test.js @@ -686,7 +686,7 @@ describe('transactions', function() { ] }; - const session = await mongoose.startSession(); + const session = await db.startSession(); session.startTransaction(); const bookingData = payload.data.map((obj) => ({ From 2d51c10aa98cc4663697687f3ded1f4753c23416 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 16 Dec 2024 12:13:48 -0500 Subject: [PATCH 35/84] fix tests --- test/docs/transactions.test.js | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/test/docs/transactions.test.js b/test/docs/transactions.test.js index 307e40d5bd6..5558d9a103a 100644 --- a/test/docs/transactions.test.js +++ b/test/docs/transactions.test.js @@ -696,12 +696,15 @@ describe('transactions', function() { moreInfo: obj.moreInfo })); + const bookings = await Booking.create(bookingData, { session, ordered: true }); + assert.equal(bookings.length, 2); + await assert.rejects( Booking.create(bookingData, { session }), /Cannot call `create\(\)` with a session and multiple documents unless `ordered: true` is set/ ); - const bookings = await Booking.create(bookingData, { session, ordered: true }); - assert.equal(bookings.length, 2); + await session.abortTransaction(); + await session.endSession(); }); }); From 08f4c2345a89a9d70dbe089b05f56a38a809cd40 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Wed, 18 Dec 2024 10:36:05 -0500 Subject: [PATCH 36/84] fix wording --- .gitignore | 1 + CONTRIBUTING.md | 2 +- ...ed-cluster.sh => configure-cluster-with-encryption.sh} | 8 ++++++-- scripts/run-encryption-tests-local.sh | 8 ++++---- 4 files changed, 12 insertions(+), 7 deletions(-) rename scripts/{start-encrypted-cluster.sh => configure-cluster-with-encryption.sh} (76%) diff --git a/.gitignore b/.gitignore index 88f5a87efba..e78311260a1 100644 --- a/.gitignore +++ b/.gitignore @@ -69,3 +69,4 @@ notes.md list.out encrypted-cluster +*.pid \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 97f084447dd..a94baf554bd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -46,7 +46,7 @@ If you have a question about Mongoose (not a bug report) please post it to eithe * execute `npm run test-tsd` to run the typescript tests * execute `npm run ts-benchmark` to run the typescript benchmark "performance test" for a single time. * execute `npm run ts-benchmark-watch` to run the typescript benchmark "performance test" while watching changes on types folder. Note: Make sure to commit all changes before executing this command. -* in order to run tests that require an encrypted cluster locally, run `npm run test-encryption-local`. Alternatively, you can start an encrypted cluster using the `scripts/start-encrypted-cluster.sh` file. +* in order to run tests that require an cluster with encryption locally, run `npm run test-encryption-local`. Alternatively, you can start an encrypted cluster using the `scripts/start-cluster-with-encryption.sh` file. ## Documentation diff --git a/scripts/start-encrypted-cluster.sh b/scripts/configure-cluster-with-encryption.sh similarity index 76% rename from scripts/start-encrypted-cluster.sh rename to scripts/configure-cluster-with-encryption.sh index 1fb8a4aa22c..1a77feee78f 100644 --- a/scripts/start-encrypted-cluster.sh +++ b/scripts/configure-cluster-with-encryption.sh @@ -1,11 +1,15 @@ -# creates a encrypted cluster (sharded on 8.0 server) +# note: in order to use FLE with mongodb, we must +# have mongocryptd or the shared library downloaded +# have an enterprise server >= 4.2 + +# this script downloads all tools required to use FLE with mongodb, then starts a cluster of the provided configuration (sharded on 8.0 server) export CWD=$(pwd); mkdir encrypted-cluster cd encrypted-cluster # note: - # we're using drivers-evergreen-tools which is a repo that handles cluster set-up for us. + # we're using drivers-evergreen-tools which is a repo used by MongoDB drivers to start clusters for testing. # if you'd like to make changes to the cluster settings, edit the exported variables below. # for configuration options for the exported variables, see here: https://github.com/mongodb-labs/drivers-evergreen-tools/blob/master/.evergreen/run-orchestration.sh # after this script is run, the encrypted-cluster/ folder will notably contain the following: diff --git a/scripts/run-encryption-tests-local.sh b/scripts/run-encryption-tests-local.sh index ff4332370bd..1b83c324033 100755 --- a/scripts/run-encryption-tests-local.sh +++ b/scripts/run-encryption-tests-local.sh @@ -1,15 +1,15 @@ #!/usr/bin/env bash -# sets up an encrypted mongodb cluster, adds relevant variables to the environment, and runs encryption tests +# sets up mongodb cluster and encryption configuration, adds relevant variables to the environment, and runs encryption tests export CWD=$(pwd); -# set up encrypted mongodb cluster if the encrypted-cluster folder does not exist -# note: for tooling, cluster set-up and configuration look into the 'scripts/start-encrypted-cluster.sh' script +# set up mongodb cluster and encryption configuration if the encrypted-cluster folder does not exist +# note: for tooling, cluster set-up and configuration look into the 'scripts/start-cluster-with-encryption.sh' script if [ -d "encrypted-cluster" ]; then cd encrypted-cluster else - source $CWD/scripts/start-encrypted-cluster.sh + source $CWD/scripts/start-cluster-with-encryption.sh fi # extracts MONGOOSE_TEST_URI and CRYPT_SHARED_LIB_PATH from .yml file into environment variables for this test run From da4eb754528665fae8c1766ef2ef1f8f4d8eadc4 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 23 Dec 2024 15:41:17 -0500 Subject: [PATCH 37/84] fix(populate): handle hydrating deeply nested populated docs underneath virtual populate re: #15110 --- lib/document.js | 2 +- lib/model.js | 2 +- lib/schema.js | 9 +++++- test/model.hydrate.test.js | 57 ++++++++++++++++++++++++++++++++++++++ 4 files changed, 67 insertions(+), 3 deletions(-) diff --git a/lib/document.js b/lib/document.js index 14a33ef323d..87c8e91b107 100644 --- a/lib/document.js +++ b/lib/document.js @@ -806,7 +806,7 @@ function init(self, obj, doc, opts, prefix) { reason: e })); } - } else if (opts.hydratedPopulatedDocs) { + } else if (schemaType && opts.hydratedPopulatedDocs) { doc[i] = schemaType.cast(value, self, true); if (doc[i] && doc[i].$__ && doc[i].$__.wasPopulated) { diff --git a/lib/model.js b/lib/model.js index 7edbce6ce28..b50ea3b09c3 100644 --- a/lib/model.js +++ b/lib/model.js @@ -3688,7 +3688,7 @@ Model.castObject = function castObject(obj, options) { } if (schemaType.$isMongooseDocumentArray) { - const castNonArraysOption = schemaType.options?.castNonArrays ??schemaType.constructor.options.castNonArrays; + const castNonArraysOption = schemaType.options?.castNonArrays ?? schemaType.constructor.options.castNonArrays; if (!Array.isArray(val)) { if (!castNonArraysOption) { if (!options.ignoreCastErrors) { diff --git a/lib/schema.js b/lib/schema.js index ddde4659294..5c9cb2e9443 100644 --- a/lib/schema.js +++ b/lib/schema.js @@ -25,6 +25,7 @@ const setPopulatedVirtualValue = require('./helpers/populate/setPopulatedVirtual const setupTimestamps = require('./helpers/timestamps/setupTimestamps'); const utils = require('./utils'); const validateRef = require('./helpers/populate/validateRef'); +const { populateModelSymbol } = require('./helpers/symbols'); const hasNumericSubpathRegex = /\.\d+(\.|$)/; @@ -2382,9 +2383,15 @@ Schema.prototype.virtual = function(name, options) { const PopulateModel = this.db.model(modelNames[0]); for (let i = 0; i < populatedVal.length; ++i) { if (!populatedVal[i].$__) { - populatedVal[i] = PopulateModel.hydrate(populatedVal[i]); + populatedVal[i] = PopulateModel.hydrate(populatedVal[i], null, { hydratedPopulatedDocs: true }); } } + const foreignField = options.foreignField; + this.$populated( + name, + populatedVal.map(doc => doc == null ? doc : doc.get(typeof foreignField === 'function' ? foreignField.call(doc, doc) : foreignField)), + { populateModelSymbol: PopulateModel } + ); } } diff --git a/test/model.hydrate.test.js b/test/model.hydrate.test.js index 447cc2be85b..0f24d6887bd 100644 --- a/test/model.hydrate.test.js +++ b/test/model.hydrate.test.js @@ -198,5 +198,62 @@ describe('model', function() { assert.ok(c.populated('users')); assert.ok(c.users[0] instanceof User); }); + + it('marks deeply nested docs as hydrated (gh-15110)', async function() { + const ArticleSchema = new Schema({ title: String }); + + const StorySchema = new Schema({ + title: String, + userId: Schema.Types.ObjectId, + article: { + type: Schema.Types.ObjectId, + ref: 'Article' + } + }); + + const UserSchema = new Schema({ + name: String + }); + + UserSchema.virtual('stories', { + ref: 'Story', + localField: '_id', + foreignField: 'userId' + }); + + const User = db.model('User', UserSchema); + const Story = db.model('Story', StorySchema); + const Article = db.model('Article', ArticleSchema); + await Promise.all([ + User.deleteMany({}), + Story.deleteMany({}), + Article.deleteMany({}) + ]); + + const article = await Article.create({ title: 'Cinema' }); + const user = await User.create({ name: 'Alex' }); + await Story.create({ title: 'Ticket 1', userId: user._id, article }); + await Story.create({ title: 'Ticket 2', userId: user._id }); + + const populated = await User.findOne({ name: 'Alex' }).populate({ + path: 'stories', + populate: ['article'] + }).lean(); + + const hydrated = User.hydrate( + JSON.parse(JSON.stringify(populated)), + null, + { hydratedPopulatedDocs: true } + ); + + assert.ok(hydrated.populated('stories')); + assert.ok(hydrated.stories[0].populated('article')); + assert.equal(hydrated.stories[0].article._id.toString(), article._id.toString()); + assert.ok(typeof hydrated.stories[0].article._id === 'object'); + assert.ok(hydrated.stories[0].article._id instanceof mongoose.Types.ObjectId); + assert.equal(hydrated.stories[0].article.title, 'Cinema'); + + assert.ok(!hydrated.stories[1].article); + }); }); }); From 1098636943915c39c2f381aa69d071b565fd85e2 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Thu, 26 Dec 2024 13:00:42 -0500 Subject: [PATCH 38/84] change all occurences of encrypted-cluster to data --- .eslintrc.js | 2 +- .gitignore | 2 +- scripts/configure-cluster-with-encryption.sh | 8 ++++---- scripts/run-encryption-tests-local.sh | 11 ++++++----- 4 files changed, 12 insertions(+), 11 deletions(-) diff --git a/.eslintrc.js b/.eslintrc.js index 7670926d241..4b4f2f312cc 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -15,7 +15,7 @@ module.exports = { '!.*', 'node_modules', '.git', - 'encrypted-cluster' + 'data' ], overrides: [ { diff --git a/.gitignore b/.gitignore index e78311260a1..c66816fcd42 100644 --- a/.gitignore +++ b/.gitignore @@ -68,5 +68,5 @@ examples/ecommerce-netlify-functions/.netlify/state.json notes.md list.out -encrypted-cluster +data *.pid \ No newline at end of file diff --git a/scripts/configure-cluster-with-encryption.sh b/scripts/configure-cluster-with-encryption.sh index 1a77feee78f..4584920ed40 100644 --- a/scripts/configure-cluster-with-encryption.sh +++ b/scripts/configure-cluster-with-encryption.sh @@ -5,14 +5,14 @@ # this script downloads all tools required to use FLE with mongodb, then starts a cluster of the provided configuration (sharded on 8.0 server) export CWD=$(pwd); -mkdir encrypted-cluster -cd encrypted-cluster +mkdir data +cd data # note: # we're using drivers-evergreen-tools which is a repo used by MongoDB drivers to start clusters for testing. # if you'd like to make changes to the cluster settings, edit the exported variables below. # for configuration options for the exported variables, see here: https://github.com/mongodb-labs/drivers-evergreen-tools/blob/master/.evergreen/run-orchestration.sh - # after this script is run, the encrypted-cluster/ folder will notably contain the following: + # after this script is run, the data/ folder will notably contain the following: # 'mo-expansion.yml' file which contains for your cluster URI and crypt shared library path # 'drivers-evergreen-tools/mongodb/bin' which contain executables for other mongodb libraries such as mongocryptd, mongosh, and mongod if [ ! -d "drivers-evergreen-tools/" ]; then @@ -20,7 +20,7 @@ if [ ! -d "drivers-evergreen-tools/" ]; then fi # configure cluster settings -export DRIVERS_TOOLS=$CWD/encrypted-cluster/drivers-evergreen-tools +export DRIVERS_TOOLS=$CWD/data/drivers-evergreen-tools export MONGODB_VERSION=8.0 export AUTH=true export MONGODB_BINARIES=$DRIVERS_TOOLS/mongodb/bin diff --git a/scripts/run-encryption-tests-local.sh b/scripts/run-encryption-tests-local.sh index 1b83c324033..244c1254591 100755 --- a/scripts/run-encryption-tests-local.sh +++ b/scripts/run-encryption-tests-local.sh @@ -4,12 +4,13 @@ export CWD=$(pwd); -# set up mongodb cluster and encryption configuration if the encrypted-cluster folder does not exist -# note: for tooling, cluster set-up and configuration look into the 'scripts/start-cluster-with-encryption.sh' script -if [ -d "encrypted-cluster" ]; then - cd encrypted-cluster +# set up mongodb cluster and encryption configuration if the data/ folder does not exist +# note: for tooling, cluster set-up and configuration look into the 'scripts/configure-cluster-with-encryption.sh' script + +if [ -d "data" ]; then + cd data else - source $CWD/scripts/start-cluster-with-encryption.sh + source $CWD/scripts/configure-cluster-with-encryption.sh fi # extracts MONGOOSE_TEST_URI and CRYPT_SHARED_LIB_PATH from .yml file into environment variables for this test run From 5a04290f4b5911046f3f2774873a136e785cde29 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Thu, 26 Dec 2024 14:02:48 -0500 Subject: [PATCH 39/84] fix(model): handle nested conventional populate with hydratedPopulatedDocs option for hydrate() --- lib/document.js | 2 +- lib/model.js | 2 +- lib/schema.js | 11 +++--- lib/schema/array.js | 3 ++ lib/schema/buffer.js | 4 +-- lib/schema/decimal128.js | 4 +-- lib/schema/number.js | 4 +-- lib/schema/objectId.js | 4 +-- lib/schema/string.js | 4 +-- lib/schema/uuid.js | 4 +-- lib/schemaType.js | 4 +-- test/model.hydrate.test.js | 69 +++++++++++++++++++++++++++++++++++++- 12 files changed, 91 insertions(+), 24 deletions(-) diff --git a/lib/document.js b/lib/document.js index 87c8e91b107..ed9e347adc0 100644 --- a/lib/document.js +++ b/lib/document.js @@ -807,7 +807,7 @@ function init(self, obj, doc, opts, prefix) { })); } } else if (schemaType && opts.hydratedPopulatedDocs) { - doc[i] = schemaType.cast(value, self, true); + doc[i] = schemaType.cast(value, self, true, undefined, { hydratedPopulatedDocs: true }); if (doc[i] && doc[i].$__ && doc[i].$__.wasPopulated) { self.$populated(path, doc[i].$__.wasPopulated.value, doc[i].$__.wasPopulated.options); diff --git a/lib/model.js b/lib/model.js index b50ea3b09c3..bd8e60ec97c 100644 --- a/lib/model.js +++ b/lib/model.js @@ -1461,7 +1461,7 @@ function getIndexesToDrop(schema, schemaIndexes, dbIndexes) { * @param {Object} [options] * @param {Array} [options.toDrop] if specified, contains a list of index names to drop * @param {Boolean} [options.hideIndexes=false] set to `true` to hide indexes instead of dropping. Requires MongoDB server 4.4 or higher - * @return {Promise} list of dropped or hidden index names + * @return {Promise>} list of dropped or hidden index names * @api public */ diff --git a/lib/schema.js b/lib/schema.js index 5c9cb2e9443..60124f68e3d 100644 --- a/lib/schema.js +++ b/lib/schema.js @@ -25,7 +25,6 @@ const setPopulatedVirtualValue = require('./helpers/populate/setPopulatedVirtual const setupTimestamps = require('./helpers/timestamps/setupTimestamps'); const utils = require('./utils'); const validateRef = require('./helpers/populate/validateRef'); -const { populateModelSymbol } = require('./helpers/symbols'); const hasNumericSubpathRegex = /\.\d+(\.|$)/; @@ -1940,13 +1939,11 @@ Schema.prototype.pre = function(name) { * const Model = mongoose.model('Model', schema); * * const m = new Model(..); - * m.save(function(err) { - * console.log('this fires after the `post` hook'); - * }); + * await m.save(); + * console.log('this fires after the `post` hook'); * - * m.find(function(err, docs) { - * console.log('this fires after the post find hook'); - * }); + * await m.find(); + * console.log('this fires after the post find hook'); * * @param {String|RegExp|String[]} methodName The method name or regular expression to match method name * @param {Object} [options] diff --git a/lib/schema/array.js b/lib/schema/array.js index e424731e4d6..a555c308cc3 100644 --- a/lib/schema/array.js +++ b/lib/schema/array.js @@ -403,6 +403,9 @@ SchemaArray.prototype.cast = function(value, doc, init, prev, options) { opts.arrayPathIndex = i; } } + if (options.hydratedPopulatedDocs) { + opts.hydratedPopulatedDocs = options.hydratedPopulatedDocs; + } rawValue[i] = caster.applySetters(rawValue[i], doc, init, void 0, opts); } } catch (e) { diff --git a/lib/schema/buffer.js b/lib/schema/buffer.js index e5cec2e0158..4d5c1af7d57 100644 --- a/lib/schema/buffer.js +++ b/lib/schema/buffer.js @@ -140,7 +140,7 @@ SchemaBuffer.prototype.checkRequired = function(value, doc) { * @api private */ -SchemaBuffer.prototype.cast = function(value, doc, init) { +SchemaBuffer.prototype.cast = function(value, doc, init, prev, options) { let ret; if (SchemaType._isRef(this, value, doc, init)) { if (value && value.isMongooseBuffer) { @@ -167,7 +167,7 @@ SchemaBuffer.prototype.cast = function(value, doc, init) { } if (value == null || utils.isNonBuiltinObject(value)) { - return this._castRef(value, doc, init); + return this._castRef(value, doc, init, options); } } diff --git a/lib/schema/decimal128.js b/lib/schema/decimal128.js index 70fcfecc607..136529ec04b 100644 --- a/lib/schema/decimal128.js +++ b/lib/schema/decimal128.js @@ -180,13 +180,13 @@ SchemaDecimal128.prototype.checkRequired = function checkRequired(value, doc) { * @api private */ -SchemaDecimal128.prototype.cast = function(value, doc, init) { +SchemaDecimal128.prototype.cast = function(value, doc, init, prev, options) { if (SchemaType._isRef(this, value, doc, init)) { if (isBsonType(value, 'Decimal128')) { return value; } - return this._castRef(value, doc, init); + return this._castRef(value, doc, init, options); } let castDecimal128; diff --git a/lib/schema/number.js b/lib/schema/number.js index d89ab7d63c0..a5188a81cc2 100644 --- a/lib/schema/number.js +++ b/lib/schema/number.js @@ -354,10 +354,10 @@ SchemaNumber.prototype.enum = function(values, message) { * @api private */ -SchemaNumber.prototype.cast = function(value, doc, init) { +SchemaNumber.prototype.cast = function(value, doc, init, prev, options) { if (typeof value !== 'number' && SchemaType._isRef(this, value, doc, init)) { if (value == null || utils.isNonBuiltinObject(value)) { - return this._castRef(value, doc, init); + return this._castRef(value, doc, init, options); } } diff --git a/lib/schema/objectId.js b/lib/schema/objectId.js index cad05198ea8..927a168df46 100644 --- a/lib/schema/objectId.js +++ b/lib/schema/objectId.js @@ -223,7 +223,7 @@ SchemaObjectId.prototype.checkRequired = function checkRequired(value, doc) { * @api private */ -SchemaObjectId.prototype.cast = function(value, doc, init) { +SchemaObjectId.prototype.cast = function(value, doc, init, prev, options) { if (!(isBsonType(value, 'ObjectId')) && SchemaType._isRef(this, value, doc, init)) { // wait! we may need to cast this to a document if ((getConstructorName(value) || '').toLowerCase() === 'objectid') { @@ -231,7 +231,7 @@ SchemaObjectId.prototype.cast = function(value, doc, init) { } if (value == null || utils.isNonBuiltinObject(value)) { - return this._castRef(value, doc, init); + return this._castRef(value, doc, init, options); } } diff --git a/lib/schema/string.js b/lib/schema/string.js index d62e233765b..b832dbd9884 100644 --- a/lib/schema/string.js +++ b/lib/schema/string.js @@ -586,9 +586,9 @@ SchemaString.prototype.checkRequired = function checkRequired(value, doc) { * @api private */ -SchemaString.prototype.cast = function(value, doc, init) { +SchemaString.prototype.cast = function(value, doc, init, prev, options) { if (typeof value !== 'string' && SchemaType._isRef(this, value, doc, init)) { - return this._castRef(value, doc, init); + return this._castRef(value, doc, init, options); } let castString; diff --git a/lib/schema/uuid.js b/lib/schema/uuid.js index 1fbfc38654d..6eb5d2f5ae0 100644 --- a/lib/schema/uuid.js +++ b/lib/schema/uuid.js @@ -268,10 +268,10 @@ SchemaUUID.prototype.checkRequired = function checkRequired(value) { * @api private */ -SchemaUUID.prototype.cast = function(value, doc, init) { +SchemaUUID.prototype.cast = function(value, doc, init, prev, options) { if (utils.isNonBuiltinObject(value) && SchemaType._isRef(this, value, doc, init)) { - return this._castRef(value, doc, init); + return this._castRef(value, doc, init, options); } let castFn; diff --git a/lib/schemaType.js b/lib/schemaType.js index ed63c47bbc7..d57cc775e60 100644 --- a/lib/schemaType.js +++ b/lib/schemaType.js @@ -1555,7 +1555,7 @@ SchemaType._isRef = function(self, value, doc, init) { * ignore */ -SchemaType.prototype._castRef = function _castRef(value, doc, init) { +SchemaType.prototype._castRef = function _castRef(value, doc, init, options) { if (value == null) { return value; } @@ -1587,7 +1587,7 @@ SchemaType.prototype._castRef = function _castRef(value, doc, init) { !doc.$__.populated[path].options.options || !doc.$__.populated[path].options.options.lean) { const PopulatedModel = pop ? pop.options[populateModelSymbol] : doc.constructor.db.model(this.options.ref); - ret = new PopulatedModel(value); + ret = PopulatedModel.hydrate(value, null, options); ret.$__.wasPopulated = { value: ret._doc._id, options: { [populateModelSymbol]: PopulatedModel } }; } diff --git a/test/model.hydrate.test.js b/test/model.hydrate.test.js index 0f24d6887bd..0ce7ebf5380 100644 --- a/test/model.hydrate.test.js +++ b/test/model.hydrate.test.js @@ -199,7 +199,7 @@ describe('model', function() { assert.ok(c.users[0] instanceof User); }); - it('marks deeply nested docs as hydrated (gh-15110)', async function() { + it('marks deeply nested docs as hydrated underneath virtuals (gh-15110)', async function() { const ArticleSchema = new Schema({ title: String }); const StorySchema = new Schema({ @@ -221,6 +221,9 @@ describe('model', function() { foreignField: 'userId' }); + db.deleteModel(/User/); + db.deleteModel(/Story/); + db.deleteModel(/Article/); const User = db.model('User', UserSchema); const Story = db.model('Story', StorySchema); const Article = db.model('Article', ArticleSchema); @@ -255,5 +258,69 @@ describe('model', function() { assert.ok(!hydrated.stories[1].article); }); + + it('marks deeply nested docs as hydrated underneath conventional (gh-15110)', async function() { + const ArticleSchema = new Schema({ + title: { + type: String + } + }); + + const StorySchema = new Schema({ + title: { + type: String + }, + article: { + type: Schema.Types.ObjectId, + ref: 'Article' + } + }); + + const UserSchema = new Schema({ + name: String, + stories: [{ + type: Schema.Types.ObjectId, + ref: 'Story' + }] + }); + + db.deleteModel(/User/); + db.deleteModel(/Story/); + db.deleteModel(/Article/); + const User = db.model('User', UserSchema); + const Story = db.model('Story', StorySchema); + const Article = db.model('Article', ArticleSchema); + await Promise.all([ + User.deleteMany({}), + Story.deleteMany({}), + Article.deleteMany({}) + ]); + + const article = await Article.create({ title: 'Cinema' }); + const story1 = await Story.create({ title: 'Ticket 1', article }); + const story2 = await Story.create({ title: 'Ticket 2' }); + + await User.create({ name: 'Alex', stories: [story1, story2] }); + + const populated = await User.findOne({ name: 'Alex' }).populate({ + path: 'stories', + populate: ['article'] + }).lean(); + + const hydrated = User.hydrate( + JSON.parse(JSON.stringify(populated)), + null, + { hydratedPopulatedDocs: true } + ); + + assert.ok(hydrated.populated('stories')); + assert.ok(hydrated.stories[0].populated('article')); + assert.equal(hydrated.stories[0].article._id.toString(), article._id.toString()); + assert.ok(typeof hydrated.stories[0].article._id === 'object'); + assert.ok(hydrated.stories[0].article._id instanceof mongoose.Types.ObjectId); + assert.equal(hydrated.stories[0].article.title, 'Cinema'); + + assert.ok(!hydrated.stories[1].article); + }); }); }); From 955cedf05b0db8cff1f72165c540a4c353d4efa0 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Fri, 27 Dec 2024 13:22:00 -0500 Subject: [PATCH 40/84] remove extra gha call - use local script instead lint typo lint --- .github/workflows/encryption-tests.yml | 10 ---------- .gitignore | 2 +- CONTRIBUTING.md | 2 +- package.json | 3 +-- ...cryption-tests-local.sh => run-encryption-tests.sh} | 3 ++- 5 files changed, 5 insertions(+), 15 deletions(-) rename scripts/{run-encryption-tests-local.sh => run-encryption-tests.sh} (95%) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index e605778920b..263ebaedc11 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -33,15 +33,5 @@ jobs: run: npm install - name: Install mongodb-client-encryption run: npm install mongodb-client-encryption - - name: Set up cluster - id: setup-cluster - uses: mongodb-labs/drivers-evergreen-tools@master - with: - version: 8.0.0 - topology: sharded_cluster - auth: auth - name: Run Tests run: npm run test-encryption - env: - MONGOOSE_TEST_URI: ${{ steps.setup-cluster.outputs.cluster-uri }} - CRYPT_SHARED_LIB_PATH: ${{ steps.setup-cluster.outputs.crypt-shared-lib-path }} diff --git a/.gitignore b/.gitignore index c66816fcd42..9a52110981e 100644 --- a/.gitignore +++ b/.gitignore @@ -69,4 +69,4 @@ notes.md list.out data -*.pid \ No newline at end of file +*.pid diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a94baf554bd..06073758d97 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -46,7 +46,7 @@ If you have a question about Mongoose (not a bug report) please post it to eithe * execute `npm run test-tsd` to run the typescript tests * execute `npm run ts-benchmark` to run the typescript benchmark "performance test" for a single time. * execute `npm run ts-benchmark-watch` to run the typescript benchmark "performance test" while watching changes on types folder. Note: Make sure to commit all changes before executing this command. -* in order to run tests that require an cluster with encryption locally, run `npm run test-encryption-local`. Alternatively, you can start an encrypted cluster using the `scripts/start-cluster-with-encryption.sh` file. +* in order to run tests that require an cluster with encryption locally, run `npm run test-encryption`. Alternatively, you can start an encrypted cluster using the `scripts/configure-cluster-with-encryption.sh` file. ## Documentation diff --git a/package.json b/package.json index 5c726e144c4..39d4451eeb7 100644 --- a/package.json +++ b/package.json @@ -104,8 +104,7 @@ "test-deno": "deno run --allow-env --allow-read --allow-net --allow-run --allow-sys --allow-write ./test/deno.js", "test-rs": "START_REPLICA_SET=1 mocha --timeout 30000 --exit ./test/*.test.js", "test-tsd": "node ./test/types/check-types-filename && tsd", - "test-encryption": "mocha --exit ./test/encryption/*.test.js", - "test-encryption-local": "bash scripts/run-encryption-tests-local.sh", + "test-encryption": "bash scripts/run-encryption-tests.sh", "tdd": "mocha ./test/*.test.js --inspect --watch --recursive --watch-files ./**/*.{js,ts}", "test-coverage": "nyc --reporter=html --reporter=text npm test", "ts-benchmark": "cd ./benchmarks/typescript/simple && npm install && npm run benchmark | node ../../../scripts/tsc-diagnostics-check" diff --git a/scripts/run-encryption-tests-local.sh b/scripts/run-encryption-tests.sh similarity index 95% rename from scripts/run-encryption-tests-local.sh rename to scripts/run-encryption-tests.sh index 244c1254591..0209292168d 100755 --- a/scripts/run-encryption-tests-local.sh +++ b/scripts/run-encryption-tests.sh @@ -35,4 +35,5 @@ source expansions.sh export MONGOOSE_TEST_URI=$MONGODB_URI # run encryption tests -npm run test-encryption +cd .. +npx mocha --exit ./test/encryption/*.test.js From acacc7aeac07b88503a87133ff4c1d6daa5c4c57 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 30 Dec 2024 15:10:51 -0500 Subject: [PATCH 41/84] Update test/model.hydrate.test.js Co-authored-by: hasezoey --- test/model.hydrate.test.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/test/model.hydrate.test.js b/test/model.hydrate.test.js index 0ce7ebf5380..9dcfaba2975 100644 --- a/test/model.hydrate.test.js +++ b/test/model.hydrate.test.js @@ -261,9 +261,7 @@ describe('model', function() { it('marks deeply nested docs as hydrated underneath conventional (gh-15110)', async function() { const ArticleSchema = new Schema({ - title: { - type: String - } + title: String }); const StorySchema = new Schema({ From 79de0ff04be6bcc6ad75c6838c934bd3a5b43113 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 30 Dec 2024 15:11:03 -0500 Subject: [PATCH 42/84] Update test/model.hydrate.test.js Co-authored-by: hasezoey --- test/model.hydrate.test.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/test/model.hydrate.test.js b/test/model.hydrate.test.js index 9dcfaba2975..98ca46f0f7f 100644 --- a/test/model.hydrate.test.js +++ b/test/model.hydrate.test.js @@ -265,9 +265,7 @@ describe('model', function() { }); const StorySchema = new Schema({ - title: { - type: String - }, + title: String, article: { type: Schema.Types.ObjectId, ref: 'Article' From 29b393680e7ef61dbddc5ef5fc65ab32abadae0b Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Thu, 2 Jan 2025 14:40:38 -0500 Subject: [PATCH 43/84] feat(connection): support Connection.prototype.aggregate() for db-level aggregations Fix #15118 --- lib/aggregate.js | 26 ++++++++++--- lib/connection.js | 12 ++++++ lib/cursor/aggregationCursor.js | 8 +++- lib/drivers/node-mongodb-native/connection.js | 12 ++++++ test/connection.test.js | 39 +++++++++++++++++++ types/connection.d.ts | 2 + 6 files changed, 93 insertions(+), 6 deletions(-) diff --git a/lib/aggregate.js b/lib/aggregate.js index 1d4a0d18e22..7234a3bd34b 100644 --- a/lib/aggregate.js +++ b/lib/aggregate.js @@ -13,6 +13,7 @@ const getConstructorName = require('./helpers/getConstructorName'); const prepareDiscriminatorPipeline = require('./helpers/aggregate/prepareDiscriminatorPipeline'); const stringifyFunctionOperators = require('./helpers/aggregate/stringifyFunctionOperators'); const utils = require('./utils'); +const Connection = require('./connection'); const read = Query.prototype.read; const readConcern = Query.prototype.readConcern; @@ -46,13 +47,17 @@ const validRedactStringValues = new Set(['$$DESCEND', '$$PRUNE', '$$KEEP']); * @see MongoDB https://www.mongodb.com/docs/manual/applications/aggregation/ * @see driver https://mongodb.github.io/node-mongodb-native/4.9/classes/Collection.html#aggregate * @param {Array} [pipeline] aggregation pipeline as an array of objects - * @param {Model} [model] the model to use with this aggregate. + * @param {Model|Connection} [modelOrConn] the model or connection to use with this aggregate. * @api public */ -function Aggregate(pipeline, model) { +function Aggregate(pipeline, modelOrConn) { this._pipeline = []; - this._model = model; + if (modelOrConn instanceof Connection) { + this._connection = modelOrConn; + } else { + this._model = modelOrConn; + } this.options = {}; if (arguments.length === 1 && Array.isArray(pipeline)) { @@ -1029,19 +1034,30 @@ Aggregate.prototype.pipeline = function() { */ Aggregate.prototype.exec = async function exec() { - if (!this._model) { + if (!this._model && !this._connection) { throw new Error('Aggregate not bound to any Model'); } if (typeof arguments[0] === 'function') { throw new MongooseError('Aggregate.prototype.exec() no longer accepts a callback'); } + + if (this._connection) { + if (!this._pipeline.length) { + throw new MongooseError('Aggregate has empty pipeline'); + } + + this._optionsForExec(); + + const cursor = await this._connection.client.db().aggregate(this._pipeline, this.options); + return await cursor.toArray(); + } + const model = this._model; const collection = this._model.collection; applyGlobalMaxTimeMS(this.options, model.db.options, model.base.options); applyGlobalDiskUse(this.options, model.db.options, model.base.options); - this._optionsForExec(); if (this.options && this.options.cursor) { return new AggregationCursor(this); diff --git a/lib/connection.js b/lib/connection.js index dbb10b1ef78..b747460083c 100644 --- a/lib/connection.js +++ b/lib/connection.js @@ -1742,6 +1742,18 @@ Connection.prototype.syncIndexes = async function syncIndexes(options = {}) { * @api public */ +/** + * Runs a [db-level aggregate()](https://www.mongodb.com/docs/manual/reference/method/db.aggregate/) on this connection's underlying `db` + * + * @method aggregate + * @memberOf Connection + * @param {Array} pipeline + * @param {Object} [options] + * @param {Boolean} [options.cursor=false] If true, make the Aggregate resolve to a Mongoose AggregationCursor rather than an array + * @return {Aggregate} Aggregation wrapper + * @api public + */ + /** * Removes the database connection with the given name created with with `useDb()`. * diff --git a/lib/cursor/aggregationCursor.js b/lib/cursor/aggregationCursor.js index 2cff8bb8e1a..a49ec426ed7 100644 --- a/lib/cursor/aggregationCursor.js +++ b/lib/cursor/aggregationCursor.js @@ -41,11 +41,17 @@ function AggregationCursor(agg) { this.cursor = null; this.agg = agg; this._transforms = []; + const connection = agg._connection; const model = agg._model; delete agg.options.cursor.useMongooseAggCursor; this._mongooseOptions = {}; - _init(model, this, agg); + if (connection) { + this.cursor = connection.db.aggregate(agg._pipeline, agg.options || {}); + setImmediate(() => this.emit('cursor', this.cursor)); + } else { + _init(model, this, agg); + } } util.inherits(AggregationCursor, Readable); diff --git a/lib/drivers/node-mongodb-native/connection.js b/lib/drivers/node-mongodb-native/connection.js index e626cb09d82..6f0bf08c278 100644 --- a/lib/drivers/node-mongodb-native/connection.js +++ b/lib/drivers/node-mongodb-native/connection.js @@ -4,6 +4,7 @@ 'use strict'; +const Aggregate = require('../../aggregate'); const MongooseConnection = require('../../connection'); const MongooseError = require('../../error/index'); const STATES = require('../../connectionState'); @@ -132,6 +133,17 @@ NativeConnection.prototype.useDb = function(name, options) { return newConn; }; +/** + * Runs a [db-level aggregate()](https://www.mongodb.com/docs/manual/reference/method/db.aggregate/) on this connection's underlying `db` + * + * @param {Array} pipeline + * @param {Object} [options] + */ + +NativeConnection.prototype.aggregate = function aggregate(pipeline, options) { + return new Aggregate(null, this).append(pipeline).option(options ?? {}); +}; + /** * Removes the database connection with the given name created with `useDb()`. * diff --git a/test/connection.test.js b/test/connection.test.js index 2243886f6be..d73508565fe 100644 --- a/test/connection.test.js +++ b/test/connection.test.js @@ -1786,4 +1786,43 @@ describe('connections:', function() { assert.ok(res.mongoose.results[1] instanceof CastError); assert.ok(res.mongoose.results[1].message.includes('not a number')); }); + + it('supports db-level aggregate on connection (gh-15118)', async function() { + const db = start(); + + const version = await start.mongodVersion(); + if (version[0] < 6) { + this.skip(); + return; + } + + const result = await db.aggregate([ + { $documents: [{ x: 10 }, { x: 2 }, { x: 5 }] }, + { $bucketAuto: { groupBy: '$x', buckets: 4 } } + ]); + assert.deepStrictEqual(result, [ + { _id: { min: 2, max: 5 }, count: 1 }, + { _id: { min: 5, max: 10 }, count: 1 }, + { _id: { min: 10, max: 10 }, count: 1 } + ]); + + const cursor = await db.aggregate([ + { $documents: [{ x: 10 }, { x: 2 }, { x: 5 }] }, + { $bucketAuto: { groupBy: '$x', buckets: 4 } } + ]).cursor(); + const cursorResult = []; + while (true) { + const doc = await cursor.next(); + if (doc == null) { + break; + } else { + cursorResult.push(doc); + } + } + assert.deepStrictEqual(cursorResult, [ + { _id: { min: 2, max: 5 }, count: 1 }, + { _id: { min: 5, max: 10 }, count: 1 }, + { _id: { min: 10, max: 10 }, count: 1 } + ]); + }); }); diff --git a/types/connection.d.ts b/types/connection.d.ts index e37914bdb4f..0e5380aec04 100644 --- a/types/connection.d.ts +++ b/types/connection.d.ts @@ -59,6 +59,8 @@ declare module 'mongoose' { } class Connection extends events.EventEmitter implements SessionStarter { + aggregate(pipeline?: PipelineStage[] | null, options?: AggregateOptions): Aggregate>; + /** Returns a promise that resolves when this connection successfully connects to MongoDB */ asPromise(): Promise; From 4c6e3240e812a2bce8d138278f2e88503f1d5cb4 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Thu, 2 Jan 2025 14:57:17 -0500 Subject: [PATCH 44/84] fix tests --- lib/aggregate.js | 8 ++++---- lib/drivers/node-mongodb-native/connection.js | 5 ++++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/lib/aggregate.js b/lib/aggregate.js index 7234a3bd34b..bf3b13058e2 100644 --- a/lib/aggregate.js +++ b/lib/aggregate.js @@ -13,7 +13,7 @@ const getConstructorName = require('./helpers/getConstructorName'); const prepareDiscriminatorPipeline = require('./helpers/aggregate/prepareDiscriminatorPipeline'); const stringifyFunctionOperators = require('./helpers/aggregate/stringifyFunctionOperators'); const utils = require('./utils'); -const Connection = require('./connection'); +const { populateModelSymbol } = require('./helpers/symbols'); const read = Query.prototype.read; const readConcern = Query.prototype.readConcern; @@ -53,10 +53,10 @@ const validRedactStringValues = new Set(['$$DESCEND', '$$PRUNE', '$$KEEP']); function Aggregate(pipeline, modelOrConn) { this._pipeline = []; - if (modelOrConn instanceof Connection) { - this._connection = modelOrConn; - } else { + if (modelOrConn == null || modelOrConn[populateModelSymbol]) { this._model = modelOrConn; + } else { + this._connection = modelOrConn; } this.options = {}; diff --git a/lib/drivers/node-mongodb-native/connection.js b/lib/drivers/node-mongodb-native/connection.js index 6f0bf08c278..d7af8d9e40c 100644 --- a/lib/drivers/node-mongodb-native/connection.js +++ b/lib/drivers/node-mongodb-native/connection.js @@ -4,7 +4,6 @@ 'use strict'; -const Aggregate = require('../../aggregate'); const MongooseConnection = require('../../connection'); const MongooseError = require('../../error/index'); const STATES = require('../../connectionState'); @@ -14,6 +13,8 @@ const processConnectionOptions = require('../../helpers/processConnectionOptions const setTimeout = require('../../helpers/timers').setTimeout; const utils = require('../../utils'); +let Aggregate; + /** * A [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) connection implementation. * @@ -141,6 +142,8 @@ NativeConnection.prototype.useDb = function(name, options) { */ NativeConnection.prototype.aggregate = function aggregate(pipeline, options) { + Aggregate = Aggregate || require('../../aggregate'); + return new Aggregate(null, this).append(pipeline).option(options ?? {}); }; From 2ebcc9eb94b2f97aa88eb264c5b57b067e40ca2e Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Thu, 2 Jan 2025 15:02:39 -0500 Subject: [PATCH 45/84] refactor: remove need for aggregate() import --- lib/drivers/node-mongodb-native/connection.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/drivers/node-mongodb-native/connection.js b/lib/drivers/node-mongodb-native/connection.js index d7af8d9e40c..cdb4b18ddd3 100644 --- a/lib/drivers/node-mongodb-native/connection.js +++ b/lib/drivers/node-mongodb-native/connection.js @@ -142,9 +142,7 @@ NativeConnection.prototype.useDb = function(name, options) { */ NativeConnection.prototype.aggregate = function aggregate(pipeline, options) { - Aggregate = Aggregate || require('../../aggregate'); - - return new Aggregate(null, this).append(pipeline).option(options ?? {}); + return new this.base.Aggregate(null, this).append(pipeline).option(options ?? {}); }; /** From 563fe5d6008ac25250fc7e85a19a025ba7b5b57c Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Thu, 2 Jan 2025 15:06:55 -0500 Subject: [PATCH 46/84] fix tests and lint --- lib/aggregate.js | 4 ++-- lib/drivers/node-mongodb-native/connection.js | 2 -- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/lib/aggregate.js b/lib/aggregate.js index bf3b13058e2..89ef19e8e0e 100644 --- a/lib/aggregate.js +++ b/lib/aggregate.js @@ -13,7 +13,7 @@ const getConstructorName = require('./helpers/getConstructorName'); const prepareDiscriminatorPipeline = require('./helpers/aggregate/prepareDiscriminatorPipeline'); const stringifyFunctionOperators = require('./helpers/aggregate/stringifyFunctionOperators'); const utils = require('./utils'); -const { populateModelSymbol } = require('./helpers/symbols'); +const { modelSymbol } = require('./helpers/symbols'); const read = Query.prototype.read; const readConcern = Query.prototype.readConcern; @@ -53,7 +53,7 @@ const validRedactStringValues = new Set(['$$DESCEND', '$$PRUNE', '$$KEEP']); function Aggregate(pipeline, modelOrConn) { this._pipeline = []; - if (modelOrConn == null || modelOrConn[populateModelSymbol]) { + if (modelOrConn == null || modelOrConn[modelSymbol]) { this._model = modelOrConn; } else { this._connection = modelOrConn; diff --git a/lib/drivers/node-mongodb-native/connection.js b/lib/drivers/node-mongodb-native/connection.js index cdb4b18ddd3..0659ac4e647 100644 --- a/lib/drivers/node-mongodb-native/connection.js +++ b/lib/drivers/node-mongodb-native/connection.js @@ -13,8 +13,6 @@ const processConnectionOptions = require('../../helpers/processConnectionOptions const setTimeout = require('../../helpers/timers').setTimeout; const utils = require('../../utils'); -let Aggregate; - /** * A [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) connection implementation. * From 4d4bd00f6c91c79ade43035616586b833f4965a7 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Fri, 3 Jan 2025 10:25:37 -0500 Subject: [PATCH 47/84] fix(model): skip createCollection() in syncIndexes() if autoCreate: false --- lib/model.js | 20 +++++++++++--------- test/model.test.js | 22 ++++++++++++++++++++++ 2 files changed, 33 insertions(+), 9 deletions(-) diff --git a/lib/model.js b/lib/model.js index 67ccfb83a61..f4516a49678 100644 --- a/lib/model.js +++ b/lib/model.js @@ -1246,19 +1246,21 @@ Model.syncIndexes = async function syncIndexes(options) { throw new MongooseError('Model.syncIndexes() no longer accepts a callback'); } - const model = this; + const autoCreate = options?.autoCreate ?? this.schema.options?.autoCreate ?? this.db.config.autoCreate ?? true; - try { - await model.createCollection(); - } catch (err) { - if (err != null && (err.name !== 'MongoServerError' || err.code !== 48)) { - throw err; + if (autoCreate) { + try { + await this.createCollection(); + } catch (err) { + if (err != null && (err.name !== 'MongoServerError' || err.code !== 48)) { + throw err; + } } } - const diffIndexesResult = await model.diffIndexes(); - const dropped = await model.cleanIndexes({ ...options, toDrop: diffIndexesResult.toDrop }); - await model.createIndexes({ ...options, toCreate: diffIndexesResult.toCreate }); + const diffIndexesResult = await this.diffIndexes(); + const dropped = await this.cleanIndexes({ ...options, toDrop: diffIndexesResult.toDrop }); + await this.createIndexes({ ...options, toCreate: diffIndexesResult.toCreate }); return dropped; }; diff --git a/test/model.test.js b/test/model.test.js index e6a1ef9cd3e..f011b22d2ea 100644 --- a/test/model.test.js +++ b/test/model.test.js @@ -4885,6 +4885,28 @@ describe('Model', function() { assert.deepStrictEqual(indexes.map(index => index.name), ['_id_', 'name_1']); }); + it('avoids creating collection if autoCreate: false', async() => { + const collectionName = generateRandomCollectionName(); + const userSchema = new Schema( + { name: { type: String, index: true } }, + { autoIndex: false, autoCreate: false, collation: { locale: 'en_US', strength: 2 } } + ); + const User = db.model('User', userSchema, collectionName); + + // Act + await User.syncIndexes(); + + // Assert + const indexes = await User.listIndexes(); + assert.deepStrictEqual(indexes.map(index => index.name), ['_id_', 'name_1']); + + const collections = await User.db.listCollections(); + const collection = collections.find(c => c.name === collectionName); + assert.ok(collection); + // Collation was not applied because autoCreate was false, so Mongoose did not send `createCollection()` + assert.ok(!collection.options.collation); + }); + it('drops indexes that are not present in schema', async() => { // Arrange const collectionName = generateRandomCollectionName(); From 306fcedcaad633971fd5e413912ddd541d8c89eb Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Fri, 3 Jan 2025 16:56:33 -0500 Subject: [PATCH 48/84] fix tests --- lib/aggregate.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/aggregate.js b/lib/aggregate.js index 89ef19e8e0e..be7ae9b7fa2 100644 --- a/lib/aggregate.js +++ b/lib/aggregate.js @@ -1058,6 +1058,7 @@ Aggregate.prototype.exec = async function exec() { applyGlobalMaxTimeMS(this.options, model.db.options, model.base.options); applyGlobalDiskUse(this.options, model.db.options, model.base.options); + this._optionsForExec(); if (this.options && this.options.cursor) { return new AggregationCursor(this); From 3203fe1d57f45aab6a10f1929e0eac032156c90b Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 6 Jan 2025 15:25:12 -0500 Subject: [PATCH 49/84] feat(model): add insertOne() function to insert a single doc Fix #14843 --- lib/model.js | 43 +++++++++++++++++++++++++++++++++++++++++++ test/model.test.js | 31 +++++++++++++++++++++++++++++++ 2 files changed, 74 insertions(+) diff --git a/lib/model.js b/lib/model.js index e400536eb94..b0b7a89566e 100644 --- a/lib/model.js +++ b/lib/model.js @@ -2774,6 +2774,49 @@ Model.create = async function create(doc, options) { return res; }; +/** + * Shortcut for saving one document to the database. + * `MyModel.insertOne(obj, options)` is almost equivalent to `new MyModel(obj).save(options)`. + * The difference is that `insertOne()` checks if `obj` is already a document, and checks for discriminators. + * + * This function triggers the following middleware. + * + * - `save()` + * + * #### Example: + * + * // Insert one new `Character` document + * await Character.insertOne({ name: 'Jean-Luc Picard' }); + * + * // Create a new character within a transaction. + * await Character.insertOne({ name: 'Jean-Luc Picard' }, { session }); + * + * @param {Object|Document} docs Document to insert, as a POJO or Mongoose document + * @param {Object} [options] Options passed down to `save()`. + * @return {Promise} + * @api public + */ + +Model.insertOne = async function insertOne(doc, options) { + _checkContext(this, 'insertOne'); + + const discriminatorKey = this.schema.options.discriminatorKey; + const Model = this.discriminators && doc[discriminatorKey] != null ? + this.discriminators[doc[discriminatorKey]] || getDiscriminatorByValue(this.discriminators, doc[discriminatorKey]) : + this; + if (Model == null) { + throw new MongooseError( + `Discriminator "${doc[discriminatorKey]}" not found for model "${this.modelName}"` + ); + } + let toSave = doc; + if (!(toSave instanceof Model)) { + toSave = new Model(toSave); + } + + return await toSave.$save(options); +}; + /** * _Requires a replica set running MongoDB >= 3.6.0._ Watches the * underlying collection for changes using diff --git a/test/model.test.js b/test/model.test.js index e7bd52447a6..be87e5a4388 100644 --- a/test/model.test.js +++ b/test/model.test.js @@ -8461,6 +8461,37 @@ describe('Model', function() { assert.deepStrictEqual(toDrop, []); }); }); + + describe('insertOne() (gh-14843)', function() { + it('should insert a new document', async function() { + const userSchema = new Schema({ + name: String + }); + const User = db.model('User', userSchema); + + const res = await User.insertOne({ name: 'John' }); + assert.ok(res instanceof User); + + const doc = await User.findOne({ _id: res._id }); + assert.equal(doc.name, 'John'); + }); + + it('should support validateBeforeSave: false option', async function() { + const userSchema = new Schema({ + name: { + type: String, + required: true + } + }); + const User = db.model('User', userSchema); + + const res = await User.insertOne({}, { validateBeforeSave: false }); + assert.ok(res instanceof User); + + const doc = await User.findOne({ _id: res._id }); + assert.equal(doc.name, undefined); + }); + }); }); From e519ac3c9392f52bfc1481e1ba0c7c9e6194a632 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 6 Jan 2025 15:27:47 -0500 Subject: [PATCH 50/84] types: add Model.insertOne() to types Re: #14843 --- test/types/models.test.ts | 10 ++++++++++ types/models.d.ts | 2 ++ 2 files changed, 12 insertions(+) diff --git a/test/types/models.test.ts b/test/types/models.test.ts index d448712a2de..1c7e1012a02 100644 --- a/test/types/models.test.ts +++ b/test/types/models.test.ts @@ -988,3 +988,13 @@ async function gh14802() { const conn2 = mongoose.createConnection('mongodb://127.0.0.1:27017/mongoose_test'); Model.useConnection(conn2); } + +async function gh14843() { + const schema = new mongoose.Schema({ + name: String + }); + const Model = model('Test', schema); + + const doc = await Model.insertOne({ name: 'taco' }); + expectType>(doc); +} diff --git a/types/models.d.ts b/types/models.d.ts index 3bf2c88dc9b..7d67072a20f 100644 --- a/types/models.d.ts +++ b/types/models.d.ts @@ -576,6 +576,8 @@ declare module 'mongoose' { Array>> >; + insertOne>(doc: DocContents | TRawDocType, options?: SaveOptions): Promise; + /** * List all [Atlas search indexes](https://www.mongodb.com/docs/atlas/atlas-search/create-index/) on this model's collection. * This function only works when connected to MongoDB Atlas. From fc0ee9f2e7fd376858b315c2c3d5158551f3f881 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 6 Jan 2025 16:39:00 -0500 Subject: [PATCH 51/84] feat(document): support schematype-level transform option Fix #15084 --- lib/document.js | 13 +++++++------ test/document.test.js | 41 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 6 deletions(-) diff --git a/lib/document.js b/lib/document.js index 52f39f43288..55b916893ea 100644 --- a/lib/document.js +++ b/lib/document.js @@ -4256,24 +4256,25 @@ function applySchemaTypeTransforms(self, json) { for (const path of paths) { const schematype = schema.paths[path]; - if (typeof schematype.options.transform === 'function') { + const topLevelTransformFunction = schematype.options.transform ?? schematype.constructor?.defaultOptions?.transform; + const embeddedSchemaTypeTransformFunction = schematype.$embeddedSchemaType?.options?.transform + ?? schematype.$embeddedSchemaType?.constructor?.defaultOptions?.transform; + if (typeof topLevelTransformFunction === 'function') { const val = self.$get(path); if (val === undefined) { continue; } - const transformedValue = schematype.options.transform.call(self, val); + const transformedValue = topLevelTransformFunction.call(self, val); throwErrorIfPromise(path, transformedValue); utils.setValue(path, transformedValue, json); - } else if (schematype.$embeddedSchemaType != null && - typeof schematype.$embeddedSchemaType.options.transform === 'function') { + } else if (typeof embeddedSchemaTypeTransformFunction === 'function') { const val = self.$get(path); if (val === undefined) { continue; } const vals = [].concat(val); - const transform = schematype.$embeddedSchemaType.options.transform; for (let i = 0; i < vals.length; ++i) { - const transformedValue = transform.call(self, vals[i]); + const transformedValue = embeddedSchemaTypeTransformFunction.call(self, vals[i]); vals[i] = transformedValue; throwErrorIfPromise(path, transformedValue); } diff --git a/test/document.test.js b/test/document.test.js index 4317795223e..7a8a9ef65b3 100644 --- a/test/document.test.js +++ b/test/document.test.js @@ -14225,6 +14225,47 @@ describe('document', function() { assert.strictEqual(duplicateKeyError.message, 'Email must be unique'); assert.strictEqual(duplicateKeyError.cause.code, 11000); }); + + it('supports global transforms per schematype (gh-15084)', async function () { + class SchemaCustomType extends mongoose.SchemaType { + static schemaName = 'CustomType'; + + constructor(key, options) { + super(key, options, 'CustomType'); + } + + cast(value) { + if (value === null) return null; + return new CustomType(value); + } + } + + class CustomType { + constructor(value) { + this.value = value; + } + } + + mongoose.Schema.Types.CustomType = SchemaCustomType; + + const Model = db.model( + 'Test', + new mongoose.Schema({ + value: { type: mongoose.Schema.Types.CustomType }, + }), + ); + + const _id = new mongoose.Types.ObjectId('0'.repeat(24)); + const doc = new Model({ _id }); + doc.value = 1; + + mongoose.Schema.Types.CustomType.set('transform', v => v == null ? v : v.value); + + assert.deepStrictEqual(doc.toJSON(), { _id, value: 1 }); + assert.deepStrictEqual(doc.toObject(), { _id, value: 1 }); + + delete mongoose.Schema.Types.CustomType; + }); }); describe('Check if instance function that is supplied in schema option is available', function() { From 97adaac2f9db54f18a0988876108480e42e7ed51 Mon Sep 17 00:00:00 2001 From: hasezoey Date: Wed, 8 Jan 2025 14:10:22 +0100 Subject: [PATCH 52/84] chore(eslintrc): set "ecmaVersion: 2022" to support syntax of the nodejs version we target --- .eslintrc.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.eslintrc.js b/.eslintrc.js index b4d7d1652d9..a2bfcaa8ee6 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -104,7 +104,7 @@ module.exports = { // 'markdown' ], parserOptions: { - ecmaVersion: 2020 + ecmaVersion: 2022 }, env: { node: true, From 43cb76b6a3266cdc69a259b8c9b0eaa3b95a114f Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Wed, 8 Jan 2025 14:32:25 -0500 Subject: [PATCH 53/84] Update lib/model.js Co-authored-by: hasezoey --- lib/model.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/model.js b/lib/model.js index b0b7a89566e..d60a9e6a777 100644 --- a/lib/model.js +++ b/lib/model.js @@ -2791,7 +2791,7 @@ Model.create = async function create(doc, options) { * // Create a new character within a transaction. * await Character.insertOne({ name: 'Jean-Luc Picard' }, { session }); * - * @param {Object|Document} docs Document to insert, as a POJO or Mongoose document + * @param {Object|Document} doc Document to insert, as a POJO or Mongoose document * @param {Object} [options] Options passed down to `save()`. * @return {Promise} * @api public From 439ec4d97c2cd963ab6f0ca8b14c35539c435c0b Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Wed, 8 Jan 2025 14:41:05 -0500 Subject: [PATCH 54/84] style: improve docs and var naming re: code review comments --- lib/model.js | 12 ++++++------ types/models.d.ts | 5 +++++ 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/lib/model.js b/lib/model.js index d60a9e6a777..b45bcecb5ca 100644 --- a/lib/model.js +++ b/lib/model.js @@ -2786,14 +2786,15 @@ Model.create = async function create(doc, options) { * #### Example: * * // Insert one new `Character` document - * await Character.insertOne({ name: 'Jean-Luc Picard' }); + * const character = await Character.insertOne({ name: 'Jean-Luc Picard' }); + * character.name; // 'Jean-Luc Picard' * * // Create a new character within a transaction. * await Character.insertOne({ name: 'Jean-Luc Picard' }, { session }); * * @param {Object|Document} doc Document to insert, as a POJO or Mongoose document * @param {Object} [options] Options passed down to `save()`. - * @return {Promise} + * @return {Promise} resolves to the saved document * @api public */ @@ -2809,12 +2810,11 @@ Model.insertOne = async function insertOne(doc, options) { `Discriminator "${doc[discriminatorKey]}" not found for model "${this.modelName}"` ); } - let toSave = doc; - if (!(toSave instanceof Model)) { - toSave = new Model(toSave); + if (!(doc instanceof Model)) { + doc = new Model(doc); } - return await toSave.$save(options); + return await doc.$save(options); }; /** diff --git a/types/models.d.ts b/types/models.d.ts index 7d67072a20f..835bb585622 100644 --- a/types/models.d.ts +++ b/types/models.d.ts @@ -576,6 +576,11 @@ declare module 'mongoose' { Array>> >; + /** + * Shortcut for saving one document to the database. + * `MyModel.insertOne(obj, options)` is almost equivalent to `new MyModel(obj).save(options)`. + * The difference is that `insertOne()` checks if `obj` is already a document, and checks for discriminators. + */ insertOne>(doc: DocContents | TRawDocType, options?: SaveOptions): Promise; /** From 08113efa08ccec25265523a92a8075a48aabf057 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Wed, 8 Jan 2025 15:10:29 -0500 Subject: [PATCH 55/84] style: fix lint and code review comments --- test/document.test.js | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/test/document.test.js b/test/document.test.js index 7a8a9ef65b3..e0889ec1d29 100644 --- a/test/document.test.js +++ b/test/document.test.js @@ -14226,10 +14226,8 @@ describe('document', function() { assert.strictEqual(duplicateKeyError.cause.code, 11000); }); - it('supports global transforms per schematype (gh-15084)', async function () { + it('supports global transforms per schematype (gh-15084)', async function() { class SchemaCustomType extends mongoose.SchemaType { - static schemaName = 'CustomType'; - constructor(key, options) { super(key, options, 'CustomType'); } @@ -14239,6 +14237,7 @@ describe('document', function() { return new CustomType(value); } } + SchemaCustomType.schemaName = 'CustomType'; class CustomType { constructor(value) { @@ -14251,8 +14250,8 @@ describe('document', function() { const Model = db.model( 'Test', new mongoose.Schema({ - value: { type: mongoose.Schema.Types.CustomType }, - }), + value: { type: mongoose.Schema.Types.CustomType } + }) ); const _id = new mongoose.Types.ObjectId('0'.repeat(24)); From 933a283fef9e82acc0957c9f4724343c5045b954 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Wed, 8 Jan 2025 15:13:04 -0500 Subject: [PATCH 56/84] chore: pin exact versions of typescript-eslint to hopefully fix tests --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index da673231045..1f11693d60f 100644 --- a/package.json +++ b/package.json @@ -30,8 +30,8 @@ "devDependencies": { "@babel/core": "7.26.0", "@babel/preset-env": "7.26.0", - "@typescript-eslint/eslint-plugin": "^8.18.0", - "@typescript-eslint/parser": "^8.18.0", + "@typescript-eslint/eslint-plugin": "8.18.0", + "@typescript-eslint/parser": "8.18.0", "acquit": "1.3.0", "acquit-ignore": "0.2.1", "acquit-require": "0.1.1", From 9800153d07e31940f9c66759cd059b4d4a0bdd93 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Thu, 9 Jan 2025 11:22:30 -0500 Subject: [PATCH 57/84] feat(schema): introduce basic jsonSchema() method to convert Mongoose schema to JSONSchema re: #11162 --- lib/schema.js | 81 ++++++++++++++++++++++++++++++ test/schema.test.js | 119 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 200 insertions(+) diff --git a/lib/schema.js b/lib/schema.js index 319d0791e8c..d116639a281 100644 --- a/lib/schema.js +++ b/lib/schema.js @@ -2886,6 +2886,87 @@ Schema.prototype._preCompile = function _preCompile() { this.plugin(idGetter, { deduplicate: true }); }; +/** + * @param {Object} [options] + * @param [Boolean] [options.useBsonType=false] if true, specify each path's type using `bsonType` rather than `type` for MongoDB $jsonSchema support + */ + +Schema.prototype.jsonSchema = function jsonSchema(options) { + const useBsonType = options?.useBsonType ?? false; + const result = { required: [], properties: {} }; + for (const path of Object.keys(this.paths)) { + const schemaType = this.paths[path]; + + // Nested paths are stored as `nested.path` in the schema type, so create nested paths in the json schema + // when necessary. + const isNested = schemaType._presplitPath.length > 1; + let jsonSchemaForPath = result; + if (isNested) { + for (let i = 0; i < schemaType._presplitPath.length - 1; ++i) { + const subpath = schemaType._presplitPath[i]; + if (jsonSchemaForPath.properties[subpath] == null) { + jsonSchemaForPath.properties[subpath] = { + bsonType: ['object', 'null'], + required: [], + properties: {} + }; + } + jsonSchemaForPath = jsonSchemaForPath.properties[subpath]; + } + } + + const lastSubpath = schemaType._presplitPath[schemaType._presplitPath.length - 1]; + let isRequired = false; + if (path === '_id') { + jsonSchemaForPath.required.push('_id'); + isRequired = true; + } else if (schemaType.options.required && typeof schemaType.options.required !== 'function') { + // Only `required: true` paths are required, conditional required is not required + jsonSchemaForPath.required.push(lastSubpath); + isRequired = true; + } + let bsonType = undefined; + let type = undefined; + + if (schemaType.instance === 'Number') { + bsonType = ['number']; + type = ['number']; + } else if (schemaType.instance === 'String') { + bsonType = ['string']; + type = ['string']; + } else if (schemaType.instance === 'Boolean') { + bsonType = ['bool']; + type = ['boolean']; + } else if (schemaType.instance === 'Date') { + bsonType = ['date']; + type = ['string']; + } else if (schemaType.instance === 'ObjectId') { + bsonType = ['objectId']; + type = ['string']; + } else if (schemaType.instance === 'Decimal128') { + bsonType = ['decimal']; + type = ['string']; + } + + if (bsonType) { + if (!isRequired) { + bsonType = [...bsonType, 'null']; + type = [...type, 'null']; + } + jsonSchemaForPath.properties[lastSubpath] = useBsonType + ? { bsonType: bsonType.length === 1 ? bsonType[0] : bsonType } + : { type: type.length === 1 ? type[0] : type }; + if (schemaType.options.enum) { + jsonSchemaForPath.properties[lastSubpath].enum = isRequired + ? schemaType.options.enum + : [...schemaType.options.enum, null]; + } + } + } + + return result; +}; + /*! * Module exports. */ diff --git a/test/schema.test.js b/test/schema.test.js index b8f4daf21a3..8f0a94aa3bc 100644 --- a/test/schema.test.js +++ b/test/schema.test.js @@ -3320,4 +3320,123 @@ describe('schema', function() { sinon.restore(); } }); + + describe('jsonSchema() (gh-11162)', function() { + it('handles basic example with only top-level keys', async function() { + const schema = new Schema({ + name: { type: String, required: true }, + age: Number, + ageSource: { + type: String, + required: function() { return this.age != null; }, + enum: ['document', 'self-reported'] + } + }, { autoCreate: false, autoIndex: false }); + + assert.deepStrictEqual(schema.jsonSchema({ useBsonType: true }), { + required: ['name', '_id'], + properties: { + _id: { + bsonType: 'objectId' + }, + name: { + bsonType: 'string' + }, + age: { + bsonType: ['number', 'null'] + }, + ageSource: { + bsonType: ['string', 'null'], + enum: ['document', 'self-reported', null] + } + } + }); + + assert.deepStrictEqual(schema.jsonSchema(), { + required: ['name', '_id'], + properties: { + _id: { + type: 'string' + }, + name: { + type: 'string' + }, + age: { + type: ['number', 'null'] + }, + ageSource: { + type: ['string', 'null'], + enum: ['document', 'self-reported', null] + } + } + }); + + const collectionName = 'gh11162'; + try { + await db.createCollection(collectionName, { + validator: { + $jsonSchema: schema.jsonSchema({ useBsonType: true }) + } + }); + const Test = db.model('Test', schema, collectionName); + + const doc1 = await Test.create({ name: 'Taco' }); + assert.equal(doc1.name, 'Taco'); + + const doc2 = await Test.create({ name: 'Billy', age: null, ageSource: null }); + assert.equal(doc2.name, 'Billy'); + assert.strictEqual(doc2.age, null); + assert.strictEqual(doc2.ageSource, null); + + const doc3 = await Test.create({ name: 'John', age: 30, ageSource: 'document' }); + assert.equal(doc3.name, 'John'); + assert.equal(doc3.age, 30); + assert.equal(doc3.ageSource, 'document'); + + await assert.rejects( + Test.create([{ name: 'Foobar', age: null, ageSource: 'something else' }], { validateBeforeSave: false }), + /MongoServerError: Document failed validation/ + ); + + await assert.rejects( + Test.create([{}], { validateBeforeSave: false }), + /MongoServerError: Document failed validation/ + ); + } finally { + await db.dropCollection(collectionName); + } + }); + + it('handles nested paths, subdocuments, and document arrays', async function() { + const schema = new Schema({ + name: { + first: String, + last: { type: String, required: true } + }, + /* subdoc: new Schema({ + prop: Number + }), + docArr: [{ field: Date }] */ + }); + + assert.deepStrictEqual(schema.jsonSchema({ useBsonType: true }), { + required: ['_id'], + properties: { + name: { + bsonType: ['object', 'null'], + required: ['last'], + properties: { + first: { + bsonType: ['string', 'null'] + }, + last: { + bsonType: 'string' + } + } + }, + _id: { bsonType: 'objectId' } + } + }); + }); + }); }); From 464107c7ccf8d7dbf111ce1a795e011a42f27898 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Thu, 9 Jan 2025 17:08:37 -0500 Subject: [PATCH 58/84] most of Val's comments addressed --- .github/workflows/encryption-tests.yml | 2 -- CONTRIBUTING.md | 3 +- scripts/configure-cluster-with-encryption.sh | 3 ++ scripts/run-encryption-tests.sh | 28 ++++--------------- test/encryption/encryption.test.js | 29 ++++++++++++++++---- 5 files changed, 35 insertions(+), 30 deletions(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 263ebaedc11..521aae66abb 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -31,7 +31,5 @@ jobs: node-version: latest - name: Install Dependencies run: npm install - - name: Install mongodb-client-encryption - run: npm install mongodb-client-encryption - name: Run Tests run: npm run test-encryption diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 06073758d97..103d03a6efa 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -46,7 +46,8 @@ If you have a question about Mongoose (not a bug report) please post it to eithe * execute `npm run test-tsd` to run the typescript tests * execute `npm run ts-benchmark` to run the typescript benchmark "performance test" for a single time. * execute `npm run ts-benchmark-watch` to run the typescript benchmark "performance test" while watching changes on types folder. Note: Make sure to commit all changes before executing this command. -* in order to run tests that require an cluster with encryption locally, run `npm run test-encryption`. Alternatively, you can start an encrypted cluster using the `scripts/configure-cluster-with-encryption.sh` file. +* in order to run tests that require an cluster with encryption locally, run `npm run test-encryption`. Alternatively, you can start an encrypted cluster using the `scripts/configure-cluster-with-encryption.sh` file. +* These scripts can take a few minutes to run. If a encryption script is exited prematurely, restart the shell and delete the `data/` directory to ensure clean-up. ## Documentation diff --git a/scripts/configure-cluster-with-encryption.sh b/scripts/configure-cluster-with-encryption.sh index 4584920ed40..9c4adf330b9 100644 --- a/scripts/configure-cluster-with-encryption.sh +++ b/scripts/configure-cluster-with-encryption.sh @@ -8,6 +8,9 @@ export CWD=$(pwd); mkdir data cd data +# install encryption dependency +npm install mongodb-client-encryption > /dev/null + # note: # we're using drivers-evergreen-tools which is a repo used by MongoDB drivers to start clusters for testing. # if you'd like to make changes to the cluster settings, edit the exported variables below. diff --git a/scripts/run-encryption-tests.sh b/scripts/run-encryption-tests.sh index 0209292168d..60b7dfae245 100755 --- a/scripts/run-encryption-tests.sh +++ b/scripts/run-encryption-tests.sh @@ -4,36 +4,20 @@ export CWD=$(pwd); +# install encryption dependency +npm install mongodb-client-encryption > /dev/null + # set up mongodb cluster and encryption configuration if the data/ folder does not exist # note: for tooling, cluster set-up and configuration look into the 'scripts/configure-cluster-with-encryption.sh' script - if [ -d "data" ]; then cd data else source $CWD/scripts/configure-cluster-with-encryption.sh fi -# extracts MONGOOSE_TEST_URI and CRYPT_SHARED_LIB_PATH from .yml file into environment variables for this test run -read -r -d '' SOURCE_SCRIPT << EOM -const fs = require('fs'); -const file = fs.readFileSync('mo-expansion.yml', { encoding: 'utf-8' }) - .trim().split('\\n'); -const regex = /^(?.*): "(?.*)"$/; -const variables = file.map( - (line) => regex.exec(line.trim()).groups -).map( - ({key, value}) => \`export \${key}='\${value}'\` -).join('\n'); - -process.stdout.write(variables); -process.stdout.write('\n'); -EOM - -node --eval "$SOURCE_SCRIPT" | tee expansions.sh -source expansions.sh - -export MONGOOSE_TEST_URI=$MONGODB_URI - # run encryption tests cd .. npx mocha --exit ./test/encryption/*.test.js + +# uninstall encryption dependency +npm uninstall mongodb-client-encryption > /dev/null \ No newline at end of file diff --git a/test/encryption/encryption.test.js b/test/encryption/encryption.test.js index 14e18306d94..a3b562e80aa 100644 --- a/test/encryption/encryption.test.js +++ b/test/encryption/encryption.test.js @@ -1,12 +1,31 @@ 'use strict'; const assert = require('assert'); -const mdb = require('mongodb'); +const mongodb = require('mongodb'); +const fs = require('fs'); const isBsonType = require('../../lib/helpers/isBsonType'); const LOCAL_KEY = Buffer.from('Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk', 'base64'); describe('ci', () => { + + const cachedUri = process.env.MONGOOSE_TEST_URI; + const cachedLib = process.env.CRYPT_SHARED_LIB_PATH; + + before(function() { + const cwd = process.cwd(); + const file = fs.readFileSync(cwd + '/data/mo-expansion.yml', { encoding: 'utf-8' }).trim().split('\n'); + const regex = /^(?.*): "(?.*)"$/; + const variables = file.map((line) => regex.exec(line.trim()).groups).reduce((acc, { key, value }) => ({ ...acc, [key]: value }), {}); + process.env.CRYPT_SHARED_LIB_PATH = variables.CRYPT_SHARED_LIB_PATH; + process.env.MONGOOSE_TEST_URI = variables.MONGODB_URI; + }); + + after(function() { + process.env.CRYPT_SHARED_LIB_PATH = cachedLib; + process.env.MONGOOSE_TEST_URI = cachedUri; + }); + describe('environmental variables', () => { it('MONGOOSE_TEST_URI is set', async function() { const uri = process.env.MONGOOSE_TEST_URI; @@ -26,16 +45,16 @@ describe('ci', () => { let unencryptedClient; beforeEach(async function() { - keyVaultClient = new mdb.MongoClient(process.env.MONGOOSE_TEST_URI); + keyVaultClient = new mongodb.MongoClient(process.env.MONGOOSE_TEST_URI); await keyVaultClient.connect(); await keyVaultClient.db('keyvault').collection('datakeys'); - const clientEncryption = new mdb.ClientEncryption(keyVaultClient, { + const clientEncryption = new mongodb.ClientEncryption(keyVaultClient, { keyVaultNamespace: 'keyvault.datakeys', kmsProviders: { local: { key: LOCAL_KEY } } }); dataKey = await clientEncryption.createDataKey('local'); - encryptedClient = new mdb.MongoClient( + encryptedClient = new mongodb.MongoClient( process.env.MONGOOSE_TEST_URI, { autoEncryption: { @@ -66,7 +85,7 @@ describe('ci', () => { } ); - unencryptedClient = new mdb.MongoClient(process.env.MONGOOSE_TEST_URI); + unencryptedClient = new mongodb.MongoClient(process.env.MONGOOSE_TEST_URI); }); afterEach(async function() { From 06ffdead0036e6648bbe8d425f0fe73f671a6cd2 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Thu, 9 Jan 2025 17:29:47 -0500 Subject: [PATCH 59/84] comments addressed readd dependency readd dependency lint readd dependency readd dependency fix dep fix dep2 fix dep2 fix dep3 fix dep4 fix dep5 fix dep6 --- .github/workflows/encryption-tests.yml | 2 + CONTRIBUTING.md | 9 ++- package.json | 2 +- scripts/configure-cluster-with-encryption.sh | 83 +++++++++++--------- scripts/run-encryption-tests.sh | 23 ------ 5 files changed, 57 insertions(+), 62 deletions(-) delete mode 100755 scripts/run-encryption-tests.sh diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 521aae66abb..263ebaedc11 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -31,5 +31,7 @@ jobs: node-version: latest - name: Install Dependencies run: npm install + - name: Install mongodb-client-encryption + run: npm install mongodb-client-encryption - name: Run Tests run: npm run test-encryption diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 103d03a6efa..18e99b6657b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -46,8 +46,13 @@ If you have a question about Mongoose (not a bug report) please post it to eithe * execute `npm run test-tsd` to run the typescript tests * execute `npm run ts-benchmark` to run the typescript benchmark "performance test" for a single time. * execute `npm run ts-benchmark-watch` to run the typescript benchmark "performance test" while watching changes on types folder. Note: Make sure to commit all changes before executing this command. -* in order to run tests that require an cluster with encryption locally, run `npm run test-encryption`. Alternatively, you can start an encrypted cluster using the `scripts/configure-cluster-with-encryption.sh` file. -* These scripts can take a few minutes to run. If a encryption script is exited prematurely, restart the shell and delete the `data/` directory to ensure clean-up. +* in order to run tests that require an cluster with encryption locally, run `npm run test-encryption`/ Alternatively, you can start an encrypted cluster using the `scripts/configure-cluster-with-encryption.sh` file. + * These scripts can take a few minutes to run. + * To change an encryption configuration, it is recommended to follow these steps: + * Edit the variables in `scripts/configure-cluster-with-encryption.sh` with your desired configuration. + * Restart your shell. + * Delete the `data/` directory if it exists. + * Finally, run the configuration script. ## Documentation diff --git a/package.json b/package.json index 39d4451eeb7..cdf2556c7c0 100644 --- a/package.json +++ b/package.json @@ -104,7 +104,7 @@ "test-deno": "deno run --allow-env --allow-read --allow-net --allow-run --allow-sys --allow-write ./test/deno.js", "test-rs": "START_REPLICA_SET=1 mocha --timeout 30000 --exit ./test/*.test.js", "test-tsd": "node ./test/types/check-types-filename && tsd", - "test-encryption": "bash scripts/run-encryption-tests.sh", + "test-encryption": "bash scripts/configure-cluster-with-encryption.sh && mocha --exit ./test/encryption/*.test.js && npm uninstall mongodb-client-encryption > /dev/null", "tdd": "mocha ./test/*.test.js --inspect --watch --recursive --watch-files ./**/*.{js,ts}", "test-coverage": "nyc --reporter=html --reporter=text npm test", "ts-benchmark": "cd ./benchmarks/typescript/simple && npm install && npm run benchmark | node ../../../scripts/tsc-diagnostics-check" diff --git a/scripts/configure-cluster-with-encryption.sh b/scripts/configure-cluster-with-encryption.sh index 9c4adf330b9..8f366bc4bbc 100644 --- a/scripts/configure-cluster-with-encryption.sh +++ b/scripts/configure-cluster-with-encryption.sh @@ -5,39 +5,50 @@ # this script downloads all tools required to use FLE with mongodb, then starts a cluster of the provided configuration (sharded on 8.0 server) export CWD=$(pwd); -mkdir data -cd data - -# install encryption dependency -npm install mongodb-client-encryption > /dev/null - -# note: - # we're using drivers-evergreen-tools which is a repo used by MongoDB drivers to start clusters for testing. - # if you'd like to make changes to the cluster settings, edit the exported variables below. - # for configuration options for the exported variables, see here: https://github.com/mongodb-labs/drivers-evergreen-tools/blob/master/.evergreen/run-orchestration.sh - # after this script is run, the data/ folder will notably contain the following: - # 'mo-expansion.yml' file which contains for your cluster URI and crypt shared library path - # 'drivers-evergreen-tools/mongodb/bin' which contain executables for other mongodb libraries such as mongocryptd, mongosh, and mongod -if [ ! -d "drivers-evergreen-tools/" ]; then - git clone --depth=1 "https://github.com/mongodb-labs/drivers-evergreen-tools.git" -fi - -# configure cluster settings -export DRIVERS_TOOLS=$CWD/data/drivers-evergreen-tools -export MONGODB_VERSION=8.0 -export AUTH=true -export MONGODB_BINARIES=$DRIVERS_TOOLS/mongodb/bin -export MONGO_ORCHESTRATION_HOME=$DRIVERS_TOOLS/mo -export PROJECT_ORCHESTRATION_HOME=$DRIVERS_TOOLS/.evergreen/orchestration -export TOPOLOGY=sharded_cluster -export SSL=nossl - -cd $DRIVERS_TOOLS -rm -rf mongosh mongodb mo -mkdir mo -cd - - -rm expansions.sh 2> /dev/null - -# start cluster -bash $DRIVERS_TOOLS/.evergreen/run-orchestration.sh + +# install extra dependency +npm install mongodb-client-encryption + +# set up mongodb cluster and encryption configuration if the data/ folder does not exist +if [ ! -d "data" ]; then + + mkdir data + cd data + + # note: + # we're using drivers-evergreen-tools which is a repo used by MongoDB drivers to start clusters for testing. + # if you'd like to make changes to the cluster settings, edit the exported variables below. + # for configuration options for the exported variables, see here: https://github.com/mongodb-labs/drivers-evergreen-tools/blob/master/.evergreen/run-orchestration.sh + # after this script is run, the data/ folder will notably contain the following: + # 'mo-expansion.yml' file which contains for your cluster URI and crypt shared library path + # 'drivers-evergreen-tools/mongodb/bin' which contain executables for other mongodb libraries such as mongocryptd, mongosh, and mongod + if [ ! -d "drivers-evergreen-tools/" ]; then + git clone --depth=1 "https://github.com/mongodb-labs/drivers-evergreen-tools.git" + fi + + # configure cluster settings + export DRIVERS_TOOLS=$CWD/data/drivers-evergreen-tools + export MONGODB_VERSION=8.0 + export AUTH=true + export MONGODB_BINARIES=$DRIVERS_TOOLS/mongodb/bin + export MONGO_ORCHESTRATION_HOME=$DRIVERS_TOOLS/mo + export PROJECT_ORCHESTRATION_HOME=$DRIVERS_TOOLS/.evergreen/orchestration + export TOPOLOGY=sharded_cluster + export SSL=nossl + + cd $DRIVERS_TOOLS + rm -rf mongosh mongodb mo + mkdir mo + cd - + + rm expansions.sh 2> /dev/null + + echo 'Configuring Cluster...' + + # start cluster + (bash $DRIVERS_TOOLS/.evergreen/run-orchestration.sh) 1> /dev/null 2> /dev/null + + echo 'Cluster Configuration Finished!' + + cd .. +fi \ No newline at end of file diff --git a/scripts/run-encryption-tests.sh b/scripts/run-encryption-tests.sh deleted file mode 100755 index 60b7dfae245..00000000000 --- a/scripts/run-encryption-tests.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env bash - -# sets up mongodb cluster and encryption configuration, adds relevant variables to the environment, and runs encryption tests - -export CWD=$(pwd); - -# install encryption dependency -npm install mongodb-client-encryption > /dev/null - -# set up mongodb cluster and encryption configuration if the data/ folder does not exist -# note: for tooling, cluster set-up and configuration look into the 'scripts/configure-cluster-with-encryption.sh' script -if [ -d "data" ]; then - cd data -else - source $CWD/scripts/configure-cluster-with-encryption.sh -fi - -# run encryption tests -cd .. -npx mocha --exit ./test/encryption/*.test.js - -# uninstall encryption dependency -npm uninstall mongodb-client-encryption > /dev/null \ No newline at end of file From 7abbbcc2fbbc518da406cbde0bb7badb1eb76b00 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Fri, 10 Jan 2025 10:05:42 -0500 Subject: [PATCH 60/84] change version to avert N-API bug --- .github/workflows/encryption-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 263ebaedc11..d4903209246 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -28,7 +28,7 @@ jobs: - name: Setup node uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: - node-version: latest + node-version: 22 - name: Install Dependencies run: npm install - name: Install mongodb-client-encryption From 45b20f08d230242120cfa0311a48bf49b161c04b Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Fri, 10 Jan 2025 17:29:25 -0500 Subject: [PATCH 61/84] feat(model): make `syncIndexes()` not call `createIndex()` on indexes that already exist --- lib/model.js | 26 +++++++++++++------------- test/model.test.js | 20 ++++++++++++++++++++ 2 files changed, 33 insertions(+), 13 deletions(-) diff --git a/lib/model.js b/lib/model.js index 8d38d9ee083..9738db0f68c 100644 --- a/lib/model.js +++ b/lib/model.js @@ -1256,7 +1256,7 @@ Model.syncIndexes = async function syncIndexes(options) { } } - const diffIndexesResult = await model.diffIndexes(); + const diffIndexesResult = await model.diffIndexes({ indexOptionsToCreate: true }); const dropped = await model.cleanIndexes({ ...options, toDrop: diffIndexesResult.toDrop }); await model.createIndexes({ ...options, toCreate: diffIndexesResult.toCreate }); @@ -1361,13 +1361,14 @@ Model.listSearchIndexes = async function listSearchIndexes(options) { * * const { toDrop, toCreate } = await Model.diffIndexes(); * toDrop; // Array of strings containing names of indexes that `syncIndexes()` will drop - * toCreate; // Array of strings containing names of indexes that `syncIndexes()` will create + * toCreate; // Array of index specs containing the keys of indexes that `syncIndexes()` will create * * @param {Object} [options] + * @param {Boolean} [options.indexOptionsToCreate=false] If true, `toCreate` will include both the index spec and the index options, not just the index spec * @return {Promise} contains the indexes that would be dropped in MongoDB and indexes that would be created in MongoDB as `{ toDrop: string[], toCreate: string[] }`. */ -Model.diffIndexes = async function diffIndexes() { +Model.diffIndexes = async function diffIndexes(options) { if (typeof arguments[0] === 'function' || typeof arguments[1] === 'function') { throw new MongooseError('Model.syncIndexes() no longer accepts a callback'); } @@ -1389,13 +1390,14 @@ Model.diffIndexes = async function diffIndexes() { const schemaIndexes = getRelatedSchemaIndexes(model, schema.indexes()); const toDrop = getIndexesToDrop(schema, schemaIndexes, dbIndexes); - const toCreate = getIndexesToCreate(schema, schemaIndexes, dbIndexes, toDrop); + const toCreate = getIndexesToCreate(schema, schemaIndexes, dbIndexes, toDrop, options); return { toDrop, toCreate }; }; -function getIndexesToCreate(schema, schemaIndexes, dbIndexes, toDrop) { +function getIndexesToCreate(schema, schemaIndexes, dbIndexes, toDrop, options) { const toCreate = []; + const indexOptionsToCreate = options?.indexOptionsToCreate ?? false; for (const [schemaIndexKeysObject, schemaIndexOptions] of schemaIndexes) { let found = false; @@ -1416,7 +1418,11 @@ function getIndexesToCreate(schema, schemaIndexes, dbIndexes, toDrop) { } if (!found) { - toCreate.push(schemaIndexKeysObject); + if (indexOptionsToCreate) { + toCreate.push([schemaIndexKeysObject, schemaIndexOptions]); + } else { + toCreate.push(schemaIndexKeysObject); + } } } @@ -1597,7 +1603,7 @@ Model.createIndexes = async function createIndexes(options) { */ function _ensureIndexes(model, options, callback) { - const indexes = model.schema.indexes(); + const indexes = Array.isArray(options?.toCreate) ? options.toCreate : model.schema.indexes(); let indexError; options = options || {}; @@ -1681,12 +1687,6 @@ function _ensureIndexes(model, options, callback) { indexOptions.background = options.background; } - if ('toCreate' in options) { - if (options.toCreate.length === 0) { - return done(); - } - } - // Just in case `createIndex()` throws a sync error let promise = null; try { diff --git a/test/model.test.js b/test/model.test.js index af518fe7340..aa3a8bff98e 100644 --- a/test/model.test.js +++ b/test/model.test.js @@ -5056,6 +5056,26 @@ describe('Model', function() { assert.strictEqual(indexes[1].background, false); }); + it('syncIndexes() does not call createIndex for indexes that already exist', async function() { + const opts = { autoIndex: false }; + const schema = new Schema({ name: String }, opts); + schema.index({ name: 1 }, { background: true }); + + const M = db.model('Test', schema); + await M.syncIndexes(); + + const indexes = await M.listIndexes(); + assert.deepEqual(indexes[1].key, { name: 1 }); + + sinon.stub(M.collection, 'createIndex').callsFake(() => Promise.resolve()); + try { + await M.syncIndexes(); + assert.equal(M.collection.createIndex.getCalls().length, 0); + } finally { + sinon.restore(); + } + }); + it('syncIndexes() supports hideIndexes (gh-14868)', async function() { const opts = { autoIndex: false }; const schema = new Schema({ name: String }, opts); From 942911c2e8b5136588318d7dd7960da39d43d466 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 13 Jan 2025 10:49:02 -0500 Subject: [PATCH 62/84] fix(schema): WIP array and document array support for jsonSchema() --- lib/schema.js | 29 +++++++++++++++++++++++++ test/schema.test.js | 52 +++++++++++++++++++++++++++++++++++++-------- 2 files changed, 72 insertions(+), 9 deletions(-) diff --git a/lib/schema.js b/lib/schema.js index d116639a281..f0dc518e6f7 100644 --- a/lib/schema.js +++ b/lib/schema.js @@ -2887,6 +2887,16 @@ Schema.prototype._preCompile = function _preCompile() { }; /** + * Returns a JSON schema representation of this Schema. + * + * In addition to types, `jsonSchema()` supports the following Mongoose validators: + * - `enum` for strings and numbers + * + * #### Example: + * const schema = new Schema({ name: String }); + * schema.jsonSchema(); // { } + * schema.jsonSchema({ useBsonType: true }); // + * * @param {Object} [options] * @param [Boolean] [options.useBsonType=false] if true, specify each path's type using `bsonType` rather than `type` for MongoDB $jsonSchema support */ @@ -2927,6 +2937,7 @@ Schema.prototype.jsonSchema = function jsonSchema(options) { } let bsonType = undefined; let type = undefined; + let additionalProperties = {}; if (schemaType.instance === 'Number') { bsonType = ['number']; @@ -2946,6 +2957,23 @@ Schema.prototype.jsonSchema = function jsonSchema(options) { } else if (schemaType.instance === 'Decimal128') { bsonType = ['decimal']; type = ['string']; + } else if (schemaType.instance === 'Embedded') { + bsonType = ['object'], + type = ['object']; + additionalProperties = schemaType.schema.jsonSchema(options); + } else if (schemaType.instance === 'Array') { + bsonType = ['array']; + type = ['array']; + if (schemaType.schema) { + // DocumentArray + if (useBsonType) { + additionalProperties.items = { bsonType: ['object', 'null'], ...schemaType.schema.jsonSchema(options) }; + } else { + additionalProperties.items = { type: ['object', 'null'], ...schemaType.schema.jsonSchema(options) }; + } + } else { + // Primitive array + } } if (bsonType) { @@ -2961,6 +2989,7 @@ Schema.prototype.jsonSchema = function jsonSchema(options) { ? schemaType.options.enum : [...schemaType.options.enum, null]; } + Object.assign(jsonSchemaForPath.properties[lastSubpath], additionalProperties); } } diff --git a/test/schema.test.js b/test/schema.test.js index 8f0a94aa3bc..288be0baa23 100644 --- a/test/schema.test.js +++ b/test/schema.test.js @@ -3407,16 +3407,45 @@ describe('schema', function() { } }); - it('handles nested paths, subdocuments, and document arrays', async function() { + it('handles arrays and document arrays', async function() { + const schema = new Schema({ + tags: [String], + docArr: [new Schema({ field: Date }, { _id: false })] + }); + + assert.deepStrictEqual(schema.jsonSchema({ useBsonType: true }), { + required: ['_id'], + properties: { + tags: { + bsonType: ['array', 'null'], + items: { + bsonType: ['string', 'null'] + } + }, + docArr: { + bsonType: ['array', 'null'], + items: { + bsonType: ['object', 'null'], + required: [], + properties: { + field: { bsonType: ['date', 'null'] } + } + } + }, + _id: { bsonType: 'objectId' } + } + }); + }); + + it('handles nested paths and subdocuments', async function() { const schema = new Schema({ name: { first: String, last: { type: String, required: true } }, - /* subdoc: new Schema({ + subdoc: new Schema({ prop: Number - }), - docArr: [{ field: Date }] */ + }, { _id: false }) }); assert.deepStrictEqual(schema.jsonSchema({ useBsonType: true }), { @@ -3426,11 +3455,16 @@ describe('schema', function() { bsonType: ['object', 'null'], required: ['last'], properties: { - first: { - bsonType: ['string', 'null'] - }, - last: { - bsonType: 'string' + first: { bsonType: ['string', 'null'] }, + last: { bsonType: 'string' } + } + }, + subdoc: { + bsonType: ['object', 'null'], + required: [], + properties: { + prop: { + bsonType: ['number', 'null'] } } }, From cf1f3b3f5fbf5e2c67871d0800abc98a29b2f79e Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 13 Jan 2025 12:02:56 -0500 Subject: [PATCH 63/84] fix(schema): support primitive arrays and arrays of arrays in JSONschema --- lib/schema.js | 116 +++++++++++++++++++++++++++++--------------- test/schema.test.js | 93 +++++++++++++++++++++++------------ 2 files changed, 138 insertions(+), 71 deletions(-) diff --git a/lib/schema.js b/lib/schema.js index f0dc518e6f7..7f83d8b9cba 100644 --- a/lib/schema.js +++ b/lib/schema.js @@ -2935,46 +2935,9 @@ Schema.prototype.jsonSchema = function jsonSchema(options) { jsonSchemaForPath.required.push(lastSubpath); isRequired = true; } - let bsonType = undefined; - let type = undefined; - let additionalProperties = {}; - - if (schemaType.instance === 'Number') { - bsonType = ['number']; - type = ['number']; - } else if (schemaType.instance === 'String') { - bsonType = ['string']; - type = ['string']; - } else if (schemaType.instance === 'Boolean') { - bsonType = ['bool']; - type = ['boolean']; - } else if (schemaType.instance === 'Date') { - bsonType = ['date']; - type = ['string']; - } else if (schemaType.instance === 'ObjectId') { - bsonType = ['objectId']; - type = ['string']; - } else if (schemaType.instance === 'Decimal128') { - bsonType = ['decimal']; - type = ['string']; - } else if (schemaType.instance === 'Embedded') { - bsonType = ['object'], - type = ['object']; - additionalProperties = schemaType.schema.jsonSchema(options); - } else if (schemaType.instance === 'Array') { - bsonType = ['array']; - type = ['array']; - if (schemaType.schema) { - // DocumentArray - if (useBsonType) { - additionalProperties.items = { bsonType: ['object', 'null'], ...schemaType.schema.jsonSchema(options) }; - } else { - additionalProperties.items = { type: ['object', 'null'], ...schemaType.schema.jsonSchema(options) }; - } - } else { - // Primitive array - } - } + const convertedSchemaType = _schemaTypeToJSONSchema(schemaType, isRequired, options); + const { additionalProperties } = convertedSchemaType; + let { bsonType, type } = convertedSchemaType; if (bsonType) { if (!isRequired) { @@ -2993,9 +2956,82 @@ Schema.prototype.jsonSchema = function jsonSchema(options) { } } + // Otherwise MongoDB errors with "$jsonSchema keyword 'required' cannot be an empty array" + if (result.required.length === 0) { + delete result.required; + } return result; }; +/*! + * Internal helper for converting an individual schematype to JSON schema properties. Recursively called for + * arrays. + * + * @param {SchemaType} schemaType + * @param {Boolean} isRequired + * @param {Object} options + */ + +function _schemaTypeToJSONSchema(schemaType, isRequired, options) { + const useBsonType = options?.useBsonType ?? false; + let bsonType = undefined; + let type = undefined; + let additionalProperties = {}; + + if (schemaType.instance === 'Number') { + bsonType = ['number']; + type = ['number']; + } else if (schemaType.instance === 'String') { + bsonType = ['string']; + type = ['string']; + } else if (schemaType.instance === 'Boolean') { + bsonType = ['bool']; + type = ['boolean']; + } else if (schemaType.instance === 'Date') { + bsonType = ['date']; + type = ['string']; + } else if (schemaType.instance === 'ObjectId') { + bsonType = ['objectId']; + type = ['string']; + } else if (schemaType.instance === 'Decimal128') { + bsonType = ['decimal']; + type = ['string']; + } else if (schemaType.instance === 'Embedded') { + bsonType = ['object'], + type = ['object']; + additionalProperties = schemaType.schema.jsonSchema(options); + } else if (schemaType.instance === 'Array') { + bsonType = ['array']; + type = ['array']; + if (schemaType.schema) { + // DocumentArray + if (useBsonType) { + additionalProperties.items = { bsonType: ['object', 'null'], ...schemaType.schema.jsonSchema(options) }; + } else { + additionalProperties.items = { type: ['object', 'null'], ...schemaType.schema.jsonSchema(options) }; + } + } else { + // Primitive array + const embeddedSchemaType = schemaType.getEmbeddedSchemaType(); + const isRequired = embeddedSchemaType.options.required && typeof embeddedSchemaType.options.required !== 'function'; + const convertedSchemaType = _schemaTypeToJSONSchema(embeddedSchemaType, isRequired, options); + let bsonType = convertedSchemaType.bsonType; + let type = convertedSchemaType.type; + if (!isRequired) { + bsonType = [...bsonType, 'null']; + type = [...type, 'null']; + } + if (useBsonType) { + additionalProperties.items = { bsonType, ...convertedSchemaType.additionalProperties }; + } else { + additionalProperties.items = { type, ...convertedSchemaType.additionalProperties }; + } + } + } + + return { bsonType, type, additionalProperties }; +} + /*! * Module exports. */ diff --git a/test/schema.test.js b/test/schema.test.js index 288be0baa23..804496ceb33 100644 --- a/test/schema.test.js +++ b/test/schema.test.js @@ -3322,6 +3322,12 @@ describe('schema', function() { }); describe('jsonSchema() (gh-11162)', function() { + const collectionName = 'gh11162'; + + afterEach(async function() { + await db.dropCollection(collectionName); + }); + it('handles basic example with only top-level keys', async function() { const schema = new Schema({ name: { type: String, required: true }, @@ -3371,45 +3377,41 @@ describe('schema', function() { } }); - const collectionName = 'gh11162'; - try { - await db.createCollection(collectionName, { - validator: { - $jsonSchema: schema.jsonSchema({ useBsonType: true }) - } - }); - const Test = db.model('Test', schema, collectionName); + await db.createCollection(collectionName, { + validator: { + $jsonSchema: schema.jsonSchema({ useBsonType: true }) + } + }); + const Test = db.model('Test', schema, collectionName); - const doc1 = await Test.create({ name: 'Taco' }); - assert.equal(doc1.name, 'Taco'); + const doc1 = await Test.create({ name: 'Taco' }); + assert.equal(doc1.name, 'Taco'); - const doc2 = await Test.create({ name: 'Billy', age: null, ageSource: null }); - assert.equal(doc2.name, 'Billy'); - assert.strictEqual(doc2.age, null); - assert.strictEqual(doc2.ageSource, null); + const doc2 = await Test.create({ name: 'Billy', age: null, ageSource: null }); + assert.equal(doc2.name, 'Billy'); + assert.strictEqual(doc2.age, null); + assert.strictEqual(doc2.ageSource, null); - const doc3 = await Test.create({ name: 'John', age: 30, ageSource: 'document' }); - assert.equal(doc3.name, 'John'); - assert.equal(doc3.age, 30); - assert.equal(doc3.ageSource, 'document'); + const doc3 = await Test.create({ name: 'John', age: 30, ageSource: 'document' }); + assert.equal(doc3.name, 'John'); + assert.equal(doc3.age, 30); + assert.equal(doc3.ageSource, 'document'); - await assert.rejects( - Test.create([{ name: 'Foobar', age: null, ageSource: 'something else' }], { validateBeforeSave: false }), - /MongoServerError: Document failed validation/ - ); + await assert.rejects( + Test.create([{ name: 'Foobar', age: null, ageSource: 'something else' }], { validateBeforeSave: false }), + /MongoServerError: Document failed validation/ + ); - await assert.rejects( - Test.create([{}], { validateBeforeSave: false }), - /MongoServerError: Document failed validation/ - ); - } finally { - await db.dropCollection(collectionName); - } + await assert.rejects( + Test.create([{}], { validateBeforeSave: false }), + /MongoServerError: Document failed validation/ + ); }); it('handles arrays and document arrays', async function() { const schema = new Schema({ tags: [String], + coordinates: [[{ type: Number, required: true }]], docArr: [new Schema({ field: Date }, { _id: false })] }); @@ -3422,11 +3424,19 @@ describe('schema', function() { bsonType: ['string', 'null'] } }, + coordinates: { + bsonType: ['array', 'null'], + items: { + bsonType: ['array', 'null'], + items: { + bsonType: ['number'] + } + } + }, docArr: { bsonType: ['array', 'null'], items: { bsonType: ['object', 'null'], - required: [], properties: { field: { bsonType: ['date', 'null'] } } @@ -3435,6 +3445,18 @@ describe('schema', function() { _id: { bsonType: 'objectId' } } }); + + await db.createCollection(collectionName, { + validator: { + $jsonSchema: schema.jsonSchema({ useBsonType: true }) + } + }); + const Test = db.model('Test', schema, collectionName); + + const now = new Date(); + await Test.create({ tags: ['javascript'], coordinates: [[0, 0]], docArr: [{ field: now }] }); + + await Test.create({ tags: 'javascript', coordinates: [[0, 0]], docArr: [{}] }); }); it('handles nested paths and subdocuments', async function() { @@ -3461,7 +3483,6 @@ describe('schema', function() { }, subdoc: { bsonType: ['object', 'null'], - required: [], properties: { prop: { bsonType: ['number', 'null'] @@ -3471,6 +3492,16 @@ describe('schema', function() { _id: { bsonType: 'objectId' } } }); + + await db.createCollection(collectionName, { + validator: { + $jsonSchema: schema.jsonSchema({ useBsonType: true }) + } + }); + const Test = db.model('Test', schema, collectionName); + + await Test.create({ name: { last: 'James' }, subdoc: {} }); + await Test.create({ name: { first: 'Mike', last: 'James' }, subdoc: { prop: 42 } }); }); }); }); From d0e8d57fdd0a9a4f377a2f598a749f743a7dd995 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 13 Jan 2025 12:26:09 -0500 Subject: [PATCH 64/84] fix(schema): test jsonSchema() output with AJV --- lib/schema.js | 31 +++++++++++++++++++++++-------- package.json | 1 + test/schema.test.js | 23 +++++++++++++++++++++++ 3 files changed, 47 insertions(+), 8 deletions(-) diff --git a/lib/schema.js b/lib/schema.js index 7f83d8b9cba..a94cda42a4d 100644 --- a/lib/schema.js +++ b/lib/schema.js @@ -2889,13 +2889,20 @@ Schema.prototype._preCompile = function _preCompile() { /** * Returns a JSON schema representation of this Schema. * + * By default, returns normal JSON schema representation, which is not typically what you want to use with + * [MongoDB's `$jsonSchema` collection option](https://www.mongodb.com/docs/manual/core/schema-validation/specify-json-schema/). + * Use the `useBsonType: true` option to return MongoDB `$jsonSchema` syntax instead. + * * In addition to types, `jsonSchema()` supports the following Mongoose validators: * - `enum` for strings and numbers * * #### Example: * const schema = new Schema({ name: String }); - * schema.jsonSchema(); // { } - * schema.jsonSchema({ useBsonType: true }); // + * // // { required: ['_id'], properties: { name: { type: ['string', 'null'] }, _id: { type: 'string' } } } + * schema.jsonSchema(); + * + * // { required: ['_id'], properties: { name: { bsonType: ['string', 'null'] }, _id: { bsonType: 'objectId' } } } + * schema.jsonSchema({ useBsonType: true }); * * @param {Object} [options] * @param [Boolean] [options.useBsonType=false] if true, specify each path's type using `bsonType` rather than `type` for MongoDB $jsonSchema support @@ -2903,7 +2910,7 @@ Schema.prototype._preCompile = function _preCompile() { Schema.prototype.jsonSchema = function jsonSchema(options) { const useBsonType = options?.useBsonType ?? false; - const result = { required: [], properties: {} }; + const result = useBsonType ? { required: [], properties: {} } : { type: 'object', required: [], properties: {} }; for (const path of Object.keys(this.paths)) { const schemaType = this.paths[path]; @@ -2915,11 +2922,17 @@ Schema.prototype.jsonSchema = function jsonSchema(options) { for (let i = 0; i < schemaType._presplitPath.length - 1; ++i) { const subpath = schemaType._presplitPath[i]; if (jsonSchemaForPath.properties[subpath] == null) { - jsonSchemaForPath.properties[subpath] = { - bsonType: ['object', 'null'], - required: [], - properties: {} - }; + jsonSchemaForPath.properties[subpath] = useBsonType + ? { + bsonType: ['object', 'null'], + required: [], + properties: {} + } + : { + type: ['object', 'null'], + required: [], + properties: {} + }; } jsonSchemaForPath = jsonSchemaForPath.properties[subpath]; } @@ -3027,6 +3040,8 @@ function _schemaTypeToJSONSchema(schemaType, isRequired, options) { additionalProperties.items = { type, ...convertedSchemaType.additionalProperties }; } } + } else { + throw new Error(`Cannot convert schema to JSON schema: unsupported schematype ${schemaType.instance}`); } return { bsonType, type, additionalProperties }; diff --git a/package.json b/package.json index 453705f05eb..7690e1dca54 100644 --- a/package.json +++ b/package.json @@ -35,6 +35,7 @@ "acquit": "1.3.0", "acquit-ignore": "0.2.1", "acquit-require": "0.1.1", + "ajv": "8.17.1", "assert-browserify": "2.0.0", "babel-loader": "8.2.5", "broken-link-checker": "^0.7.8", diff --git a/test/schema.test.js b/test/schema.test.js index 804496ceb33..04d05979ed8 100644 --- a/test/schema.test.js +++ b/test/schema.test.js @@ -6,6 +6,7 @@ const start = require('./common'); +const Ajv = require('ajv'); const mongoose = start.mongoose; const assert = require('assert'); const sinon = require('sinon'); @@ -3359,6 +3360,7 @@ describe('schema', function() { }); assert.deepStrictEqual(schema.jsonSchema(), { + type: 'object', required: ['name', '_id'], properties: { _id: { @@ -3406,6 +3408,15 @@ describe('schema', function() { Test.create([{}], { validateBeforeSave: false }), /MongoServerError: Document failed validation/ ); + + const ajv = new Ajv(); + const validate = ajv.compile(schema.jsonSchema()); + + assert.ok(validate({ _id: 'test', name: 'Taco' })); + assert.ok(validate({ _id: 'test', name: 'Billy', age: null, ageSource: null })); + assert.ok(validate({ _id: 'test', name: 'John', age: 30, ageSource: 'document' })); + assert.ok(!validate({ _id: 'test', name: 'Foobar', age: null, ageSource: 'something else' })); + assert.ok(!validate({})); }); it('handles arrays and document arrays', async function() { @@ -3457,6 +3468,12 @@ describe('schema', function() { await Test.create({ tags: ['javascript'], coordinates: [[0, 0]], docArr: [{ field: now }] }); await Test.create({ tags: 'javascript', coordinates: [[0, 0]], docArr: [{}] }); + + const ajv = new Ajv(); + const validate = ajv.compile(schema.jsonSchema()); + + assert.ok(validate({ _id: 'test', tags: ['javascript'], coordinates: [[0, 0]], docArr: [{ field: '2023-07-16' }] })); + assert.ok(validate({ _id: 'test', tags: ['javascript'], coordinates: [[0, 0]], docArr: [{}] })); }); it('handles nested paths and subdocuments', async function() { @@ -3502,6 +3519,12 @@ describe('schema', function() { await Test.create({ name: { last: 'James' }, subdoc: {} }); await Test.create({ name: { first: 'Mike', last: 'James' }, subdoc: { prop: 42 } }); + + const ajv = new Ajv(); + const validate = ajv.compile(schema.jsonSchema()); + + assert.ok(validate({ _id: 'test', name: { last: 'James' }, subdoc: {} })); + assert.ok(validate({ _id: 'test', name: { first: 'Mike', last: 'James' }, subdoc: { prop: 42 } })); }); }); }); From e8a35c3a039096de324081d0b0c210b2fa5d44dd Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 13 Jan 2025 15:30:10 -0500 Subject: [PATCH 65/84] fix(schema): map support for jsonSchema() --- lib/schema.js | 49 +++++++++++++++--- lib/schema/map.js | 7 +++ test/schema.test.js | 122 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 171 insertions(+), 7 deletions(-) diff --git a/lib/schema.js b/lib/schema.js index a94cda42a4d..f44135707b4 100644 --- a/lib/schema.js +++ b/lib/schema.js @@ -2914,6 +2914,11 @@ Schema.prototype.jsonSchema = function jsonSchema(options) { for (const path of Object.keys(this.paths)) { const schemaType = this.paths[path]; + // Skip Map embedded paths, maps will be handled seperately. + if (schemaType._presplitPath.indexOf('$*') !== -1) { + continue; + } + // Nested paths are stored as `nested.path` in the schema type, so create nested paths in the json schema // when necessary. const isNested = schemaType._presplitPath.length > 1; @@ -2925,12 +2930,10 @@ Schema.prototype.jsonSchema = function jsonSchema(options) { jsonSchemaForPath.properties[subpath] = useBsonType ? { bsonType: ['object', 'null'], - required: [], properties: {} } : { type: ['object', 'null'], - required: [], properties: {} }; } @@ -2941,9 +2944,15 @@ Schema.prototype.jsonSchema = function jsonSchema(options) { const lastSubpath = schemaType._presplitPath[schemaType._presplitPath.length - 1]; let isRequired = false; if (path === '_id') { + if (!jsonSchemaForPath.required) { + jsonSchemaForPath.required = []; + } jsonSchemaForPath.required.push('_id'); isRequired = true; } else if (schemaType.options.required && typeof schemaType.options.required !== 'function') { + if (!jsonSchemaForPath.required) { + jsonSchemaForPath.required = []; + } // Only `required: true` paths are required, conditional required is not required jsonSchemaForPath.required.push(lastSubpath); isRequired = true; @@ -3019,9 +3028,9 @@ function _schemaTypeToJSONSchema(schemaType, isRequired, options) { if (schemaType.schema) { // DocumentArray if (useBsonType) { - additionalProperties.items = { bsonType: ['object', 'null'], ...schemaType.schema.jsonSchema(options) }; + additionalProperties.items = { ...schemaType.schema.jsonSchema(options), bsonType: ['object', 'null'] }; } else { - additionalProperties.items = { type: ['object', 'null'], ...schemaType.schema.jsonSchema(options) }; + additionalProperties.items = { ...schemaType.schema.jsonSchema(options), type: ['object', 'null'] }; } } else { // Primitive array @@ -3035,13 +3044,39 @@ function _schemaTypeToJSONSchema(schemaType, isRequired, options) { type = [...type, 'null']; } if (useBsonType) { - additionalProperties.items = { bsonType, ...convertedSchemaType.additionalProperties }; + additionalProperties.items = { ...convertedSchemaType.additionalProperties, bsonType }; + } else { + additionalProperties.items = { ...convertedSchemaType.additionalProperties, type }; + } + } + } else if (schemaType.instance === 'Map') { + bsonType = ['object']; + type = ['object']; + const embeddedSchemaType = schemaType.getEmbeddedSchemaType(); + const isRequired = embeddedSchemaType.options.required && typeof embeddedSchemaType.options.required !== 'function'; + if (embeddedSchemaType.schema) { + // Map of objects + additionalProperties.additionalProperties = useBsonType + ? { ...embeddedSchemaType.schema.jsonSchema(options), bsonType: ['object', 'null'] } + : { ...embeddedSchemaType.schema.jsonSchema(options), type: ['object', 'null'] }; + + } else { + // Map of primitives + const convertedSchemaType = _schemaTypeToJSONSchema(embeddedSchemaType, isRequired, options); + let bsonType = convertedSchemaType.bsonType; + let type = convertedSchemaType.type; + if (!isRequired) { + bsonType = [...bsonType, 'null']; + type = [...type, 'null']; + } + if (useBsonType) { + additionalProperties.additionalProperties = { bsonType: bsonType.length === 1 ? bsonType[0] : bsonType }; } else { - additionalProperties.items = { type, ...convertedSchemaType.additionalProperties }; + additionalProperties.additionalProperties = { type: type.length === 1 ? type[0] : type }; } } } else { - throw new Error(`Cannot convert schema to JSON schema: unsupported schematype ${schemaType.instance}`); + throw new Error(`Cannot convert schema to JSON schema: unsupported schematype "${schemaType.instance}"`); } return { bsonType, type, additionalProperties }; diff --git a/lib/schema/map.js b/lib/schema/map.js index 1c7c41ae900..976245a198e 100644 --- a/lib/schema/map.js +++ b/lib/schema/map.js @@ -67,6 +67,13 @@ class SchemaMap extends SchemaType { } return schematype; } + + /** + * Returns the embedded schema type (i.e. the `.$*` path) + */ + getEmbeddedSchemaType() { + return this.$__schemaType; + } } /** diff --git a/test/schema.test.js b/test/schema.test.js index 04d05979ed8..e73b11f2369 100644 --- a/test/schema.test.js +++ b/test/schema.test.js @@ -3526,5 +3526,127 @@ describe('schema', function() { assert.ok(validate({ _id: 'test', name: { last: 'James' }, subdoc: {} })); assert.ok(validate({ _id: 'test', name: { first: 'Mike', last: 'James' }, subdoc: { prop: 42 } })); }); + + it('handles maps', async function() { + const schema = new Schema({ + props: { + type: Map, + of: String, + required: true + }, + subdocs: { + type: Map, + of: new Schema({ + name: String, + age: { type: Number, required: true } + }, { _id: false }) + }, + nested: { + myMap: { + type: Map, + of: Number + } + } + }); + + assert.deepStrictEqual(schema.jsonSchema({ useBsonType: true }), { + required: ['props', '_id'], + properties: { + props: { + bsonType: 'object', + additionalProperties: { + bsonType: ['string', 'null'] + } + }, + subdocs: { + bsonType: ['object', 'null'], + additionalProperties: { + bsonType: ['object', 'null'], + required: ['age'], + properties: { + name: { bsonType: ['string', 'null'] }, + age: { bsonType: 'number' } + } + } + }, + nested: { + bsonType: ['object', 'null'], + properties: { + myMap: { + bsonType: ['object', 'null'], + additionalProperties: { + bsonType: ['number', 'null'] + } + } + } + }, + _id: { bsonType: 'objectId' } + } + }); + + await db.createCollection(collectionName, { + validator: { + $jsonSchema: schema.jsonSchema({ useBsonType: true }) + } + }); + const Test = db.model('Test', schema, collectionName); + + await Test.create({ + props: new Map([['key', 'value']]), + subdocs: { + captain: { + name: 'Jean-Luc Picard', + age: 59 + } + }, + nested: { + myMap: { + answer: 42 + } + } + }); + + await assert.rejects( + Test.create([{ + props: new Map([['key', 'value']]), + subdocs: { + captain: {} + } + }], { validateBeforeSave: false }), + /MongoServerError: Document failed validation/ + ); + + const ajv = new Ajv(); + const validate = ajv.compile(schema.jsonSchema()); + + assert.ok(validate({ + _id: 'test', + props: { someKey: 'someValue' }, + subdocs: { + captain: { + name: 'Jean-Luc Picard', + age: 59 + } + }, + nested: { + myMap: { + answer: 42 + } + } + })); + assert.ok(!validate({ + props: { key: 'value' }, + subdocs: { + captain: {} + } + })); + assert.ok(!validate({ + nested: { + myMap: { + answer: 'not a number' + } + } + })); + }); }); }); From eff58ce2360775244bc8274ffbf06ae6787d24ca Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Wed, 15 Jan 2025 08:57:47 -0500 Subject: [PATCH 66/84] test: add test case for map of arrays, implementation still WIP --- test/schema.test.js | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/test/schema.test.js b/test/schema.test.js index e73b11f2369..0020361b430 100644 --- a/test/schema.test.js +++ b/test/schema.test.js @@ -3546,6 +3546,10 @@ describe('schema', function() { type: Map, of: Number } + }, + arrs: { + type: Map, + of: [String] } }); @@ -3580,6 +3584,18 @@ describe('schema', function() { } } }, + arrs: { + bsonType: ['object', 'null'], + additionalProperties: { + bsonType: ['array', 'null'], + items: { + bsonType: ['object', 'null'], + additionalProperties: { + bsonType: ['string', 'null'] + } + } + } + }, _id: { bsonType: 'objectId' } } }); From da6d7e1334648dc5ad1edbfa73f90f75d0e93f17 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Wed, 15 Jan 2025 10:50:33 -0500 Subject: [PATCH 67/84] add separate setup npm command --- .github/workflows/encryption-tests.yml | 2 ++ CONTRIBUTING.md | 2 +- package.json | 4 +++- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index d4903209246..3c0fbc65b8a 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -33,5 +33,7 @@ jobs: run: npm install - name: Install mongodb-client-encryption run: npm install mongodb-client-encryption + - name: Setup Tests + run: npm run setup-test-encryption - name: Run Tests run: npm run test-encryption diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 18e99b6657b..6efea5d26e4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -46,7 +46,7 @@ If you have a question about Mongoose (not a bug report) please post it to eithe * execute `npm run test-tsd` to run the typescript tests * execute `npm run ts-benchmark` to run the typescript benchmark "performance test" for a single time. * execute `npm run ts-benchmark-watch` to run the typescript benchmark "performance test" while watching changes on types folder. Note: Make sure to commit all changes before executing this command. -* in order to run tests that require an cluster with encryption locally, run `npm run test-encryption`/ Alternatively, you can start an encrypted cluster using the `scripts/configure-cluster-with-encryption.sh` file. +* in order to run tests that require an cluster with encryption locally, run `npm run setup-test-encryption` followed by `npm run test-encryption`. Alternatively, you can start an encrypted cluster using the `scripts/configure-cluster-with-encryption.sh` file. * These scripts can take a few minutes to run. * To change an encryption configuration, it is recommended to follow these steps: * Edit the variables in `scripts/configure-cluster-with-encryption.sh` with your desired configuration. diff --git a/package.json b/package.json index cdf2556c7c0..d8a1d01c4a9 100644 --- a/package.json +++ b/package.json @@ -22,6 +22,7 @@ "bson": "^6.7.0", "kareem": "2.6.3", "mongodb": "~6.10.0", + "mongodb-client-encryption": "^6.1.1", "mpath": "0.9.0", "mquery": "5.0.0", "ms": "2.1.3", @@ -104,7 +105,8 @@ "test-deno": "deno run --allow-env --allow-read --allow-net --allow-run --allow-sys --allow-write ./test/deno.js", "test-rs": "START_REPLICA_SET=1 mocha --timeout 30000 --exit ./test/*.test.js", "test-tsd": "node ./test/types/check-types-filename && tsd", - "test-encryption": "bash scripts/configure-cluster-with-encryption.sh && mocha --exit ./test/encryption/*.test.js && npm uninstall mongodb-client-encryption > /dev/null", + "setup-test-encryption": "bash scripts/configure-cluster-with-encryption.sh", + "test-encryption": "mocha --exit ./test/encryption/*.test.js", "tdd": "mocha ./test/*.test.js --inspect --watch --recursive --watch-files ./**/*.{js,ts}", "test-coverage": "nyc --reporter=html --reporter=text npm test", "ts-benchmark": "cd ./benchmarks/typescript/simple && npm install && npm run benchmark | node ../../../scripts/tsc-diagnostics-check" From 47759f86405c67139c51be9f5fdf64546442eeaa Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Wed, 15 Jan 2025 11:55:30 -0500 Subject: [PATCH 68/84] chore: use Deno 2.1 in tests --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f17b792e2b4..deda9be94f5 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -108,7 +108,7 @@ jobs: - name: Setup Deno uses: denoland/setup-deno@v2 with: - deno-version: v1.37.x + deno-version: v2.1.x - run: deno --version - run: npm install - name: Run Deno tests From 556ea45871f6ce33f2f289398a69efbb58edb9a9 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Wed, 15 Jan 2025 14:47:54 -0500 Subject: [PATCH 69/84] explicitly set type: commonjs for Deno --- package.json | 1 + test/deno.js | 2 -- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/package.json b/package.json index 663a81830ea..57033a96612 100644 --- a/package.json +++ b/package.json @@ -17,6 +17,7 @@ "orm", "db" ], + "type": "commonjs", "license": "MIT", "dependencies": { "bson": "^6.10.1", diff --git a/test/deno.js b/test/deno.js index 63f5762118b..fe6ab6232ca 100644 --- a/test/deno.js +++ b/test/deno.js @@ -16,8 +16,6 @@ const fixtures = require('./mocha-fixtures.js') await fixtures.mochaGlobalSetup(); const child_args = [ - // args is required to be set manually, because there is currently no way to get all arguments from deno - '--allow-env', '--allow-read', '--allow-net', '--allow-run', '--allow-sys', '--allow-write', ...Deno.args, resolve(fileURLToPath(import.meta.url), '../deno_mocha.js') ]; From cf34a9f8d64c5377a31f16920fddd3205f4352b5 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Wed, 15 Jan 2025 16:15:21 -0500 Subject: [PATCH 70/84] fix: handle maps of arrays --- lib/schema.js | 20 +++++++++++++------- test/schema.test.js | 25 +++++++++++++++++++++++-- 2 files changed, 36 insertions(+), 9 deletions(-) diff --git a/lib/schema.js b/lib/schema.js index f44135707b4..360f0747a50 100644 --- a/lib/schema.js +++ b/lib/schema.js @@ -3055,11 +3055,17 @@ function _schemaTypeToJSONSchema(schemaType, isRequired, options) { const embeddedSchemaType = schemaType.getEmbeddedSchemaType(); const isRequired = embeddedSchemaType.options.required && typeof embeddedSchemaType.options.required !== 'function'; if (embeddedSchemaType.schema) { - // Map of objects - additionalProperties.additionalProperties = useBsonType - ? { ...embeddedSchemaType.schema.jsonSchema(options), bsonType: ['object', 'null'] } - : { ...embeddedSchemaType.schema.jsonSchema(options), type: ['object', 'null'] }; - + if (embeddedSchemaType.instance === 'Array') { + // Map of document arrays + additionalProperties.additionalProperties = useBsonType + ? { bsonType: ['array', 'null'], items: { bsonType: ['object', 'null'], ...embeddedSchemaType.schema.jsonSchema(options) } } + : { type: ['array', 'null'], items: { type: ['object', 'null'], ...embeddedSchemaType.schema.jsonSchema(options) } }; + } else { + // Map of objects + additionalProperties.additionalProperties = useBsonType + ? { ...embeddedSchemaType.schema.jsonSchema(options), bsonType: ['object', 'null'] } + : { ...embeddedSchemaType.schema.jsonSchema(options), type: ['object', 'null'] }; + } } else { // Map of primitives const convertedSchemaType = _schemaTypeToJSONSchema(embeddedSchemaType, isRequired, options); @@ -3070,9 +3076,9 @@ function _schemaTypeToJSONSchema(schemaType, isRequired, options) { type = [...type, 'null']; } if (useBsonType) { - additionalProperties.additionalProperties = { bsonType: bsonType.length === 1 ? bsonType[0] : bsonType }; + additionalProperties.additionalProperties = { bsonType: bsonType.length === 1 ? bsonType[0] : bsonType, ...convertedSchemaType.additionalProperties }; } else { - additionalProperties.additionalProperties = { type: type.length === 1 ? type[0] : type }; + additionalProperties.additionalProperties = { type: type.length === 1 ? type[0] : type, ...convertedSchemaType.additionalProperties }; } } } else { diff --git a/test/schema.test.js b/test/schema.test.js index 0020361b430..d89695ac412 100644 --- a/test/schema.test.js +++ b/test/schema.test.js @@ -3550,6 +3550,10 @@ describe('schema', function() { arrs: { type: Map, of: [String] + }, + docArrs: { + type: Map, + of: [new Schema({ name: String }, { _id: false })] } }); @@ -3585,13 +3589,24 @@ describe('schema', function() { } }, arrs: { + bsonType: ['object', 'null'], + additionalProperties: { + bsonType: ['array', 'null'], + items: { + bsonType: ['string', 'null'] + } + } + }, + docArrs: { bsonType: ['object', 'null'], additionalProperties: { bsonType: ['array', 'null'], items: { bsonType: ['object', 'null'], - additionalProperties: { - bsonType: ['string', 'null'] + properties: { + name: { + bsonType: ['string', 'null'] + } } } } @@ -3619,6 +3634,12 @@ describe('schema', function() { myMap: { answer: 42 } + }, + arrs: { + key: ['value'] + }, + docArrs: { + otherKey: [{ name: 'otherValue' }] } }); From 553b29c0a146364b4a7dea8d2a16d822ad5a4846 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Wed, 15 Jan 2025 16:16:06 -0500 Subject: [PATCH 71/84] test: expand map jsonSchema tests --- test/schema.test.js | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/test/schema.test.js b/test/schema.test.js index d89695ac412..4fe3199d88a 100644 --- a/test/schema.test.js +++ b/test/schema.test.js @@ -3669,6 +3669,12 @@ describe('schema', function() { myMap: { answer: 42 } + }, + arrs: { + key: ['value'] + }, + docArrs: { + otherKey: [{ name: 'otherValue' }] } })); assert.ok(!validate({ From 9a89b46555c2c19c72f1e30b4d5dd212a77c947c Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Thu, 16 Jan 2025 16:08:53 -0500 Subject: [PATCH 72/84] Update lib/schema.js Co-authored-by: hasezoey --- lib/schema.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/schema.js b/lib/schema.js index 360f0747a50..5086019c739 100644 --- a/lib/schema.js +++ b/lib/schema.js @@ -2898,7 +2898,7 @@ Schema.prototype._preCompile = function _preCompile() { * * #### Example: * const schema = new Schema({ name: String }); - * // // { required: ['_id'], properties: { name: { type: ['string', 'null'] }, _id: { type: 'string' } } } + * // { required: ['_id'], properties: { name: { type: ['string', 'null'] }, _id: { type: 'string' } } } * schema.jsonSchema(); * * // { required: ['_id'], properties: { name: { bsonType: ['string', 'null'] }, _id: { bsonType: 'objectId' } } } From cc9f288fb777e8b0e12aa5d4d9322388128e3de1 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Fri, 17 Jan 2025 14:17:13 -0500 Subject: [PATCH 73/84] fix(collection): avoid buffering if creating a collection during a connection interruption --- lib/collection.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/collection.js b/lib/collection.js index 117a8c69551..e6c365c9a13 100644 --- a/lib/collection.js +++ b/lib/collection.js @@ -29,7 +29,7 @@ function Collection(name, conn, opts) { this.collectionName = name; this.conn = conn; this.queue = []; - this.buffer = true; + this.buffer = !conn?._hasOpened; this.emitter = new EventEmitter(); if (STATES.connected === this.conn.readyState) { From 402bbecb15867e531f950809df7896874887beac Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Fri, 17 Jan 2025 15:18:05 -0500 Subject: [PATCH 74/84] add Buffer and UUID, add link to JSON schema docs, add additional test coverage --- lib/schema.js | 8 ++- test/schema.test.js | 148 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 155 insertions(+), 1 deletion(-) diff --git a/lib/schema.js b/lib/schema.js index 5086019c739..92fbc50b8dd 100644 --- a/lib/schema.js +++ b/lib/schema.js @@ -2889,7 +2889,7 @@ Schema.prototype._preCompile = function _preCompile() { /** * Returns a JSON schema representation of this Schema. * - * By default, returns normal JSON schema representation, which is not typically what you want to use with + * By default, returns normal [JSON schema representation](https://json-schema.org/learn/getting-started-step-by-step), which is not typically what you want to use with * [MongoDB's `$jsonSchema` collection option](https://www.mongodb.com/docs/manual/core/schema-validation/specify-json-schema/). * Use the `useBsonType: true` option to return MongoDB `$jsonSchema` syntax instead. * @@ -3018,6 +3018,12 @@ function _schemaTypeToJSONSchema(schemaType, isRequired, options) { } else if (schemaType.instance === 'Decimal128') { bsonType = ['decimal']; type = ['string']; + } else if (schemaType.instance === 'Buffer') { + bsonType = ['binData']; + type = ['string']; + } else if (schemaType.instance === 'UUID') { + bsonType = ['binData']; + type = ['string']; } else if (schemaType.instance === 'Embedded') { bsonType = ['object'], type = ['object']; diff --git a/test/schema.test.js b/test/schema.test.js index 4fe3199d88a..3cee617a1a8 100644 --- a/test/schema.test.js +++ b/test/schema.test.js @@ -3419,6 +3419,86 @@ describe('schema', function() { assert.ok(!validate({})); }); + it('handles all primitive data types', async function() { + const schema = new Schema({ + num: Number, + str: String, + bool: Boolean, + date: Date, + id: mongoose.ObjectId, + decimal: mongoose.Types.Decimal128, + buf: Buffer, + uuid: 'UUID' + }); + + assert.deepStrictEqual(schema.jsonSchema({ useBsonType: true }), { + required: ['_id'], + properties: { + num: { + bsonType: ['number', 'null'] + }, + str: { + bsonType: ['string', 'null'] + }, + bool: { + bsonType: ['bool', 'null'] + }, + date: { + bsonType: ['date', 'null'] + }, + id: { + bsonType: ['objectId', 'null'] + }, + decimal: { + bsonType: ['decimal', 'null'] + }, + buf: { + bsonType: ['binData', 'null'] + }, + uuid: { + bsonType: ['binData', 'null'] + }, + _id: { + bsonType: 'objectId' + } + } + }); + + assert.deepStrictEqual(schema.jsonSchema(), { + type: 'object', + required: ['_id'], + properties: { + num: { + type: ['number', 'null'] + }, + str: { + type: ['string', 'null'] + }, + bool: { + type: ['boolean', 'null'] + }, + date: { + type: ['string', 'null'] + }, + id: { + type: ['string', 'null'] + }, + decimal: { + type: ['string', 'null'] + }, + buf: { + type: ['string', 'null'] + }, + uuid: { + type: ['string', 'null'] + }, + _id: { + type: 'string' + } + } + }); + }); + it('handles arrays and document arrays', async function() { const schema = new Schema({ tags: [String], @@ -3691,5 +3771,73 @@ describe('schema', function() { } })); }); + + it('handles map with required element', async function() { + const schema = new Schema({ + props: { + type: Map, + of: { type: String, required: true } + } + }); + + assert.deepStrictEqual(schema.jsonSchema({ useBsonType: true }), { + required: ['_id'], + properties: { + props: { + bsonType: ['object', 'null'], + additionalProperties: { + bsonType: 'string' + } + }, + _id: { + bsonType: 'objectId' + } + } + }); + + assert.deepStrictEqual(schema.jsonSchema(), { + type: 'object', + required: ['_id'], + properties: { + props: { + type: ['object', 'null'], + additionalProperties: { + type: 'string' + } + }, + _id: { + type: 'string' + } + } + }); + }) + + it('handles required enums', function() { + const RacoonSchema = new Schema({ + name: { type: String, enum: ['Edwald', 'Tobi'], required: true } + }); + + assert.deepStrictEqual(RacoonSchema.jsonSchema({ useBsonType: true }), { + required: ['name', '_id'], + properties: { + name: { + bsonType: 'string', + enum: ['Edwald', 'Tobi'] + }, + _id: { + bsonType: 'objectId' + } + } + }); + }); + + it('throws error on mixed type', function() { + const schema = new Schema({ + mixed: mongoose.Mixed + }); + + assert.throws(() => schema.jsonSchema({ useBsonType: true }), /unsupported schematype "Mixed"/); + assert.throws(() => schema.jsonSchema(), /unsupported schematype "Mixed"/); + }); }); }); From fd6022ba801297678d9b5ea6dde18ccbca4e6820 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Fri, 17 Jan 2025 15:21:17 -0500 Subject: [PATCH 75/84] style: fix lint --- test/schema.test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/schema.test.js b/test/schema.test.js index 3cee617a1a8..05f0a23ac86 100644 --- a/test/schema.test.js +++ b/test/schema.test.js @@ -3810,7 +3810,7 @@ describe('schema', function() { } } }); - }) + }); it('handles required enums', function() { const RacoonSchema = new Schema({ From 879443ddca525770a35e24aee9ef2f6fcd3e40fd Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Fri, 17 Jan 2025 15:38:51 -0500 Subject: [PATCH 76/84] fix tests --- test/schema.test.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/schema.test.js b/test/schema.test.js index 05f0a23ac86..02cc4fecdc2 100644 --- a/test/schema.test.js +++ b/test/schema.test.js @@ -3326,7 +3326,11 @@ describe('schema', function() { const collectionName = 'gh11162'; afterEach(async function() { - await db.dropCollection(collectionName); + await db.dropCollection(collectionName).catch(err => { + if (err.message !== 'ns not found') { + throw err; + } + }); }); it('handles basic example with only top-level keys', async function() { From 00a2778c3f2d52829340cac91f92123ec732a7c0 Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Sun, 19 Jan 2025 19:48:06 -0500 Subject: [PATCH 77/84] refactor(schema): move logic for converting SchemaType to JSON schema into the individual SchemaType classes Re: #11162 --- lib/helpers/createJSONSchemaTypeDefinition.js | 24 ++++ lib/schema.js | 132 +----------------- lib/schema/array.js | 18 +++ lib/schema/bigint.js | 14 ++ lib/schema/boolean.js | 14 ++ lib/schema/buffer.js | 14 ++ lib/schema/date.js | 14 ++ lib/schema/decimal128.js | 14 ++ lib/schema/documentArray.js | 18 +++ lib/schema/double.js | 13 ++ lib/schema/int32.js | 14 ++ lib/schema/map.js | 28 ++++ lib/schema/number.js | 14 ++ lib/schema/objectId.js | 14 ++ lib/schema/string.js | 14 ++ lib/schema/subdocument.js | 17 +++ lib/schema/uuid.js | 14 ++ lib/schemaType.js | 12 ++ test/schema.test.js | 53 ++++++- 19 files changed, 324 insertions(+), 131 deletions(-) create mode 100644 lib/helpers/createJSONSchemaTypeDefinition.js diff --git a/lib/helpers/createJSONSchemaTypeDefinition.js b/lib/helpers/createJSONSchemaTypeDefinition.js new file mode 100644 index 00000000000..40e108262da --- /dev/null +++ b/lib/helpers/createJSONSchemaTypeDefinition.js @@ -0,0 +1,24 @@ +'use strict'; + +/** + * Handles creating `{ type: 'object' }` vs `{ bsonType: 'object' }` vs `{ bsonType: ['object', 'null'] }` + * + * @param {String} type + * @param {String} bsonType + * @param {Boolean} useBsonType + * @param {Boolean} isRequired + */ + +module.exports = function createJSONSchemaTypeArray(type, bsonType, useBsonType, isRequired) { + if (useBsonType) { + if (isRequired) { + return { bsonType }; + } + return { bsonType: [bsonType, 'null'] }; + } else { + if (isRequired) { + return { type }; + } + return { type: [type, 'null'] }; + } +}; diff --git a/lib/schema.js b/lib/schema.js index 92fbc50b8dd..e89cd49c24d 100644 --- a/lib/schema.js +++ b/lib/schema.js @@ -2957,24 +2957,11 @@ Schema.prototype.jsonSchema = function jsonSchema(options) { jsonSchemaForPath.required.push(lastSubpath); isRequired = true; } - const convertedSchemaType = _schemaTypeToJSONSchema(schemaType, isRequired, options); - const { additionalProperties } = convertedSchemaType; - let { bsonType, type } = convertedSchemaType; - - if (bsonType) { - if (!isRequired) { - bsonType = [...bsonType, 'null']; - type = [...type, 'null']; - } - jsonSchemaForPath.properties[lastSubpath] = useBsonType - ? { bsonType: bsonType.length === 1 ? bsonType[0] : bsonType } - : { type: type.length === 1 ? type[0] : type }; - if (schemaType.options.enum) { - jsonSchemaForPath.properties[lastSubpath].enum = isRequired - ? schemaType.options.enum - : [...schemaType.options.enum, null]; - } - Object.assign(jsonSchemaForPath.properties[lastSubpath], additionalProperties); + jsonSchemaForPath.properties[lastSubpath] = schemaType.toJSONSchema(options); + if (schemaType.options.enum) { + jsonSchemaForPath.properties[lastSubpath].enum = isRequired + ? schemaType.options.enum + : [...schemaType.options.enum, null]; } } @@ -2985,115 +2972,6 @@ Schema.prototype.jsonSchema = function jsonSchema(options) { return result; }; -/*! - * Internal helper for converting an individual schematype to JSON schema properties. Recursively called for - * arrays. - * - * @param {SchemaType} schemaType - * @param {Boolean} isRequired - * @param {Object} options - */ - -function _schemaTypeToJSONSchema(schemaType, isRequired, options) { - const useBsonType = options?.useBsonType ?? false; - let bsonType = undefined; - let type = undefined; - let additionalProperties = {}; - - if (schemaType.instance === 'Number') { - bsonType = ['number']; - type = ['number']; - } else if (schemaType.instance === 'String') { - bsonType = ['string']; - type = ['string']; - } else if (schemaType.instance === 'Boolean') { - bsonType = ['bool']; - type = ['boolean']; - } else if (schemaType.instance === 'Date') { - bsonType = ['date']; - type = ['string']; - } else if (schemaType.instance === 'ObjectId') { - bsonType = ['objectId']; - type = ['string']; - } else if (schemaType.instance === 'Decimal128') { - bsonType = ['decimal']; - type = ['string']; - } else if (schemaType.instance === 'Buffer') { - bsonType = ['binData']; - type = ['string']; - } else if (schemaType.instance === 'UUID') { - bsonType = ['binData']; - type = ['string']; - } else if (schemaType.instance === 'Embedded') { - bsonType = ['object'], - type = ['object']; - additionalProperties = schemaType.schema.jsonSchema(options); - } else if (schemaType.instance === 'Array') { - bsonType = ['array']; - type = ['array']; - if (schemaType.schema) { - // DocumentArray - if (useBsonType) { - additionalProperties.items = { ...schemaType.schema.jsonSchema(options), bsonType: ['object', 'null'] }; - } else { - additionalProperties.items = { ...schemaType.schema.jsonSchema(options), type: ['object', 'null'] }; - } - } else { - // Primitive array - const embeddedSchemaType = schemaType.getEmbeddedSchemaType(); - const isRequired = embeddedSchemaType.options.required && typeof embeddedSchemaType.options.required !== 'function'; - const convertedSchemaType = _schemaTypeToJSONSchema(embeddedSchemaType, isRequired, options); - let bsonType = convertedSchemaType.bsonType; - let type = convertedSchemaType.type; - if (!isRequired) { - bsonType = [...bsonType, 'null']; - type = [...type, 'null']; - } - if (useBsonType) { - additionalProperties.items = { ...convertedSchemaType.additionalProperties, bsonType }; - } else { - additionalProperties.items = { ...convertedSchemaType.additionalProperties, type }; - } - } - } else if (schemaType.instance === 'Map') { - bsonType = ['object']; - type = ['object']; - const embeddedSchemaType = schemaType.getEmbeddedSchemaType(); - const isRequired = embeddedSchemaType.options.required && typeof embeddedSchemaType.options.required !== 'function'; - if (embeddedSchemaType.schema) { - if (embeddedSchemaType.instance === 'Array') { - // Map of document arrays - additionalProperties.additionalProperties = useBsonType - ? { bsonType: ['array', 'null'], items: { bsonType: ['object', 'null'], ...embeddedSchemaType.schema.jsonSchema(options) } } - : { type: ['array', 'null'], items: { type: ['object', 'null'], ...embeddedSchemaType.schema.jsonSchema(options) } }; - } else { - // Map of objects - additionalProperties.additionalProperties = useBsonType - ? { ...embeddedSchemaType.schema.jsonSchema(options), bsonType: ['object', 'null'] } - : { ...embeddedSchemaType.schema.jsonSchema(options), type: ['object', 'null'] }; - } - } else { - // Map of primitives - const convertedSchemaType = _schemaTypeToJSONSchema(embeddedSchemaType, isRequired, options); - let bsonType = convertedSchemaType.bsonType; - let type = convertedSchemaType.type; - if (!isRequired) { - bsonType = [...bsonType, 'null']; - type = [...type, 'null']; - } - if (useBsonType) { - additionalProperties.additionalProperties = { bsonType: bsonType.length === 1 ? bsonType[0] : bsonType, ...convertedSchemaType.additionalProperties }; - } else { - additionalProperties.additionalProperties = { type: type.length === 1 ? type[0] : type, ...convertedSchemaType.additionalProperties }; - } - } - } else { - throw new Error(`Cannot convert schema to JSON schema: unsupported schematype "${schemaType.instance}"`); - } - - return { bsonType, type, additionalProperties }; -} - /*! * Module exports. */ diff --git a/lib/schema/array.js b/lib/schema/array.js index a555c308cc3..06b1e988cb8 100644 --- a/lib/schema/array.js +++ b/lib/schema/array.js @@ -21,6 +21,7 @@ const isOperator = require('../helpers/query/isOperator'); const util = require('util'); const utils = require('../utils'); const castToNumber = require('./operators/helpers').castToNumber; +const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); const geospatial = require('./operators/geospatial'); const getDiscriminatorByValue = require('../helpers/discriminator/getDiscriminatorByValue'); @@ -700,6 +701,23 @@ handle.$ne = SchemaArray.prototype._castForQuery; handle.$nin = SchemaType.prototype.$conditionalHandlers.$nin; handle.$in = SchemaType.prototype.$conditionalHandlers.$in; +/** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + +SchemaArray.prototype.toJSONSchema = function toJSONSchema(options) { + const embeddedSchemaType = this.getEmbeddedSchemaType(); + const isRequired = this.options.required && typeof this.options.required !== 'function'; + return { + ...createJSONSchemaTypeDefinition('array', 'array', options?.useBsonType, isRequired), + items: embeddedSchemaType.toJSONSchema(options) + }; +}; + /*! * Module exports. */ diff --git a/lib/schema/bigint.js b/lib/schema/bigint.js index 4dcebcbd41d..474d77461fd 100644 --- a/lib/schema/bigint.js +++ b/lib/schema/bigint.js @@ -7,6 +7,7 @@ const CastError = require('../error/cast'); const SchemaType = require('../schemaType'); const castBigInt = require('../cast/bigint'); +const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); /** * BigInt SchemaType constructor. @@ -240,6 +241,19 @@ SchemaBigInt.prototype._castNullish = function _castNullish(v) { return v; }; +/** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + +SchemaBigInt.prototype.toJSONSchema = function toJSONSchema(options) { + const isRequired = this.options.required && typeof this.options.required !== 'function'; + return createJSONSchemaTypeDefinition('string', 'long', options?.useBsonType, isRequired); +}; + /*! * Module exports. */ diff --git a/lib/schema/boolean.js b/lib/schema/boolean.js index 1cbade08c6d..b11162621f0 100644 --- a/lib/schema/boolean.js +++ b/lib/schema/boolean.js @@ -7,6 +7,7 @@ const CastError = require('../error/cast'); const SchemaType = require('../schemaType'); const castBoolean = require('../cast/boolean'); +const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); /** * Boolean SchemaType constructor. @@ -290,6 +291,19 @@ SchemaBoolean.prototype._castNullish = function _castNullish(v) { return v; }; +/** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + +SchemaBoolean.prototype.toJSONSchema = function toJSONSchema(options) { + const isRequired = this.options.required && typeof this.options.required !== 'function'; + return createJSONSchemaTypeDefinition('boolean', 'bool', options?.useBsonType, isRequired); +}; + /*! * Module exports. */ diff --git a/lib/schema/buffer.js b/lib/schema/buffer.js index 4d5c1af7d57..8111956fb95 100644 --- a/lib/schema/buffer.js +++ b/lib/schema/buffer.js @@ -7,6 +7,7 @@ const MongooseBuffer = require('../types/buffer'); const SchemaBufferOptions = require('../options/schemaBufferOptions'); const SchemaType = require('../schemaType'); +const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); const handleBitwiseOperator = require('./operators/bitwise'); const utils = require('../utils'); @@ -300,6 +301,19 @@ SchemaBuffer.prototype.castForQuery = function($conditional, val, context) { return casted ? casted.toObject({ transform: false, virtuals: false }) : casted; }; +/** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + +SchemaBuffer.prototype.toJSONSchema = function toJSONSchema(options) { + const isRequired = this.options.required && typeof this.options.required !== 'function'; + return createJSONSchemaTypeDefinition('string', 'binData', options?.useBsonType, isRequired); +}; + /*! * Module exports. */ diff --git a/lib/schema/date.js b/lib/schema/date.js index 6cbfee83865..6d671f51e50 100644 --- a/lib/schema/date.js +++ b/lib/schema/date.js @@ -8,6 +8,7 @@ const MongooseError = require('../error/index'); const SchemaDateOptions = require('../options/schemaDateOptions'); const SchemaType = require('../schemaType'); const castDate = require('../cast/date'); +const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); const getConstructorName = require('../helpers/getConstructorName'); const utils = require('../utils'); @@ -426,6 +427,19 @@ SchemaDate.prototype.castForQuery = function($conditional, val, context) { return handler.call(this, val); }; +/** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + +SchemaDate.prototype.toJSONSchema = function toJSONSchema(options) { + const isRequired = this.options.required && typeof this.options.required !== 'function'; + return createJSONSchemaTypeDefinition('string', 'date', options?.useBsonType, isRequired); +}; + /*! * Module exports. */ diff --git a/lib/schema/decimal128.js b/lib/schema/decimal128.js index 136529ec04b..3c7f3e28ca3 100644 --- a/lib/schema/decimal128.js +++ b/lib/schema/decimal128.js @@ -7,6 +7,7 @@ const SchemaType = require('../schemaType'); const CastError = SchemaType.CastError; const castDecimal128 = require('../cast/decimal128'); +const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); const isBsonType = require('../helpers/isBsonType'); /** @@ -221,6 +222,19 @@ SchemaDecimal128.prototype.$conditionalHandlers = { $lte: handleSingle }; +/** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + +SchemaDecimal128.prototype.toJSONSchema = function toJSONSchema(options) { + const isRequired = this.options.required && typeof this.options.required !== 'function'; + return createJSONSchemaTypeDefinition('string', 'decimal', options?.useBsonType, isRequired); +}; + /*! * Module exports. */ diff --git a/lib/schema/documentArray.js b/lib/schema/documentArray.js index 413dc4a8fbc..58ecf920cc0 100644 --- a/lib/schema/documentArray.js +++ b/lib/schema/documentArray.js @@ -12,6 +12,7 @@ const SchemaDocumentArrayOptions = require('../options/schemaDocumentArrayOptions'); const SchemaType = require('../schemaType'); const cast = require('../cast'); +const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); const discriminator = require('../helpers/model/discriminator'); const handleIdOption = require('../helpers/schema/handleIdOption'); const handleSpreadDoc = require('../helpers/document/handleSpreadDoc'); @@ -651,6 +652,23 @@ function cast$elemMatch(val, context) { return cast(schema, val, null, this && this.$$context); } +/** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + +SchemaDocumentArray.prototype.toJSONSchema = function toJSONSchema(options) { + const itemsTypeDefinition = createJSONSchemaTypeDefinition('object', 'object', options?.useBsonType, false); + const isRequired = this.options.required && typeof this.options.required !== 'function'; + return { + ...createJSONSchemaTypeDefinition('array', 'array', options?.useBsonType, isRequired), + items: { ...itemsTypeDefinition, ...this.schema.jsonSchema(options) } + }; +}; + /*! * Module exports. */ diff --git a/lib/schema/double.js b/lib/schema/double.js index 79c94752184..23b1f33b38d 100644 --- a/lib/schema/double.js +++ b/lib/schema/double.js @@ -7,6 +7,7 @@ const CastError = require('../error/cast'); const SchemaType = require('../schemaType'); const castDouble = require('../cast/double'); +const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); /** * Double SchemaType constructor. @@ -204,6 +205,18 @@ SchemaDouble.prototype.$conditionalHandlers = { $lte: handleSingle }; +/** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + +SchemaDouble.prototype.toJSONSchema = function toJSONSchema(options) { + const isRequired = this.options.required && typeof this.options.required !== 'function'; + return createJSONSchemaTypeDefinition('number', 'double', options?.useBsonType, isRequired); +}; /*! * Module exports. diff --git a/lib/schema/int32.js b/lib/schema/int32.js index 6838d22f2bb..7cf2c364dc5 100644 --- a/lib/schema/int32.js +++ b/lib/schema/int32.js @@ -7,6 +7,7 @@ const CastError = require('../error/cast'); const SchemaType = require('../schemaType'); const castInt32 = require('../cast/int32'); +const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); const handleBitwiseOperator = require('./operators/bitwise'); /** @@ -246,6 +247,19 @@ SchemaInt32.prototype.castForQuery = function($conditional, val, context) { } }; +/** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + +SchemaInt32.prototype.toJSONSchema = function toJSONSchema(options) { + const isRequired = this.options.required && typeof this.options.required !== 'function'; + return createJSONSchemaTypeDefinition('number', 'int', options?.useBsonType, isRequired); +}; + /*! * Module exports. diff --git a/lib/schema/map.js b/lib/schema/map.js index 976245a198e..c65f21b931b 100644 --- a/lib/schema/map.js +++ b/lib/schema/map.js @@ -7,6 +7,8 @@ const MongooseMap = require('../types/map'); const SchemaMapOptions = require('../options/schemaMapOptions'); const SchemaType = require('../schemaType'); +const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); + /*! * ignore */ @@ -74,6 +76,32 @@ class SchemaMap extends SchemaType { getEmbeddedSchemaType() { return this.$__schemaType; } + + /** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + + toJSONSchema(options) { + const useBsonType = options?.useBsonType; + const embeddedSchemaType = this.getEmbeddedSchemaType(); + + const isRequired = this.options.required && typeof this.options.required !== 'function'; + const result = createJSONSchemaTypeDefinition('object', 'object', useBsonType, isRequired); + + if (embeddedSchemaType.schema) { + result.additionalProperties = useBsonType + ? { ...embeddedSchemaType.toJSONSchema(options) } + : { ...embeddedSchemaType.toJSONSchema(options) }; + } else { + result.additionalProperties = embeddedSchemaType.toJSONSchema(options); + } + + return result; + } } /** diff --git a/lib/schema/number.js b/lib/schema/number.js index a5188a81cc2..728dfe570b8 100644 --- a/lib/schema/number.js +++ b/lib/schema/number.js @@ -8,6 +8,7 @@ const MongooseError = require('../error/index'); const SchemaNumberOptions = require('../options/schemaNumberOptions'); const SchemaType = require('../schemaType'); const castNumber = require('../cast/number'); +const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); const handleBitwiseOperator = require('./operators/bitwise'); const utils = require('../utils'); @@ -442,6 +443,19 @@ SchemaNumber.prototype.castForQuery = function($conditional, val, context) { return val; }; +/** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + +SchemaNumber.prototype.toJSONSchema = function toJSONSchema(options) { + const isRequired = (this.options.required && typeof this.options.required !== 'function') || this.path === '_id'; + return createJSONSchemaTypeDefinition('number', 'number', options?.useBsonType, isRequired); +}; + /*! * Module exports. */ diff --git a/lib/schema/objectId.js b/lib/schema/objectId.js index 927a168df46..6eb0fbed08f 100644 --- a/lib/schema/objectId.js +++ b/lib/schema/objectId.js @@ -7,6 +7,7 @@ const SchemaObjectIdOptions = require('../options/schemaObjectIdOptions'); const SchemaType = require('../schemaType'); const castObjectId = require('../cast/objectid'); +const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); const getConstructorName = require('../helpers/getConstructorName'); const oid = require('../types/objectid'); const isBsonType = require('../helpers/isBsonType'); @@ -290,6 +291,19 @@ function resetId(v) { return v; } +/** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + +SchemaObjectId.prototype.toJSONSchema = function toJSONSchema(options) { + const isRequired = (this.options.required && typeof this.options.required !== 'function') || this.path === '_id'; + return createJSONSchemaTypeDefinition('string', 'objectId', options?.useBsonType, isRequired); +}; + /*! * Module exports. */ diff --git a/lib/schema/string.js b/lib/schema/string.js index b832dbd9884..1e84cac6271 100644 --- a/lib/schema/string.js +++ b/lib/schema/string.js @@ -8,6 +8,7 @@ const SchemaType = require('../schemaType'); const MongooseError = require('../error/index'); const SchemaStringOptions = require('../options/schemaStringOptions'); const castString = require('../cast/string'); +const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); const utils = require('../utils'); const isBsonType = require('../helpers/isBsonType'); @@ -698,6 +699,19 @@ SchemaString.prototype.castForQuery = function($conditional, val, context) { } }; +/** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + +SchemaString.prototype.toJSONSchema = function toJSONSchema(options) { + const isRequired = this.options.required && typeof this.options.required !== 'function'; + return createJSONSchemaTypeDefinition('string', 'string', options?.useBsonType, isRequired); +}; + /*! * Module exports. */ diff --git a/lib/schema/subdocument.js b/lib/schema/subdocument.js index 9a77d82c879..14d2a548347 100644 --- a/lib/schema/subdocument.js +++ b/lib/schema/subdocument.js @@ -12,6 +12,7 @@ const SchemaType = require('../schemaType'); const applyDefaults = require('../helpers/document/applyDefaults'); const $exists = require('./operators/exists'); const castToNumber = require('./operators/helpers').castToNumber; +const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); const discriminator = require('../helpers/model/discriminator'); const geospatial = require('./operators/geospatial'); const getConstructor = require('../helpers/discriminator/getConstructor'); @@ -396,3 +397,19 @@ SchemaSubdocument.prototype.clone = function() { schematype._appliedDiscriminators = this._appliedDiscriminators; return schematype; }; + +/** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + +SchemaSubdocument.prototype.toJSONSchema = function toJSONSchema(options) { + const isRequired = this.options.required && typeof this.options.required !== 'function'; + return { + ...this.schema.jsonSchema(options), + ...createJSONSchemaTypeDefinition('object', 'object', options?.useBsonType, isRequired) + }; +}; diff --git a/lib/schema/uuid.js b/lib/schema/uuid.js index 6eb5d2f5ae0..bb264159487 100644 --- a/lib/schema/uuid.js +++ b/lib/schema/uuid.js @@ -7,6 +7,7 @@ const MongooseBuffer = require('../types/buffer'); const SchemaType = require('../schemaType'); const CastError = SchemaType.CastError; +const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); const utils = require('../utils'); const handleBitwiseOperator = require('./operators/bitwise'); @@ -351,6 +352,19 @@ SchemaUUID.prototype.castForQuery = function($conditional, val, context) { } }; +/** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + +SchemaUUID.prototype.toJSONSchema = function toJSONSchema(options) { + const isRequired = this.options.required && typeof this.options.required !== 'function'; + return createJSONSchemaTypeDefinition('string', 'binData', options?.useBsonType, isRequired); +}; + /*! * Module exports. */ diff --git a/lib/schemaType.js b/lib/schemaType.js index d57cc775e60..22c9edbd473 100644 --- a/lib/schemaType.js +++ b/lib/schemaType.js @@ -1771,6 +1771,18 @@ SchemaType.prototype.getEmbeddedSchemaType = function getEmbeddedSchemaType() { SchemaType.prototype._duplicateKeyErrorMessage = null; +/** + * Returns this schema type's representation in a JSON schema. + * + * @param [options] + * @param [options.useBsonType=false] If true, return a representation with `bsonType` for use with MongoDB's `$jsonSchema`. + * @returns {Object} JSON schema properties + */ + +SchemaType.prototype.toJSONSchema = function toJSONSchema() { + throw new Error('Converting unsupported SchemaType to JSON Schema: ' + this.instance); +}; + /*! * Module exports. */ diff --git a/test/schema.test.js b/test/schema.test.js index 02cc4fecdc2..6e4daf866ce 100644 --- a/test/schema.test.js +++ b/test/schema.test.js @@ -3432,7 +3432,10 @@ describe('schema', function() { id: mongoose.ObjectId, decimal: mongoose.Types.Decimal128, buf: Buffer, - uuid: 'UUID' + uuid: 'UUID', + bigint: BigInt, + double: 'Double', + int32: 'Int32' }); assert.deepStrictEqual(schema.jsonSchema({ useBsonType: true }), { @@ -3462,6 +3465,15 @@ describe('schema', function() { uuid: { bsonType: ['binData', 'null'] }, + bigint: { + bsonType: ['long', 'null'] + }, + double: { + bsonType: ['double', 'null'] + }, + int32: { + bsonType: ['int', 'null'] + }, _id: { bsonType: 'objectId' } @@ -3496,6 +3508,15 @@ describe('schema', function() { uuid: { type: ['string', 'null'] }, + bigint: { + type: ['string', 'null'] + }, + double: { + type: ['number', 'null'] + }, + int32: { + type: ['number', 'null'] + }, _id: { type: 'string' } @@ -3524,7 +3545,7 @@ describe('schema', function() { items: { bsonType: ['array', 'null'], items: { - bsonType: ['number'] + bsonType: 'number' } } }, @@ -3594,6 +3615,30 @@ describe('schema', function() { } }); + assert.deepStrictEqual(schema.jsonSchema(), { + required: ['_id'], + type: 'object', + properties: { + name: { + type: ['object', 'null'], + required: ['last'], + properties: { + first: { type: ['string', 'null'] }, + last: { type: 'string' } + } + }, + subdoc: { + type: ['object', 'null'], + properties: { + prop: { + type: ['number', 'null'] + } + } + }, + _id: { type: 'string' } + } + }); + await db.createCollection(collectionName, { validator: { $jsonSchema: schema.jsonSchema({ useBsonType: true }) @@ -3840,8 +3885,8 @@ describe('schema', function() { mixed: mongoose.Mixed }); - assert.throws(() => schema.jsonSchema({ useBsonType: true }), /unsupported schematype "Mixed"/); - assert.throws(() => schema.jsonSchema(), /unsupported schematype "Mixed"/); + assert.throws(() => schema.jsonSchema({ useBsonType: true }), /unsupported SchemaType to JSON Schema: Mixed/); + assert.throws(() => schema.jsonSchema(), /unsupported SchemaType to JSON Schema: Mixed/); }); }); }); From e89339949624ab74e78ad44bdafc4db48aad08ec Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 20 Jan 2025 16:02:33 -0500 Subject: [PATCH 78/84] fix: rename jsonSchema -> toJSONSchema for consistency --- lib/schema.js | 6 ++--- lib/schema/documentArray.js | 2 +- lib/schema/subdocument.js | 2 +- test/schema.test.js | 44 ++++++++++++++++++------------------- 4 files changed, 27 insertions(+), 27 deletions(-) diff --git a/lib/schema.js b/lib/schema.js index e89cd49c24d..0204c6cc9c4 100644 --- a/lib/schema.js +++ b/lib/schema.js @@ -2899,16 +2899,16 @@ Schema.prototype._preCompile = function _preCompile() { * #### Example: * const schema = new Schema({ name: String }); * // { required: ['_id'], properties: { name: { type: ['string', 'null'] }, _id: { type: 'string' } } } - * schema.jsonSchema(); + * schema.toJSONSchema(); * * // { required: ['_id'], properties: { name: { bsonType: ['string', 'null'] }, _id: { bsonType: 'objectId' } } } - * schema.jsonSchema({ useBsonType: true }); + * schema.toJSONSchema({ useBsonType: true }); * * @param {Object} [options] * @param [Boolean] [options.useBsonType=false] if true, specify each path's type using `bsonType` rather than `type` for MongoDB $jsonSchema support */ -Schema.prototype.jsonSchema = function jsonSchema(options) { +Schema.prototype.toJSONSchema = function toJSONSchema(options) { const useBsonType = options?.useBsonType ?? false; const result = useBsonType ? { required: [], properties: {} } : { type: 'object', required: [], properties: {} }; for (const path of Object.keys(this.paths)) { diff --git a/lib/schema/documentArray.js b/lib/schema/documentArray.js index 58ecf920cc0..77b78fa860e 100644 --- a/lib/schema/documentArray.js +++ b/lib/schema/documentArray.js @@ -665,7 +665,7 @@ SchemaDocumentArray.prototype.toJSONSchema = function toJSONSchema(options) { const isRequired = this.options.required && typeof this.options.required !== 'function'; return { ...createJSONSchemaTypeDefinition('array', 'array', options?.useBsonType, isRequired), - items: { ...itemsTypeDefinition, ...this.schema.jsonSchema(options) } + items: { ...itemsTypeDefinition, ...this.schema.toJSONSchema(options) } }; }; diff --git a/lib/schema/subdocument.js b/lib/schema/subdocument.js index 14d2a548347..3afdb8ee281 100644 --- a/lib/schema/subdocument.js +++ b/lib/schema/subdocument.js @@ -409,7 +409,7 @@ SchemaSubdocument.prototype.clone = function() { SchemaSubdocument.prototype.toJSONSchema = function toJSONSchema(options) { const isRequired = this.options.required && typeof this.options.required !== 'function'; return { - ...this.schema.jsonSchema(options), + ...this.schema.toJSONSchema(options), ...createJSONSchemaTypeDefinition('object', 'object', options?.useBsonType, isRequired) }; }; diff --git a/test/schema.test.js b/test/schema.test.js index 6e4daf866ce..416502b03a5 100644 --- a/test/schema.test.js +++ b/test/schema.test.js @@ -2173,7 +2173,7 @@ describe('schema', function() { const keys = Object.keys(SchemaStringOptions.prototype). filter(key => key !== 'constructor' && key !== 'populate'); const functions = Object.keys(Schema.Types.String.prototype). - filter(key => ['constructor', 'cast', 'castForQuery', 'checkRequired'].indexOf(key) === -1); + filter(key => ['constructor', 'cast', 'castForQuery', 'checkRequired', 'toJSONSchema'].indexOf(key) === -1); assert.deepEqual(keys.sort(), functions.sort()); }); @@ -3344,7 +3344,7 @@ describe('schema', function() { } }, { autoCreate: false, autoIndex: false }); - assert.deepStrictEqual(schema.jsonSchema({ useBsonType: true }), { + assert.deepStrictEqual(schema.toJSONSchema({ useBsonType: true }), { required: ['name', '_id'], properties: { _id: { @@ -3363,7 +3363,7 @@ describe('schema', function() { } }); - assert.deepStrictEqual(schema.jsonSchema(), { + assert.deepStrictEqual(schema.toJSONSchema(), { type: 'object', required: ['name', '_id'], properties: { @@ -3385,7 +3385,7 @@ describe('schema', function() { await db.createCollection(collectionName, { validator: { - $jsonSchema: schema.jsonSchema({ useBsonType: true }) + $jsonSchema: schema.toJSONSchema({ useBsonType: true }) } }); const Test = db.model('Test', schema, collectionName); @@ -3414,7 +3414,7 @@ describe('schema', function() { ); const ajv = new Ajv(); - const validate = ajv.compile(schema.jsonSchema()); + const validate = ajv.compile(schema.toJSONSchema()); assert.ok(validate({ _id: 'test', name: 'Taco' })); assert.ok(validate({ _id: 'test', name: 'Billy', age: null, ageSource: null })); @@ -3438,7 +3438,7 @@ describe('schema', function() { int32: 'Int32' }); - assert.deepStrictEqual(schema.jsonSchema({ useBsonType: true }), { + assert.deepStrictEqual(schema.toJSONSchema({ useBsonType: true }), { required: ['_id'], properties: { num: { @@ -3480,7 +3480,7 @@ describe('schema', function() { } }); - assert.deepStrictEqual(schema.jsonSchema(), { + assert.deepStrictEqual(schema.toJSONSchema(), { type: 'object', required: ['_id'], properties: { @@ -3531,7 +3531,7 @@ describe('schema', function() { docArr: [new Schema({ field: Date }, { _id: false })] }); - assert.deepStrictEqual(schema.jsonSchema({ useBsonType: true }), { + assert.deepStrictEqual(schema.toJSONSchema({ useBsonType: true }), { required: ['_id'], properties: { tags: { @@ -3564,7 +3564,7 @@ describe('schema', function() { await db.createCollection(collectionName, { validator: { - $jsonSchema: schema.jsonSchema({ useBsonType: true }) + $jsonSchema: schema.toJSONSchema({ useBsonType: true }) } }); const Test = db.model('Test', schema, collectionName); @@ -3575,7 +3575,7 @@ describe('schema', function() { await Test.create({ tags: 'javascript', coordinates: [[0, 0]], docArr: [{}] }); const ajv = new Ajv(); - const validate = ajv.compile(schema.jsonSchema()); + const validate = ajv.compile(schema.toJSONSchema()); assert.ok(validate({ _id: 'test', tags: ['javascript'], coordinates: [[0, 0]], docArr: [{ field: '2023-07-16' }] })); assert.ok(validate({ _id: 'test', tags: ['javascript'], coordinates: [[0, 0]], docArr: [{}] })); @@ -3592,7 +3592,7 @@ describe('schema', function() { }, { _id: false }) }); - assert.deepStrictEqual(schema.jsonSchema({ useBsonType: true }), { + assert.deepStrictEqual(schema.toJSONSchema({ useBsonType: true }), { required: ['_id'], properties: { name: { @@ -3615,7 +3615,7 @@ describe('schema', function() { } }); - assert.deepStrictEqual(schema.jsonSchema(), { + assert.deepStrictEqual(schema.toJSONSchema(), { required: ['_id'], type: 'object', properties: { @@ -3641,7 +3641,7 @@ describe('schema', function() { await db.createCollection(collectionName, { validator: { - $jsonSchema: schema.jsonSchema({ useBsonType: true }) + $jsonSchema: schema.toJSONSchema({ useBsonType: true }) } }); const Test = db.model('Test', schema, collectionName); @@ -3650,7 +3650,7 @@ describe('schema', function() { await Test.create({ name: { first: 'Mike', last: 'James' }, subdoc: { prop: 42 } }); const ajv = new Ajv(); - const validate = ajv.compile(schema.jsonSchema()); + const validate = ajv.compile(schema.toJSONSchema()); assert.ok(validate({ _id: 'test', name: { last: 'James' }, subdoc: {} })); assert.ok(validate({ _id: 'test', name: { first: 'Mike', last: 'James' }, subdoc: { prop: 42 } })); @@ -3686,7 +3686,7 @@ describe('schema', function() { } }); - assert.deepStrictEqual(schema.jsonSchema({ useBsonType: true }), { + assert.deepStrictEqual(schema.toJSONSchema({ useBsonType: true }), { required: ['props', '_id'], properties: { props: { @@ -3746,7 +3746,7 @@ describe('schema', function() { await db.createCollection(collectionName, { validator: { - $jsonSchema: schema.jsonSchema({ useBsonType: true }) + $jsonSchema: schema.toJSONSchema({ useBsonType: true }) } }); const Test = db.model('Test', schema, collectionName); @@ -3783,7 +3783,7 @@ describe('schema', function() { ); const ajv = new Ajv(); - const validate = ajv.compile(schema.jsonSchema()); + const validate = ajv.compile(schema.toJSONSchema()); assert.ok(validate({ _id: 'test', @@ -3829,7 +3829,7 @@ describe('schema', function() { } }); - assert.deepStrictEqual(schema.jsonSchema({ useBsonType: true }), { + assert.deepStrictEqual(schema.toJSONSchema({ useBsonType: true }), { required: ['_id'], properties: { props: { @@ -3844,7 +3844,7 @@ describe('schema', function() { } }); - assert.deepStrictEqual(schema.jsonSchema(), { + assert.deepStrictEqual(schema.toJSONSchema(), { type: 'object', required: ['_id'], properties: { @@ -3866,7 +3866,7 @@ describe('schema', function() { name: { type: String, enum: ['Edwald', 'Tobi'], required: true } }); - assert.deepStrictEqual(RacoonSchema.jsonSchema({ useBsonType: true }), { + assert.deepStrictEqual(RacoonSchema.toJSONSchema({ useBsonType: true }), { required: ['name', '_id'], properties: { name: { @@ -3885,8 +3885,8 @@ describe('schema', function() { mixed: mongoose.Mixed }); - assert.throws(() => schema.jsonSchema({ useBsonType: true }), /unsupported SchemaType to JSON Schema: Mixed/); - assert.throws(() => schema.jsonSchema(), /unsupported SchemaType to JSON Schema: Mixed/); + assert.throws(() => schema.toJSONSchema({ useBsonType: true }), /unsupported SchemaType to JSON Schema: Mixed/); + assert.throws(() => schema.toJSONSchema(), /unsupported SchemaType to JSON Schema: Mixed/); }); }); }); From 57b48f9426d39d8683fc4392939ae2d761574beb Mon Sep 17 00:00:00 2001 From: Valeri Karpov Date: Mon, 20 Jan 2025 16:08:21 -0500 Subject: [PATCH 79/84] types: add toJSONSchema to typescript types --- types/index.d.ts | 2 ++ types/schematypes.d.ts | 2 ++ 2 files changed, 4 insertions(+) diff --git a/types/index.d.ts b/types/index.d.ts index f1a6b22bad9..32554a048b2 100644 --- a/types/index.d.ts +++ b/types/index.d.ts @@ -508,6 +508,8 @@ declare module 'mongoose' { statics: { [F in keyof TStaticMethods]: TStaticMethods[F] } & { [name: string]: (this: TModelType, ...args: any[]) => unknown }; + toJSONSchema(options?: { useBsonType?: boolean }): Record; + /** Creates a virtual type with the given name. */ virtual>( name: keyof TVirtuals | string, diff --git a/types/schematypes.d.ts b/types/schematypes.d.ts index d5d81c7d560..5f364f0cea4 100644 --- a/types/schematypes.d.ts +++ b/types/schematypes.d.ts @@ -300,6 +300,8 @@ declare module 'mongoose' { /** Declares a full text index. */ text(bool: boolean): this; + toJSONSchema(options?: { useBsonType?: boolean }): Record; + /** Defines a custom function for transforming this path when converting a document to JSON. */ transform(fn: (value: any) => any): this; From d02dfc474d8a8dd2d0544a57cf69c70887dbb31c Mon Sep 17 00:00:00 2001 From: bailey Date: Thu, 23 Jan 2025 08:40:21 -0700 Subject: [PATCH 80/84] changes --- .eslintrc.js | 3 +- docs/field-level-encryption.md | 38 +++ lib/encryptionUtils.js | 72 +++++ lib/schema.js | 167 ++++++++-- test/encrypted_schema.test.js | 538 +++++++++++++++++++++++++++++++++ 5 files changed, 784 insertions(+), 34 deletions(-) create mode 100644 lib/encryptionUtils.js create mode 100644 test/encrypted_schema.test.js diff --git a/.eslintrc.js b/.eslintrc.js index 002d1e7b8b9..91b38166932 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -15,7 +15,8 @@ module.exports = { '!.*', 'node_modules', '.git', - 'data' + 'data', + '.config' ], overrides: [ { diff --git a/docs/field-level-encryption.md b/docs/field-level-encryption.md index 3531fca0218..13daef15dfe 100644 --- a/docs/field-level-encryption.md +++ b/docs/field-level-encryption.md @@ -112,3 +112,41 @@ With the above connection, if you create a model named 'Test' that uses the 'tes const Model = mongoose.model('Test', mongoose.Schema({ name: String })); await Model.create({ name: 'super secret' }); ``` + +## Automatic FLE in Mongoose + +Mongoose supports the declaration of encrypted schemas - schemas that, when connected to a model, utilize MongoDB's Client Side +Field Level Encryption or Queryable Encryption under the hood. Mongoose automatically generates either an `encryptedFieldsMap` or a +`schemaMap` when instantiating a MongoClient and encrypts fields on write and decrypts fields on reads. + +### Encryption types + +MongoDB has two different automatic encryption implementations: client side field level encryption (CSFLE) and queryable encryption (QE). +See [choosing an in-use encryption approach](https://www.mongodb.com/docs/v7.3/core/queryable-encryption/about-qe-csfle/#choosing-an-in-use-encryption-approach). + +### Declaring Encrypted Schemas + +The following schema declares two properties, `name` and `ssn`. `ssn` is encrypted using queryable encryption, and +is configured for equality queries: + +```javascript +const encryptedUserSchema = new Schema({ + name: String, + ssn: { + type: String, + // 1 + encrypt: { + keyId: '', + queries: 'equality' + } + } + // 2 +}, { encryptionType: 'queryableEncryption' }); +``` + +To declare a field as encrypted, you must: + +1. Annotate the field with encryption metadata in the schema definition +2. Choose an encryption type for the schema and configure the schema for the encryption type + +Not all schematypes are supported for CSFLE and QE. For an overview of valid schema types, refer to MongoDB's documentation. diff --git a/lib/encryptionUtils.js b/lib/encryptionUtils.js new file mode 100644 index 00000000000..916ef17a5cd --- /dev/null +++ b/lib/encryptionUtils.js @@ -0,0 +1,72 @@ +'use strict'; + +const schemaTypes = require('./schema/index.js'); +const SchemaBigInt = require('./schema/bigint'); +const SchemaBoolean = require('./schema/boolean'); +const SchemaBuffer = require('./schema/buffer'); +const SchemaDate = require('./schema/date'); +const SchemaDecimal128 = require('./schema/decimal128'); +const SchemaDouble = require('./schema/double'); +const SchemaInt32 = require('./schema/int32'); +const SchemaObjectId = require('./schema/objectId'); +const SchemaString = require('./schema/string'); + +/** + * Given a schema and a path to a field in the schema, this returns the + * BSON type of the field, if it can be determined. This method specifically + * **only** handles BSON types that are used for CSFLE and QE - any other + * BSON types will return `null`. (example: MinKey and MaxKey). + * + * @param {import('.').Schema} schema + * @param {string} path + * @returns + */ +function inferBSONType(schema, path) { + const type = schema.path(path); + + if (type instanceof SchemaString) { + return 'string'; + } + + if (type instanceof SchemaInt32) { + return 'int'; + } + + if (type instanceof SchemaBigInt) { + return 'long'; + } + + if (type instanceof SchemaBoolean) { + return 'bool'; + } + + if (type instanceof SchemaDate) { + return 'date'; + } + + if (type instanceof SchemaBuffer) { + return 'binData'; + } + + if (type instanceof SchemaObjectId) { + return 'objectId'; + } + + if (type instanceof SchemaDecimal128) { + return 'decimal'; + } + + if (type instanceof SchemaDouble) { + return 'double'; + } + + if (type instanceof schemaTypes.Array) { + return 'array'; + } + + return null; +} + +module.exports = { + inferBSONType +}; diff --git a/lib/schema.js b/lib/schema.js index 0204c6cc9c4..62053ed2511 100644 --- a/lib/schema.js +++ b/lib/schema.js @@ -25,6 +25,7 @@ const setPopulatedVirtualValue = require('./helpers/populate/setPopulatedVirtual const setupTimestamps = require('./helpers/timestamps/setupTimestamps'); const utils = require('./utils'); const validateRef = require('./helpers/populate/validateRef'); +const { inferBSONType } = require('./encryptionUtils'); const hasNumericSubpathRegex = /\.\d+(\.|$)/; @@ -86,6 +87,7 @@ const numberRE = /^\d+$/; * - [pluginTags](https://mongoosejs.com/docs/guide.html#pluginTags): array of strings - defaults to `undefined`. If set and plugin called with `tags` option, will only apply that plugin to schemas with a matching tag. * - [virtuals](https://mongoosejs.com/docs/tutorials/virtuals.html#virtuals-via-schema-options): object - virtuals to define, alias for [`.virtual`](https://mongoosejs.com/docs/api/schema.html#Schema.prototype.virtual()) * - [collectionOptions]: object with options passed to [`createCollection()`](https://www.mongodb.com/docs/manual/reference/method/db.createCollection/) when calling `Model.createCollection()` or `autoCreate` set to true. + * - [encryptionType]: the encryption type for the schema. Valid options are `csfle` or `queryableEncryption`. See https://mongoosejs.com/docs/field-level-encryption. * * #### Options for Nested Schemas: * @@ -128,6 +130,7 @@ function Schema(obj, options) { // For internal debugging. Do not use this to try to save a schema in MDB. this.$id = ++id; this.mapPaths = []; + this.encryptedFields = {}; this.s = { hooks: new Kareem() @@ -166,7 +169,7 @@ function Schema(obj, options) { // ensure the documents get an auto _id unless disabled const auto_id = !this.paths['_id'] && - (this.options._id) && !_idSubDoc; + (this.options._id) && !_idSubDoc; if (auto_id) { addAutoId(this); @@ -463,6 +466,8 @@ Schema.prototype._clone = function _clone(Constructor) { s.aliases = Object.assign({}, this.aliases); + s.encryptedFields = clone(this.encryptedFields); + return s; }; @@ -495,7 +500,17 @@ Schema.prototype.pick = function(paths, options) { } for (const path of paths) { - if (this.nested[path]) { + if (path in this.encryptedFields) { + const encrypt = this.encryptedFields[path]; + const schemaType = this.path(path); + newSchema.add({ + [path]: { + encrypt, + [this.options.typeKey]: schemaType + } + }); + } + else if (this.nested[path]) { newSchema.add({ [path]: get(this.tree, path) }); } else { const schematype = this.path(path); @@ -506,6 +521,10 @@ Schema.prototype.pick = function(paths, options) { } } + if (!this._hasEncryptedFields()) { + newSchema.options.encryptionType = null; + } + return newSchema; }; @@ -534,9 +553,9 @@ Schema.prototype.omit = function(paths, options) { if (!Array.isArray(paths)) { throw new MongooseError( 'Schema#omit() only accepts an array argument, ' + - 'got "' + - typeof paths + - '"' + 'got "' + + typeof paths + + '"' ); } @@ -667,6 +686,20 @@ Schema.prototype._defaultToObjectOptions = function(json) { return defaultOptions; }; +/** + * Sets the encryption type of the schema, if a value is provided, otherwise + * returns the encryption type. + * + * @param {'csfle' | 'queryableEncryption' | undefined} encryptionType plain object with paths to add, or another schema + */ +Schema.prototype.encryptionType = function encryptionType(encryptionType) { + if (typeof encryptionType === 'string' || encryptionType === null) { + this.options.encryptionType = encryptionType; + } else { + return this.options.encryptionType; + } +}; + /** * Adds key path / schema type pairs to this schema. * @@ -735,7 +768,7 @@ Schema.prototype.add = function add(obj, prefix) { if ( key !== '_id' && ((typeof val !== 'object' && typeof val !== 'function' && !isMongooseTypeString) || - val == null) + val == null) ) { throw new TypeError(`Invalid schema configuration: \`${val}\` is not ` + `a valid type at path \`${key}\`. See ` + @@ -818,15 +851,71 @@ Schema.prototype.add = function add(obj, prefix) { } } } + + if (val.instanceOfSchema && val.encryptionType() != null) { + // schema.add({ field: }) + if (this.encryptionType() != val.encryptionType()) { + throw new Error('encryptionType of a nested schema must match the encryption type of the parent schema.'); + } + + for (const [encryptedField, encryptedFieldConfig] of Object.entries(val.encryptedFields)) { + const path = fullPath + '.' + encryptedField; + this._addEncryptedField(path, encryptedFieldConfig); + } + } + else if (typeof val === 'object' && 'encrypt' in val) { + // schema.add({ field: { type: , encrypt: { ... }}}) + const { encrypt } = val; + + if (this.encryptionType() == null) { + throw new Error('encryptionType must be provided'); + } + + this._addEncryptedField(fullPath, encrypt); + } else { + // if the field was already encrypted and we re-configure it to be unencrypted, remove + // the encrypted field configuration + this._removeEncryptedField(fullPath); + } } const aliasObj = Object.fromEntries( Object.entries(obj).map(([key]) => ([prefix + key, null])) ); aliasFields(this, aliasObj); + return this; }; +/** + * @param {string} path + * @param {object} fieldConfig + * + * @api private + */ +Schema.prototype._addEncryptedField = function _addEncryptedField(path, fieldConfig) { + const type = inferBSONType(this, path); + if (type == null) { + throw new Error('unable to determine bson type for field `' + path + '`'); + } + + this.encryptedFields[path] = clone(fieldConfig); +}; + +/** + * @api private + */ +Schema.prototype._removeEncryptedField = function _removeEncryptedField(path) { + delete this.encryptedFields[path]; +}; + +/** + * @api private + */ +Schema.prototype._hasEncryptedFields = function _hasEncryptedFields() { + return Object.keys(this.encryptedFields).length > 0; +}; + /** * Add an alias for `path`. This means getting or setting the `alias` * is equivalent to getting or setting the `path`. @@ -1008,23 +1097,23 @@ Schema.prototype.reserved = Schema.reserved; const reserved = Schema.reserved; // Core object reserved['prototype'] = -// EventEmitter -reserved.emit = -reserved.listeners = -reserved.removeListener = - -// document properties and functions -reserved.collection = -reserved.errors = -reserved.get = -reserved.init = -reserved.isModified = -reserved.isNew = -reserved.populated = -reserved.remove = -reserved.save = -reserved.toObject = -reserved.validate = 1; + // EventEmitter + reserved.emit = + reserved.listeners = + reserved.removeListener = + + // document properties and functions + reserved.collection = + reserved.errors = + reserved.get = + reserved.init = + reserved.isModified = + reserved.isNew = + reserved.populated = + reserved.remove = + reserved.save = + reserved.toObject = + reserved.validate = 1; reserved.collection = 1; /** @@ -1104,10 +1193,10 @@ Schema.prototype.path = function(path, obj) { } if (typeof branch[sub] !== 'object') { const msg = 'Cannot set nested path `' + path + '`. ' - + 'Parent path `' - + fullPath - + '` already set to type ' + branch[sub].name - + '.'; + + 'Parent path `' + + fullPath + + '` already set to type ' + branch[sub].name + + '.'; throw new Error(msg); } branch = branch[sub]; @@ -1375,6 +1464,16 @@ Schema.prototype.interpretAsType = function(path, obj, options) { let type = obj[options.typeKey] && (obj[options.typeKey] instanceof Function || options.typeKey !== 'type' || !obj.type.type) ? obj[options.typeKey] : {}; + + if (type instanceof SchemaType) { + if (type.path === path) { + return type; + } + const clone = type.clone(); + clone.path = path; + return clone; + } + let name; if (utils.isPOJO(type) || type === 'mixed') { @@ -1404,8 +1503,8 @@ Schema.prototype.interpretAsType = function(path, obj, options) { return new MongooseTypes.DocumentArray(path, cast, obj); } if (cast && - cast[options.typeKey] && - cast[options.typeKey].instanceOfSchema) { + cast[options.typeKey] && + cast[options.typeKey].instanceOfSchema) { if (!(cast[options.typeKey] instanceof Schema)) { if (this.options._isMerging) { cast[options.typeKey] = new Schema(cast[options.typeKey]); @@ -1739,7 +1838,7 @@ Schema.prototype.hasMixedParent = function(path) { for (let i = 0; i < subpaths.length; ++i) { path = i > 0 ? path + '.' + subpaths[i] : subpaths[i]; if (this.paths.hasOwnProperty(path) && - this.paths[path] instanceof MongooseTypes.Mixed) { + this.paths[path] instanceof MongooseTypes.Mixed) { return this.paths[path]; } } @@ -2520,6 +2619,8 @@ Schema.prototype.remove = function(path) { delete this.paths[name]; _deletePath(this, name); + + this._removeEncryptedField(name); }, this); } return this; @@ -2615,9 +2716,9 @@ Schema.prototype.removeVirtual = function(path) { Schema.prototype.loadClass = function(model, virtualsOnly) { // Stop copying when hit certain base classes if (model === Object.prototype || - model === Function.prototype || - model.prototype.hasOwnProperty('$isMongooseModelPrototype') || - model.prototype.hasOwnProperty('$isMongooseDocumentPrototype')) { + model === Function.prototype || + model.prototype.hasOwnProperty('$isMongooseModelPrototype') || + model.prototype.hasOwnProperty('$isMongooseDocumentPrototype')) { return this; } diff --git a/test/encrypted_schema.test.js b/test/encrypted_schema.test.js new file mode 100644 index 00000000000..8f35dc97b73 --- /dev/null +++ b/test/encrypted_schema.test.js @@ -0,0 +1,538 @@ + +'use strict'; + +const assert = require('assert'); +const start = require('./common'); +const { ObjectId, Decimal128 } = require('../lib/types'); +const { Double, Int32, UUID } = require('bson'); + +const mongoose = start.mongoose; +const Schema = mongoose.Schema; + +/** + * + * @param {import('../lib').Schema} object + * @param {Array | string} path + * @returns + */ +function schemaHasEncryptedProperty(schema, path) { + path = [path].flat(); + path = path.join('.'); + + return path in schema.encryptedFields; +} + +const KEY_ID = new UUID(); +const algorithm = 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'; + +describe('encrypted schema declaration', function() { + describe('Tests that fields of valid schema types can be declared as encrypted schemas', function() { + const basicSchemaTypes = [ + { type: String, name: 'string' }, + { type: Schema.Types.Boolean, name: 'boolean' }, + { type: Schema.Types.Buffer, name: 'buffer' }, + { type: Date, name: 'date' }, + { type: ObjectId, name: 'objectid' }, + { type: BigInt, name: 'bigint' }, + { type: Decimal128, name: 'Decimal128' }, + { type: Int32, name: 'int32' }, + { type: Double, name: 'double' } + ]; + + for (const { type, name } of basicSchemaTypes) { + describe(`When a schema is instantiated with an encrypted field of type ${name}`, function() { + let schema; + beforeEach(function() { + schema = new Schema({ + field: { + type, encrypt: { keyId: KEY_ID, algorithm } + } + }, { + encryptionType: 'csfle' + }); + }); + + it(`Then the schema has an encrypted property of type ${name}`, function() { + assert.ok(schemaHasEncryptedProperty(schema, 'field')); + }); + }); + } + + describe('when a schema is instantiated with a nested encrypted schema', function() { + let schema; + beforeEach(function() { + const encryptedSchema = new Schema({ + encrypted: { + type: String, encrypt: { keyId: KEY_ID, algorithm } + } + }, { encryptionType: 'csfle' }); + schema = new Schema({ + field: encryptedSchema + }, { encryptionType: 'csfle' }); + }); + + + it('then the schema has a nested property that is encrypted', function() { + assert.ok(schemaHasEncryptedProperty(schema, ['field', 'encrypted'])); + }); + }); + + describe('when a schema is instantiated with a nested schema object', function() { + let schema; + beforeEach(function() { + schema = new Schema({ + field: { + encrypted: { + type: String, encrypt: { keyId: KEY_ID, algorithm } + } + } + }, { encryptionType: 'csfle' }); + }); + + it('then the schema has a nested property that is encrypted', function() { + assert.ok(schemaHasEncryptedProperty(schema, ['field', 'encrypted'])); + }); + }); + + describe('when a schema is instantiated as an Array', function() { + let schema; + beforeEach(function() { + schema = new Schema({ + encrypted: { + type: [Number], + encrypt: { keyId: KEY_ID, algorithm } + } + }, { encryptionType: 'csfle' }); + }); + + it('then the schema has a nested property that is encrypted', function() { + assert.ok(schemaHasEncryptedProperty(schema, 'encrypted')); + }); + }); + + }); + + describe('invalid schema types for encrypted schemas', function() { + describe('When a schema is instantiated with an encrypted field of type Number', function() { + it('Then an error is thrown', function() { + assert.throws(() => { + new Schema({ + field: { + type: Number, encrypt: { keyId: KEY_ID, algorithm } + } + }, { encryptionType: 'csfle' }); + }, /unable to determine bson type for field `field`/); + }); + }); + + describe('When a schema is instantiated with an encrypted field of type Mixed', function() { + it('Then an error is thrown', function() { + assert.throws(() => { + new Schema({ + field: { + type: Schema.Types.Mixed, encrypt: { keyId: KEY_ID, algorithm } + } + }, { encryptionType: 'csfle' }); + }, /unable to determine bson type for field `field`/); + }); + }); + + describe('When a schema is instantiated with a custom schema type plugin', function() { + class Int8 extends mongoose.SchemaType { + constructor(key, options) { + super(key, options, 'Int8'); + } + } + + beforeEach(function() { + // Don't forget to add `Int8` to the type registry + mongoose.Schema.Types.Int8 = Int8; + }); + afterEach(function() { + delete mongoose.Schema.Types.Int8; + }); + + it('Then an error is thrown', function() { + assert.throws(() => { + new Schema({ + field: { + type: Int8, encrypt: { keyId: KEY_ID, algorithm } + } + }, { encryptionType: 'csfle' }); + }, /unable to determine bson type for field `field`/); + }); + }); + + }); + + describe('options.encryptionType', function() { + describe('when an encrypted schema is instantiated and an encryptionType is not provided', function() { + it('an error is thrown', function() { + assert.throws( + () => { + new Schema({ + field: { + type: String, + encrypt: { keyId: KEY_ID, algorithm } + } + }); + }, /encryptionType must be provided/ + ); + + + }); + }); + + describe('when a nested encrypted schema is provided to schema constructor and the encryption types are different', function() { + it('then an error is thrown', function() { + const innerSchema = new Schema({ + field1: { + type: String, encrypt: { + keyId: KEY_ID, + queries: { type: 'equality' } + } + } + }, { encryptionType: 'csfle' }); + assert.throws(() => { + new Schema({ + field1: innerSchema + }, { encryptionType: 'queryableEncryption' }); + }, /encryptionType of a nested schema must match the encryption type of the parent schema/); + }); + }); + }); + + describe('tests for schema mutation methods', function() { + describe('Schema.prototype.add()', function() { + describe('Given a schema with no encrypted fields', function() { + describe('When an encrypted field is added', function() { + it('Then the encrypted field is added to the encrypted fields for the schema', function() { + const schema = new Schema({ + field1: Number + }); + schema.encryptionType('csfle'); + schema.add( + { name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } } + ); + assert.ok(schemaHasEncryptedProperty(schema, ['name'])); + }); + }); + }); + + describe('Given a schema with an encrypted field', function() { + describe('when an encrypted field is added', function() { + describe('and the encryption type matches the existing encryption type', function() { + it('Then the encrypted field is added to the encrypted fields for the schema', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.add( + { name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } } + ); + assert.ok(schemaHasEncryptedProperty(schema, ['name'])); + }); + }); + }); + }); + + describe('Given a schema with an encrypted field', function() { + describe('when an encrypted field is added with different encryption settings for the same field', function() { + it('The encryption settings for the field are overridden', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.add( + { name: { type: String, encrypt: { keyId: new UUID(), algorithm } } } + ); + assert.notEqual(schema.encryptedFields['name'].keyId, KEY_ID); + }); + + }); + + describe('When an unencrypted field is added for the same field', function() { + it('The field on the schema is overridden', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.add( + { field1: String } + ); + assert.equal(schemaHasEncryptedProperty(schema, ['field1']), false); + }); + + }); + }); + + describe('Given a schema', function() { + describe('When multiple encrypted fields are added to the schema in one call to add()', function() { + it('Then all the encrypted fields are added to the schema', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.add( + { + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + } + ); + + assert.ok(schemaHasEncryptedProperty(schema, ['name'])); + assert.ok(schemaHasEncryptedProperty(schema, ['age'])); + }); + }); + }); + }); + + describe('Schema.prototype.remove()', function() { + describe('Given a schema with one encrypted field', function() { + describe('When the encrypted field is removed', function() { + it('Then the encrypted fields on the schema does not contain the removed field', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.remove('field1'); + + assert.equal(schemaHasEncryptedProperty(schema, ['field1']), false); + }); + }); + }); + + describe('Given a schema with multiple encrypted fields', function() { + describe('When one encrypted field is removed', function() { + it('The encrypted fields on the schema does not contain the removed field', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.remove(['field1']); + + assert.equal(schemaHasEncryptedProperty(schema, ['field1']), false); + assert.equal(schemaHasEncryptedProperty(schema, ['name']), true); + assert.equal(schemaHasEncryptedProperty(schema, ['age']), true); + }); + }); + + describe('When all encrypted fields are removed', function() { + it('The encrypted fields on the schema does not contain the removed field', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.remove(['field1', 'name', 'age']); + + assert.equal(schemaHasEncryptedProperty(schema, ['field1']), false); + assert.equal(schemaHasEncryptedProperty(schema, ['name']), false); + assert.equal(schemaHasEncryptedProperty(schema, ['age']), false); + }); + }); + }); + + describe('when a nested encrypted property is removed', function() { + it('the encrypted field is removed from the schema', function() { + const schema = new Schema({ + field1: { name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } } + }, { encryptionType: 'csfle' }); + + assert.equal(schemaHasEncryptedProperty(schema, ['field1.name']), true); + + schema.remove(['field1.name']); + + assert.equal(schemaHasEncryptedProperty(schema, ['field1.name']), false); + }); + }); + }); + }); + + describe('tests for schema copying methods', function() { + describe('Schema.prototype.clone()', function() { + describe('Given a schema with encrypted fields', function() { + describe('When the schema is cloned', function() { + it('The resultant schema contains all the same encrypted fields as the original schema', function() { + const schema1 = new Schema({ name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } }, { encryptionType: 'csfle' }); + const schema2 = schema1.clone(); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), true); + }); + it('The encryption type of the cloned schema is the same as the original', function() { + const schema1 = new Schema({ name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } }, { encryptionType: 'csfle' }); + const schema2 = schema1.clone(); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + describe('When the cloned schema is modified', function() { + it('The original is not modified', function() { + const schema1 = new Schema({ name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } }, { encryptionType: 'csfle' }); + const schema2 = schema1.clone(); + schema2.remove('name'); + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), false); + assert.equal(schemaHasEncryptedProperty(schema1, ['name']), true); + }); + }); + }); + }); + }); + + describe('Schema.prototype.pick()', function() { + describe('When pick() is used with only unencrypted fields', function() { + it('Then the resultant schema has none of the original schema’s encrypted fields', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name1', 'age1']); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), false); + assert.equal(schemaHasEncryptedProperty(schema2, ['age']), false); + }); + it('Then the encryption type is set to the cloned schemas encryptionType', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name1', 'age1']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + + describe('When pick() is used with some unencrypted fields', function() { + it('Then the resultant schema has the encrypted fields of the original schema that were specified to pick().', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name', 'age1']); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), true); + assert.equal(schemaHasEncryptedProperty(schema2, ['age']), false); + }); + it('Then the encryption type is the same as the original schema’s encryption type', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name', 'age1']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + + describe('When pick() is used with nested paths', function() { + it('Then the resultant schema has the encrypted fields of the original schema that were specified to pick().', function() { + const originalSchema = new Schema({ + name: { + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name.name', 'age1']); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name', 'name']), true); + assert.equal(schemaHasEncryptedProperty(schema2, ['age']), false); + }); + it('Then the encryption type is the same as the original schema’s encryption type', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name', 'age1']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + }); + + describe('Schema.prototype.omit()', function() { + describe('When omit() is used with only unencrypted fields', function() { + it('Then the resultant schema has all the original schema’s encrypted fields', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.omit(['name1', 'age1']); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), true); + assert.equal(schemaHasEncryptedProperty(schema2, ['age']), true); + }); + it('Then the encryption type is the same as the original schema’s encryption type', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.omit(['name1', 'age1']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + + describe('When omit() is used with some unencrypted fields', function() { + it('Then the resultant schema has the encrypted fields of the original schema that were specified to omit()', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.omit(['name', 'age1']); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), false); + assert.equal(schemaHasEncryptedProperty(schema2, ['age']), true); + }); + it('Then the encryption type is the same as the original schema’s encryption type', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.omit(['name', 'age1']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + + describe('When omit() is used with some all the encrypted fields', function() { + it('Then the encryption type is the same as the original schema’s encryption type', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.omit(['name', 'age']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + }); + }); +}); From 4402f80ed8447b5cecce0dafd2abd891df7477db Mon Sep 17 00:00:00 2001 From: Bailey Pearson Date: Thu, 23 Jan 2025 08:43:48 -0700 Subject: [PATCH 81/84] Update lib/encryptionUtils.js Co-authored-by: Aditi Khare <106987683+aditi-khare-mongoDB@users.noreply.github.com> --- lib/encryptionUtils.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/encryptionUtils.js b/lib/encryptionUtils.js index 916ef17a5cd..f0c46dee716 100644 --- a/lib/encryptionUtils.js +++ b/lib/encryptionUtils.js @@ -19,7 +19,7 @@ const SchemaString = require('./schema/string'); * * @param {import('.').Schema} schema * @param {string} path - * @returns + * @returns {string} */ function inferBSONType(schema, path) { const type = schema.path(path); From a6a51bd56a75136127989d7586785e470ae30083 Mon Sep 17 00:00:00 2001 From: Bailey Pearson Date: Thu, 23 Jan 2025 08:44:21 -0700 Subject: [PATCH 82/84] Update test/encrypted_schema.test.js Co-authored-by: Aditi Khare <106987683+aditi-khare-mongoDB@users.noreply.github.com> --- test/encrypted_schema.test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/encrypted_schema.test.js b/test/encrypted_schema.test.js index 8f35dc97b73..529ffe42ac0 100644 --- a/test/encrypted_schema.test.js +++ b/test/encrypted_schema.test.js @@ -519,7 +519,7 @@ describe('encrypted schema declaration', function() { }); }); - describe('When omit() is used with some all the encrypted fields', function() { + describe('When omit() is used with all the encrypted fields', function() { it('Then the encryption type is the same as the original schema’s encryption type', function() { const originalSchema = new Schema({ name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, From 7fa66160c40759f5af86bee18c2308eb5098a0d3 Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Wed, 29 Jan 2025 16:30:54 -0500 Subject: [PATCH 83/84] src code changes --- types/schemaoptions.d.ts | 2 ++ types/schematypes.d.ts | 26 ++++++++++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/types/schemaoptions.d.ts b/types/schemaoptions.d.ts index 4df87a806ea..d9bb080878d 100644 --- a/types/schemaoptions.d.ts +++ b/types/schemaoptions.d.ts @@ -258,6 +258,8 @@ declare module 'mongoose' { * @default false */ overwriteModels?: boolean; + + encryptionType?: 'csfle' | 'queryableEncryption'; } interface DefaultSchemaOptions { diff --git a/types/schematypes.d.ts b/types/schematypes.d.ts index 5f364f0cea4..22e2992f6e0 100644 --- a/types/schematypes.d.ts +++ b/types/schematypes.d.ts @@ -1,3 +1,5 @@ +import * as BSON from 'bson'; + declare module 'mongoose' { /** The Mongoose Date [SchemaType](/docs/schematypes.html). */ @@ -207,6 +209,8 @@ declare module 'mongoose' { maxlength?: number | [number, string] | readonly [number, string]; [other: string]: any; + + encrypt?: EncryptSchemaTypeOptions; } interface Validator { @@ -218,6 +222,28 @@ declare module 'mongoose' { type ValidatorFunction = (this: DocType, value: any, validatorProperties?: Validator) => any; + export interface EncryptSchemaTypeOptions { + /** The id of the dataKey to use for encryption */ + keyId: BSON.UUID; + + /** + * Specifies the type of queries that the field can be queried on for Queryable Encryption. + * Required when `SchemaOptions.encryptionType` is 'queryableEncryption' + */ + queries?: 'equality' | 'range'; + + /** + * The algorithm to use for encryption. + * Required when `SchemaOptions.encryptionType` is 'csfle' + */ + algorithm?: + | 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' + | 'AEAD_AES_256_CBC_HMAC_SHA_512-Random' + | 'Indexed' + | 'Unindexed' + | 'Range'; + } + class SchemaType { /** SchemaType constructor */ constructor(path: string, options?: AnyObject, instance?: string); From 616ce14defb00b811e5ca4b4dd4ad8a097608cff Mon Sep 17 00:00:00 2001 From: Aditi Khare Date: Wed, 29 Jan 2025 17:30:41 -0500 Subject: [PATCH 84/84] tests added --- test/types/schema.test.ts | 12 ++++++++- test/types/schemaTypeOptions.test.ts | 37 ++++++++++++++++++++++++++ types/schemaoptions.d.ts | 3 +++ types/schematypes.d.ts | 39 ++++++++++++---------------- 4 files changed, 67 insertions(+), 24 deletions(-) diff --git a/test/types/schema.test.ts b/test/types/schema.test.ts index 13408eaf293..00e0445878f 100644 --- a/test/types/schema.test.ts +++ b/test/types/schema.test.ts @@ -24,7 +24,7 @@ import { ValidateOpts, BufferToBinary } from 'mongoose'; -import { Binary } from 'mongodb'; +import { Binary, BSON } from 'mongodb'; import { IsPathRequired } from '../../types/inferschematype'; import { expectType, expectError, expectAssignable } from 'tsd'; import { ObtainDocumentPathType, ResolvePathType } from '../../types/inferschematype'; @@ -591,6 +591,16 @@ const batchSchema2 = new Schema({ name: String }, { discriminatorKey: 'kind', st } } }); batchSchema2.discriminator('event', eventSchema2); + +function encryptionType() { + const keyId = new BSON.UUID(); + expectError(new Schema({ name: { type: String, encrypt: { keyId } } }, { encryptionType: 'newFakeEncryptionType' })); + expectError(new Schema({ name: { type: String, encrypt: { keyId } } }, { encryptionType: 1 })); + + expectType(new Schema({ name: { type: String, encrypt: { keyId } } }, { encryptionType: 'queryableEncryption' })); + expectType(new Schema({ name: { type: String, encrypt: { keyId } } }, { encryptionType: 'csfle' })); +} + function gh11828() { interface IUser { name: string; diff --git a/test/types/schemaTypeOptions.test.ts b/test/types/schemaTypeOptions.test.ts index 3514b01d7e9..4f38ceab909 100644 --- a/test/types/schemaTypeOptions.test.ts +++ b/test/types/schemaTypeOptions.test.ts @@ -1,3 +1,4 @@ +import { BSON } from 'mongodb'; import { AnyArray, Schema, @@ -74,3 +75,39 @@ function defaultOptions() { expectType>(new Schema.Types.Subdocument('none').defaultOptions); expectType>(new Schema.Types.UUID('none').defaultOptions); } + +function encrypt() { + const keyId = new BSON.UUID(); + + new SchemaTypeOptions()['encrypt'] = { keyId, algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' }; + new SchemaTypeOptions()['encrypt'] = { keyId, algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' }; + new SchemaTypeOptions()['encrypt'] = { keyId, algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Random' }; + new SchemaTypeOptions()['encrypt'] = { keyId, algorithm: 'Indexed' }; + new SchemaTypeOptions()['encrypt'] = { keyId, algorithm: 'Unindexed' }; + new SchemaTypeOptions()['encrypt'] = { keyId, algorithm: 'Range' }; + new SchemaTypeOptions()['encrypt'] = { keyId, algorithm: undefined }; + + // qe + valid queries + new SchemaTypeOptions()['encrypt'] = { keyId, queries: 'equality' }; + new SchemaTypeOptions()['encrypt'] = { keyId, queries: 'range' }; + new SchemaTypeOptions()['encrypt'] = { keyId, queries: undefined }; + + // empty object + expectError['encrypt']>({}); + + // invalid keyId + expectError['encrypt']>({ keyId: 'fakeId' }); + + // missing keyId + expectError['encrypt']>({ queries: 'equality' }); + expectError['encrypt']>({ algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' }); + + // invalid algorithm + expectError['encrypt']>({ keyId, algorithm: 'SHA_FAKE_ALG' }); + + // invalid queries + expectError['encrypt']>({ keyId, queries: 'fakeQueryOption' }); + + // invalid input option + expectError['encrypt']>({ keyId, invalidKey: 'fakeKeyOption' }); +} diff --git a/types/schemaoptions.d.ts b/types/schemaoptions.d.ts index d9bb080878d..f661e1643de 100644 --- a/types/schemaoptions.d.ts +++ b/types/schemaoptions.d.ts @@ -259,6 +259,9 @@ declare module 'mongoose' { */ overwriteModels?: boolean; + /** + * Required when the schema is encrypted. + */ encryptionType?: 'csfle' | 'queryableEncryption'; } diff --git a/types/schematypes.d.ts b/types/schematypes.d.ts index 22e2992f6e0..a59f8c46668 100644 --- a/types/schematypes.d.ts +++ b/types/schematypes.d.ts @@ -210,7 +210,22 @@ declare module 'mongoose' { [other: string]: any; - encrypt?: EncryptSchemaTypeOptions; + encrypt?: { + /** The id of the dataKey to use for encryption */ + keyId: BSON.UUID; + + /** + * Specifies the type of queries that the field can be queried on for Queryable Encryption. + * Required when `SchemaOptions.encryptionType` is 'queryableEncryption' + */ + queries?: 'equality' | 'range'; + + /** + * The algorithm to use for encryption. + * Required when `SchemaOptions.encryptionType` is 'csfle' + */ + algorithm?: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' | 'AEAD_AES_256_CBC_HMAC_SHA_512-Random' | 'Indexed' | 'Unindexed' | 'Range'; + }; } interface Validator { @@ -222,28 +237,6 @@ declare module 'mongoose' { type ValidatorFunction = (this: DocType, value: any, validatorProperties?: Validator) => any; - export interface EncryptSchemaTypeOptions { - /** The id of the dataKey to use for encryption */ - keyId: BSON.UUID; - - /** - * Specifies the type of queries that the field can be queried on for Queryable Encryption. - * Required when `SchemaOptions.encryptionType` is 'queryableEncryption' - */ - queries?: 'equality' | 'range'; - - /** - * The algorithm to use for encryption. - * Required when `SchemaOptions.encryptionType` is 'csfle' - */ - algorithm?: - | 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' - | 'AEAD_AES_256_CBC_HMAC_SHA_512-Random' - | 'Indexed' - | 'Unindexed' - | 'Range'; - } - class SchemaType { /** SchemaType constructor */ constructor(path: string, options?: AnyObject, instance?: string);