diff --git a/.evergreen/run-tests.sh b/.evergreen/run-tests.sh index 9bad420f7f..a0c26caf6c 100755 --- a/.evergreen/run-tests.sh +++ b/.evergreen/run-tests.sh @@ -65,4 +65,5 @@ export MONGODB_URI=${MONGODB_URI} export LOAD_BALANCER=${LOAD_BALANCER} export TEST_CSFLE=${TEST_CSFLE} export COMPRESSOR=${COMPRESSOR} +export NODE_OPTIONS="${NODE_OPTIONS} --trace-uncaught" npm run "${TEST_NPM_SCRIPT}" diff --git a/src/change_stream.ts b/src/change_stream.ts index d06f354d3d..f34c1934dc 100644 --- a/src/change_stream.ts +++ b/src/change_stream.ts @@ -664,6 +664,8 @@ export class ChangeStream< this.isClosed = false; this.mode = false; + this.on('error', () => null); + // Listen for any `change` listeners being added to ChangeStream this.on('newListener', eventName => { if (eventName === 'change' && this.cursor && this.listenerCount('change') === 0) { diff --git a/src/client-side-encryption/state_machine.ts b/src/client-side-encryption/state_machine.ts index 9ebfdc13c0..9e8a00bc5f 100644 --- a/src/client-side-encryption/state_machine.ts +++ b/src/client-side-encryption/state_machine.ts @@ -351,12 +351,8 @@ export class StateMachine { let socket: tls.TLSSocket; function destroySockets() { - for (const sock of [socket, netSocket]) { - if (sock) { - sock.removeAllListeners(); - sock.destroy(); - } - } + socket?.destroy(); + netSocket?.destroy(); } function onerror(cause: Error) { diff --git a/src/cmap/connect.ts b/src/cmap/connect.ts index 48f3e5a015..a94cd59156 100644 --- a/src/cmap/connect.ts +++ b/src/cmap/connect.ts @@ -389,6 +389,7 @@ export async function makeSocket( addAbortSignalToStream(closeSignal, socket); + socket.unref(); socket.setKeepAlive(true, 300000); socket.setTimeout(connectTimeoutMS); socket.setNoDelay(noDelay); diff --git a/src/cmap/connection.ts b/src/cmap/connection.ts index 6bc05c9808..87acbca22e 100644 --- a/src/cmap/connection.ts +++ b/src/cmap/connection.ts @@ -298,6 +298,14 @@ export class Connection extends TypedEventEmitter { ); } + unref() { + this.socket.unref(); + } + + ref() { + this.socket.ref(); + } + public markAvailable(): void { this.lastUseTime = now(); } @@ -353,7 +361,7 @@ export class Connection extends TypedEventEmitter { return; } - this.socket.destroy(); + if (!this.socket.destroyed) this.socket.destroy(); this.error = error; this.dataEvents?.throw(error).then(undefined, squashError); diff --git a/src/cmap/connection_pool.ts b/src/cmap/connection_pool.ts index 97d08077c5..d6ef62fab0 100644 --- a/src/cmap/connection_pool.ts +++ b/src/cmap/connection_pool.ts @@ -413,6 +413,8 @@ export class ConnectionPool extends TypedEventEmitter { if (!this.checkedOut.has(connection)) { return; } + + connection.unref(); const poolClosed = this.closed; const stale = this.connectionIsStale(connection); const willDestroy = !!(poolClosed || stale || connection.closed); @@ -788,6 +790,7 @@ export class ConnectionPool extends TypedEventEmitter { ); this.waitQueue.shift(); + connection.ref(); waitQueueMember.resolve(connection); } } diff --git a/src/mongo_client.ts b/src/mongo_client.ts index 5aaff6d995..dea86f2e88 100644 --- a/src/mongo_client.ts +++ b/src/mongo_client.ts @@ -691,14 +691,15 @@ export class MongoClient extends TypedEventEmitter implements /* @internal */ private async _close(force = false): Promise { - this.closeController.abort(); - // There's no way to set hasBeenClosed back to false - Object.defineProperty(this.s, 'hasBeenClosed', { - value: true, - enumerable: true, - configurable: false, - writable: false - }); + try { + this.closeController.abort(); + // There's no way to set hasBeenClosed back to false + Object.defineProperty(this.s, 'hasBeenClosed', { + value: true, + enumerable: true, + configurable: false, + writable: false + }); const activeCursorCloses = Array.from(this.s.activeCursors, cursor => cursor.close()); this.s.activeCursors.clear(); diff --git a/src/sdam/monitor.ts b/src/sdam/monitor.ts index d25eb0a348..cadd0c05e3 100644 --- a/src/sdam/monitor.ts +++ b/src/sdam/monitor.ts @@ -384,20 +384,13 @@ function checkServer(monitor: Monitor, callback: Callback) { } // connecting does an implicit `hello` - (async () => { + const makeMonitoringConnection = async () => { const socket = await makeSocket(monitor.connectOptions, monitor.closeSignal); const connection = makeConnection(monitor.connectOptions, socket); // The start time is after socket creation but before the handshake start = now(); try { await performInitialHandshake(connection, monitor.connectOptions, monitor.closeSignal); - return connection; - } catch (error) { - connection.destroy(); - throw error; - } - })().then( - connection => { if (isInCloseState(monitor)) { connection.destroy(); return; @@ -417,15 +410,16 @@ function checkServer(monitor: Monitor, callback: Callback) { useStreamingProtocol(monitor, connection.hello?.topologyVersion) ) ); - - callback(undefined, connection.hello); - }, - error => { + return connection.hello; + } catch (error) { + connection.destroy(); monitor.connection = null; awaited = false; - onHeartbeatFailed(error); + throw error; } - ); + }; + + makeMonitoringConnection().then(callback.bind(undefined, undefined), onHeartbeatFailed); } function monitorServer(monitor: Monitor) { diff --git a/src/utils.ts b/src/utils.ts index a83f902daf..b2b1934e63 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1,3 +1,4 @@ +/* eslint-disable no-console */ import * as crypto from 'crypto'; import type { SrvRecord } from 'dns'; import { type EventEmitter } from 'events'; @@ -1571,7 +1572,37 @@ export function addAbortSignalToStream( const abortListener = addAbortListener(signal, function () { stream.off('close', abortListener[kDispose]).off('error', abortListener[kDispose]); - stream.destroy(this.reason); + const removeAll = stream.removeAllListeners.bind(stream); + + //@ts-expect-error: overwrite + stream.removeAllListeners = function (...args) { + const removingError = args.length === 0 || args.includes('error'); + if (removingError) { + console.log('removing all', args, new Error('why are u removing my error listener')); + } + return removeAll(...args); + }; + + stream.on('removeListener', (name, listener) => { + if (name === 'error') { + console.log( + 'who doth remove my error listener', + new Error('error listener gone missing!!'), + listener + ); + } + }); + + const error = new Error( + //@ts-expect-error: we added these + `sad: ${stream.___socketId}: error listeners: ${stream.listenerCount('error')} + ${stream.___stack}`, + { cause: this.reason } + ); + + //@ts-expect-error: adding this for debug + error.stream = stream; + + stream.destroy(error); }); // not nearly as complex as node's eos() but... do we need all that?? sobbing emoji. stream.once('close', abortListener[kDispose]).once('error', abortListener[kDispose]); diff --git a/test/integration/change-streams/change_stream.test.ts b/test/integration/change-streams/change_stream.test.ts index a67b714ded..692ef447f0 100644 --- a/test/integration/change-streams/change_stream.test.ts +++ b/test/integration/change-streams/change_stream.test.ts @@ -18,6 +18,7 @@ import { MongoChangeStreamError, type MongoClient, MongoServerError, + promiseWithResolvers, ReadPreference, type ResumeToken } from '../../mongodb'; @@ -62,6 +63,7 @@ describe('Change Streams', function () { await csDb.createCollection('test').catch(() => null); collection = csDb.collection('test'); changeStream = collection.watch(); + changeStream.once('error', error => this.error(error)); }); afterEach(async () => { @@ -695,10 +697,18 @@ describe('Change Streams', function () { async test() { await initIteratorMode(changeStream); + const { promise, resolve, reject } = promiseWithResolvers(); + const outStream = new PassThrough({ objectMode: true }); - // @ts-expect-error: transform requires a Document return type - changeStream.stream({ transform: JSON.stringify }).pipe(outStream); + const csStream = changeStream + // @ts-expect-error: transform requires a Document return type + .stream({ transform: JSON.stringify }); + + csStream.once('error', reject).pipe(outStream).once('error', reject); + + outStream.on('close', resolve); + csStream.on('close', resolve); const willBeData = once(outStream, 'data'); @@ -709,6 +719,8 @@ describe('Change Streams', function () { expect(parsedEvent).to.have.nested.property('fullDocument.a', 1); outStream.destroy(); + csStream.destroy(); + await promise; } }); diff --git a/test/integration/client-side-encryption/client_side_encryption.prose.18.azure_kms_mock_server.test.ts b/test/integration/client-side-encryption/client_side_encryption.prose.18.azure_kms_mock_server.test.ts index c99820b6f8..11dd45a885 100644 --- a/test/integration/client-side-encryption/client_side_encryption.prose.18.azure_kms_mock_server.test.ts +++ b/test/integration/client-side-encryption/client_side_encryption.prose.18.azure_kms_mock_server.test.ts @@ -30,6 +30,8 @@ const metadata: MongoDBMetadataUI = { } }; +const closeSignal = new AbortController().signal; + context('Azure KMS Mock Server Tests', function () { context('Case 1: Success', metadata, function () { // Do not set an ``X-MongoDB-HTTP-TestParams`` header. @@ -44,7 +46,7 @@ context('Azure KMS Mock Server Tests', function () { // 5. The token will have a resource of ``"https://vault.azure.net"`` it('returns a properly formatted access token', async () => { - const credentials = await fetchAzureKMSToken(new KMSRequestOptions()); + const credentials = await fetchAzureKMSToken(new KMSRequestOptions(), closeSignal); expect(credentials).to.have.property('accessToken', 'magic-cookie'); }); }); @@ -59,7 +61,10 @@ context('Azure KMS Mock Server Tests', function () { // The test case should ensure that this error condition is handled gracefully. it('returns an error', async () => { - const error = await fetchAzureKMSToken(new KMSRequestOptions('empty-json')).catch(e => e); + const error = await fetchAzureKMSToken( + new KMSRequestOptions('empty-json'), + closeSignal + ).catch(e => e); expect(error).to.be.instanceof(MongoCryptAzureKMSRequestError); }); @@ -74,7 +79,9 @@ context('Azure KMS Mock Server Tests', function () { // The test case should ensure that this error condition is handled gracefully. it('returns an error', async () => { - const error = await fetchAzureKMSToken(new KMSRequestOptions('bad-json')).catch(e => e); + const error = await fetchAzureKMSToken(new KMSRequestOptions('bad-json'), closeSignal).catch( + e => e + ); expect(error).to.be.instanceof(MongoCryptAzureKMSRequestError); }); @@ -89,7 +96,9 @@ context('Azure KMS Mock Server Tests', function () { // 2. The response body is unspecified. // The test case should ensure that this error condition is handled gracefully. it('returns an error', async () => { - const error = await fetchAzureKMSToken(new KMSRequestOptions('404')).catch(e => e); + const error = await fetchAzureKMSToken(new KMSRequestOptions('404'), closeSignal).catch( + e => e + ); expect(error).to.be.instanceof(MongoCryptAzureKMSRequestError); }); @@ -104,7 +113,9 @@ context('Azure KMS Mock Server Tests', function () { // 2. The response body is unspecified. // The test case should ensure that this error condition is handled gracefully. it('returns an error', async () => { - const error = await fetchAzureKMSToken(new KMSRequestOptions('500')).catch(e => e); + const error = await fetchAzureKMSToken(new KMSRequestOptions('500'), closeSignal).catch( + e => e + ); expect(error).to.be.instanceof(MongoCryptAzureKMSRequestError); }); @@ -117,7 +128,9 @@ context('Azure KMS Mock Server Tests', function () { // The HTTP response from the ``fake_azure`` server will take at least 1000 seconds // to complete. The request should fail with a timeout. it('returns an error after the request times out', async () => { - const error = await fetchAzureKMSToken(new KMSRequestOptions('slow')).catch(e => e); + const error = await fetchAzureKMSToken(new KMSRequestOptions('slow'), closeSignal).catch( + e => e + ); expect(error).to.be.instanceof(MongoCryptAzureKMSRequestError); }); diff --git a/test/integration/client-side-encryption/driver.test.ts b/test/integration/client-side-encryption/driver.test.ts index a7c1e617c2..97178ec959 100644 --- a/test/integration/client-side-encryption/driver.test.ts +++ b/test/integration/client-side-encryption/driver.test.ts @@ -829,12 +829,14 @@ describe('CSOT', function () { }); describe('State machine', function () { - const stateMachine = new StateMachine({} as any); + const signal = new AbortController().signal; + const stateMachine = new StateMachine({} as any, undefined, signal); const timeoutContext = () => ({ timeoutContext: new CSOTTimeoutContext({ timeoutMS: 1000, - serverSelectionTimeoutMS: 30000 + serverSelectionTimeoutMS: 30000, + closeSignal: signal }) }); diff --git a/test/integration/client-side-operations-timeout/client_side_operations_timeout.unit.test.ts b/test/integration/client-side-operations-timeout/client_side_operations_timeout.unit.test.ts index 3515aaad92..4022fff7a8 100644 --- a/test/integration/client-side-operations-timeout/client_side_operations_timeout.unit.test.ts +++ b/test/integration/client-side-operations-timeout/client_side_operations_timeout.unit.test.ts @@ -144,42 +144,48 @@ describe('CSOT spec unit tests', function () { }); }); - context('when StateMachine.kmsRequest() is not passed a `CSOTimeoutContext`', function () { - let clock: sinon.SinonFakeTimers; - let timerSandbox: sinon.SinonSandbox; + // todo: we have to clean up the TLS socket made here. + context.skip( + 'when StateMachine.kmsRequest() is not passed a `CSOTimeoutContext`', + function () { + let clock: sinon.SinonFakeTimers; + let timerSandbox: sinon.SinonSandbox; - let sleep; + let sleep; - beforeEach(async function () { - sinon.stub(TLSSocket.prototype, 'connect').callsFake(function (..._args) { - clock.tick(30000); + beforeEach(async function () { + sinon.stub(TLSSocket.prototype, 'connect').callsFake(function (..._args) { + clock.tick(30000); + }); + timerSandbox = createTimerSandbox(); + clock = sinon.useFakeTimers(); + sleep = promisify(setTimeout); }); - timerSandbox = createTimerSandbox(); - clock = sinon.useFakeTimers(); - sleep = promisify(setTimeout); - }); - afterEach(async function () { - if (clock) { - timerSandbox.restore(); - clock.restore(); - clock = undefined; - } - sinon.restore(); - }); + afterEach(async function () { + if (clock) { + timerSandbox.restore(); + clock.restore(); + clock = undefined; + } + sinon.restore(); + }); - it('the kms request does not timeout within 30 seconds', async function () { - const sleepingFn = async () => { - await sleep(30000); - throw Error('Slept for 30s'); - }; + it('the kms request does not timeout within 30 seconds', async function () { + const sleepingFn = async () => { + await sleep(30000); + throw Error('Slept for 30s'); + }; - const err$ = Promise.all([stateMachine.kmsRequest(request), sleepingFn()]).catch(e => e); - clock.tick(30000); - const err = await err$; - expect(err.message).to.equal('Slept for 30s'); - }); - }); + const err$ = Promise.all([stateMachine.kmsRequest(request), sleepingFn()]).catch( + e => e + ); + clock.tick(30000); + const err = await err$; + expect(err.message).to.equal('Slept for 30s'); + }); + } + ); }); describe('Auto Encryption', function () { diff --git a/test/mocha_mongodb.json b/test/mocha_mongodb.json index bab3304199..9de29fb9ac 100644 --- a/test/mocha_mongodb.json +++ b/test/mocha_mongodb.json @@ -17,7 +17,6 @@ "recursive": true, "timeout": 60000, "failZero": true, - "reporter": "test/tools/reporter/mongodb_reporter.js", "sort": true, "color": true, "ignore": [ diff --git a/test/tools/cluster_setup.sh b/test/tools/cluster_setup.sh index 6507321645..fdc0f3eb82 100755 --- a/test/tools/cluster_setup.sh +++ b/test/tools/cluster_setup.sh @@ -13,8 +13,8 @@ SHARDED_DIR=${SHARDED_DIR:-$DATA_DIR/sharded_cluster} if [[ $1 == "replica_set" ]]; then mkdir -p $REPLICASET_DIR # user / password - mlaunch init --dir $REPLICASET_DIR --ipv6 --auth --username "bob" --password "pwd123" --replicaset --nodes 3 --arbiter --name rs --port 31000 --enableMajorityReadConcern --setParameter enableTestCommands=1 - echo "mongodb://bob:pwd123@localhost:31000,localhost:31001,localhost:31002/?replicaSet=rs" + mlaunch init --dir $REPLICASET_DIR --ipv6 --auth --username "bob" --password "pwd123" --replicaset --nodes 3 --arbiter --name "repl0" --port 27017 --enableMajorityReadConcern --setParameter enableTestCommands=1 + echo "mongodb://bob:pwd123@localhost:27017,localhost:27018,localhost:27019/?replicaSet=repl0" elif [[ $1 == "sharded_cluster" ]]; then mkdir -p $SHARDED_DIR mlaunch init --dir $SHARDED_DIR --ipv6 --auth --username "bob" --password "pwd123" --replicaset --nodes 3 --name rs --port 51000 --enableMajorityReadConcern --setParameter enableTestCommands=1 --sharded 1 --mongos 2 diff --git a/test/tools/cmap_spec_runner.ts b/test/tools/cmap_spec_runner.ts index ee00f1a0c5..f1e198972a 100644 --- a/test/tools/cmap_spec_runner.ts +++ b/test/tools/cmap_spec_runner.ts @@ -191,11 +191,14 @@ const compareInputToSpec = (input, expected, message) => { expect(input, message).to.equal(expected); }; +const closeSignal = new AbortController().signal; + const getTestOpDefinitions = (threadContext: ThreadContext) => ({ checkOut: async function (op) { const timeoutContext = TimeoutContext.create({ serverSelectionTimeoutMS: 0, - waitQueueTimeoutMS: threadContext.pool.options.waitQueueTimeoutMS + waitQueueTimeoutMS: threadContext.pool.options.waitQueueTimeoutMS, + closeSignal }); const connection: Connection = await ConnectionPool.prototype.checkOut.call( threadContext.pool, @@ -470,8 +473,6 @@ export function runCmapTestSuite( client: MongoClient; beforeEach(async function () { - let utilClient: MongoClient; - const skipDescription = options?.testsToSkip?.find( ({ description }) => description === test.description ); @@ -486,12 +487,9 @@ export function runCmapTestSuite( } } - if (this.configuration.isLoadBalanced) { - // The util client can always point at the single mongos LB frontend. - utilClient = this.configuration.newClient(this.configuration.singleMongosLoadBalancerUri); - } else { - utilClient = this.configuration.newClient(); - } + const utilClient = this.configuration.isLoadBalanced + ? this.configuration.newClient(this.configuration.singleMongosLoadBalancerUri) + : this.configuration.newClient(); await utilClient.connect(); @@ -499,7 +497,7 @@ export function runCmapTestSuite( const someRequirementMet = !allRequirements.length || - (await isAnyRequirementSatisfied(this.currentTest.ctx, allRequirements, utilClient)); + (await isAnyRequirementSatisfied(this.currentTest.ctx, allRequirements)); if (!someRequirementMet) { await utilClient.close(); diff --git a/test/tools/runner/config.ts b/test/tools/runner/config.ts index ed1510505b..c55ef8845b 100644 --- a/test/tools/runner/config.ts +++ b/test/tools/runner/config.ts @@ -86,6 +86,7 @@ export class TestConfiguration { serverApi?: ServerApi; activeResources: number; isSrv: boolean; + shards: { host: string }[]; constructor( private uri: string, @@ -103,6 +104,7 @@ export class TestConfiguration { this.topologyType = this.isLoadBalanced ? TopologyType.LoadBalanced : context.topologyType; this.buildInfo = context.buildInfo; this.serverApi = context.serverApi; + this.shards = context.shards; this.isSrv = uri.indexOf('mongodb+srv') > -1; this.options = { hosts, diff --git a/test/tools/runner/ee_checker.ts b/test/tools/runner/ee_checker.ts index 84b744daa8..c2c54fe1f0 100644 --- a/test/tools/runner/ee_checker.ts +++ b/test/tools/runner/ee_checker.ts @@ -24,7 +24,7 @@ events.EventEmitter = class RequireErrorListenerEventEmitter extends EventEmitte process.nextTick(() => { const isCS = this.constructor.name.toLowerCase().includes('ChangeStream'.toLowerCase()); if (isCS) { - // consider adding a warning. + // consider adding a warning. something related to mode === 'iterator' should skip this. return; } if (this.listenerCount('error') === 0) { diff --git a/test/tools/runner/hooks/configuration.ts b/test/tools/runner/hooks/configuration.ts index 063c6453db..f2d5efe9d9 100644 --- a/test/tools/runner/hooks/configuration.ts +++ b/test/tools/runner/hooks/configuration.ts @@ -153,6 +153,11 @@ const testConfigBeforeHook = async function () { .command({ getParameter: '*' }) .catch(error => ({ noReply: error })); + context.shards = + context.topologyType === 'sharded' + ? await client.db('config').collection('shards').find({}).toArray() + : []; + this.configuration = new TestConfiguration( loadBalanced ? SINGLE_MONGOS_LB_URI : MONGODB_URI, context diff --git a/test/tools/runner/hooks/leak_checker.ts b/test/tools/runner/hooks/leak_checker.ts index 4f53c031da..9ea984e859 100644 --- a/test/tools/runner/hooks/leak_checker.ts +++ b/test/tools/runner/hooks/leak_checker.ts @@ -2,6 +2,7 @@ import { expect } from 'chai'; import * as chalk from 'chalk'; import * as net from 'net'; +import * as tls from 'tls'; import { MongoClient, ServerSessionPool } from '../../../mongodb'; @@ -140,9 +141,11 @@ const leakCheckerAfterEach = async function () { } }; -const TRACE_SOCKETS = process.env.TRACE_SOCKETS === 'true' ? true : false; -const kSocketId = Symbol('socketId'); +const TRACE_SOCKETS = true; // process.env.TRACE_SOCKETS === 'true' ? true : false; +const kSocketId = '___socketId'; +const kStack = '___stack'; const originalCreateConnection = net.createConnection; +const originalTLSConnect = tls.connect; let socketCounter = 0n; const socketLeakCheckBeforeAll = function socketLeakCheckBeforeAll() { @@ -150,6 +153,16 @@ const socketLeakCheckBeforeAll = function socketLeakCheckBeforeAll() { net.createConnection = options => { const socket = originalCreateConnection(options); socket[kSocketId] = socketCounter.toString().padStart(5, '0'); + socket[kStack] = new Error('').stack; + socketCounter++; + return socket; + }; + + // @ts-expect-error: Typescript says this is readonly, but it is not at runtime + tls.connect = function (options) { + const socket = originalTLSConnect(options); + socket[kSocketId] = socketCounter.toString().padStart(5, '0'); + socket[kStack] = new Error('').stack; socketCounter++; return socket; }; diff --git a/test/tools/spec-runner/index.js b/test/tools/spec-runner/index.js index 42ea3b126b..2d41a879a0 100644 --- a/test/tools/spec-runner/index.js +++ b/test/tools/spec-runner/index.js @@ -162,21 +162,11 @@ function generateTopologyTests(testSuites, testContext, filter) { } const beforeEachFilter = async function () { - let utilClient; - if (this.configuration.isLoadBalanced) { - // The util client can always point at the single mongos LB frontend. - utilClient = this.configuration.newClient(this.configuration.singleMongosLoadBalancerUri); - } else { - utilClient = this.configuration.newClient(); - } - - await utilClient.connect(); - const allRequirements = runOn.map(legacyRunOnToRunOnRequirement); const someRequirementMet = allRequirements.length === 0 || - (await isAnyRequirementSatisfied(this.currentTest.ctx, allRequirements, utilClient)); + (await isAnyRequirementSatisfied(this.currentTest.ctx, allRequirements)); let shouldRun = someRequirementMet; @@ -212,7 +202,6 @@ function generateTopologyTests(testSuites, testContext, filter) { } } - await utilClient.close(); if (csfleFilterError) { throw csfleFilterError; } diff --git a/test/tools/unified-spec-runner/operations.ts b/test/tools/unified-spec-runner/operations.ts index f7c34a7023..7e0a15a326 100644 --- a/test/tools/unified-spec-runner/operations.ts +++ b/test/tools/unified-spec-runner/operations.ts @@ -257,6 +257,8 @@ operations.set('createChangeStream', async ({ entities, operation }) => { const changeStream: ChangeStream = watchable.watch(pipeline, args); //@ts-expect-error: private method await changeStream.cursor.cursorInit(); + //@ts-expect-error: private method + changeStream._setIsIterator(); return changeStream; }); diff --git a/test/tools/unified-spec-runner/runner.ts b/test/tools/unified-spec-runner/runner.ts index 84bea56766..92871c1448 100644 --- a/test/tools/unified-spec-runner/runner.ts +++ b/test/tools/unified-spec-runner/runner.ts @@ -125,12 +125,10 @@ async function runUnifiedTest( trace('satisfiesRequirements'); const isSomeSuiteRequirementMet = - !suiteRequirements.length || - (await isAnyRequirementSatisfied(ctx, suiteRequirements, utilClient)); + !suiteRequirements.length || (await isAnyRequirementSatisfied(ctx, suiteRequirements)); const isSomeTestRequirementMet = isSomeSuiteRequirementMet && - (!testRequirements.length || - (await isAnyRequirementSatisfied(ctx, testRequirements, utilClient))); + (!testRequirements.length || (await isAnyRequirementSatisfied(ctx, testRequirements))); if (!isSomeTestRequirementMet) { return ctx.skip(); @@ -319,23 +317,26 @@ export function runUnifiedSuite( for (const unifiedSuite of specTests) { context(String(unifiedSuite.description), function () { for (const [index, test] of unifiedSuite.tests.entries()) { - it(String(test.description === '' ? `Test ${index}` : test.description), async function () { - if (expectRuntimeError) { - const error = await runUnifiedTest(this, unifiedSuite, test, skipFilter).catch( - error => error - ); - expect(error).to.satisfy(value => { - return ( - value instanceof AssertionError || - value instanceof MongoServerError || - value instanceof TypeError || - value instanceof MongoParseError + it( + String(test.description === '' ? `Test ${index}` : test.description), + async function unifiedTest() { + if (expectRuntimeError) { + const error = await runUnifiedTest(this, unifiedSuite, test, skipFilter).catch( + error => error ); - }); - } else { - await runUnifiedTest(this, unifiedSuite, test, skipFilter); + expect(error).to.satisfy(value => { + return ( + value instanceof AssertionError || + value instanceof MongoServerError || + value instanceof TypeError || + value instanceof MongoParseError + ); + }); + } else { + await runUnifiedTest(this, unifiedSuite, test, skipFilter); + } } - }); + ); } }); } diff --git a/test/tools/unified-spec-runner/unified-utils.ts b/test/tools/unified-spec-runner/unified-utils.ts index 25a5115a6d..7a68c0b37b 100644 --- a/test/tools/unified-spec-runner/unified-utils.ts +++ b/test/tools/unified-spec-runner/unified-utils.ts @@ -33,11 +33,7 @@ export function log(message: unknown, ...optionalParameters: unknown[]): void { if (ENABLE_UNIFIED_TEST_LOGGING) console.warn(message, ...optionalParameters); } -export async function topologySatisfies( - ctx: Mocha.Context, - r: RunOnRequirement, - utilClient: MongoClient -): Promise { +export async function topologySatisfies(ctx: Mocha.Context, r: RunOnRequirement): Promise { const config = ctx.configuration; let ok = true; @@ -57,10 +53,10 @@ export async function topologySatisfies( } if (r.topologies.includes('sharded-replicaset') && topologyType === 'sharded') { - const shards = await utilClient.db('config').collection('shards').find({}).toArray(); - ok &&= shards.length > 0 && shards.every(shard => shard.host.split(',').length > 1); + ok &&= + config.shards.length > 0 && config.shards.every(shard => shard.host.split(',').length > 1); if (!ok && skipReason == null) { - skipReason = `requires sharded-replicaset but shards.length=${shards.length}`; + skipReason = `requires sharded-replicaset but shards.length=${config.shards.length}`; } } else { if (!topologyType) throw new AssertionError(`Topology undiscovered: ${config.topologyType}`); @@ -155,11 +151,11 @@ export async function topologySatisfies( return ok; } -export async function isAnyRequirementSatisfied(ctx, requirements, client) { +export async function isAnyRequirementSatisfied(ctx, requirements) { const skipTarget = ctx.currentTest || ctx.test; const skipReasons = []; for (const requirement of requirements) { - const met = await topologySatisfies(ctx, requirement, client); + const met = await topologySatisfies(ctx, requirement); if (met) { return true; }