diff --git a/src/sessions.ts b/src/sessions.ts index 3dc3202ea4c..8167bced7c0 100644 --- a/src/sessions.ts +++ b/src/sessions.ts @@ -383,7 +383,7 @@ export class ClientSession extends TypedEventEmitter { */ startTransaction(options?: TransactionOptions): void { if (this[kSnapshotEnabled]) { - throw new MongoCompatibilityError('Transactions are not allowed with snapshot sessions'); + throw new MongoCompatibilityError('Transactions are not supported in snapshot sessions'); } if (this.inTransaction()) { diff --git a/test/tools/unified-spec-runner/entities.ts b/test/tools/unified-spec-runner/entities.ts index 82658319973..7afb4d30a21 100644 --- a/test/tools/unified-spec-runner/entities.ts +++ b/test/tools/unified-spec-runner/entities.ts @@ -1,3 +1,4 @@ +/* eslint-disable @typescript-eslint/no-non-null-assertion */ import { expect } from 'chai'; import { ChangeStream } from '../../../src/change_stream'; @@ -64,8 +65,6 @@ function getClient(address) { return new MongoClient(`mongodb://${address}`, getEnvironmentalOptions()); } -type PushFunction = (e: any) => void; - export class UnifiedMongoClient extends MongoClient { commandEvents: CommandEvent[]; cmapEvents: CmapEvent[]; @@ -139,6 +138,17 @@ export class UnifiedMongoClient extends MongoClient { return this.ignoredEvents.includes(e.commandName); } + getCapturedEvents(eventType: string): CommandEvent[] | CmapEvent[] { + switch (eventType) { + case 'command': + return this.commandEvents; + case 'cmap': + return this.cmapEvents; + default: + throw new Error(`Unknown eventType: ${eventType}`); + } + } + // NOTE: pushCommandEvent must be an arrow function pushCommandEvent: (e: CommandEvent) => void = e => { if (!this.isIgnored(e)) { @@ -151,22 +161,14 @@ export class UnifiedMongoClient extends MongoClient { this.cmapEvents.push(e); }; - stopCapturingEvents(pushFn: PushFunction): void { - const observedEvents = [...this.observedCommandEvents, ...this.observedCmapEvents]; - for (const eventName of observedEvents) { - this.off(eventName, pushFn); - } - } - /** Disables command monitoring for the client and returns a list of the captured events. */ - stopCapturingCommandEvents(): CommandEvent[] { - this.stopCapturingEvents(this.pushCommandEvent); - return this.commandEvents; - } - - stopCapturingCmapEvents(): CmapEvent[] { - this.stopCapturingEvents(this.pushCmapEvent); - return this.cmapEvents; + stopCapturingEvents(): void { + for (const eventName of this.observedCommandEvents) { + this.off(eventName, this.pushCommandEvent); + } + for (const eventName of this.observedCmapEvents) { + this.off(eventName, this.pushCmapEvent); + } } } @@ -179,7 +181,7 @@ export class FailPointMap extends Map { let address: string; if (addressOrClient instanceof MongoClient) { client = addressOrClient; - address = client.topology.s.seedlist.join(','); + address = client.topology!.s.seedlist.join(','); } else { // create a new client address = addressOrClient.toString(); @@ -300,7 +302,7 @@ export class EntitiesMap extends Map { getEntity(type: 'cursor', key: string, assertExists?: boolean): AbstractCursor; getEntity(type: 'stream', key: string, assertExists?: boolean): UnifiedChangeStream; getEntity(type: 'clientEncryption', key: string, assertExists?: boolean): ClientEncryption; - getEntity(type: EntityTypeId, key: string, assertExists = true): Entity { + getEntity(type: EntityTypeId, key: string, assertExists = true): Entity | undefined { const entity = this.get(key); if (!entity) { if (assertExists) throw new Error(`Entity '${key}' does not exist`); diff --git a/test/tools/unified-spec-runner/match.ts b/test/tools/unified-spec-runner/match.ts index e615d4a421f..4aaad51a8e0 100644 --- a/test/tools/unified-spec-runner/match.ts +++ b/test/tools/unified-spec-runner/match.ts @@ -1,3 +1,4 @@ +/* eslint-disable @typescript-eslint/no-non-null-assertion */ import { expect } from 'chai'; import { inspect } from 'util'; @@ -7,6 +8,7 @@ import { Document, Long, MongoError, + MongoServerError, ObjectId, OneOrMore } from '../../../src'; @@ -264,11 +266,11 @@ export function specialCheck( entities: EntitiesMap, path: string[] = [], checkExtraKeys: boolean -): boolean { +): void { if (isUnsetOrMatchesOperator(expected)) { if (actual === null || actual === undefined) return; - resultCheck(actual, expected.$$unsetOrMatches, entities, path, checkExtraKeys); + resultCheck(actual, expected.$$unsetOrMatches as any, entities, path, checkExtraKeys); } else if (isMatchesEntityOperator(expected)) { // $$matchesEntity const entity = entities.get(expected.$$matchesEntity); @@ -290,7 +292,7 @@ export function specialCheck( // $$sessionLsid const session = entities.getEntity('session', expected.$$sessionLsid, false); expect(session, `Session ${expected.$$sessionLsid} does not exist in entities`).to.exist; - const entitySessionHex = session.id.id.buffer.toString('hex').toUpperCase(); + const entitySessionHex = session.id!.id.buffer.toString('hex').toUpperCase(); const actualSessionHex = actual.id.buffer.toString('hex').toUpperCase(); expect( entitySessionHex, @@ -298,7 +300,7 @@ export function specialCheck( ).to.equal(actualSessionHex); } else if (isTypeOperator(expected)) { // $$type - let ok: boolean; + let ok = false; const types = Array.isArray(expected.$$type) ? expected.$$type : [expected.$$type]; for (const type of types) { ok ||= TYPE_MAP.get(type)(actual); @@ -364,19 +366,23 @@ function compareCommandStartedEvents( entities: EntitiesMap, prefix: string ) { - if (expected.command) { - resultCheck(actual.command, expected.command, entities, [`${prefix}.command`]); + if (expected!.command) { + resultCheck(actual.command, expected!.command, entities, [`${prefix}.command`]); } - if (expected.commandName) { + if (expected!.commandName) { expect( - expected.commandName, - `expected ${prefix}.commandName to equal ${expected.commandName} but received ${actual.commandName}` + expected!.commandName, + `expected ${prefix}.commandName to equal ${expected!.commandName} but received ${ + actual.commandName + }` ).to.equal(actual.commandName); } - if (expected.databaseName) { + if (expected!.databaseName) { expect( - expected.databaseName, - `expected ${prefix}.databaseName to equal ${expected.databaseName} but received ${actual.databaseName}` + expected!.databaseName, + `expected ${prefix}.databaseName to equal ${expected!.databaseName} but received ${ + actual.databaseName + }` ).to.equal(actual.databaseName); } } @@ -387,13 +393,15 @@ function compareCommandSucceededEvents( entities: EntitiesMap, prefix: string ) { - if (expected.reply) { - resultCheck(actual.reply, expected.reply, entities, [prefix]); + if (expected!.reply) { + resultCheck(actual.reply as Document, expected!.reply, entities, [prefix]); } - if (expected.commandName) { + if (expected!.commandName) { expect( - expected.commandName, - `expected ${prefix}.commandName to equal ${expected.commandName} but received ${actual.commandName}` + expected!.commandName, + `expected ${prefix}.commandName to equal ${expected!.commandName} but received ${ + actual.commandName + }` ).to.equal(actual.commandName); } } @@ -404,10 +412,12 @@ function compareCommandFailedEvents( entities: EntitiesMap, prefix: string ) { - if (expected.commandName) { + if (expected!.commandName) { expect( - expected.commandName, - `expected ${prefix}.commandName to equal ${expected.commandName} but received ${actual.commandName}` + expected!.commandName, + `expected ${prefix}.commandName to equal ${expected!.commandName} but received ${ + actual.commandName + }` ).to.equal(actual.commandName); } } @@ -489,28 +499,34 @@ export function matchesEvents( } } +function isMongoCryptError(err): boolean { + if (err.constructor.name === 'MongoCryptError') { + return true; + } + return err.stack.includes('at ClientEncryption'); +} + export function expectErrorCheck( error: Error | MongoError, expected: ExpectedError, entities: EntitiesMap -): boolean { - if (Object.keys(expected)[0] === 'isClientError' || Object.keys(expected)[0] === 'isError') { - // FIXME: We cannot tell if Error arose from driver and not from server - return; +): void { + const expectMessage = `\n\nOriginal Error Stack:\n${error.stack}\n\n`; + + if (!isMongoCryptError(error)) { + expect(error, expectMessage).to.be.instanceOf(MongoError); } - const expectMessage = `\n\nOriginal Error Stack:\n${error.stack}\n\n`; + if (expected.isClientError === false) { + expect(error).to.be.instanceOf(MongoServerError); + } else if (expected.isClientError === true) { + expect(error).not.to.be.instanceOf(MongoServerError); + } if (expected.errorContains != null) { expect(error.message, expectMessage).to.include(expected.errorContains); } - if (!(error instanceof MongoError)) { - // if statement asserts type for TS, expect will always fail - expect(error, expectMessage).to.be.instanceOf(MongoError); - return; - } - if (expected.errorCode != null) { expect(error, expectMessage).to.have.property('code', expected.errorCode); } @@ -520,24 +536,26 @@ export function expectErrorCheck( } if (expected.errorLabelsContain != null) { + const mongoError = error as MongoError; for (const errorLabel of expected.errorLabelsContain) { expect( - error.hasErrorLabel(errorLabel), - `Error was supposed to have label ${errorLabel}, has [${error.errorLabels}] -- ${expectMessage}` + mongoError.hasErrorLabel(errorLabel), + `Error was supposed to have label ${errorLabel}, has [${mongoError.errorLabels}] -- ${expectMessage}` ).to.be.true; } } if (expected.errorLabelsOmit != null) { + const mongoError = error as MongoError; for (const errorLabel of expected.errorLabelsOmit) { expect( - error.hasErrorLabel(errorLabel), - `Error was supposed to have label ${errorLabel}, has [${error.errorLabels}] -- ${expectMessage}` + mongoError.hasErrorLabel(errorLabel), + `Error was not supposed to have label ${errorLabel}, has [${mongoError.errorLabels}] -- ${expectMessage}` ).to.be.false; } } if (expected.expectResult != null) { - resultCheck(error, expected.expectResult, entities); + resultCheck(error, expected.expectResult as any, entities); } } diff --git a/test/tools/unified-spec-runner/operations.ts b/test/tools/unified-spec-runner/operations.ts index e09cec5d2ea..514c27cbc07 100644 --- a/test/tools/unified-spec-runner/operations.ts +++ b/test/tools/unified-spec-runner/operations.ts @@ -1,3 +1,4 @@ +/* eslint-disable @typescript-eslint/no-non-null-assertion */ /* eslint-disable @typescript-eslint/no-unused-vars */ import { expect } from 'chai'; @@ -26,7 +27,9 @@ interface OperationFunctionParams { entities: EntitiesMap; } -type RunOperationFn = (p: OperationFunctionParams) => Promise; +type RunOperationFn = ( + p: OperationFunctionParams +) => Promise; export const operations = new Map(); operations.set('abortTransaction', async ({ entities, operation }) => { @@ -39,7 +42,7 @@ operations.set('aggregate', async ({ entities, operation }) => { if (!(dbOrCollection instanceof Db || dbOrCollection instanceof Collection)) { throw new Error(`Operation object '${operation.object}' must be a db or collection`); } - const { pipeline, ...opts } = operation.arguments; + const { pipeline, ...opts } = operation.arguments!; const cursor = dbOrCollection.aggregate(pipeline, opts); return cursor.toArray(); }); @@ -47,35 +50,35 @@ operations.set('aggregate', async ({ entities, operation }) => { operations.set('assertCollectionExists', async ({ operation, client }) => { const collections = ( await client - .db(operation.arguments.databaseName) + .db(operation.arguments!.databaseName) .listCollections({}, { nameOnly: true }) .toArray() ).map(({ name }) => name); - expect(collections).to.include(operation.arguments.collectionName); + expect(collections).to.include(operation.arguments!.collectionName); }); operations.set('assertCollectionNotExists', async ({ operation, client }) => { const collections = ( await client - .db(operation.arguments.databaseName) + .db(operation.arguments!.databaseName) .listCollections({}, { nameOnly: true }) .toArray() ).map(({ name }) => name); - expect(collections).to.not.include(operation.arguments.collectionName); + expect(collections).to.not.include(operation.arguments!.collectionName); }); operations.set('assertIndexExists', async ({ operation, client }) => { const collection = client - .db(operation.arguments.databaseName) - .collection(operation.arguments.collectionName); + .db(operation.arguments!.databaseName) + .collection(operation.arguments!.collectionName); const indexes = (await collection.listIndexes().toArray()).map(({ name }) => name); - expect(indexes).to.include(operation.arguments.indexName); + expect(indexes).to.include(operation.arguments!.indexName); }); operations.set('assertIndexNotExists', async ({ operation, client }) => { const collection = client - .db(operation.arguments.databaseName) - .collection(operation.arguments.collectionName); + .db(operation.arguments!.databaseName) + .collection(operation.arguments!.collectionName); const listIndexCursor = collection.listIndexes(); let indexes; @@ -88,11 +91,11 @@ operations.set('assertIndexNotExists', async ({ operation, client }) => { // Error will always exist here, this makes the output show what caused an issue with assertIndexNotExists expect(error).to.not.exist; } - expect(indexes.map(({ name }) => name)).to.not.include(operation.arguments.indexName); + expect(indexes.map(({ name }) => name)).to.not.include(operation.arguments!.indexName); }); operations.set('assertDifferentLsidOnLastTwoCommands', async ({ entities, operation }) => { - const client = entities.getEntity('client', operation.arguments.client); + const client = entities.getEntity('client', operation.arguments!.client); expect(client.observedCommandEvents.includes('commandStarted')).to.be.true; const startedEvents = client.commandEvents.filter( @@ -111,7 +114,7 @@ operations.set('assertDifferentLsidOnLastTwoCommands', async ({ entities, operat }); operations.set('assertSameLsidOnLastTwoCommands', async ({ entities, operation }) => { - const client = entities.getEntity('client', operation.arguments.client); + const client = entities.getEntity('client', operation.arguments!.client); expect(client.observedCommandEvents.includes('commandStarted')).to.be.true; const startedEvents = client.commandEvents.filter( @@ -130,27 +133,27 @@ operations.set('assertSameLsidOnLastTwoCommands', async ({ entities, operation } }); operations.set('assertSessionDirty', async ({ entities, operation }) => { - const session = operation.arguments.session; + const session = operation.arguments!.session; expect(session.serverSession.isDirty).to.be.true; }); operations.set('assertSessionNotDirty', async ({ entities, operation }) => { - const session = operation.arguments.session; + const session = operation.arguments!.session; expect(session.serverSession.isDirty).to.be.false; }); operations.set('assertSessionPinned', async ({ entities, operation }) => { - const session = operation.arguments.session; + const session = operation.arguments!.session; expect(session.isPinned, 'session should be pinned').to.be.true; }); operations.set('assertSessionUnpinned', async ({ entities, operation }) => { - const session = operation.arguments.session; + const session = operation.arguments!.session; expect(session.isPinned, 'session should be unpinned').to.be.false; }); operations.set('assertSessionTransactionState', async ({ entities, operation }) => { - const session = operation.arguments.session; + const session = operation.arguments!.session; const transactionStateTranslation = { none: 'NO_TRANSACTION', @@ -160,25 +163,25 @@ operations.set('assertSessionTransactionState', async ({ entities, operation }) aborted: 'TRANSACTION_ABORTED' }; - const driverTransactionStateName = transactionStateTranslation[operation.arguments.state]; + const driverTransactionStateName = transactionStateTranslation[operation.arguments!.state]; expect(session.transaction.state).to.equal(driverTransactionStateName); }); operations.set('assertNumberConnectionsCheckedOut', async ({ entities, operation }) => { - const client = entities.getEntity('client', operation.arguments.client); - const servers = Array.from(client.topology.s.servers.values()); + const client = entities.getEntity('client', operation.arguments!.client); + const servers = Array.from(client.topology!.s.servers.values()); const checkedOutConnections = servers.reduce((count, server) => { const pool = server.s.pool; return count + pool.currentCheckedOutCount; }, 0); await Promise.resolve(); // wait one tick - expect(checkedOutConnections).to.equal(operation.arguments.connections); + expect(checkedOutConnections).to.equal(operation.arguments!.connections); }); operations.set('bulkWrite', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { requests, ...opts } = operation.arguments; + const { requests, ...opts } = operation.arguments!; return collection.bulkWrite(requests, opts); }); @@ -206,7 +209,7 @@ operations.set('createChangeStream', async ({ entities, operation }) => { throw new Error(`Entity ${operation.object} must be watchable`); } - const { pipeline, ...args } = operation.arguments; + const { pipeline, ...args } = operation.arguments!; const changeStream = watchable.watch(pipeline, args); return new Promise((resolve, reject) => { @@ -220,13 +223,13 @@ operations.set('createChangeStream', async ({ entities, operation }) => { operations.set('createCollection', async ({ entities, operation }) => { const db = entities.getEntity('db', operation.object); - const { collection, ...opts } = operation.arguments; + const { collection, ...opts } = operation.arguments!; return await db.createCollection(collection, opts); }); operations.set('createFindCursor', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { filter, ...opts } = operation.arguments; + const { filter, ...opts } = operation.arguments!; const cursor = collection.find(filter, opts); // The spec dictates that we create the cursor and force the find command // to execute, but don't move the cursor forward. hasNext() accomplishes @@ -237,25 +240,25 @@ operations.set('createFindCursor', async ({ entities, operation }) => { operations.set('createIndex', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { keys, ...opts } = operation.arguments; + const { keys, ...opts } = operation.arguments!; await collection.createIndex(keys, opts); }); operations.set('dropIndex', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { name, ...opts } = operation.arguments; + const { name, ...opts } = operation.arguments!; await collection.dropIndex(name, opts); }); operations.set('deleteOne', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { filter, ...options } = operation.arguments; + const { filter, ...options } = operation.arguments!; return collection.deleteOne(filter, options); }); operations.set('dropCollection', async ({ entities, operation }) => { const db = entities.getEntity('db', operation.object); - const { collection, ...opts } = operation.arguments; + const { collection, ...opts } = operation.arguments!; // TODO(NODE-4243): dropCollection should suppress namespace not found errors try { @@ -274,42 +277,42 @@ operations.set('endSession', async ({ entities, operation }) => { operations.set('find', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { filter, ...opts } = operation.arguments; + const { filter, ...opts } = operation.arguments!; return collection.find(filter, opts).toArray(); }); operations.set('findOneAndReplace', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { filter, replacement, ...opts } = operation.arguments; + const { filter, replacement, ...opts } = operation.arguments!; return (await collection.findOneAndReplace(filter, replacement, translateOptions(opts))).value; }); operations.set('findOneAndUpdate', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { filter, update, ...opts } = operation.arguments; + const { filter, update, ...opts } = operation.arguments!; return (await collection.findOneAndUpdate(filter, update, translateOptions(opts))).value; }); operations.set('findOneAndDelete', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { filter, ...opts } = operation.arguments; + const { filter, ...opts } = operation.arguments!; return (await collection.findOneAndDelete(filter, opts)).value; }); operations.set('failPoint', async ({ entities, operation }) => { - const client = entities.getEntity('client', operation.arguments.client); - return entities.failPoints.enableFailPoint(client, operation.arguments.failPoint); + const client = entities.getEntity('client', operation.arguments!.client); + return entities.failPoints.enableFailPoint(client, operation.arguments!.failPoint); }); operations.set('insertOne', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { document, ...opts } = operation.arguments; + const { document, ...opts } = operation.arguments!; return collection.insertOne(document, opts); }); operations.set('insertMany', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { documents, ...opts } = operation.arguments; + const { documents, ...opts } = operation.arguments!; return collection.insertMany(documents, opts); }); @@ -338,23 +341,23 @@ operations.set('iterateUntilDocumentOrError', async ({ entities, operation }) => operations.set('listCollections', async ({ entities, operation }) => { const db = entities.getEntity('db', operation.object); - const { filter, ...opts } = operation.arguments; + const { filter, ...opts } = operation.arguments!; return db.listCollections(filter, opts).toArray(); }); operations.set('listDatabases', async ({ entities, operation }) => { const client = entities.getEntity('client', operation.object); - return client.db().admin().listDatabases(operation.arguments); + return client.db().admin().listDatabases(operation.arguments!); }); operations.set('listIndexes', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - return collection.listIndexes(operation.arguments).toArray(); + return collection.listIndexes(operation.arguments!).toArray(); }); operations.set('replaceOne', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { filter, replacement, ...opts } = operation.arguments; + const { filter, replacement, ...opts } = operation.arguments!; return collection.replaceOne(filter, replacement, opts); }); @@ -364,26 +367,26 @@ operations.set('startTransaction', async ({ entities, operation }) => { }); operations.set('targetedFailPoint', async ({ entities, operation }) => { - const session = operation.arguments.session; + const session = operation.arguments!.session; expect(session.isPinned, 'Session must be pinned for a targetedFailPoint').to.be.true; const address = session.transaction.isPinned ? session.transaction._pinnedServer.s.description.hostAddress : session.pinnedConnection.address; - await entities.failPoints.enableFailPoint(address, operation.arguments.failPoint); + await entities.failPoints.enableFailPoint(address, operation.arguments!.failPoint); }); operations.set('delete', async ({ entities, operation }) => { const bucket = entities.getEntity('bucket', operation.object); - return bucket.delete(operation.arguments.id); + return bucket.delete(operation.arguments!.id); }); operations.set('download', async ({ entities, operation }) => { const bucket = entities.getEntity('bucket', operation.object); - const stream = bucket.openDownloadStream(operation.arguments.id); + const stream = bucket.openDownloadStream(operation.arguments!.id); return new Promise((resolve, reject) => { - const chunks = []; + const chunks: any[] = []; stream.on('data', chunk => chunks.push(...chunk)); stream.on('error', reject); stream.on('end', () => resolve(chunks)); @@ -393,12 +396,12 @@ operations.set('download', async ({ entities, operation }) => { operations.set('upload', async ({ entities, operation }) => { const bucket = entities.getEntity('bucket', operation.object); - const stream = bucket.openUploadStream(operation.arguments.filename, { - chunkSizeBytes: operation.arguments.chunkSizeBytes + const stream = bucket.openUploadStream(operation.arguments!.filename, { + chunkSizeBytes: operation.arguments!.chunkSizeBytes }); return new Promise((resolve, reject) => { - stream.end(Buffer.from(operation.arguments.source.$$hexBytes, 'hex'), (error, file) => { + stream.end(Buffer.from(operation.arguments!.source.$$hexBytes, 'hex'), (error, file) => { if (error) reject(error); resolve((file as GridFSFile)._id as ObjectId); }); @@ -412,11 +415,11 @@ operations.set('withTransaction', async ({ entities, operation, client }) => { readConcern: ReadConcern.fromOptions(operation.arguments), writeConcern: WriteConcern.fromOptions(operation.arguments), readPreference: ReadPreference.fromOptions(operation.arguments), - maxCommitTimeMS: operation.arguments.maxCommitTimeMS + maxCommitTimeMS: operation.arguments!.maxCommitTimeMS }; return session.withTransaction(async () => { - for (const callbackOperation of operation.arguments.callback) { + for (const callbackOperation of operation.arguments!.callback) { await executeOperationAndCheck(callbackOperation, entities, client); } }, options); @@ -424,61 +427,61 @@ operations.set('withTransaction', async ({ entities, operation, client }) => { operations.set('countDocuments', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { filter, ...opts } = operation.arguments; + const { filter, ...opts } = operation.arguments!; return collection.countDocuments(filter, opts); }); operations.set('deleteMany', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { filter, ...opts } = operation.arguments; + const { filter, ...opts } = operation.arguments!; return collection.deleteMany(filter, opts); }); operations.set('distinct', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { fieldName, filter, ...opts } = operation.arguments; + const { fieldName, filter, ...opts } = operation.arguments!; return collection.distinct(fieldName, filter, opts); }); operations.set('estimatedDocumentCount', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - return collection.estimatedDocumentCount(operation.arguments); + return collection.estimatedDocumentCount(operation.arguments!); }); operations.set('runCommand', async ({ entities, operation }: OperationFunctionParams) => { const db = entities.getEntity('db', operation.object); - const { command, ...opts } = operation.arguments; + const { command, ...opts } = operation.arguments!; return db.command(command, opts); }); operations.set('updateMany', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { filter, update, ...options } = operation.arguments; + const { filter, update, ...options } = operation.arguments!; return collection.updateMany(filter, update, options); }); operations.set('updateOne', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { filter, update, ...options } = operation.arguments; + const { filter, update, ...options } = operation.arguments!; return collection.updateOne(filter, update, options); }); operations.set('rename', async ({ entities, operation }) => { const collection = entities.getEntity('collection', operation.object); - const { to, ...options } = operation.arguments; + const { to, ...options } = operation.arguments!; return collection.rename(to, options); }); operations.set('createDataKey', async ({ entities, operation }) => { const clientEncryption = entities.getEntity('clientEncryption', operation.object); - const { kmsProvider, opts } = operation.arguments ?? {}; + const { kmsProvider, opts } = operation.arguments!; return clientEncryption.createDataKey(kmsProvider, opts); }); operations.set('rewrapManyDataKey', async ({ entities, operation }) => { const clientEncryption = entities.getEntity('clientEncryption', operation.object); - const { filter, opts } = operation.arguments ?? {}; + const { filter, opts } = operation.arguments!; const rewrapManyDataKeyResult = await clientEncryption.rewrapManyDataKey(filter, opts); @@ -500,14 +503,14 @@ operations.set('rewrapManyDataKey', async ({ entities, operation }) => { operations.set('deleteKey', async ({ entities, operation }) => { const clientEncryption = entities.getEntity('clientEncryption', operation.object); - const { id } = operation.arguments ?? {}; + const { id } = operation.arguments!; return clientEncryption.deleteKey(id); }); operations.set('getKey', async ({ entities, operation }) => { const clientEncryption = entities.getEntity('clientEncryption', operation.object); - const { id } = operation.arguments ?? {}; + const { id } = operation.arguments!; return clientEncryption.getKey(id); }); @@ -520,21 +523,21 @@ operations.set('getKeys', async ({ entities, operation }) => { operations.set('addKeyAltName', async ({ entities, operation }) => { const clientEncryption = entities.getEntity('clientEncryption', operation.object); - const { id, keyAltName } = operation.arguments ?? {}; + const { id, keyAltName } = operation.arguments!; return clientEncryption.addKeyAltName(id, keyAltName); }); operations.set('removeKeyAltName', async ({ entities, operation }) => { const clientEncryption = entities.getEntity('clientEncryption', operation.object); - const { id, keyAltName } = operation.arguments ?? {}; + const { id, keyAltName } = operation.arguments!; return clientEncryption.removeKeyAltName(id, keyAltName); }); operations.set('getKeyByAltName', async ({ entities, operation }) => { const clientEncryption = entities.getEntity('clientEncryption', operation.object); - const { keyAltName } = operation.arguments ?? {}; + const { keyAltName } = operation.arguments!; return clientEncryption.getKeyByAltName(keyAltName); }); @@ -555,7 +558,7 @@ export async function executeOperationAndCheck( let result; try { - result = await opFunc({ entities, operation, client }); + result = await opFunc!({ entities, operation, client }); } catch (error) { if (operation.expectError) { expectErrorCheck(error, operation.expectError, entities); @@ -575,7 +578,7 @@ export async function executeOperationAndCheck( } if (operation.expectResult) { - resultCheck(result, operation.expectResult, entities); + resultCheck(result, operation.expectResult as any, entities); } if (operation.saveResultAsEntity) { diff --git a/test/tools/unified-spec-runner/runner.ts b/test/tools/unified-spec-runner/runner.ts index 5f37f3467f1..70313b81e5f 100644 --- a/test/tools/unified-spec-runner/runner.ts +++ b/test/tools/unified-spec-runner/runner.ts @@ -1,3 +1,4 @@ +/* eslint-disable @typescript-eslint/no-non-null-assertion */ import { expect } from 'chai'; import { gte as semverGte, satisfies as semverSatisfies } from 'semver'; @@ -7,7 +8,7 @@ import { ReadPreference } from '../../../src/read_preference'; import { TopologyType } from '../../../src/sdam/common'; import { ns } from '../../../src/utils'; import { ejson } from '../utils'; -import { CmapEvent, CommandEvent, EntitiesMap } from './entities'; +import { EntitiesMap, UnifiedMongoClient } from './entities'; import { matchesEvents } from './match'; import { executeOperationAndCheck } from './operations'; import * as uni from './schema'; @@ -65,7 +66,6 @@ async function runUnifiedTest( expect.fail(`Test was skipped with an empty skip reason: ${test.description}`); } - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion ctx.test!.skipReason = skipReason; ctx.skip(); @@ -79,7 +79,7 @@ async function runUnifiedTest( utilClient = ctx.configuration.newClient(); } - let entities: EntitiesMap; + let entities: EntitiesMap | undefined; try { trace('\n starting test:'); try { @@ -198,13 +198,12 @@ async function runUnifiedTest( } } - const clientCommandEvents = new Map(); - const clientCmapEvents = new Map(); + const clientList = new Map(); // If any event listeners were enabled on any client entities, // the test runner MUST now disable those event listeners. for (const [id, client] of entities.mapOf('client')) { - clientCommandEvents.set(id, client.stopCapturingCommandEvents()); - clientCmapEvents.set(id, client.stopCapturingCmapEvents()); + client.stopCapturingEvents(); + clientList.set(id, client); } if (test.expectEvents) { @@ -213,11 +212,13 @@ async function runUnifiedTest( const eventType = expectedEventsForClient.eventType; // If no event type is provided it defaults to 'command', so just // check for 'cmap' here for now. - const actualEvents = - eventType === 'cmap' ? clientCmapEvents.get(clientId) : clientCommandEvents.get(clientId); - - expect(actualEvents, `No client entity found with id ${clientId}`).to.exist; - matchesEvents(expectedEventsForClient, actualEvents, entities); + const testClient = clientList.get(clientId); + expect(testClient, `No client entity found with id ${clientId}`).to.exist; + matchesEvents( + expectedEventsForClient, + testClient!.getCapturedEvents(eventType ?? 'command'), + entities + ); } } diff --git a/test/tools/unified-spec-runner/unified-utils.ts b/test/tools/unified-spec-runner/unified-utils.ts index 0676fcdc567..c22723ebf5c 100644 --- a/test/tools/unified-spec-runner/unified-utils.ts +++ b/test/tools/unified-spec-runner/unified-utils.ts @@ -171,12 +171,14 @@ export function patchVersion(version: string): string { return `${major}.${minor ?? 0}.${patch ?? 0}`; } -export function patchDbOptions(options: CollectionOrDatabaseOptions): DbOptions { +export function patchDbOptions(options: CollectionOrDatabaseOptions = {}): DbOptions { // TODO return { ...options } as DbOptions; } -export function patchCollectionOptions(options: CollectionOrDatabaseOptions): CollectionOptions { +export function patchCollectionOptions( + options: CollectionOrDatabaseOptions = {} +): CollectionOptions { // TODO return { ...options } as CollectionOptions; } diff --git a/test/unit/sessions.test.js b/test/unit/sessions.test.js index f60dc9a1944..1b7996f5ed0 100644 --- a/test/unit/sessions.test.js +++ b/test/unit/sessions.test.js @@ -32,7 +32,7 @@ describe('Sessions - unit', function () { session = new ClientSession(client, serverSessionPool, { snapshot: true }); expect(session.snapshotEnabled).to.equal(true); expect(() => session.startTransaction()).to.throw( - 'Transactions are not allowed with snapshot sessions' + 'Transactions are not supported in snapshot sessions' ); }); });