diff --git a/src/cmap/wire_protocol/responses.ts b/src/cmap/wire_protocol/responses.ts index 1d20566e2d5..e8e873a3719 100644 --- a/src/cmap/wire_protocol/responses.ts +++ b/src/cmap/wire_protocol/responses.ts @@ -8,6 +8,7 @@ import { parseToElementsToArray, parseUtf8ValidationOption, pluckBSONSerializeOptions, + serialize, type Timestamp } from '../../bson'; import { MONGODB_ERROR_CODES, MongoUnexpectedServerResponseError } from '../../error'; @@ -230,11 +231,9 @@ export class CursorResponse extends MongoDBResponse { * This supports a feature of the FindCursor. * It is an optimization to avoid an extra getMore when the limit has been reached */ - static emptyGetMore: CursorResponse = { - id: new Long(0), - length: 0, - shift: () => null - } as unknown as CursorResponse; + static get emptyGetMore(): CursorResponse { + return new CursorResponse(serialize({ ok: 1, cursor: { id: 0, nextBatch: [] } })); + } static override is(value: unknown): value is CursorResponse { return value instanceof CursorResponse || value === CursorResponse.emptyGetMore; @@ -248,8 +247,12 @@ export class CursorResponse extends MongoDBResponse { } public get id(): Long { + if (!this.cursor.has('id')) { + throw new MongoUnexpectedServerResponseError('"id" is missing'); + } try { - return Long.fromBigInt(this.cursor.get('id', BSONType.long, true)); + const cursorId = this.cursor.get('id', BSONType.long, false); + return cursorId != null ? Long.fromBigInt(cursorId) : Long.fromBigInt(0n); } catch (cause) { throw new MongoUnexpectedServerResponseError(cause.message, { cause }); } diff --git a/test/integration/crud/find.test.js b/test/integration/crud/find.test.js index ed098d31c0a..da351abb79f 100644 --- a/test/integration/crud/find.test.js +++ b/test/integration/crud/find.test.js @@ -2388,4 +2388,122 @@ describe('Find', function () { }); }); }); + + it('should correctly rewind cursor after emptyGetMore optimization', { + metadata: { + requires: { topology: ['sharded'] } + }, + + test: async function () { + const configuration = this.configuration; + const client = configuration.newClient(configuration.writeConcernMax(), { maxPoolSize: 1 }); + await client.connect(); + this.defer(async () => await client.close()); + + const db = client.db(configuration.db); + const collectionName = 'test_rewind_emptygetmore'; + await db.dropCollection(collectionName).catch(() => null); + const collection = db.collection(collectionName); + + // Insert 10 documents + const docsToInsert = Array.from({ length: 10 }, (_, i) => ({ x: i })); + await collection.insertMany(docsToInsert, configuration.writeConcernMax()); + + // Create a cursor with batch size = 2 and limit = 4 (a multiple of batch size) + // This configuration is important to trigger the emptyGetMore optimization + const cursor = collection.find({}, { batchSize: 2, limit: 4 }); + + // Consume all documents (4 due to limit) + const documents = []; + for (let i = 0; i < 5; i++) { + // The 5th iteration should return null as the cursor is exhausted + const doc = await cursor.next(); + if (doc !== null) { + documents.push(doc); + } + } + + // Verify we got the correct number of documents (based on limit) + expect(documents).to.have.length(4); + + // Prior to the fix, this rewind() call would throw + // "TypeError: this.documents?.clear is not a function" + // because the emptyGetMore optimization sets documents to an object without a clear method + try { + cursor.rewind(); + + // Verify we can iterate the cursor again after rewind + const documentsAfterRewind = []; + for (let i = 0; i < 4; i++) { + const doc = await cursor.next(); + if (doc !== null) { + documentsAfterRewind.push(doc); + } + } + + // Verify we got the same documents again + expect(documentsAfterRewind).to.have.length(4); + for (let i = 0; i < 4; i++) { + expect(documentsAfterRewind[i].x).to.equal(documents[i].x); + } + } catch (error) { + // If the rewind() operation fails, the test should fail + expect.fail(`Rewind operation failed: ${error.message}`); + } + } + }); + + it('should handle rewind after emptyGetMore optimization (using repro.js scenario)', { + metadata: { + requires: { topology: ['single', 'replicaset', 'sharded', 'ssl', 'heap', 'wiredtiger'] } + }, + + test: async function () { + const configuration = this.configuration; + const client = configuration.newClient(configuration.writeConcernMax(), { maxPoolSize: 1 }); + await client.connect(); + this.defer(async () => await client.close()); + + const db = client.db(configuration.db); + const collectionName = 'test_rewind_repro'; + await db.dropCollection(collectionName).catch(() => null); + const collection = db.collection(collectionName); + + // Insert 100 documents (like in repro.js) + const documents = Array.from({ length: 100 }, (_, i) => ({ + _id: i, + value: `Document ${i}` + })); + await collection.insertMany(documents, configuration.writeConcernMax()); + + // Create a cursor with a small batch size to force multiple getMore operations + const cursor = collection.find({}).batchSize(10); + + // Consume the cursor until it's exhausted (like in repro.js) + while (await cursor.hasNext()) { + await cursor.next(); + } + + // At this point, cursor should be fully consumed and potentially + // optimized with emptyGetMore which lacks clear() method + + // Now try to rewind the cursor and fetch documents again + try { + // This would throw if documents.clear is not a function and fix isn't applied + cursor.rewind(); + + // If we got here, rewind succeeded - now try to use the cursor again + const results = await cursor.toArray(); + + // Verify the results + expect(results).to.have.length(100); + for (let i = 0; i < 100; i++) { + expect(results[i]).to.have.property('_id', i); + expect(results[i]).to.have.property('value', `Document ${i}`); + } + } catch (error) { + expect.fail(`Error during rewind or subsequent fetch: ${error.message}`); + } + } + }); });