Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(NODE-6878): ensure documents are cleared correctly to handle emptyGe… #4488

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 4 additions & 5 deletions src/cmap/wire_protocol/responses.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import {
parseToElementsToArray,
parseUtf8ValidationOption,
pluckBSONSerializeOptions,
serialize,
type Timestamp
} from '../../bson';
import { MONGODB_ERROR_CODES, MongoUnexpectedServerResponseError } from '../../error';
Expand Down Expand Up @@ -230,11 +231,9 @@ export class CursorResponse extends MongoDBResponse {
* This supports a feature of the FindCursor.
* It is an optimization to avoid an extra getMore when the limit has been reached
*/
static emptyGetMore: CursorResponse = {
id: new Long(0),
length: 0,
shift: () => null
} as unknown as CursorResponse;
static get emptyGetMore(): CursorResponse {
return new CursorResponse(serialize({ ok: 1, cursor: { id: 0, nextBatch: [] } }));
}

static override is(value: unknown): value is CursorResponse {
return value instanceof CursorResponse || value === CursorResponse.emptyGetMore;
Expand Down
9 changes: 8 additions & 1 deletion src/cursor/abstract_cursor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -867,7 +867,14 @@ export abstract class AbstractCursor<
}

this.cursorId = null;
this.documents?.clear();

if (this.documents && typeof this.documents.clear === 'function') {
this.documents.clear();
} else {
// If documents is the emptyGetMore object or doesn't have the clear() method
// Simply set it to null to force reinitialization
this.documents = null;
}
this.timeoutContext?.clear();
this.timeoutContext = undefined;
this.isClosed = false;
Expand Down
118 changes: 118 additions & 0 deletions test/integration/crud/find.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -2388,4 +2388,122 @@ describe('Find', function () {
});
});
});

it('should correctly rewind cursor after emptyGetMore optimization', {
metadata: {
requires: { topology: ['sharded'] }
},

test: async function () {
const configuration = this.configuration;
const client = configuration.newClient(configuration.writeConcernMax(), { maxPoolSize: 1 });
await client.connect();
this.defer(async () => await client.close());

const db = client.db(configuration.db);
const collectionName = 'test_rewind_emptygetmore';
await db.dropCollection(collectionName).catch(() => null);
const collection = db.collection(collectionName);

// Insert 10 documents
const docsToInsert = Array.from({ length: 10 }, (_, i) => ({ x: i }));
await collection.insertMany(docsToInsert, configuration.writeConcernMax());

// Create a cursor with batch size = 2 and limit = 4 (a multiple of batch size)
// This configuration is important to trigger the emptyGetMore optimization
const cursor = collection.find({}, { batchSize: 2, limit: 4 });

// Consume all documents (4 due to limit)
const documents = [];
for (let i = 0; i < 5; i++) {
// The 5th iteration should return null as the cursor is exhausted
const doc = await cursor.next();
if (doc !== null) {
documents.push(doc);
}
}

// Verify we got the correct number of documents (based on limit)
expect(documents).to.have.length(4);

// Prior to the fix, this rewind() call would throw
// "TypeError: this.documents?.clear is not a function"
// because the emptyGetMore optimization sets documents to an object without a clear method
try {
cursor.rewind();

// Verify we can iterate the cursor again after rewind
const documentsAfterRewind = [];
for (let i = 0; i < 4; i++) {
const doc = await cursor.next();
if (doc !== null) {
documentsAfterRewind.push(doc);
}
}

// Verify we got the same documents again
expect(documentsAfterRewind).to.have.length(4);
for (let i = 0; i < 4; i++) {
expect(documentsAfterRewind[i].x).to.equal(documents[i].x);
}
} catch (error) {
// If the rewind() operation fails, the test should fail
expect.fail(`Rewind operation failed: ${error.message}`);
}
}
});

it('should handle rewind after emptyGetMore optimization (using repro.js scenario)', {
metadata: {
requires: { topology: ['single', 'replicaset', 'sharded', 'ssl', 'heap', 'wiredtiger'] }
},

test: async function () {
const configuration = this.configuration;
const client = configuration.newClient(configuration.writeConcernMax(), { maxPoolSize: 1 });
await client.connect();
this.defer(async () => await client.close());

const db = client.db(configuration.db);
const collectionName = 'test_rewind_repro';
await db.dropCollection(collectionName).catch(() => null);
const collection = db.collection(collectionName);

// Insert 100 documents (like in repro.js)
const documents = Array.from({ length: 100 }, (_, i) => ({
_id: i,
value: `Document ${i}`
}));
await collection.insertMany(documents, configuration.writeConcernMax());

// Create a cursor with a small batch size to force multiple getMore operations
const cursor = collection.find({}).batchSize(10);

// Consume the cursor until it's exhausted (like in repro.js)
while (await cursor.hasNext()) {
await cursor.next();
}

// At this point, cursor should be fully consumed and potentially
// optimized with emptyGetMore which lacks clear() method

// Now try to rewind the cursor and fetch documents again
try {
// This would throw if documents.clear is not a function and fix isn't applied
cursor.rewind();

// If we got here, rewind succeeded - now try to use the cursor again
const results = await cursor.toArray();

// Verify the results
expect(results).to.have.length(100);
for (let i = 0; i < 100; i++) {
expect(results[i]).to.have.property('_id', i);
expect(results[i]).to.have.property('value', `Document ${i}`);
}
} catch (error) {
expect.fail(`Error during rewind or subsequent fetch: ${error.message}`);
}
}
});
});