@@ -2388,4 +2388,123 @@ describe('Find', function () {
2388
2388
} ) ;
2389
2389
} ) ;
2390
2390
} ) ;
2391
+
2392
+ it ( 'should correctly rewind cursor after emptyGetMore optimization' , {
2393
+ metadata : {
2394
+ requires : { topology : [ 'sharded' ] }
2395
+ } ,
2396
+
2397
+ test : async function ( ) {
2398
+ const configuration = this . configuration ;
2399
+ const client = configuration . newClient ( configuration . writeConcernMax ( ) , { maxPoolSize : 1 } ) ;
2400
+ await client . connect ( ) ;
2401
+ this . defer ( async ( ) => await client . close ( ) ) ;
2402
+
2403
+ const db = client . db ( configuration . db ) ;
2404
+ const collectionName = 'test_rewind_emptygetmore' ;
2405
+ await db . dropCollection ( collectionName ) . catch ( ( ) => null ) ;
2406
+ const collection = db . collection ( collectionName ) ;
2407
+
2408
+ // Insert 10 documents
2409
+ const docsToInsert = Array . from ( { length : 10 } , ( _ , i ) => ( { x : i } ) ) ;
2410
+ await collection . insertMany ( docsToInsert , configuration . writeConcernMax ( ) ) ;
2411
+
2412
+ // Create a cursor with batch size = 2 and limit = 4 (a multiple of batch size)
2413
+ // This configuration is important to trigger the emptyGetMore optimization
2414
+ const cursor = collection . find ( { } , { batchSize : 2 , limit : 4 } ) ;
2415
+
2416
+ // Consume all documents (4 due to limit)
2417
+ const documents = [ ] ;
2418
+ for ( let i = 0 ; i < 5 ; i ++ ) {
2419
+ // The 5th iteration should return null as the cursor is exhausted
2420
+ const doc = await cursor . next ( ) ;
2421
+ if ( doc !== null ) {
2422
+ documents . push ( doc ) ;
2423
+ }
2424
+ }
2425
+
2426
+ // Verify we got the correct number of documents (based on limit)
2427
+ expect ( documents ) . to . have . length ( 4 ) ;
2428
+
2429
+ // Prior to the fix, this rewind() call would throw
2430
+ // "TypeError: this.documents?.clear is not a function"
2431
+ // because the emptyGetMore optimization sets documents to an object without a clear method
2432
+ try {
2433
+ cursor . rewind ( ) ;
2434
+
2435
+ // Verify we can iterate the cursor again after rewind
2436
+ const documentsAfterRewind = [ ] ;
2437
+ for ( let i = 0 ; i < 4 ; i ++ ) {
2438
+ const doc = await cursor . next ( ) ;
2439
+ if ( doc !== null ) {
2440
+ documentsAfterRewind . push ( doc ) ;
2441
+ }
2442
+ }
2443
+
2444
+ // Verify we got the same documents again
2445
+ expect ( documentsAfterRewind ) . to . have . length ( 4 ) ;
2446
+ for ( let i = 0 ; i < 4 ; i ++ ) {
2447
+ expect ( documentsAfterRewind [ i ] . x ) . to . equal ( documents [ i ] . x ) ;
2448
+ }
2449
+ } catch ( error ) {
2450
+ // If the rewind() operation fails, the test should fail
2451
+ expect . fail ( `Rewind operation failed: ${ error . message } ` ) ;
2452
+ }
2453
+ }
2454
+ } ) ;
2455
+
2456
+ // Reproduce NODE-6878: Test specifically based on repro.js scenario
2457
+ it ( 'should handle rewind after emptyGetMore optimization (using repro.js scenario)' , {
2458
+ metadata : {
2459
+ requires : { topology : [ 'single' , 'replicaset' , 'sharded' , 'ssl' , 'heap' , 'wiredtiger' ] }
2460
+ } ,
2461
+
2462
+ test : async function ( ) {
2463
+ const configuration = this . configuration ;
2464
+ const client = configuration . newClient ( configuration . writeConcernMax ( ) , { maxPoolSize : 1 } ) ;
2465
+ await client . connect ( ) ;
2466
+ this . defer ( async ( ) => await client . close ( ) ) ;
2467
+
2468
+ const db = client . db ( configuration . db ) ;
2469
+ const collectionName = 'test_rewind_repro' ;
2470
+ await db . dropCollection ( collectionName ) . catch ( ( ) => null ) ;
2471
+ const collection = db . collection ( collectionName ) ;
2472
+
2473
+ // Insert 100 documents (like in repro.js)
2474
+ const documents = Array . from ( { length : 100 } , ( _ , i ) => ( {
2475
+ _id : i ,
2476
+ value : `Document ${ i } `
2477
+ } ) ) ;
2478
+ await collection . insertMany ( documents , configuration . writeConcernMax ( ) ) ;
2479
+
2480
+ // Create a cursor with a small batch size to force multiple getMore operations
2481
+ const cursor = collection . find ( { } ) . batchSize ( 10 ) ;
2482
+
2483
+ // Consume the cursor until it's exhausted (like in repro.js)
2484
+ while ( await cursor . hasNext ( ) ) {
2485
+ await cursor . next ( ) ;
2486
+ }
2487
+
2488
+ // At this point, cursor should be fully consumed and potentially
2489
+ // optimized with emptyGetMore which lacks clear() method
2490
+
2491
+ // Now try to rewind the cursor and fetch documents again
2492
+ try {
2493
+ // This would throw if documents.clear is not a function and fix isn't applied
2494
+ cursor . rewind ( ) ;
2495
+
2496
+ // If we got here, rewind succeeded - now try to use the cursor again
2497
+ const results = await cursor . toArray ( ) ;
2498
+
2499
+ // Verify the results
2500
+ expect ( results ) . to . have . length ( 100 ) ;
2501
+ for ( let i = 0 ; i < 100 ; i ++ ) {
2502
+ expect ( results [ i ] ) . to . have . property ( '_id' , i ) ;
2503
+ expect ( results [ i ] ) . to . have . property ( 'value' , `Document ${ i } ` ) ;
2504
+ }
2505
+ } catch ( error ) {
2506
+ expect . fail ( `Error during rewind or subsequent fetch: ${ error . message } ` ) ;
2507
+ }
2508
+ }
2509
+ } ) ;
2391
2510
} ) ;
0 commit comments