@@ -2388,4 +2388,122 @@ describe('Find', function () {
2388
2388
} ) ;
2389
2389
} ) ;
2390
2390
} ) ;
2391
+
2392
+ it ( 'should correctly rewind cursor after emptyGetMore optimization' , {
2393
+ metadata : {
2394
+ requires : { topology : [ 'sharded' ] }
2395
+ } ,
2396
+
2397
+ test : async function ( ) {
2398
+ const configuration = this . configuration ;
2399
+ const client = configuration . newClient ( configuration . writeConcernMax ( ) , { maxPoolSize : 1 } ) ;
2400
+ await client . connect ( ) ;
2401
+ this . defer ( async ( ) => await client . close ( ) ) ;
2402
+
2403
+ const db = client . db ( configuration . db ) ;
2404
+ const collectionName = 'test_rewind_emptygetmore' ;
2405
+ await db . dropCollection ( collectionName ) . catch ( ( ) => null ) ;
2406
+ const collection = db . collection ( collectionName ) ;
2407
+
2408
+ // Insert 10 documents
2409
+ const docsToInsert = Array . from ( { length : 10 } , ( _ , i ) => ( { x : i } ) ) ;
2410
+ await collection . insertMany ( docsToInsert , configuration . writeConcernMax ( ) ) ;
2411
+
2412
+ // Create a cursor with batch size = 2 and limit = 4 (a multiple of batch size)
2413
+ // This configuration is important to trigger the emptyGetMore optimization
2414
+ const cursor = collection . find ( { } , { batchSize : 2 , limit : 4 } ) ;
2415
+
2416
+ // Consume all documents (4 due to limit)
2417
+ const documents = [ ] ;
2418
+ for ( let i = 0 ; i < 5 ; i ++ ) {
2419
+ // The 5th iteration should return null as the cursor is exhausted
2420
+ const doc = await cursor . next ( ) ;
2421
+ if ( doc !== null ) {
2422
+ documents . push ( doc ) ;
2423
+ }
2424
+ }
2425
+
2426
+ // Verify we got the correct number of documents (based on limit)
2427
+ expect ( documents ) . to . have . length ( 4 ) ;
2428
+
2429
+ // Prior to the fix, this rewind() call would throw
2430
+ // "TypeError: this.documents?.clear is not a function"
2431
+ // because the emptyGetMore optimization sets documents to an object without a clear method
2432
+ try {
2433
+ cursor . rewind ( ) ;
2434
+
2435
+ // Verify we can iterate the cursor again after rewind
2436
+ const documentsAfterRewind = [ ] ;
2437
+ for ( let i = 0 ; i < 4 ; i ++ ) {
2438
+ const doc = await cursor . next ( ) ;
2439
+ if ( doc !== null ) {
2440
+ documentsAfterRewind . push ( doc ) ;
2441
+ }
2442
+ }
2443
+
2444
+ // Verify we got the same documents again
2445
+ expect ( documentsAfterRewind ) . to . have . length ( 4 ) ;
2446
+ for ( let i = 0 ; i < 4 ; i ++ ) {
2447
+ expect ( documentsAfterRewind [ i ] . x ) . to . equal ( documents [ i ] . x ) ;
2448
+ }
2449
+ } catch ( error ) {
2450
+ // If the rewind() operation fails, the test should fail
2451
+ expect . fail ( `Rewind operation failed: ${ error . message } ` ) ;
2452
+ }
2453
+ }
2454
+ } ) ;
2455
+
2456
+ it ( 'should handle rewind after emptyGetMore optimization (using repro.js scenario)' , {
2457
+ metadata : {
2458
+ requires : { topology : [ 'single' , 'replicaset' , 'sharded' , 'ssl' , 'heap' , 'wiredtiger' ] }
2459
+ } ,
2460
+
2461
+ test : async function ( ) {
2462
+ const configuration = this . configuration ;
2463
+ const client = configuration . newClient ( configuration . writeConcernMax ( ) , { maxPoolSize : 1 } ) ;
2464
+ await client . connect ( ) ;
2465
+ this . defer ( async ( ) => await client . close ( ) ) ;
2466
+
2467
+ const db = client . db ( configuration . db ) ;
2468
+ const collectionName = 'test_rewind_repro' ;
2469
+ await db . dropCollection ( collectionName ) . catch ( ( ) => null ) ;
2470
+ const collection = db . collection ( collectionName ) ;
2471
+
2472
+ // Insert 100 documents (like in repro.js)
2473
+ const documents = Array . from ( { length : 100 } , ( _ , i ) => ( {
2474
+ _id : i ,
2475
+ value : `Document ${ i } `
2476
+ } ) ) ;
2477
+ await collection . insertMany ( documents , configuration . writeConcernMax ( ) ) ;
2478
+
2479
+ // Create a cursor with a small batch size to force multiple getMore operations
2480
+ const cursor = collection . find ( { } ) . batchSize ( 10 ) ;
2481
+
2482
+ // Consume the cursor until it's exhausted (like in repro.js)
2483
+ while ( await cursor . hasNext ( ) ) {
2484
+ await cursor . next ( ) ;
2485
+ }
2486
+
2487
+ // At this point, cursor should be fully consumed and potentially
2488
+ // optimized with emptyGetMore which lacks clear() method
2489
+
2490
+ // Now try to rewind the cursor and fetch documents again
2491
+ try {
2492
+ // This would throw if documents.clear is not a function and fix isn't applied
2493
+ cursor . rewind ( ) ;
2494
+
2495
+ // If we got here, rewind succeeded - now try to use the cursor again
2496
+ const results = await cursor . toArray ( ) ;
2497
+
2498
+ // Verify the results
2499
+ expect ( results ) . to . have . length ( 100 ) ;
2500
+ for ( let i = 0 ; i < 100 ; i ++ ) {
2501
+ expect ( results [ i ] ) . to . have . property ( '_id' , i ) ;
2502
+ expect ( results [ i ] ) . to . have . property ( 'value' , `Document ${ i } ` ) ;
2503
+ }
2504
+ } catch ( error ) {
2505
+ expect . fail ( `Error during rewind or subsequent fetch: ${ error . message } ` ) ;
2506
+ }
2507
+ }
2508
+ } ) ;
2391
2509
} ) ;
0 commit comments