@@ -610,132 +610,139 @@ export function createPatchedFetcher(
610
610
next : { ...init ?. next , fetchType : 'origin' , fetchIdx } ,
611
611
}
612
612
613
- return originFetch ( input , clonedInit ) . then ( async ( res ) => {
614
- if ( ! isStale && fetchStart ) {
615
- trackFetchMetric ( workStore , {
616
- start : fetchStart ,
617
- url : fetchUrl ,
618
- cacheReason : cacheReasonOverride || cacheReason ,
619
- cacheStatus :
620
- finalRevalidate === 0 || cacheReasonOverride
621
- ? 'skip'
622
- : 'miss' ,
623
- cacheWarning,
624
- status : res . status ,
625
- method : clonedInit . method || 'GET' ,
626
- } )
627
- }
628
- if (
629
- res . status === 200 &&
630
- incrementalCache &&
631
- cacheKey &&
632
- ( isCacheableRevalidate || requestStore ?. serverComponentsHmrCache )
633
- ) {
634
- const normalizedRevalidate =
635
- finalRevalidate >= INFINITE_CACHE
636
- ? CACHE_ONE_YEAR
637
- : finalRevalidate
638
- const externalRevalidate =
639
- finalRevalidate >= INFINITE_CACHE ? false : finalRevalidate
640
-
641
- if ( workUnitStore && workUnitStore . type === 'prerender' ) {
642
- // We are prerendering at build time or revalidate time with dynamicIO so we need to
643
- // buffer the response so we can guarantee it can be read in a microtask
644
- const bodyBuffer = await res . arrayBuffer ( )
645
-
646
- const fetchedData = {
647
- headers : Object . fromEntries ( res . headers . entries ( ) ) ,
648
- body : Buffer . from ( bodyBuffer ) . toString ( 'base64' ) ,
613
+ return originFetch ( input , clonedInit )
614
+ . then ( async ( res ) => {
615
+ if ( ! isStale && fetchStart ) {
616
+ trackFetchMetric ( workStore , {
617
+ start : fetchStart ,
618
+ url : fetchUrl ,
619
+ cacheReason : cacheReasonOverride || cacheReason ,
620
+ cacheStatus :
621
+ finalRevalidate === 0 || cacheReasonOverride
622
+ ? 'skip'
623
+ : 'miss' ,
624
+ cacheWarning,
649
625
status : res . status ,
650
- url : res . url ,
651
- }
652
-
653
- // We can skip checking the serverComponentsHmrCache because we aren't in
654
- // dev mode.
655
-
656
- await incrementalCache . set (
657
- cacheKey ,
658
- {
659
- kind : CachedRouteKind . FETCH ,
660
- data : fetchedData ,
661
- revalidate : normalizedRevalidate ,
662
- } ,
663
- {
664
- fetchCache : true ,
665
- revalidate : externalRevalidate ,
666
- fetchUrl,
667
- fetchIdx,
668
- tags,
626
+ method : clonedInit . method || 'GET' ,
627
+ } )
628
+ }
629
+ if (
630
+ res . status === 200 &&
631
+ incrementalCache &&
632
+ cacheKey &&
633
+ ( isCacheableRevalidate ||
634
+ requestStore ?. serverComponentsHmrCache )
635
+ ) {
636
+ const normalizedRevalidate =
637
+ finalRevalidate >= INFINITE_CACHE
638
+ ? CACHE_ONE_YEAR
639
+ : finalRevalidate
640
+ const externalRevalidate =
641
+ finalRevalidate >= INFINITE_CACHE ? false : finalRevalidate
642
+
643
+ if ( workUnitStore && workUnitStore . type === 'prerender' ) {
644
+ // We are prerendering at build time or revalidate time with dynamicIO so we need to
645
+ // buffer the response so we can guarantee it can be read in a microtask
646
+ const bodyBuffer = await res . arrayBuffer ( )
647
+
648
+ const fetchedData = {
649
+ headers : Object . fromEntries ( res . headers . entries ( ) ) ,
650
+ body : Buffer . from ( bodyBuffer ) . toString ( 'base64' ) ,
651
+ status : res . status ,
652
+ url : res . url ,
669
653
}
670
- )
671
- await handleUnlock ( )
672
654
673
- // We we return a new Response to the caller.
674
- return new Response ( bodyBuffer , {
675
- headers : res . headers ,
676
- status : res . status ,
677
- statusText : res . statusText ,
678
- } )
679
- } else {
680
- // We're cloning the response using this utility because there
681
- // exists a bug in the undici library around response cloning.
682
- // See the following pull request for more details:
683
- // https://github.com/vercel/next.js/pull/73274
684
- const [ cloned1 , cloned2 ] = cloneResponse ( res )
685
-
686
- // We are dynamically rendering including dev mode. We want to return
687
- // the response to the caller as soon as possible because it might stream
688
- // over a very long time.
689
- cloned1
690
- . arrayBuffer ( )
691
- . then ( async ( arrayBuffer ) => {
692
- const bodyBuffer = Buffer . from ( arrayBuffer )
693
-
694
- const fetchedData = {
695
- headers : Object . fromEntries ( cloned1 . headers . entries ( ) ) ,
696
- body : bodyBuffer . toString ( 'base64' ) ,
697
- status : cloned1 . status ,
698
- url : cloned1 . url ,
655
+ // We can skip checking the serverComponentsHmrCache because we aren't in
656
+ // dev mode.
657
+
658
+ await incrementalCache . set (
659
+ cacheKey ,
660
+ {
661
+ kind : CachedRouteKind . FETCH ,
662
+ data : fetchedData ,
663
+ revalidate : normalizedRevalidate ,
664
+ } ,
665
+ {
666
+ fetchCache : true ,
667
+ revalidate : externalRevalidate ,
668
+ fetchUrl,
669
+ fetchIdx,
670
+ tags,
699
671
}
672
+ )
673
+ await handleUnlock ( )
700
674
701
- requestStore ?. serverComponentsHmrCache ?. set (
702
- cacheKey ,
703
- fetchedData
704
- )
705
-
706
- if ( isCacheableRevalidate ) {
707
- await incrementalCache . set (
675
+ // We return a new Response to the caller.
676
+ return new Response ( bodyBuffer , {
677
+ headers : res . headers ,
678
+ status : res . status ,
679
+ statusText : res . statusText ,
680
+ } )
681
+ } else {
682
+ // We're cloning the response using this utility because there
683
+ // exists a bug in the undici library around response cloning.
684
+ // See the following pull request for more details:
685
+ // https://github.com/vercel/next.js/pull/73274
686
+
687
+ const [ cloned1 , cloned2 ] = cloneResponse ( res )
688
+
689
+ // We are dynamically rendering including dev mode. We want to return
690
+ // the response to the caller as soon as possible because it might stream
691
+ // over a very long time.
692
+ cloned1
693
+ . arrayBuffer ( )
694
+ . then ( async ( arrayBuffer ) => {
695
+ const bodyBuffer = Buffer . from ( arrayBuffer )
696
+
697
+ const fetchedData = {
698
+ headers : Object . fromEntries ( cloned1 . headers . entries ( ) ) ,
699
+ body : bodyBuffer . toString ( 'base64' ) ,
700
+ status : cloned1 . status ,
701
+ url : cloned1 . url ,
702
+ }
703
+
704
+ requestStore ?. serverComponentsHmrCache ?. set (
708
705
cacheKey ,
709
- {
710
- kind : CachedRouteKind . FETCH ,
711
- data : fetchedData ,
712
- revalidate : normalizedRevalidate ,
713
- } ,
714
- {
715
- fetchCache : true ,
716
- revalidate : externalRevalidate ,
717
- fetchUrl,
718
- fetchIdx,
719
- tags,
720
- }
706
+ fetchedData
721
707
)
722
- }
723
- } )
724
- . catch ( ( error ) =>
725
- console . warn ( `Failed to set fetch cache` , input , error )
726
- )
727
- . finally ( handleUnlock )
728
708
729
- return cloned2
709
+ if ( isCacheableRevalidate ) {
710
+ await incrementalCache . set (
711
+ cacheKey ,
712
+ {
713
+ kind : CachedRouteKind . FETCH ,
714
+ data : fetchedData ,
715
+ revalidate : normalizedRevalidate ,
716
+ } ,
717
+ {
718
+ fetchCache : true ,
719
+ revalidate : externalRevalidate ,
720
+ fetchUrl,
721
+ fetchIdx,
722
+ tags,
723
+ }
724
+ )
725
+ }
726
+ } )
727
+ . catch ( ( error ) =>
728
+ console . warn ( `Failed to set fetch cache` , input , error )
729
+ )
730
+ . finally ( handleUnlock )
731
+
732
+ return cloned2
733
+ }
730
734
}
731
- }
732
735
733
- // we had response that we determined shouldn't be cached so we return it
734
- // and don't cache it. This also needs to unlock the cache lock we acquired.
735
- await handleUnlock ( )
736
+ // we had response that we determined shouldn't be cached so we return it
737
+ // and don't cache it. This also needs to unlock the cache lock we acquired.
738
+ await handleUnlock ( )
736
739
737
- return res
738
- } )
740
+ return res
741
+ } )
742
+ . catch ( ( error ) => {
743
+ handleUnlock ( )
744
+ throw error
745
+ } )
739
746
}
740
747
741
748
let cacheReasonOverride
0 commit comments