Skip to content

Commit 4af4031

Browse files
sebmarkbagegnoff
authored andcommitted
[Flight Reply] Reject any new Chunks not yet discovered at the time of reportGlobalError (#31840)
We might have already resolved models that are not pending and so are not rejected by aborting the stream. When those later get parsed they might discover new chunks which end up as pending. These should be errored since they will never be able to resolve later. This avoids infinitely hanging the stream. This same fix needs to be ported to ReactFlightClient that has the same issue.
1 parent 7283a21 commit 4af4031

File tree

2 files changed

+45
-0
lines changed

2 files changed

+45
-0
lines changed

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMReplyEdge-test.js

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -250,4 +250,31 @@ describe('ReactFlightDOMReplyEdge', () => {
250250
),
251251
);
252252
});
253+
254+
it('should abort when parsing an incomplete payload', async () => {
255+
const infinitePromise = new Promise(() => {});
256+
const controller = new AbortController();
257+
const promiseForResult = ReactServerDOMClient.encodeReply(
258+
{promise: infinitePromise},
259+
{
260+
signal: controller.signal,
261+
},
262+
);
263+
controller.abort();
264+
const body = await promiseForResult;
265+
266+
const decoded = await ReactServerDOMServer.decodeReply(
267+
body,
268+
webpackServerMap,
269+
);
270+
271+
let error = null;
272+
try {
273+
await decoded.promise;
274+
} catch (x) {
275+
error = x;
276+
}
277+
expect(error).not.toBe(null);
278+
expect(error.message).toBe('Connection closed.');
279+
});
253280
});

packages/react-server/src/ReactFlightReplyServer.js

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -173,6 +173,8 @@ export type Response = {
173173
_prefix: string,
174174
_formData: FormData,
175175
_chunks: Map<number, SomeChunk<any>>,
176+
_closed: boolean,
177+
_closedReason: mixed,
176178
_temporaryReferences: void | TemporaryReferenceSet,
177179
};
178180

@@ -261,6 +263,14 @@ function createResolvedModelChunk<T>(
261263
return new Chunk(RESOLVED_MODEL, value, id, response);
262264
}
263265

266+
function createErroredChunk<T>(
267+
response: Response,
268+
reason: mixed,
269+
): ErroredChunk<T> {
270+
// $FlowFixMe[invalid-constructor] Flow doesn't support functions as constructors
271+
return new Chunk(ERRORED, null, reason, response);
272+
}
273+
264274
function resolveModelChunk<T>(
265275
chunk: SomeChunk<T>,
266276
value: string,
@@ -501,6 +511,8 @@ function initializeModelChunk<T>(chunk: ResolvedModelChunk<T>): void {
501511
// Report that any missing chunks in the model is now going to throw this
502512
// error upon read. Also notify any pending promises.
503513
export function reportGlobalError(response: Response, error: Error): void {
514+
response._closed = true;
515+
response._closedReason = error;
504516
response._chunks.forEach(chunk => {
505517
// If this chunk was already resolved or errored, it won't
506518
// trigger an error but if it wasn't then we need to
@@ -522,6 +534,10 @@ function getChunk(response: Response, id: number): SomeChunk<any> {
522534
if (backingEntry != null) {
523535
// We assume that this is a string entry for now.
524536
chunk = createResolvedModelChunk(response, (backingEntry: any), id);
537+
} else if (response._closed) {
538+
// We have already errored the response and we're not going to get
539+
// anything more streaming in so this will immediately error.
540+
chunk = createErroredChunk(response, response._closedReason);
525541
} else {
526542
// We're still waiting on this entry to stream in.
527543
chunk = createPendingChunk(response);
@@ -1102,6 +1118,8 @@ export function createResponse(
11021118
_prefix: formFieldPrefix,
11031119
_formData: backingFormData,
11041120
_chunks: chunks,
1121+
_closed: false,
1122+
_closedReason: null,
11051123
_temporaryReferences: temporaryReferences,
11061124
};
11071125
return response;

0 commit comments

Comments
 (0)