diff --git a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js
index aae9cf48c4285..41fc0bfd41088 100644
--- a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js
+++ b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js
@@ -2797,7 +2797,16 @@ describe('ReactFlightDOM', () => {
abortFizz('bam');
});
- expect(errors).toEqual(['bam']);
+ if (__DEV__) {
+ expect(errors).toEqual([new Error('Connection closed.')]);
+ } else {
+ // This is likely a bug. In Dev we get a connection closed error
+ // because the debug info creates a chunk that has a pending status
+ // and when the stream finishes we error if any chunks are still pending.
+ // In production there is no debug info so the missing chunk is never instantiated
+ // because nothing triggers model evaluation before the stream completes
+ expect(errors).toEqual(['bam']);
+ }
const container = document.createElement('div');
await readInto(container, fizzReadable);
@@ -2919,10 +2928,11 @@ describe('ReactFlightDOM', () => {
});
const {prelude} = await pendingResult;
+
expect(errors).toEqual(['boom']);
- const response = ReactServerDOMClient.createFromReadableStream(
- Readable.toWeb(prelude),
- );
+
+ const preludeWeb = Readable.toWeb(prelude);
+ const response = ReactServerDOMClient.createFromReadableStream(preludeWeb);
const {writable: fizzWritable, readable: fizzReadable} = getTestStream();
@@ -2949,7 +2959,17 @@ describe('ReactFlightDOM', () => {
});
// one error per boundary
- expect(errors).toEqual(['boom', 'boom', 'boom']);
+ if (__DEV__) {
+ const err = new Error('Connection closed.');
+ expect(errors).toEqual([err, err, err]);
+ } else {
+ // This is likely a bug. In Dev we get a connection closed error
+ // because the debug info creates a chunk that has a pending status
+ // and when the stream finishes we error if any chunks are still pending.
+ // In production there is no debug info so the missing chunk is never instantiated
+ // because nothing triggers model evaluation before the stream completes
+ expect(errors).toEqual(['boom', 'boom', 'boom']);
+ }
const container = document.createElement('div');
await readInto(container, fizzReadable);
diff --git a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMBrowser-test.js b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMBrowser-test.js
index a4c5df377be57..fa1e65862564e 100644
--- a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMBrowser-test.js
+++ b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMBrowser-test.js
@@ -2454,12 +2454,28 @@ describe('ReactFlightDOMBrowser', () => {
passThrough(prelude),
);
const container = document.createElement('div');
- const root = ReactDOMClient.createRoot(container);
+ errors.length = 0;
+ const root = ReactDOMClient.createRoot(container, {
+ onUncaughtError(err) {
+ errors.push(err);
+ },
+ });
await act(() => {
root.render();
});
- expect(container.innerHTML).toBe('
loading...
');
+ if (__DEV__) {
+ expect(errors).toEqual([new Error('Connection closed.')]);
+ expect(container.innerHTML).toBe('');
+ } else {
+ // This is likely a bug. In Dev we get a connection closed error
+ // because the debug info creates a chunk that has a pending status
+ // and when the stream finishes we error if any chunks are still pending.
+ // In production there is no debug info so the missing chunk is never instantiated
+ // because nothing triggers model evaluation before the stream completes
+ expect(errors).toEqual([]);
+ expect(container.innerHTML).toBe('loading...
');
+ }
});
});
diff --git a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMEdge-test.js b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMEdge-test.js
index 1c146014dcefa..0cb3897aea443 100644
--- a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMEdge-test.js
+++ b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMEdge-test.js
@@ -1172,7 +1172,16 @@ describe('ReactFlightDOMEdge', () => {
),
);
fizzController.abort('bam');
- expect(errors).toEqual(['bam']);
+ if (__DEV__) {
+ expect(errors).toEqual([new Error('Connection closed.')]);
+ } else {
+ // This is likely a bug. In Dev we get a connection closed error
+ // because the debug info creates a chunk that has a pending status
+ // and when the stream finishes we error if any chunks are still pending.
+ // In production there is no debug info so the missing chunk is never instantiated
+ // because nothing triggers model evaluation before the stream completes
+ expect(errors).toEqual(['bam']);
+ }
// Should still match the result when parsed
const result = await readResult(ssrStream);
const div = document.createElement('div');
diff --git a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js
index 620da74ff1db4..f2dca4a45c7fa 100644
--- a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js
+++ b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js
@@ -509,7 +509,16 @@ describe('ReactFlightDOMNode', () => {
),
);
ssrStream.abort('bam');
- expect(errors).toEqual(['bam']);
+ if (__DEV__) {
+ expect(errors).toEqual([new Error('Connection closed.')]);
+ } else {
+ // This is likely a bug. In Dev we get a connection closed error
+ // because the debug info creates a chunk that has a pending status
+ // and when the stream finishes we error if any chunks are still pending.
+ // In production there is no debug info so the missing chunk is never instantiated
+ // because nothing triggers model evaluation before the stream completes
+ expect(errors).toEqual(['bam']);
+ }
// Should still match the result when parsed
const result = await readResult(ssrStream);
const div = document.createElement('div');
diff --git a/packages/react-server/src/ReactFlightServer.js b/packages/react-server/src/ReactFlightServer.js
index 9616d4b972911..efad2aa59ca81 100644
--- a/packages/react-server/src/ReactFlightServer.js
+++ b/packages/react-server/src/ReactFlightServer.js
@@ -649,9 +649,13 @@ function serializeThenable(
// We can no longer accept any resolved values
request.abortableTasks.delete(newTask);
newTask.status = ABORTED;
- const errorId: number = (request.fatalError: any);
- const model = stringify(serializeByValueID(errorId));
- emitModelChunk(request, newTask.id, model);
+ if (enableHalt && request.type === PRERENDER) {
+ request.pendingChunks--;
+ } else {
+ const errorId: number = (request.fatalError: any);
+ const model = stringify(serializeByValueID(errorId));
+ emitModelChunk(request, newTask.id, model);
+ }
return newTask.id;
}
if (typeof thenable.status === 'string') {
@@ -1633,6 +1637,24 @@ function outlineTask(request: Request, task: Task): ReactJSONValue {
return serializeLazyID(newTask.id);
}
+function outlineHaltedTask(
+ request: Request,
+ task: Task,
+ allowLazy: boolean,
+): ReactJSONValue {
+ // In the future if we track task state for resuming we'll maybe need to
+ // construnct an actual task here but since we're never going to retry it
+ // we just claim the id and serialize it according to the proper convention
+ const taskId = request.nextChunkId++;
+ if (allowLazy) {
+ // We're halting in a position that can handle a lazy reference
+ return serializeLazyID(taskId);
+ } else {
+ // We're halting in a position that needs a value reference
+ return serializeByValueID(taskId);
+ }
+}
+
function renderElement(
request: Request,
task: Task,
@@ -2278,6 +2300,20 @@ function renderModel(
((model: any).$$typeof === REACT_ELEMENT_TYPE ||
(model: any).$$typeof === REACT_LAZY_TYPE);
+ if (request.status === ABORTING) {
+ task.status = ABORTED;
+ if (enableHalt && request.type === PRERENDER) {
+ // This will create a new task and refer to it in this slot
+ // the new task won't be retried because we are aborting
+ return outlineHaltedTask(request, task, wasReactNode);
+ }
+ const errorId = (request.fatalError: any);
+ if (wasReactNode) {
+ return serializeLazyID(errorId);
+ }
+ return serializeByValueID(errorId);
+ }
+
const x =
thrownValue === SuspenseException
? // This is a special type of exception used for Suspense. For historical
@@ -2291,14 +2327,6 @@ function renderModel(
if (typeof x === 'object' && x !== null) {
// $FlowFixMe[method-unbinding]
if (typeof x.then === 'function') {
- if (request.status === ABORTING) {
- task.status = ABORTED;
- const errorId: number = (request.fatalError: any);
- if (wasReactNode) {
- return serializeLazyID(errorId);
- }
- return serializeByValueID(errorId);
- }
// Something suspended, we'll need to create a new task and resolve it later.
const newTask = createTask(
request,
@@ -2344,15 +2372,6 @@ function renderModel(
}
}
- if (request.status === ABORTING) {
- task.status = ABORTED;
- const errorId: number = (request.fatalError: any);
- if (wasReactNode) {
- return serializeLazyID(errorId);
- }
- return serializeByValueID(errorId);
- }
-
// Restore the context. We assume that this will be restored by the inner
// functions in case nothing throws so we don't use "finally" here.
task.keyPath = prevKeyPath;
@@ -3820,6 +3839,22 @@ function retryTask(request: Request, task: Task): void {
request.abortableTasks.delete(task);
task.status = COMPLETED;
} catch (thrownValue) {
+ if (request.status === ABORTING) {
+ request.abortableTasks.delete(task);
+ task.status = ABORTED;
+ if (enableHalt && request.type === PRERENDER) {
+ // When aborting a prerener with halt semantics we don't emit
+ // anything into the slot for a task that aborts, it remains unresolved
+ request.pendingChunks--;
+ } else {
+ // Otherwise we emit an error chunk into the task slot.
+ const errorId: number = (request.fatalError: any);
+ const model = stringify(serializeByValueID(errorId));
+ emitModelChunk(request, task.id, model);
+ }
+ return;
+ }
+
const x =
thrownValue === SuspenseException
? // This is a special type of exception used for Suspense. For historical
@@ -3832,14 +3867,6 @@ function retryTask(request: Request, task: Task): void {
if (typeof x === 'object' && x !== null) {
// $FlowFixMe[method-unbinding]
if (typeof x.then === 'function') {
- if (request.status === ABORTING) {
- request.abortableTasks.delete(task);
- task.status = ABORTED;
- const errorId: number = (request.fatalError: any);
- const model = stringify(serializeByValueID(errorId));
- emitModelChunk(request, task.id, model);
- return;
- }
// Something suspended again, let's pick it back up later.
task.status = PENDING;
task.thenableState = getThenableStateAfterSuspending();
@@ -3856,15 +3883,6 @@ function retryTask(request: Request, task: Task): void {
}
}
- if (request.status === ABORTING) {
- request.abortableTasks.delete(task);
- task.status = ABORTED;
- const errorId: number = (request.fatalError: any);
- const model = stringify(serializeByValueID(errorId));
- emitModelChunk(request, task.id, model);
- return;
- }
-
request.abortableTasks.delete(task);
task.status = ERRORED;
const digest = logRecoverableError(request, x, task);
@@ -3942,6 +3960,17 @@ function abortTask(task: Task, request: Request, errorId: number): void {
request.completedErrorChunks.push(processedChunk);
}
+function haltTask(task: Task, request: Request): void {
+ if (task.status === RENDERING) {
+ // this task will be halted by the render
+ return;
+ }
+ task.status = ABORTED;
+ // We don't actually emit anything for this task id because we are intentionally
+ // leaving the reference unfulfilled.
+ request.pendingChunks--;
+}
+
function flushCompletedChunks(
request: Request,
destination: Destination,
@@ -4087,12 +4116,6 @@ export function abort(request: Request, reason: mixed): void {
}
const abortableTasks = request.abortableTasks;
if (abortableTasks.size > 0) {
- // We have tasks to abort. We'll emit one error row and then emit a reference
- // to that row from every row that's still remaining if we are rendering. If we
- // are prerendering (and halt semantics are enabled) we will refer to an error row
- // but not actually emit it so the reciever can at that point rather than error.
- const errorId = request.nextChunkId++;
- request.fatalError = errorId;
if (
enablePostpone &&
typeof reason === 'object' &&
@@ -4101,10 +4124,20 @@ export function abort(request: Request, reason: mixed): void {
) {
const postponeInstance: Postpone = (reason: any);
logPostpone(request, postponeInstance.message, null);
- if (!enableHalt || request.type === PRERENDER) {
- // When prerendering with halt semantics we omit the referred to postpone.
+ if (enableHalt && request.type === PRERENDER) {
+ // When prerendering with halt semantics we simply halt the task
+ // and leave the reference unfulfilled.
+ abortableTasks.forEach(task => haltTask(task, request));
+ abortableTasks.clear();
+ } else {
+ // When rendering we produce a shared postpone chunk and then
+ // fulfill each task with a reference to that chunk.
+ const errorId = request.nextChunkId++;
+ request.fatalError = errorId;
request.pendingChunks++;
emitPostponeChunk(request, errorId, postponeInstance);
+ abortableTasks.forEach(task => abortTask(task, request, errorId));
+ abortableTasks.clear();
}
} else {
const error =
@@ -4120,14 +4153,22 @@ export function abort(request: Request, reason: mixed): void {
)
: reason;
const digest = logRecoverableError(request, error, null);
- if (!enableHalt || request.type === RENDER) {
- // When prerendering with halt semantics we omit the referred to error.
+ if (enableHalt && request.type === PRERENDER) {
+ // When prerendering with halt semantics we simply halt the task
+ // and leave the reference unfulfilled.
+ abortableTasks.forEach(task => haltTask(task, request));
+ abortableTasks.clear();
+ } else {
+ // When rendering we produce a shared error chunk and then
+ // fulfill each task with a reference to that chunk.
+ const errorId = request.nextChunkId++;
+ request.fatalError = errorId;
request.pendingChunks++;
emitErrorChunk(request, errorId, digest, error);
+ abortableTasks.forEach(task => abortTask(task, request, errorId));
+ abortableTasks.clear();
}
}
- abortableTasks.forEach(task => abortTask(task, request, errorId));
- abortableTasks.clear();
const onAllReady = request.onAllReady;
onAllReady();
}