From 0a8846d12215c0da24801691f353d12dd3d6c38f Mon Sep 17 00:00:00 2001 From: Josh Story Date: Sat, 16 Mar 2024 12:39:37 -0700 Subject: [PATCH] [Fizz] Prevent uncloned large precomputed chunks without relying on render-time assertions (#28568) A while back we implemented a heuristic that if a chunk was large it was assumed to be produced by the render and thus was safe to stream which results in transferring the underlying object memory. Later we ran into an issue where a precomputed chunk grew large enough to trigger this hueristic and it started causing renders to fail because once a second render had occurred the precomputed chunk would not have an underlying buffer of bytes to send and these bytes would be omitted from the stream. We implemented a technique to detect large precomputed chunks and we enforced that these always be cloned before writing. Unfortunately our test coverage was not perfect and there has been for a very long time now a usage pattern where if you complete a boundary in one flush and then complete a boundary that has stylehsheet dependencies in another flush you can get a large precomputed chunk that was not being cloned to be sent twice causing streaming errors. I've thought about why we even went with this solution in the first place and I think it was a mistake. It relies on a dev only check to catch paired with potentially version specific order of operations on the streaming side. This is too unreliable. Additionally the low limit of view size for Edge is not used in Node.js but there is not real justification for this. In this change I updated the view size for edge streaming to match Node at 2048 bytes which is still relatively small and we have no data one way or another to preference 512 over this. Then I updated the assertion logic to error anytime a precomputed chunk exceeds the size. This eliminates the need to clone these chunks by just making sure our view size is always larger than the largest precomputed chunk we can possibly write. I'm generally in favor of this for a few reasons. First, we'll always know during testing whether we've violated the limit as long as we exercise each stream config because the precomputed chunks are created in module scope. Second, we can always split up large chunks so making sure the precomptued chunk is smaller than whatever view size we actually desire is relatively trivial. --- .../ReactDOMLegacyServerStreamConfig.js | 6 -- .../src/server/ReactFizzConfigDOM.js | 7 +-- .../src/__tests__/ReactDOMFloat-test.js | 59 +++++++++++++++++++ .../src/ReactNoopFlightServer.js | 3 - .../src/ReactServerStreamConfigBrowser.js | 29 ++------- .../src/ReactServerStreamConfigBun.js | 6 -- .../src/ReactServerStreamConfigEdge.js | 29 ++------- .../src/ReactServerStreamConfigFB.js | 6 -- .../src/ReactServerStreamConfigNode.js | 25 ++------ .../forks/ReactServerStreamConfig.custom.js | 1 - 10 files changed, 77 insertions(+), 94 deletions(-) diff --git a/packages/react-dom-bindings/src/server/ReactDOMLegacyServerStreamConfig.js b/packages/react-dom-bindings/src/server/ReactDOMLegacyServerStreamConfig.js index 0791791a148e0..5fa0c88d13181 100644 --- a/packages/react-dom-bindings/src/server/ReactDOMLegacyServerStreamConfig.js +++ b/packages/react-dom-bindings/src/server/ReactDOMLegacyServerStreamConfig.js @@ -58,12 +58,6 @@ export function typedArrayToBinaryChunk( throw new Error('Not implemented.'); } -export function clonePrecomputedChunk( - chunk: PrecomputedChunk, -): PrecomputedChunk { - return chunk; -} - export function byteLengthOfChunk(chunk: Chunk | PrecomputedChunk): number { throw new Error('Not implemented.'); } diff --git a/packages/react-dom-bindings/src/server/ReactFizzConfigDOM.js b/packages/react-dom-bindings/src/server/ReactFizzConfigDOM.js index 0788f216e5f30..056630536eece 100644 --- a/packages/react-dom-bindings/src/server/ReactFizzConfigDOM.js +++ b/packages/react-dom-bindings/src/server/ReactFizzConfigDOM.js @@ -48,7 +48,6 @@ import { writeChunkAndReturn, stringToChunk, stringToPrecomputedChunk, - clonePrecomputedChunk, } from 'react-server/src/ReactServerStreamConfig'; import { resolveRequest, @@ -4143,15 +4142,13 @@ export function writeCompletedBoundaryInstruction( ) { resumableState.instructions |= SentStyleInsertionFunction | SentCompleteBoundaryFunction; - writeChunk( - destination, - clonePrecomputedChunk(completeBoundaryWithStylesScript1FullBoth), - ); + writeChunk(destination, completeBoundaryWithStylesScript1FullBoth); } else if ( (resumableState.instructions & SentStyleInsertionFunction) === NothingSent ) { resumableState.instructions |= SentStyleInsertionFunction; + writeChunk(destination, completeBoundaryWithStylesScript1FullPartial); } else { writeChunk(destination, completeBoundaryWithStylesScript1Partial); diff --git a/packages/react-dom/src/__tests__/ReactDOMFloat-test.js b/packages/react-dom/src/__tests__/ReactDOMFloat-test.js index 2cc4df5d779c5..44e9f74718821 100644 --- a/packages/react-dom/src/__tests__/ReactDOMFloat-test.js +++ b/packages/react-dom/src/__tests__/ReactDOMFloat-test.js @@ -785,6 +785,65 @@ describe('ReactDOMFloat', () => { ).toEqual(['']); }); + // @gate enableFloat + it('can send style insertion implementation independent of boundary commpletion instruction implementation', async () => { + await act(() => { + renderToPipeableStream( + + + + foo + + + + + bar + + + + , + ).pipe(writable); + }); + + expect(getMeaningfulChildren(document)).toEqual( + + + + {'loading foo...'} + {'loading bar...'} + + , + ); + + await act(() => { + resolveText('foo'); + }); + expect(getMeaningfulChildren(document)).toEqual( + + + + foo + {'loading bar...'} + + , + ); + await act(() => { + resolveText('bar'); + }); + expect(getMeaningfulChildren(document)).toEqual( + + + + + + foo + {'loading bar...'} + + + , + ); + }); + // @gate enableFloat it('can avoid inserting a late stylesheet if it already rendered on the client', async () => { await act(() => { diff --git a/packages/react-noop-renderer/src/ReactNoopFlightServer.js b/packages/react-noop-renderer/src/ReactNoopFlightServer.js index 9faab3cdfa110..ac0181a38bed6 100644 --- a/packages/react-noop-renderer/src/ReactNoopFlightServer.js +++ b/packages/react-noop-renderer/src/ReactNoopFlightServer.js @@ -47,9 +47,6 @@ const ReactNoopFlightServer = ReactFlightServer({ stringToPrecomputedChunk(content: string): Uint8Array { return textEncoder.encode(content); }, - clonePrecomputedChunk(chunk: Uint8Array): Uint8Array { - return chunk; - }, isClientReference(reference: Object): boolean { return reference.$$typeof === Symbol.for('react.client.reference'); }, diff --git a/packages/react-server/src/ReactServerStreamConfigBrowser.js b/packages/react-server/src/ReactServerStreamConfigBrowser.js index da1a56f813303..f9371303844fa 100644 --- a/packages/react-server/src/ReactServerStreamConfigBrowser.js +++ b/packages/react-server/src/ReactServerStreamConfigBrowser.js @@ -22,7 +22,7 @@ export function flushBuffered(destination: Destination) { // transform streams. https://github.com/whatwg/streams/issues/960 } -const VIEW_SIZE = 512; +const VIEW_SIZE = 2048; let currentView = null; let writtenBytes = 0; @@ -40,15 +40,6 @@ export function writeChunk( } if (chunk.byteLength > VIEW_SIZE) { - if (__DEV__) { - if (precomputedChunkSet.has(chunk)) { - console.error( - 'A large precomputed chunk was passed to writeChunk without being copied.' + - ' Large chunks get enqueued directly and are not copied. This is incompatible with precomputed chunks because you cannot enqueue the same precomputed chunk twice.' + - ' Use "cloneChunk" to make a copy of this large precomputed chunk before writing it. This is a bug in React.', - ); - } - } // this chunk may overflow a single view which implies it was not // one that is cached by the streaming renderer. We will enqueu // it directly and expect it is not re-used @@ -120,15 +111,15 @@ export function stringToChunk(content: string): Chunk { return textEncoder.encode(content); } -const precomputedChunkSet: Set = __DEV__ - ? new Set() - : (null: any); - export function stringToPrecomputedChunk(content: string): PrecomputedChunk { const precomputedChunk = textEncoder.encode(content); if (__DEV__) { - precomputedChunkSet.add(precomputedChunk); + if (precomputedChunk.byteLength > VIEW_SIZE) { + console.error( + 'precomputed chunks must be smaller than the view size configured for this host. This is a bug in React.', + ); + } } return precomputedChunk; @@ -151,14 +142,6 @@ export function typedArrayToBinaryChunk( return content.byteLength > VIEW_SIZE ? buffer.slice() : buffer; } -export function clonePrecomputedChunk( - precomputedChunk: PrecomputedChunk, -): PrecomputedChunk { - return precomputedChunk.byteLength > VIEW_SIZE - ? precomputedChunk.slice() - : precomputedChunk; -} - export function byteLengthOfChunk(chunk: Chunk | PrecomputedChunk): number { return chunk.byteLength; } diff --git a/packages/react-server/src/ReactServerStreamConfigBun.js b/packages/react-server/src/ReactServerStreamConfigBun.js index 276c7f59e490a..ac8ae3f1a52eb 100644 --- a/packages/react-server/src/ReactServerStreamConfigBun.js +++ b/packages/react-server/src/ReactServerStreamConfigBun.js @@ -70,12 +70,6 @@ export function typedArrayToBinaryChunk( return content; } -export function clonePrecomputedChunk( - chunk: PrecomputedChunk, -): PrecomputedChunk { - return chunk; -} - export function byteLengthOfChunk(chunk: Chunk | PrecomputedChunk): number { return Buffer.byteLength(chunk, 'utf8'); } diff --git a/packages/react-server/src/ReactServerStreamConfigEdge.js b/packages/react-server/src/ReactServerStreamConfigEdge.js index 1bc9d7655a087..e77dc28284a18 100644 --- a/packages/react-server/src/ReactServerStreamConfigEdge.js +++ b/packages/react-server/src/ReactServerStreamConfigEdge.js @@ -22,7 +22,7 @@ export function flushBuffered(destination: Destination) { // transform streams. https://github.com/whatwg/streams/issues/960 } -const VIEW_SIZE = 512; +const VIEW_SIZE = 2048; let currentView = null; let writtenBytes = 0; @@ -40,15 +40,6 @@ export function writeChunk( } if (chunk.byteLength > VIEW_SIZE) { - if (__DEV__) { - if (precomputedChunkSet.has(chunk)) { - console.error( - 'A large precomputed chunk was passed to writeChunk without being copied.' + - ' Large chunks get enqueued directly and are not copied. This is incompatible with precomputed chunks because you cannot enqueue the same precomputed chunk twice.' + - ' Use "cloneChunk" to make a copy of this large precomputed chunk before writing it. This is a bug in React.', - ); - } - } // this chunk may overflow a single view which implies it was not // one that is cached by the streaming renderer. We will enqueu // it directly and expect it is not re-used @@ -120,15 +111,15 @@ export function stringToChunk(content: string): Chunk { return textEncoder.encode(content); } -const precomputedChunkSet: Set = __DEV__ - ? new Set() - : (null: any); - export function stringToPrecomputedChunk(content: string): PrecomputedChunk { const precomputedChunk = textEncoder.encode(content); if (__DEV__) { - precomputedChunkSet.add(precomputedChunk); + if (precomputedChunk.byteLength > VIEW_SIZE) { + console.error( + 'precomputed chunks must be smaller than the view size configured for this host. This is a bug in React.', + ); + } } return precomputedChunk; @@ -151,14 +142,6 @@ export function typedArrayToBinaryChunk( return content.byteLength > VIEW_SIZE ? buffer.slice() : buffer; } -export function clonePrecomputedChunk( - precomputedChunk: PrecomputedChunk, -): PrecomputedChunk { - return precomputedChunk.byteLength > VIEW_SIZE - ? precomputedChunk.slice() - : precomputedChunk; -} - export function byteLengthOfChunk(chunk: Chunk | PrecomputedChunk): number { return chunk.byteLength; } diff --git a/packages/react-server/src/ReactServerStreamConfigFB.js b/packages/react-server/src/ReactServerStreamConfigFB.js index 5dfdde467f751..e0c754955ede4 100644 --- a/packages/react-server/src/ReactServerStreamConfigFB.js +++ b/packages/react-server/src/ReactServerStreamConfigFB.js @@ -60,12 +60,6 @@ export function typedArrayToBinaryChunk( throw new Error('Not implemented.'); } -export function clonePrecomputedChunk( - chunk: PrecomputedChunk, -): PrecomputedChunk { - return chunk; -} - export function byteLengthOfChunk(chunk: Chunk | PrecomputedChunk): number { throw new Error('Not implemented.'); } diff --git a/packages/react-server/src/ReactServerStreamConfigNode.js b/packages/react-server/src/ReactServerStreamConfigNode.js index 5d5c6b4fab3b6..cbd366ab54ba3 100644 --- a/packages/react-server/src/ReactServerStreamConfigNode.js +++ b/packages/react-server/src/ReactServerStreamConfigNode.js @@ -99,15 +99,6 @@ function writeViewChunk( return; } if (chunk.byteLength > VIEW_SIZE) { - if (__DEV__) { - if (precomputedChunkSet && precomputedChunkSet.has(chunk)) { - console.error( - 'A large precomputed chunk was passed to writeChunk without being copied.' + - ' Large chunks get enqueued directly and are not copied. This is incompatible with precomputed chunks because you cannot enqueue the same precomputed chunk twice.' + - ' Use "cloneChunk" to make a copy of this large precomputed chunk before writing it. This is a bug in React.', - ); - } - } // this chunk may overflow a single view which implies it was not // one that is cached by the streaming renderer. We will enqueu // it directly and expect it is not re-used @@ -201,14 +192,14 @@ export function stringToChunk(content: string): Chunk { return content; } -const precomputedChunkSet = __DEV__ ? new Set() : null; - export function stringToPrecomputedChunk(content: string): PrecomputedChunk { const precomputedChunk = textEncoder.encode(content); if (__DEV__) { - if (precomputedChunkSet) { - precomputedChunkSet.add(precomputedChunk); + if (precomputedChunk.byteLength > VIEW_SIZE) { + console.error( + 'precomputed chunks must be smaller than the view size configured for this host. This is a bug in React.', + ); } } @@ -222,14 +213,6 @@ export function typedArrayToBinaryChunk( return new Uint8Array(content.buffer, content.byteOffset, content.byteLength); } -export function clonePrecomputedChunk( - precomputedChunk: PrecomputedChunk, -): PrecomputedChunk { - return precomputedChunk.length > VIEW_SIZE - ? precomputedChunk.slice() - : precomputedChunk; -} - export function byteLengthOfChunk(chunk: Chunk | PrecomputedChunk): number { return typeof chunk === 'string' ? Buffer.byteLength(chunk, 'utf8') diff --git a/packages/react-server/src/forks/ReactServerStreamConfig.custom.js b/packages/react-server/src/forks/ReactServerStreamConfig.custom.js index cc92a88b01e79..e372e8903f856 100644 --- a/packages/react-server/src/forks/ReactServerStreamConfig.custom.js +++ b/packages/react-server/src/forks/ReactServerStreamConfig.custom.js @@ -41,7 +41,6 @@ export const closeWithError = $$$config.closeWithError; export const stringToChunk = $$$config.stringToChunk; export const stringToPrecomputedChunk = $$$config.stringToPrecomputedChunk; export const typedArrayToBinaryChunk = $$$config.typedArrayToBinaryChunk; -export const clonePrecomputedChunk = $$$config.clonePrecomputedChunk; export const byteLengthOfChunk = $$$config.byteLengthOfChunk; export const byteLengthOfBinaryChunk = $$$config.byteLengthOfBinaryChunk; export const createFastHash = $$$config.createFastHash;