-
Notifications
You must be signed in to change notification settings - Fork 29.7k
One technique to capture static and runtime chunks while rendering as fast as possible #85163
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: canary
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -178,7 +178,7 @@ import { | |||||||||||||
| workUnitAsyncStorage, | ||||||||||||||
| type PrerenderStore, | ||||||||||||||
| } from './work-unit-async-storage.external' | ||||||||||||||
| import { consoleAsyncStorage } from './console-async-storage.external' | ||||||||||||||
| // import { consoleAsyncStorage } from './console-async-storage.external' | ||||||||||||||
| import { CacheSignal } from './cache-signal' | ||||||||||||||
| import { getTracedMetadata } from '../lib/trace/utils' | ||||||||||||||
| import { InvariantError } from '../../shared/lib/invariant-error' | ||||||||||||||
|
|
@@ -2454,7 +2454,7 @@ async function renderToStream( | |||||||||||||
| // We only have a Prerender environment for projects opted into cacheComponents | ||||||||||||||
| cacheComponents | ||||||||||||||
| ) { | ||||||||||||||
| const [resolveValidation, validationOutlet] = createValidationOutlet() | ||||||||||||||
| // const [resolveValidation, validationOutlet] = createValidationOutlet() | ||||||||||||||
| let debugChannel: DebugChannelPair | undefined | ||||||||||||||
| const getPayload = async ( | ||||||||||||||
| // eslint-disable-next-line @typescript-eslint/no-shadow | ||||||||||||||
|
|
@@ -2472,7 +2472,7 @@ async function renderToStream( | |||||||||||||
| // even if we end up discarding a render and restarting, | ||||||||||||||
| // because we're not going to wait for the stream to complete, | ||||||||||||||
| // so leaving the validation unresolved is fine. | ||||||||||||||
| payload._validation = validationOutlet | ||||||||||||||
| // payload._validation = validationOutlet | ||||||||||||||
|
|
||||||||||||||
| if (isBypassingCachesInDev(renderOpts, requestStore)) { | ||||||||||||||
| // Mark the RSC payload to indicate that caches were bypassed in dev. | ||||||||||||||
|
|
@@ -2545,17 +2545,17 @@ async function renderToStream( | |||||||||||||
| // TODO(restart-on-cache-miss): | ||||||||||||||
| // This can probably be optimized to do less work, | ||||||||||||||
| // because we've already made sure that we have warm caches. | ||||||||||||||
| consoleAsyncStorage.run( | ||||||||||||||
| { dim: true }, | ||||||||||||||
| spawnDynamicValidationInDev, | ||||||||||||||
| resolveValidation, | ||||||||||||||
| tree, | ||||||||||||||
| ctx, | ||||||||||||||
| res.statusCode === 404, | ||||||||||||||
| clientReferenceManifest, | ||||||||||||||
| requestStore, | ||||||||||||||
| devValidatingFallbackParams | ||||||||||||||
| ) | ||||||||||||||
| // consoleAsyncStorage.run( | ||||||||||||||
| // { dim: true }, | ||||||||||||||
| // spawnDynamicValidationInDev, | ||||||||||||||
| // resolveValidation, | ||||||||||||||
| // tree, | ||||||||||||||
| // ctx, | ||||||||||||||
| // res.statusCode === 404, | ||||||||||||||
| // clientReferenceManifest, | ||||||||||||||
| // requestStore, | ||||||||||||||
| // devValidatingFallbackParams | ||||||||||||||
| // ) | ||||||||||||||
| } else { | ||||||||||||||
| // This is a dynamic render. We don't do dynamic tracking because we're not prerendering | ||||||||||||||
| const RSCPayload: RSCPayload & RSCPayloadDevProperties = | ||||||||||||||
|
|
@@ -3044,7 +3044,7 @@ async function renderWithRestartOnCacheMissInDev( | |||||||||||||
| // then we'll only use this render for filling caches. | ||||||||||||||
| // We won't advance the stage, and thus leave dynamic APIs hanging, | ||||||||||||||
| // because they won't be cached anyway, so it'd be wasted work. | ||||||||||||||
| if (maybeStream === null || cacheSignal.hasPendingReads()) { | ||||||||||||||
| if (true || maybeStream === null || cacheSignal.hasPendingReads()) { | ||||||||||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
A debug condition View DetailsAnalysisDebug condition forces cache miss path unconditionally in renderWithRestartOnCacheMissInDev()What fails: The condition at line 3047 in How to reproduce: Any render in development mode using the // Lines 3044-3050 in app-render.tsx - BEFORE FIX
if (true || maybeStream === null || cacheSignal.hasPendingReads()) {
return null
}
// Should be:
if (maybeStream === null || cacheSignal.hasPendingReads()) {
return null
}Result: Every render is treated as a cache miss scenario (returns null), preventing the stage controller from advancing to Dynamic stage even when no cache misses occur. Expected: According to the comments and logic flow, when there are no cache misses (both Evidence: Git history shows this debug code was added in commit c6a2889 ("One technique to capture static and runtime chunks while rendering as fast as possible"), where the condition was explicitly changed from |
||||||||||||||
| return null | ||||||||||||||
| } | ||||||||||||||
|
|
||||||||||||||
|
|
@@ -3108,6 +3108,9 @@ async function renderWithRestartOnCacheMissInDev( | |||||||||||||
| // We're not using it, so we need to create a new one. | ||||||||||||||
| debugChannel = setReactDebugChannel && createDebugChannel() | ||||||||||||||
|
|
||||||||||||||
| const staticChunks: Array<Uint8Array> = [] | ||||||||||||||
| const runtimeChunks: Array<Uint8Array> = [] | ||||||||||||||
|
|
||||||||||||||
| const finalRscPayload = await getPayload(requestStore) | ||||||||||||||
| const finalServerStream = await workUnitAsyncStorage.run(requestStore, () => | ||||||||||||||
| pipelineInSequentialTasks( | ||||||||||||||
|
|
@@ -3124,19 +3127,66 @@ async function renderWithRestartOnCacheMissInDev( | |||||||||||||
| } | ||||||||||||||
| ) | ||||||||||||||
| }, | ||||||||||||||
| (stream) => { | ||||||||||||||
| // Runtime stage | ||||||||||||||
| finalStageController.advanceStage(RenderStage.Runtime) | ||||||||||||||
| return stream | ||||||||||||||
| async (stream) => { | ||||||||||||||
| const [continuationStream, staticStream] = stream.tee() | ||||||||||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. did you benchmark this this would likely only be faster if you can move it one layer up, to access the chunks before they get pushed otherwise this will likely create a large number of promises that make it slower |
||||||||||||||
| const reader = staticStream.getReader() | ||||||||||||||
| Promise.resolve().then(() => { | ||||||||||||||
| process.nextTick(() => { | ||||||||||||||
| // Runtime stage | ||||||||||||||
| reader.releaseLock() | ||||||||||||||
| finalStageController.advanceStage(RenderStage.Runtime) | ||||||||||||||
| }) | ||||||||||||||
| }) | ||||||||||||||
|
|
||||||||||||||
| try { | ||||||||||||||
| while (true) { | ||||||||||||||
| const { done, value } = await reader.read() | ||||||||||||||
| if (done) { | ||||||||||||||
| break | ||||||||||||||
| } | ||||||||||||||
| staticChunks.push(value) | ||||||||||||||
| } | ||||||||||||||
| } catch (e) { | ||||||||||||||
| // When we release the lock we may reject the read | ||||||||||||||
| } | ||||||||||||||
| return continuationStream | ||||||||||||||
| }, | ||||||||||||||
| (stream) => { | ||||||||||||||
| // Dynamic stage | ||||||||||||||
| finalStageController.advanceStage(RenderStage.Dynamic) | ||||||||||||||
| return stream | ||||||||||||||
| async (stream) => { | ||||||||||||||
| // We make a very important but sublte assumption here | ||||||||||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There's a typo in the comment:
Suggested change
Spotted by Graphite Agent |
||||||||||||||
| // The task above returns a promise but it will still return in a microtask | ||||||||||||||
| // This is true because we schedule a releaseLock in a nextTick from | ||||||||||||||
| // the microtask queue which will cause the first non-microtasky read | ||||||||||||||
| // to reject in a microtask allowing the whole job to wrap up | ||||||||||||||
| const [continuationStream, runtimeStream] = (await stream).tee() | ||||||||||||||
| const reader = runtimeStream.getReader() | ||||||||||||||
| Promise.resolve().then(() => { | ||||||||||||||
| process.nextTick(() => { | ||||||||||||||
| // Dynamic stage | ||||||||||||||
| reader.releaseLock() | ||||||||||||||
| finalStageController.advanceStage(RenderStage.Dynamic) | ||||||||||||||
| }) | ||||||||||||||
| }) | ||||||||||||||
|
|
||||||||||||||
| try { | ||||||||||||||
| while (true) { | ||||||||||||||
| const { done, value } = await reader.read() | ||||||||||||||
| if (done) { | ||||||||||||||
| break | ||||||||||||||
| } | ||||||||||||||
| runtimeChunks.push(value) | ||||||||||||||
| } | ||||||||||||||
| } catch (e) { | ||||||||||||||
| // When we release the lock we may reject the read | ||||||||||||||
| } | ||||||||||||||
| return continuationStream | ||||||||||||||
| } | ||||||||||||||
| ) | ||||||||||||||
| ) | ||||||||||||||
|
|
||||||||||||||
| let dec = new TextDecoder() | ||||||||||||||
| console.log({ staticChunks: staticChunks.map((c) => dec.decode(c)) }) | ||||||||||||||
| console.log({ runtimeChunks: runtimeChunks.map((c) => dec.decode(c)) }) | ||||||||||||||
|
Comment on lines
+3187
to
+3188
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
Debug console.log statements are present in production code, logging decoded stream chunks on every render. View DetailsAnalysisUnguarded debug console.log statements in renderWithRestartOnCacheMissInDev()What fails: Lines 3187-3188 in How to reproduce: // In renderWithRestartOnCacheMissInDev() after final render completes (line ~3186):
let dec = new TextDecoder()
console.log({ staticChunks: staticChunks.map((c) => dec.decode(c)) })
console.log({ runtimeChunks: runtimeChunks.map((c) => dec.decode(c)) })When this code path executes during development (process.env.NODE_ENV === 'development' with cacheComponents enabled), these console.log statements execute unconditionally. Result: Debug logging is output to the console on every development render. While this code is development-only (protected by runtime guards at the call site), the pattern in the same file shows that debug logs should be wrapped in Expected behavior: Debug console.log statements should follow the established pattern in the codebase and be wrapped in Fix applied: Wrapped the console.log statements in a |
||||||||||||||
|
|
||||||||||||||
| if (process.env.NODE_ENV === 'development' && setCacheStatus) { | ||||||||||||||
| setCacheStatus('filled', htmlRequestId, requestId) | ||||||||||||||
| } | ||||||||||||||
|
|
||||||||||||||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,3 +1,26 @@ | ||
| import { cookies } from 'next/headers' | ||
| import { Suspense } from 'react' | ||
|
|
||
| export default function Page() { | ||
| return <p>hello world</p> | ||
| return ( | ||
| <main> | ||
| <div>this is static</div> | ||
| <Suspense fallback="loading..."> | ||
| <Dynamic /> | ||
| </Suspense> | ||
| <Suspense fallback="loading..."> | ||
| <Runtime /> | ||
| </Suspense> | ||
| </main> | ||
| ) | ||
| } | ||
|
|
||
| async function Dynamic() { | ||
| await new Promise((resolve) => setTimeout(resolve, 1000)) | ||
| return <p>hello dynamic</p> | ||
| } | ||
|
|
||
| async function Runtime() { | ||
| await cookies() | ||
| return <p>hello runtime</p> | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,6 +1,11 @@ | ||
| /** | ||
| * @type {import('next').NextConfig} | ||
| */ | ||
| const nextConfig = {} | ||
| const nextConfig = { | ||
| cacheComponents: true, | ||
| experimental: { | ||
| // reactDebugChannel: true, | ||
| }, | ||
| } | ||
|
|
||
| module.exports = nextConfig |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The hardcoded
truecondition forces this branch to always execute, effectively bypassing the cache validation logic. This appears to be intentional for testing the new chunk capture implementation, but should be removed before merging to preserve the original cache-based conditional behavior. The surrounding code changes suggest this is part of a proof-of-concept for the streaming capture technique.Spotted by Graphite Agent

Is this helpful? React 👍 or 👎 to let us know.