Skip to content

Commit

Permalink
Use an update queue for refreshes
Browse files Browse the repository at this point in the history
Refreshes are easier than initial mounts because we have a mounted fiber
that we can attach the cache to. We don't need to rely on clever pooling
tricks; they're just normal updates.

More importantly, we're not at risk of dropping requests/data if we run
out of lanes, which is especially important for refreshes because they
can contain data seeded from a server mutation response; we cannot
afford to accidentally evict it.
  • Loading branch information
acdlite committed Dec 15, 2020
1 parent bba8139 commit b36c421
Show file tree
Hide file tree
Showing 8 changed files with 137 additions and 103 deletions.
81 changes: 48 additions & 33 deletions packages/react-reconciler/src/ReactFiberBeginWork.new.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ import type {
OffscreenProps,
OffscreenState,
} from './ReactFiberOffscreenComponent';
import type {CacheInstance} from './ReactFiberCacheComponent';
import type {CacheInstance, Cache} from './ReactFiberCacheComponent';
import type {UpdateQueue} from './ReactUpdateQueue.new';

import checkPropTypes from 'shared/checkPropTypes';

Expand Down Expand Up @@ -671,9 +672,11 @@ function updateCacheComponent(
? CacheContext._currentValue
: CacheContext._currentValue2;

let ownCacheInstance: CacheInstance | null = null;
let cacheInstance: CacheInstance | null = null;
if (current === null) {
// This is a newly mounted component. Request a fresh cache.
// TODO: Fast path when parent cache component is also a new mount? We can
// check `parentCacheInstance.provider.alternate`.
const root = getWorkInProgressRoot();
invariant(
root !== null,
Expand All @@ -684,62 +687,74 @@ function updateCacheComponent(
// This may be the same as the parent cache, like if the current render
// spawned from a previous render that already committed. Otherwise, this
// is the root of a cache consistency boundary.
let initialState;
if (freshCache !== parentCacheInstance.cache) {
ownCacheInstance = {
cacheInstance = {
cache: freshCache,
provider: workInProgress,
};
pushProvider(workInProgress, CacheContext, ownCacheInstance);
initialState = {
cache: freshCache,
};
pushProvider(workInProgress, CacheContext, cacheInstance);
// No need to propagate the refresh, because this is a new tree.
} else {
// Use the parent cache
ownCacheInstance = null;
cacheInstance = null;
initialState = {
cache: null,
};
}
// Initialize an update queue. We use this for refreshes.
workInProgress.memoizedState = initialState;
initializeUpdateQueue(workInProgress);
} else {
// This component already mounted.
if (includesSomeLane(renderLanes, updateLanes)) {
// A refresh was scheduled.
const root = getWorkInProgressRoot();
invariant(
root !== null,
'Expected a work-in-progress root. This is a bug in React. Please ' +
'file an issue.',
);
const freshCache = requestFreshCache(root, renderLanes);
if (
parentCacheInstance === null ||
freshCache !== parentCacheInstance.cache
) {
ownCacheInstance = {
cache: freshCache,
// An refresh was scheduled. If it was an refresh on this fiber, then we
// will have an update in the queue. Otherwise, it must have been an
// update on a parent, propagated via context.
cloneUpdateQueue(current, workInProgress);
processUpdateQueue(workInProgress, null, null, renderLanes);
const prevCache: Cache | null = current.memoizedState.cache;
const nextCache: Cache | null = workInProgress.memoizedState.cache;

if (nextCache !== prevCache && nextCache !== null) {
// Received a refresh.
cacheInstance = {
cache: nextCache,
provider: workInProgress,
};
pushProvider(workInProgress, CacheContext, ownCacheInstance);
pushProvider(workInProgress, CacheContext, cacheInstance);
// Refreshes propagate through the entire subtree. The refreshed cache
// will override nested caches.
propagateCacheRefresh(workInProgress, renderLanes);
} else {
// The fresh cache is the same as the parent cache.
ownCacheInstance = null;
// A parent cache boundary refreshed. So we can use the cache context.
cacheInstance = null;

// If the update queue is empty, disconnect the old cache from the tree
// so it can be garbage collected.
if (workInProgress.lanes === NoLanes) {
const updateQueue: UpdateQueue<any> = (workInProgress.updateQueue: any);
workInProgress.memoizedState = updateQueue.baseState = {cache: null};
}
}
} else {
// Reuse the memoized cache.
const prevCacheInstance: CacheInstance | null = current.memoizedState;
if (prevCacheInstance !== null) {
ownCacheInstance = prevCacheInstance;
cacheInstance = current.stateNode;
if (cacheInstance !== null) {
// There was no refresh, so no need to propagate to nested boundaries.
pushProvider(workInProgress, CacheContext, ownCacheInstance);
} else {
ownCacheInstance = null;
pushProvider(workInProgress, CacheContext, cacheInstance);
}
}
}

// If this CacheComponent is the root of its tree, then `memoizedState` will
// point to a cache object. Otherwise, a null state indicates that this
// If this CacheComponent is the root of its tree, then `stateNode` will
// point to a cache instance. Otherwise, a null instance indicates that this
// CacheComponent inherits from a parent boundary. We can use this to infer
// whether to push/pop the cache context.
workInProgress.memoizedState = ownCacheInstance;
workInProgress.stateNode = cacheInstance;

const nextChildren = workInProgress.pendingProps.children;
reconcileChildren(current, workInProgress, nextChildren, renderLanes);
Expand Down Expand Up @@ -3349,11 +3364,11 @@ function beginWork(
}
case CacheComponent: {
if (enableCache) {
const ownCacheInstance: CacheInstance | null =
workInProgress.memoizedState;
const ownCacheInstance: CacheInstance | null = current.stateNode;
if (ownCacheInstance !== null) {
pushProvider(workInProgress, CacheContext, ownCacheInstance);
}
workInProgress.stateNode = ownCacheInstance;
}
break;
}
Expand Down
81 changes: 48 additions & 33 deletions packages/react-reconciler/src/ReactFiberBeginWork.old.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ import type {
OffscreenProps,
OffscreenState,
} from './ReactFiberOffscreenComponent';
import type {CacheInstance} from './ReactFiberCacheComponent';
import type {CacheInstance, Cache} from './ReactFiberCacheComponent';
import type {UpdateQueue} from './ReactUpdateQueue.new';

import checkPropTypes from 'shared/checkPropTypes';

Expand Down Expand Up @@ -671,9 +672,11 @@ function updateCacheComponent(
? CacheContext._currentValue
: CacheContext._currentValue2;

let ownCacheInstance: CacheInstance | null = null;
let cacheInstance: CacheInstance | null = null;
if (current === null) {
// This is a newly mounted component. Request a fresh cache.
// TODO: Fast path when parent cache component is also a new mount? We can
// check `parentCacheInstance.provider.alternate`.
const root = getWorkInProgressRoot();
invariant(
root !== null,
Expand All @@ -684,62 +687,74 @@ function updateCacheComponent(
// This may be the same as the parent cache, like if the current render
// spawned from a previous render that already committed. Otherwise, this
// is the root of a cache consistency boundary.
let initialState;
if (freshCache !== parentCacheInstance.cache) {
ownCacheInstance = {
cacheInstance = {
cache: freshCache,
provider: workInProgress,
};
pushProvider(workInProgress, CacheContext, ownCacheInstance);
initialState = {
cache: freshCache,
};
pushProvider(workInProgress, CacheContext, cacheInstance);
// No need to propagate the refresh, because this is a new tree.
} else {
// Use the parent cache
ownCacheInstance = null;
cacheInstance = null;
initialState = {
cache: null,
};
}
// Initialize an update queue. We use this for refreshes.
workInProgress.memoizedState = initialState;
initializeUpdateQueue(workInProgress);
} else {
// This component already mounted.
if (includesSomeLane(renderLanes, updateLanes)) {
// A refresh was scheduled.
const root = getWorkInProgressRoot();
invariant(
root !== null,
'Expected a work-in-progress root. This is a bug in React. Please ' +
'file an issue.',
);
const freshCache = requestFreshCache(root, renderLanes);
if (
parentCacheInstance === null ||
freshCache !== parentCacheInstance.cache
) {
ownCacheInstance = {
cache: freshCache,
// An refresh was scheduled. If it was an refresh on this fiber, then we
// will have an update in the queue. Otherwise, it must have been an
// update on a parent, propagated via context.
cloneUpdateQueue(current, workInProgress);
processUpdateQueue(workInProgress, null, null, renderLanes);
const prevCache: Cache | null = current.memoizedState.cache;
const nextCache: Cache | null = workInProgress.memoizedState.cache;

if (nextCache !== prevCache && nextCache !== null) {
// Received a refresh.
cacheInstance = {
cache: nextCache,
provider: workInProgress,
};
pushProvider(workInProgress, CacheContext, ownCacheInstance);
pushProvider(workInProgress, CacheContext, cacheInstance);
// Refreshes propagate through the entire subtree. The refreshed cache
// will override nested caches.
propagateCacheRefresh(workInProgress, renderLanes);
} else {
// The fresh cache is the same as the parent cache.
ownCacheInstance = null;
// A parent cache boundary refreshed. So we can use the cache context.
cacheInstance = null;

// If the update queue is empty, disconnect the old cache from the tree
// so it can be garbage collected.
if (workInProgress.lanes === NoLanes) {
const updateQueue: UpdateQueue<any> = (workInProgress.updateQueue: any);
workInProgress.memoizedState = updateQueue.baseState = {cache: null};
}
}
} else {
// Reuse the memoized cache.
const prevCacheInstance: CacheInstance | null = current.memoizedState;
if (prevCacheInstance !== null) {
ownCacheInstance = prevCacheInstance;
cacheInstance = current.stateNode;
if (cacheInstance !== null) {
// There was no refresh, so no need to propagate to nested boundaries.
pushProvider(workInProgress, CacheContext, ownCacheInstance);
} else {
ownCacheInstance = null;
pushProvider(workInProgress, CacheContext, cacheInstance);
}
}
}

// If this CacheComponent is the root of its tree, then `memoizedState` will
// point to a cache object. Otherwise, a null state indicates that this
// If this CacheComponent is the root of its tree, then `stateNode` will
// point to a cache instance. Otherwise, a null instance indicates that this
// CacheComponent inherits from a parent boundary. We can use this to infer
// whether to push/pop the cache context.
workInProgress.memoizedState = ownCacheInstance;
workInProgress.stateNode = cacheInstance;

const nextChildren = workInProgress.pendingProps.children;
reconcileChildren(current, workInProgress, nextChildren, renderLanes);
Expand Down Expand Up @@ -3349,11 +3364,11 @@ function beginWork(
}
case CacheComponent: {
if (enableCache) {
const ownCacheInstance: CacheInstance | null =
workInProgress.memoizedState;
const ownCacheInstance: CacheInstance | null = current.stateNode;
if (ownCacheInstance !== null) {
pushProvider(workInProgress, CacheContext, ownCacheInstance);
}
workInProgress.stateNode = ownCacheInstance;
}
break;
}
Expand Down
3 changes: 1 addition & 2 deletions packages/react-reconciler/src/ReactFiberCompleteWork.new.js
Original file line number Diff line number Diff line change
Expand Up @@ -1492,8 +1492,7 @@ function completeWork(
}
case CacheComponent: {
if (enableCache) {
const ownCacheInstance: CacheInstance | null =
workInProgress.memoizedState;
const ownCacheInstance: CacheInstance | null = workInProgress.stateNode;
if (ownCacheInstance !== null) {
// This is a cache provider.
popProvider(CacheContext, workInProgress);
Expand Down
3 changes: 1 addition & 2 deletions packages/react-reconciler/src/ReactFiberCompleteWork.old.js
Original file line number Diff line number Diff line change
Expand Up @@ -1492,8 +1492,7 @@ function completeWork(
}
case CacheComponent: {
if (enableCache) {
const ownCacheInstance: CacheInstance | null =
workInProgress.memoizedState;
const ownCacheInstance: CacheInstance | null = workInProgress.stateNode;
if (ownCacheInstance !== null) {
// This is a cache provider.
popProvider(CacheContext, workInProgress);
Expand Down
29 changes: 15 additions & 14 deletions packages/react-reconciler/src/ReactFiberHooks.new.js
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ import {
setCurrentUpdateLanePriority,
higherLanePriority,
DefaultLanePriority,
transferCacheToSpawnedLane,
} from './ReactFiberLane.new';
import {readContext} from './ReactFiberNewContext.new';
import {
Expand Down Expand Up @@ -1744,28 +1743,30 @@ function refreshCache<T>(
// TODO: Does Cache work in legacy mode? Should decide and write a test.
const root = scheduleUpdateOnFiber(provider, lane, eventTime);

let seededCache = null;
const seededCache = new Map();
if (seedKey !== null && seedKey !== undefined && root !== null) {
// TODO: Warn if wrong type
seededCache = new Map([[seedKey, seedValue]]);
transferCacheToSpawnedLane(root, seededCache, lane);
// Seed the cache with the value passed by the caller. This could be from
// a server mutation, or it could be a streaming response.
seededCache.set(seedKey, seedValue);
}

// Schedule an update on the cache boundary to trigger a refresh.
const refreshUpdate = createUpdate(eventTime, lane);
let payload;
if (provider.tag === HostRoot) {
const refreshUpdate = createUpdate(eventTime, lane);
refreshUpdate.payload = {
payload = {
cacheInstance: {
provider: provider,
cache:
// For the root cache, we won't bother to lazily initialize the
// map. Seed an empty one. This saves use the trouble of having
// to use an updater function. Maybe we should use this approach
// for non-root refreshes, too.
seededCache !== null ? seededCache : new Map(),
cache: seededCache,
},
};
enqueueUpdate(provider, refreshUpdate);
} else {
payload = {
cache: seededCache,
};
}
refreshUpdate.payload = payload;
enqueueUpdate(provider, refreshUpdate);
} finally {
ReactCurrentBatchConfig.transition = prevTransition;
}
Expand Down
Loading

0 comments on commit b36c421

Please sign in to comment.