Skip to content

Commit

Permalink
Fix: Don't flush discrete at end of batchedUpdates (#21229)
Browse files Browse the repository at this point in the history
The outermost `batchedUpdates` call flushes pending sync updates at the
end. This was intended for legacy sync mode, but it also happens to
flush discrete updates in concurrent mode.

Instead, we should only flush sync updates at the end of
`batchedUpdates` for legacy roots. Discrete sync updates can wait to
flush in the microtask.

`discreteUpdates` has the same issue, which is how I originally noticed
this, but I'll change that one in a separate commit since it requires
updating a few (no longer relevant) internal tests.
  • Loading branch information
acdlite authored Apr 21, 2021
1 parent 89847bf commit a155860
Show file tree
Hide file tree
Showing 8 changed files with 198 additions and 71 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ describe('StoreStressConcurrent', () => {
let React;
let ReactDOM;
let act;
let actAsync;
let bridge;
let store;
let print;
Expand All @@ -23,6 +24,9 @@ describe('StoreStressConcurrent', () => {
React = require('react');
ReactDOM = require('react-dom');
act = require('./utils').act;
// TODO: Figure out recommendation for concurrent mode tests, then replace
// this helper with the real thing.
actAsync = require('./utils').actAsync;

print = require('./storeSerializer').print;
});
Expand Down Expand Up @@ -758,7 +762,7 @@ describe('StoreStressConcurrent', () => {

// Force fallback.
expect(print(store)).toEqual(snapshots[i]);
act(() => {
await actAsync(async () => {
bridge.send('overrideSuspense', {
id: suspenseID,
rendererID: store.getRendererIDForElement(suspenseID),
Expand All @@ -768,7 +772,7 @@ describe('StoreStressConcurrent', () => {
expect(print(store)).toEqual(snapshots[j]);

// Stop forcing fallback.
act(() => {
await actAsync(async () => {
bridge.send('overrideSuspense', {
id: suspenseID,
rendererID: store.getRendererIDForElement(suspenseID),
Expand Down Expand Up @@ -818,7 +822,7 @@ describe('StoreStressConcurrent', () => {
expect(print(store)).toEqual(snapshots[j]);

// Stop forcing fallback. This reverts to primary content.
act(() => {
await actAsync(async () => {
bridge.send('overrideSuspense', {
id: suspenseID,
rendererID: store.getRendererIDForElement(suspenseID),
Expand All @@ -829,13 +833,13 @@ describe('StoreStressConcurrent', () => {
expect(print(store)).toEqual(snapshots[i]);

// Clean up after every iteration.
act(() => root.unmount());
await actAsync(async () => root.unmount());
expect(print(store)).toBe('');
}
}
});

it('should handle a stress test for Suspense without type change (Concurrent Mode)', () => {
it('should handle a stress test for Suspense without type change (Concurrent Mode)', async () => {
const A = () => 'a';
const B = () => 'b';
const C = () => 'c';
Expand Down Expand Up @@ -1294,7 +1298,7 @@ describe('StoreStressConcurrent', () => {

// Force fallback.
expect(print(store)).toEqual(snapshots[i]);
act(() => {
await actAsync(async () => {
bridge.send('overrideSuspense', {
id: suspenseID,
rendererID: store.getRendererIDForElement(suspenseID),
Expand All @@ -1304,7 +1308,7 @@ describe('StoreStressConcurrent', () => {
expect(print(store)).toEqual(fallbackSnapshots[j]);

// Stop forcing fallback.
act(() => {
await actAsync(async () => {
bridge.send('overrideSuspense', {
id: suspenseID,
rendererID: store.getRendererIDForElement(suspenseID),
Expand Down Expand Up @@ -1354,7 +1358,7 @@ describe('StoreStressConcurrent', () => {
expect(print(store)).toEqual(fallbackSnapshots[j]);

// Stop forcing fallback. This reverts to primary content.
act(() => {
await actAsync(async () => {
bridge.send('overrideSuspense', {
id: suspenseID,
rendererID: store.getRendererIDForElement(suspenseID),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -371,4 +371,54 @@ describe('ReactDOMNativeEventHeuristic-test', () => {
);
}
});

// @gate experimental
it('should not flush discrete events at the end of outermost batchedUpdates', async () => {
const root = ReactDOM.unstable_createRoot(container);

let target;
function Foo() {
const [count, setCount] = React.useState(0);
return (
<div
ref={el => {
target = el;
if (target !== null) {
el.onclick = () => {
ReactDOM.unstable_batchedUpdates(() => {
setCount(count + 1);
});
Scheduler.unstable_yieldValue(
container.textContent + ' [after batchedUpdates]',
);
};
}
}}>
Count: {count}
</div>
);
}

await act(async () => {
root.render(<Foo />);
});
expect(container.textContent).toEqual('Count: 0');

const pressEvent = document.createEvent('Event');
pressEvent.initEvent('click', true, true);
dispatchAndSetCurrentEvent(target, pressEvent);

expect(Scheduler).toHaveYielded(['Count: 0 [after batchedUpdates]']);
// TODO: There's a `flushDiscreteUpdates` call at the end of the event
// delegation listener that gets called even if no React event handlers are
// fired. Once that is removed, this will be 0, not 1.
// expect(container.textContent).toEqual('Count: 0');
expect(container.textContent).toEqual('Count: 1');

// Intentionally not using `act` so we can observe in between the click
// event and the microtask, without batching.
await null;

expect(container.textContent).toEqual('Count: 1');
});
});
22 changes: 20 additions & 2 deletions packages/react-reconciler/src/ReactFiberSyncTaskQueue.new.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import {
import {ImmediatePriority, scheduleCallback} from './Scheduler';

let syncQueue: Array<SchedulerCallback> | null = null;
let includesLegacySyncCallbacks: boolean = false;
let isFlushingSyncQueue: boolean = false;

export function scheduleSyncCallback(callback: SchedulerCallback) {
Expand All @@ -31,7 +32,23 @@ export function scheduleSyncCallback(callback: SchedulerCallback) {
}
}

export function flushSyncCallbackQueue() {
export function scheduleLegacySyncCallback(callback: SchedulerCallback) {
includesLegacySyncCallbacks = true;
scheduleSyncCallback(callback);
}

export function flushSyncCallbacksOnlyInLegacyMode() {
// Only flushes the queue if there's a legacy sync callback scheduled.
// TODO: There's only a single type of callback: performSyncOnWorkOnRoot. So
// it might make more sense for the queue to be a list of roots instead of a
// list of generic callbacks. Then we can have two: one for legacy roots, one
// for concurrent roots. And this method would only flush the legacy ones.
if (includesLegacySyncCallbacks) {
flushSyncCallbacks();
}
}

export function flushSyncCallbacks() {
if (!isFlushingSyncQueue && syncQueue !== null) {
// Prevent re-entrancy.
isFlushingSyncQueue = true;
Expand All @@ -50,13 +67,14 @@ export function flushSyncCallbackQueue() {
} while (callback !== null);
}
syncQueue = null;
includesLegacySyncCallbacks = false;
} catch (error) {
// If something throws, leave the remaining callbacks on the queue.
if (syncQueue !== null) {
syncQueue = syncQueue.slice(i + 1);
}
// Resume flushing in the next tick
scheduleCallback(ImmediatePriority, flushSyncCallbackQueue);
scheduleCallback(ImmediatePriority, flushSyncCallbacks);
throw error;
} finally {
setCurrentUpdatePriority(previousUpdatePriority);
Expand Down
22 changes: 20 additions & 2 deletions packages/react-reconciler/src/ReactFiberSyncTaskQueue.old.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import {
import {ImmediatePriority, scheduleCallback} from './Scheduler';

let syncQueue: Array<SchedulerCallback> | null = null;
let includesLegacySyncCallbacks: boolean = false;
let isFlushingSyncQueue: boolean = false;

export function scheduleSyncCallback(callback: SchedulerCallback) {
Expand All @@ -31,7 +32,23 @@ export function scheduleSyncCallback(callback: SchedulerCallback) {
}
}

export function flushSyncCallbackQueue() {
export function scheduleLegacySyncCallback(callback: SchedulerCallback) {
includesLegacySyncCallbacks = true;
scheduleSyncCallback(callback);
}

export function flushSyncCallbacksOnlyInLegacyMode() {
// Only flushes the queue if there's a legacy sync callback scheduled.
// TODO: There's only a single type of callback: performSyncOnWorkOnRoot. So
// it might make more sense for the queue to be a list of roots instead of a
// list of generic callbacks. Then we can have two: one for legacy roots, one
// for concurrent roots. And this method would only flush the legacy ones.
if (includesLegacySyncCallbacks) {
flushSyncCallbacks();
}
}

export function flushSyncCallbacks() {
if (!isFlushingSyncQueue && syncQueue !== null) {
// Prevent re-entrancy.
isFlushingSyncQueue = true;
Expand All @@ -50,13 +67,14 @@ export function flushSyncCallbackQueue() {
} while (callback !== null);
}
syncQueue = null;
includesLegacySyncCallbacks = false;
} catch (error) {
// If something throws, leave the remaining callbacks on the queue.
if (syncQueue !== null) {
syncQueue = syncQueue.slice(i + 1);
}
// Resume flushing in the next tick
scheduleCallback(ImmediatePriority, flushSyncCallbackQueue);
scheduleCallback(ImmediatePriority, flushSyncCallbacks);
throw error;
} finally {
setCurrentUpdatePriority(previousUpdatePriority);
Expand Down
50 changes: 31 additions & 19 deletions packages/react-reconciler/src/ReactFiberWorkLoop.new.js
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,10 @@ import {
IdlePriority as IdleSchedulerPriority,
} from './Scheduler';
import {
flushSyncCallbackQueue,
flushSyncCallbacks,
flushSyncCallbacksOnlyInLegacyMode,
scheduleSyncCallback,
scheduleLegacySyncCallback,
} from './ReactFiberSyncTaskQueue.new';
import {
NoFlags as NoHookEffect,
Expand Down Expand Up @@ -561,7 +563,7 @@ export function scheduleUpdateOnFiber(
// without immediately flushing it. We only do this for user-initiated
// updates, to preserve historical behavior of legacy mode.
resetRenderTimer();
flushSyncCallbackQueue();
flushSyncCallbacksOnlyInLegacyMode();
}
}
} else {
Expand Down Expand Up @@ -698,13 +700,17 @@ function ensureRootIsScheduled(root: FiberRoot, currentTime: number) {
if (newCallbackPriority === SyncLane) {
// Special case: Sync React callbacks are scheduled on a special
// internal queue
scheduleSyncCallback(performSyncWorkOnRoot.bind(null, root));
if (root.tag === LegacyRoot) {
scheduleLegacySyncCallback(performSyncWorkOnRoot.bind(null, root));
} else {
scheduleSyncCallback(performSyncWorkOnRoot.bind(null, root));
}
if (supportsMicrotasks) {
// Flush the queue in a microtask.
scheduleMicrotask(flushSyncCallbackQueue);
scheduleMicrotask(flushSyncCallbacks);
} else {
// Flush the queue in an Immediate task.
scheduleCallback(ImmediateSchedulerPriority, flushSyncCallbackQueue);
scheduleCallback(ImmediateSchedulerPriority, flushSyncCallbacks);
}
newCallbackNode = null;
} else {
Expand Down Expand Up @@ -1054,7 +1060,7 @@ export function flushRoot(root: FiberRoot, lanes: Lanes) {
ensureRootIsScheduled(root, now());
if ((executionContext & (RenderContext | CommitContext)) === NoContext) {
resetRenderTimer();
flushSyncCallbackQueue();
flushSyncCallbacks();
}
}
}
Expand Down Expand Up @@ -1085,7 +1091,7 @@ export function flushDiscreteUpdates() {
// like `el.focus()`. Exit.
return;
}
flushSyncCallbackQueue();
flushSyncCallbacks();
// If the discrete updates scheduled passive effects, flush them now so that
// they fire before the next serial event.
flushPassiveEffects();
Expand All @@ -1111,10 +1117,11 @@ export function batchedUpdates<A, R>(fn: A => R, a: A): R {
return fn(a);
} finally {
executionContext = prevExecutionContext;
// If there were legacy sync updates, flush them at the end of the outer
// most batchedUpdates-like method.
if (executionContext === NoContext) {
// Flush the immediate callbacks that were scheduled during this batch
resetRenderTimer();
flushSyncCallbackQueue();
flushSyncCallbacksOnlyInLegacyMode();
}
}
}
Expand All @@ -1126,10 +1133,11 @@ export function batchedEventUpdates<A, R>(fn: A => R, a: A): R {
return fn(a);
} finally {
executionContext = prevExecutionContext;
// If there were legacy sync updates, flush them at the end of the outer
// most batchedUpdates-like method.
if (executionContext === NoContext) {
// Flush the immediate callbacks that were scheduled during this batch
resetRenderTimer();
flushSyncCallbackQueue();
flushSyncCallbacksOnlyInLegacyMode();
}
}
}
Expand All @@ -1151,9 +1159,10 @@ export function discreteUpdates<A, B, C, D, R>(
setCurrentUpdatePriority(previousPriority);
ReactCurrentBatchConfig.transition = prevTransition;
if (executionContext === NoContext) {
// Flush the immediate callbacks that were scheduled during this batch
resetRenderTimer();
flushSyncCallbackQueue();
// TODO: This should only flush legacy sync updates. Not discrete updates
// in Concurrent Mode. Discrete updates will flush in a microtask.
flushSyncCallbacks();
}
}
}
Expand All @@ -1166,10 +1175,13 @@ export function unbatchedUpdates<A, R>(fn: (a: A) => R, a: A): R {
return fn(a);
} finally {
executionContext = prevExecutionContext;
// If there were legacy sync updates, flush them at the end of the outer
// most batchedUpdates-like method.
if (executionContext === NoContext) {
// Flush the immediate callbacks that were scheduled during this batch
resetRenderTimer();
flushSyncCallbackQueue();
// TODO: I think this call is redundant, because we flush inside
// scheduleUpdateOnFiber when LegacyUnbatchedContext is set.
flushSyncCallbacksOnlyInLegacyMode();
}
}
}
Expand All @@ -1196,7 +1208,7 @@ export function flushSync<A, R>(fn: A => R, a: A): R {
// Note that this will happen even if batchedUpdates is higher up
// the stack.
if ((executionContext & (RenderContext | CommitContext)) === NoContext) {
flushSyncCallbackQueue();
flushSyncCallbacks();
} else {
if (__DEV__) {
console.error(
Expand Down Expand Up @@ -1226,7 +1238,7 @@ export function flushControlled(fn: () => mixed): void {
if (executionContext === NoContext) {
// Flush the immediate callbacks that were scheduled during this batch
resetRenderTimer();
flushSyncCallbackQueue();
flushSyncCallbacks();
}
}
}
Expand Down Expand Up @@ -2098,7 +2110,7 @@ function commitRootImpl(root, renderPriorityLevel) {
}

// If layout work was scheduled, flush it now.
flushSyncCallbackQueue();
flushSyncCallbacks();

if (__DEV__) {
if (enableDebugTracing) {
Expand Down Expand Up @@ -2224,7 +2236,7 @@ function flushPassiveEffectsImpl() {

executionContext = prevExecutionContext;

flushSyncCallbackQueue();
flushSyncCallbacks();

// If additional passive effects were scheduled, increment a counter. If this
// exceeds the limit, we'll fire a warning.
Expand Down
Loading

0 comments on commit a155860

Please sign in to comment.