From 2d2a65be8e0f9b84b100438c01d6dd69750e68bc Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Sun, 13 Dec 2020 23:08:53 -0600 Subject: [PATCH 01/30] Initial scaffolding for - Add Cache component type to React exports - Add internal work tag - Test that it can render children No functionality yet. --- .../react-reconciler/src/ReactFiber.new.js | 26 ++++++++++++++++++ .../react-reconciler/src/ReactFiber.old.js | 26 ++++++++++++++++++ .../src/ReactFiberBeginWork.new.js | 27 +++++++++++++++++++ .../src/ReactFiberBeginWork.old.js | 27 +++++++++++++++++++ .../src/ReactFiberCompleteWork.new.js | 8 ++++++ .../src/ReactFiberCompleteWork.old.js | 8 ++++++ .../react-reconciler/src/ReactWorkTags.js | 4 ++- .../src/__tests__/ReactCache-test.js | 22 +++++++++++++++ packages/react/index.classic.fb.js | 1 + packages/react/index.experimental.js | 1 + packages/react/index.js | 1 + packages/react/index.modern.fb.js | 1 + packages/react/src/React.js | 2 ++ packages/shared/ReactSymbols.js | 2 ++ packages/shared/getComponentName.js | 3 +++ packages/shared/isValidElementType.js | 6 +++-- 16 files changed, 162 insertions(+), 3 deletions(-) create mode 100644 packages/react-reconciler/src/__tests__/ReactCache-test.js diff --git a/packages/react-reconciler/src/ReactFiber.new.js b/packages/react-reconciler/src/ReactFiber.new.js index 9d55634deab95..3b3d9d4bc2e7b 100644 --- a/packages/react-reconciler/src/ReactFiber.new.js +++ b/packages/react-reconciler/src/ReactFiber.new.js @@ -27,6 +27,7 @@ import { enableProfilerTimer, enableFundamentalAPI, enableScopeAPI, + enableCache, } from 'shared/ReactFeatureFlags'; import {NoFlags, Placement, StaticMask} from './ReactFiberFlags'; import {ConcurrentRoot, BlockingRoot} from './ReactRootTags'; @@ -54,6 +55,7 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import getComponentName from 'shared/getComponentName'; @@ -88,6 +90,7 @@ import { REACT_SCOPE_TYPE, REACT_OFFSCREEN_TYPE, REACT_LEGACY_HIDDEN_TYPE, + REACT_CACHE_TYPE, } from 'shared/ReactSymbols'; export type {Fiber}; @@ -501,6 +504,11 @@ export function createFiberFromTypeAndProps( return createFiberFromScope(type, pendingProps, mode, lanes, key); } // eslint-disable-next-line no-fallthrough + case REACT_CACHE_TYPE: + if (enableCache) { + return createFiberFromCache(pendingProps, mode, lanes, key); + } + // eslint-disable-next-line no-fallthrough default: { if (typeof type === 'object' && type !== null) { switch (type.$$typeof) { @@ -745,6 +753,24 @@ export function createFiberFromLegacyHidden( return fiber; } +export function createFiberFromCache( + pendingProps: any, + mode: TypeOfMode, + lanes: Lanes, + key: null | string, +) { + const fiber = createFiber(CacheComponent, pendingProps, key, mode); + // TODO: The Cache fiber shouldn't have a type. It has a tag. + // This needs to be fixed in getComponentName so that it relies on the tag + // instead. + if (__DEV__) { + fiber.type = REACT_CACHE_TYPE; + } + fiber.elementType = REACT_CACHE_TYPE; + fiber.lanes = lanes; + return fiber; +} + export function createFiberFromText( content: string, mode: TypeOfMode, diff --git a/packages/react-reconciler/src/ReactFiber.old.js b/packages/react-reconciler/src/ReactFiber.old.js index 76001dd757e74..1e20db2851007 100644 --- a/packages/react-reconciler/src/ReactFiber.old.js +++ b/packages/react-reconciler/src/ReactFiber.old.js @@ -27,6 +27,7 @@ import { enableProfilerTimer, enableFundamentalAPI, enableScopeAPI, + enableCache, } from 'shared/ReactFeatureFlags'; import {NoFlags, Placement, StaticMask} from './ReactFiberFlags'; import {ConcurrentRoot, BlockingRoot} from './ReactRootTags'; @@ -54,6 +55,7 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import getComponentName from 'shared/getComponentName'; @@ -88,6 +90,7 @@ import { REACT_SCOPE_TYPE, REACT_OFFSCREEN_TYPE, REACT_LEGACY_HIDDEN_TYPE, + REACT_CACHE_TYPE, } from 'shared/ReactSymbols'; export type {Fiber}; @@ -501,6 +504,11 @@ export function createFiberFromTypeAndProps( return createFiberFromScope(type, pendingProps, mode, lanes, key); } // eslint-disable-next-line no-fallthrough + case REACT_CACHE_TYPE: + if (enableCache) { + return createFiberFromCache(pendingProps, mode, lanes, key); + } + // eslint-disable-next-line no-fallthrough default: { if (typeof type === 'object' && type !== null) { switch (type.$$typeof) { @@ -745,6 +753,24 @@ export function createFiberFromLegacyHidden( return fiber; } +export function createFiberFromCache( + pendingProps: any, + mode: TypeOfMode, + lanes: Lanes, + key: null | string, +) { + const fiber = createFiber(CacheComponent, pendingProps, key, mode); + // TODO: The Cache fiber shouldn't have a type. It has a tag. + // This needs to be fixed in getComponentName so that it relies on the tag + // instead. + if (__DEV__) { + fiber.type = REACT_CACHE_TYPE; + } + fiber.elementType = REACT_CACHE_TYPE; + fiber.lanes = lanes; + return fiber; +} + export function createFiberFromText( content: string, mode: TypeOfMode, diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 056d9c8b1ca7f..109dfc87118bd 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -50,6 +50,7 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import { NoFlags, @@ -76,6 +77,7 @@ import { enableFundamentalAPI, warnAboutDefaultPropsOnFunctionComponents, enableScopeAPI, + enableCache, } from 'shared/ReactFeatureFlags'; import invariant from 'shared/invariant'; import shallowEqual from 'shared/shallowEqual'; @@ -647,6 +649,20 @@ function updateOffscreenComponent( // fork the function. const updateLegacyHiddenComponent = updateOffscreenComponent; +function updateCacheComponent( + current: Fiber | null, + workInProgress: Fiber, + updateLanes: Lanes, + renderLanes: Lanes, +) { + if (!enableCache) { + return null; + } + const nextChildren = workInProgress.pendingProps.children; + reconcileChildren(current, workInProgress, nextChildren, renderLanes); + return workInProgress.child; +} + function updateFragment( current: Fiber | null, workInProgress: Fiber, @@ -3418,6 +3434,17 @@ function beginWork( case LegacyHiddenComponent: { return updateLegacyHiddenComponent(current, workInProgress, renderLanes); } + case CacheComponent: { + if (enableCache) { + return updateCacheComponent( + current, + workInProgress, + updateLanes, + renderLanes, + ); + } + break; + } } invariant( false, diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index 3f462e198aaf5..9b2e9fd2bfdf3 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -50,6 +50,7 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import { NoFlags, @@ -76,6 +77,7 @@ import { enableFundamentalAPI, warnAboutDefaultPropsOnFunctionComponents, enableScopeAPI, + enableCache, } from 'shared/ReactFeatureFlags'; import invariant from 'shared/invariant'; import shallowEqual from 'shared/shallowEqual'; @@ -647,6 +649,20 @@ function updateOffscreenComponent( // fork the function. const updateLegacyHiddenComponent = updateOffscreenComponent; +function updateCacheComponent( + current: Fiber | null, + workInProgress: Fiber, + updateLanes: Lanes, + renderLanes: Lanes, +) { + if (!enableCache) { + return null; + } + const nextChildren = workInProgress.pendingProps.children; + reconcileChildren(current, workInProgress, nextChildren, renderLanes); + return workInProgress.child; +} + function updateFragment( current: Fiber | null, workInProgress: Fiber, @@ -3418,6 +3434,17 @@ function beginWork( case LegacyHiddenComponent: { return updateLegacyHiddenComponent(current, workInProgress, renderLanes); } + case CacheComponent: { + if (enableCache) { + return updateCacheComponent( + current, + workInProgress, + updateLanes, + renderLanes, + ); + } + break; + } } invariant( false, diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js index 53a328121051f..11bb07f989a52 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js @@ -56,6 +56,7 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import { NoMode, @@ -132,6 +133,7 @@ import { enableFundamentalAPI, enableScopeAPI, enableProfilerTimer, + enableCache, } from 'shared/ReactFeatureFlags'; import { markSpawnedWork, @@ -1481,6 +1483,12 @@ function completeWork( return null; } + case CacheComponent: { + if (enableCache) { + bubbleProperties(workInProgress); + return null; + } + } } invariant( false, diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js index 8f0e68353d90c..6e4fab4d05f00 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js @@ -56,6 +56,7 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import { NoMode, @@ -132,6 +133,7 @@ import { enableFundamentalAPI, enableScopeAPI, enableProfilerTimer, + enableCache, } from 'shared/ReactFeatureFlags'; import { markSpawnedWork, @@ -1481,6 +1483,12 @@ function completeWork( return null; } + case CacheComponent: { + if (enableCache) { + bubbleProperties(workInProgress); + return null; + } + } } invariant( false, diff --git a/packages/react-reconciler/src/ReactWorkTags.js b/packages/react-reconciler/src/ReactWorkTags.js index 84cd9ebd4ba55..65dba28b367a3 100644 --- a/packages/react-reconciler/src/ReactWorkTags.js +++ b/packages/react-reconciler/src/ReactWorkTags.js @@ -31,7 +31,8 @@ export type WorkTag = | 20 | 21 | 22 - | 23; + | 23 + | 24; export const FunctionComponent = 0; export const ClassComponent = 1; @@ -57,3 +58,4 @@ export const FundamentalComponent = 20; export const ScopeComponent = 21; export const OffscreenComponent = 22; export const LegacyHiddenComponent = 23; +export const CacheComponent = 24; diff --git a/packages/react-reconciler/src/__tests__/ReactCache-test.js b/packages/react-reconciler/src/__tests__/ReactCache-test.js new file mode 100644 index 0000000000000..b193817e2d1de --- /dev/null +++ b/packages/react-reconciler/src/__tests__/ReactCache-test.js @@ -0,0 +1,22 @@ +let React; +let ReactNoop; +let Cache; + +describe('ReactCache', () => { + beforeEach(() => { + jest.resetModules(); + + React = require('react'); + ReactNoop = require('react-noop-renderer'); + Cache = React.unstable_Cache; + }); + + // @gate experimental + test('render Cache component', async () => { + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render(Hi); + }); + expect(root).toMatchRenderedOutput('Hi'); + }); +}); diff --git a/packages/react/index.classic.fb.js b/packages/react/index.classic.fb.js index 04723075defa4..4beccf1dfbca0 100644 --- a/packages/react/index.classic.fb.js +++ b/packages/react/index.classic.fb.js @@ -51,6 +51,7 @@ export { SuspenseList, SuspenseList as unstable_SuspenseList, unstable_getCacheForType, + unstable_Cache, // enableScopeAPI unstable_Scope, unstable_useOpaqueIdentifier, diff --git a/packages/react/index.experimental.js b/packages/react/index.experimental.js index 5de53908ae883..8c4d97eb8e746 100644 --- a/packages/react/index.experimental.js +++ b/packages/react/index.experimental.js @@ -46,6 +46,7 @@ export { SuspenseList as unstable_SuspenseList, unstable_useOpaqueIdentifier, unstable_getCacheForType, + unstable_Cache, // enableDebugTracing unstable_DebugTracingMode, } from './src/React'; diff --git a/packages/react/index.js b/packages/react/index.js index 1553bdd9e9a89..7f0173bd69e1c 100644 --- a/packages/react/index.js +++ b/packages/react/index.js @@ -83,4 +83,5 @@ export { unstable_Scope, unstable_useOpaqueIdentifier, unstable_getCacheForType, + unstable_Cache, } from './src/React'; diff --git a/packages/react/index.modern.fb.js b/packages/react/index.modern.fb.js index 9a3bb4384ca2e..982388dd2d645 100644 --- a/packages/react/index.modern.fb.js +++ b/packages/react/index.modern.fb.js @@ -50,6 +50,7 @@ export { SuspenseList, SuspenseList as unstable_SuspenseList, unstable_getCacheForType, + unstable_Cache, // enableScopeAPI unstable_Scope, unstable_useOpaqueIdentifier, diff --git a/packages/react/src/React.js b/packages/react/src/React.js index aae52b2750db6..3382743b8ccd0 100644 --- a/packages/react/src/React.js +++ b/packages/react/src/React.js @@ -17,6 +17,7 @@ import { REACT_SUSPENSE_LIST_TYPE, REACT_LEGACY_HIDDEN_TYPE, REACT_SCOPE_TYPE, + REACT_CACHE_TYPE, } from 'shared/ReactSymbols'; import {Component, PureComponent} from './ReactBaseClasses'; @@ -112,6 +113,7 @@ export { REACT_SUSPENSE_LIST_TYPE as SuspenseList, REACT_LEGACY_HIDDEN_TYPE as unstable_LegacyHidden, getCacheForType as unstable_getCacheForType, + REACT_CACHE_TYPE as unstable_Cache, // enableFundamentalAPI createFundamental as unstable_createFundamental, // enableScopeAPI diff --git a/packages/shared/ReactSymbols.js b/packages/shared/ReactSymbols.js index 1870c8c009482..d490ab417c211 100644 --- a/packages/shared/ReactSymbols.js +++ b/packages/shared/ReactSymbols.js @@ -31,6 +31,7 @@ export let REACT_OPAQUE_ID_TYPE = 0xeae0; export let REACT_DEBUG_TRACING_MODE_TYPE = 0xeae1; export let REACT_OFFSCREEN_TYPE = 0xeae2; export let REACT_LEGACY_HIDDEN_TYPE = 0xeae3; +export let REACT_CACHE_TYPE = 0xeae4; if (typeof Symbol === 'function' && Symbol.for) { const symbolFor = Symbol.for; @@ -52,6 +53,7 @@ if (typeof Symbol === 'function' && Symbol.for) { REACT_DEBUG_TRACING_MODE_TYPE = symbolFor('react.debug_trace_mode'); REACT_OFFSCREEN_TYPE = symbolFor('react.offscreen'); REACT_LEGACY_HIDDEN_TYPE = symbolFor('react.legacy_hidden'); + REACT_CACHE_TYPE = symbolFor('react.cache'); } const MAYBE_ITERATOR_SYMBOL = typeof Symbol === 'function' && Symbol.iterator; diff --git a/packages/shared/getComponentName.js b/packages/shared/getComponentName.js index ae74676ed3616..a5594f6f64d70 100644 --- a/packages/shared/getComponentName.js +++ b/packages/shared/getComponentName.js @@ -21,6 +21,7 @@ import { REACT_SUSPENSE_TYPE, REACT_SUSPENSE_LIST_TYPE, REACT_LAZY_TYPE, + REACT_CACHE_TYPE, } from 'shared/ReactSymbols'; import type {ReactContext, ReactProviderType} from 'shared/ReactTypes'; @@ -72,6 +73,8 @@ function getComponentName(type: mixed): string | null { return 'Suspense'; case REACT_SUSPENSE_LIST_TYPE: return 'SuspenseList'; + case REACT_CACHE_TYPE: + return 'Cache'; } if (typeof type === 'object') { switch (type.$$typeof) { diff --git a/packages/shared/isValidElementType.js b/packages/shared/isValidElementType.js index 35eeae2a308ce..0a361c4a22dec 100644 --- a/packages/shared/isValidElementType.js +++ b/packages/shared/isValidElementType.js @@ -22,8 +22,9 @@ import { REACT_FUNDAMENTAL_TYPE, REACT_SCOPE_TYPE, REACT_LEGACY_HIDDEN_TYPE, + REACT_CACHE_TYPE, } from 'shared/ReactSymbols'; -import {enableScopeAPI} from './ReactFeatureFlags'; +import {enableScopeAPI, enableCache} from './ReactFeatureFlags'; let REACT_MODULE_REFERENCE: number | Symbol = 0; if (typeof Symbol === 'function') { @@ -44,7 +45,8 @@ export default function isValidElementType(type: mixed) { type === REACT_SUSPENSE_TYPE || type === REACT_SUSPENSE_LIST_TYPE || type === REACT_LEGACY_HIDDEN_TYPE || - (enableScopeAPI && type === REACT_SCOPE_TYPE) + (enableScopeAPI && type === REACT_SCOPE_TYPE) || + (enableCache && type === REACT_CACHE_TYPE) ) { return true; } From 187459129b629c24f0bbe0245a611117fcecf14d Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Sun, 13 Dec 2020 23:34:59 -0600 Subject: [PATCH 02/30] Implement getCacheForType Makes Cache act like a context provider. Along with `getCacheForType`, this unlocks basic data fetching. --- .../src/ReactFiberBeginWork.new.js | 33 ++++- .../src/ReactFiberBeginWork.old.js | 33 ++++- .../src/ReactFiberCacheComponent.js | 33 +++++ .../src/ReactFiberCompleteWork.new.js | 6 +- .../src/ReactFiberCompleteWork.old.js | 6 +- .../src/ReactFiberHooks.new.js | 29 ++++- .../src/ReactFiberHooks.old.js | 29 ++++- .../src/ReactFiberLane.new.js | 13 ++ .../src/ReactFiberLane.old.js | 13 ++ .../src/ReactFiberNewContext.new.js | 16 ++- .../src/ReactFiberNewContext.old.js | 16 ++- .../src/ReactFiberUnwindWork.new.js | 20 ++- .../src/ReactFiberUnwindWork.old.js | 20 ++- .../src/__tests__/ReactCache-test.js | 122 ++++++++++++++++++ scripts/error-codes/codes.json | 3 +- 15 files changed, 365 insertions(+), 27 deletions(-) create mode 100644 packages/react-reconciler/src/ReactFiberCacheComponent.js diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 109dfc87118bd..863e015074009 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -23,6 +23,7 @@ import type { OffscreenProps, OffscreenState, } from './ReactFiberOffscreenComponent'; +import type {Cache} from './ReactFiberCacheComponent'; import checkPropTypes from 'shared/checkPropTypes'; @@ -117,6 +118,7 @@ import { removeLanes, mergeLanes, getBumpedLaneForHydration, + requestFreshCache, } from './ReactFiberLane.new'; import { ConcurrentMode, @@ -203,6 +205,7 @@ import { } from './ReactFiberWorkLoop.new'; import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import {setWorkInProgressVersion} from './ReactMutableSource.new'; +import {CacheContext} from './ReactFiberCacheComponent'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -658,6 +661,24 @@ function updateCacheComponent( if (!enableCache) { return null; } + + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + + const cache: Cache = + current === null + ? requestFreshCache(root, renderLanes) + : current.memoizedState; + + // TODO: Propagate changes, once refreshing exists. + pushProvider(workInProgress, CacheContext, cache); + + workInProgress.memoizedState = cache; + const nextChildren = workInProgress.pendingProps.children; reconcileChildren(current, workInProgress, nextChildren, renderLanes); return workInProgress.child; @@ -2817,7 +2838,7 @@ function updateContextProvider( } } - pushProvider(workInProgress, newValue); + pushProvider(workInProgress, context, newValue); if (oldProps !== null) { const oldValue = oldProps.value; @@ -3104,7 +3125,8 @@ function beginWork( break; case ContextProvider: { const newValue = workInProgress.memoizedProps.value; - pushProvider(workInProgress, newValue); + const context: ReactContext = workInProgress.type._context; + pushProvider(workInProgress, context, newValue); break; } case Profiler: @@ -3250,6 +3272,13 @@ function beginWork( workInProgress.lanes = NoLanes; return updateOffscreenComponent(current, workInProgress, renderLanes); } + case CacheComponent: { + if (enableCache) { + const cache: Cache = current.memoizedState; + pushProvider(workInProgress, CacheContext, cache); + } + break; + } } return bailoutOnAlreadyFinishedWork(current, workInProgress, renderLanes); } else { diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index 9b2e9fd2bfdf3..8f474df3f8c92 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -23,6 +23,7 @@ import type { OffscreenProps, OffscreenState, } from './ReactFiberOffscreenComponent'; +import type {Cache} from './ReactFiberCacheComponent'; import checkPropTypes from 'shared/checkPropTypes'; @@ -117,6 +118,7 @@ import { removeLanes, mergeLanes, getBumpedLaneForHydration, + requestFreshCache, } from './ReactFiberLane.old'; import { ConcurrentMode, @@ -203,6 +205,7 @@ import { } from './ReactFiberWorkLoop.old'; import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import {setWorkInProgressVersion} from './ReactMutableSource.old'; +import {CacheContext} from './ReactFiberCacheComponent'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -658,6 +661,24 @@ function updateCacheComponent( if (!enableCache) { return null; } + + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + + const cache: Cache = + current === null + ? requestFreshCache(root, renderLanes) + : current.memoizedState; + + // TODO: Propagate changes, once refreshing exists. + pushProvider(workInProgress, CacheContext, cache); + + workInProgress.memoizedState = cache; + const nextChildren = workInProgress.pendingProps.children; reconcileChildren(current, workInProgress, nextChildren, renderLanes); return workInProgress.child; @@ -2817,7 +2838,7 @@ function updateContextProvider( } } - pushProvider(workInProgress, newValue); + pushProvider(workInProgress, context, newValue); if (oldProps !== null) { const oldValue = oldProps.value; @@ -3104,7 +3125,8 @@ function beginWork( break; case ContextProvider: { const newValue = workInProgress.memoizedProps.value; - pushProvider(workInProgress, newValue); + const context: ReactContext = workInProgress.type._context; + pushProvider(workInProgress, context, newValue); break; } case Profiler: @@ -3250,6 +3272,13 @@ function beginWork( workInProgress.lanes = NoLanes; return updateOffscreenComponent(current, workInProgress, renderLanes); } + case CacheComponent: { + if (enableCache) { + const cache: Cache = current.memoizedState; + pushProvider(workInProgress, CacheContext, cache); + } + break; + } } return bailoutOnAlreadyFinishedWork(current, workInProgress, renderLanes); } else { diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.js b/packages/react-reconciler/src/ReactFiberCacheComponent.js new file mode 100644 index 0000000000000..36ae44e1a3f97 --- /dev/null +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.js @@ -0,0 +1,33 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow + */ + +import type {ReactContext} from 'shared/ReactTypes'; + +import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols'; + +export type Cache = {| + providers: Set | null, + data: Map<() => mixed, mixed> | null, +|}; + +export const CacheContext: ReactContext = { + $$typeof: REACT_CONTEXT_TYPE, + // We don't use Consumer/Provider for Cache components. So we'll cheat. + Consumer: (null: any), + Provider: (null: any), + _calculateChangedBits: null, + _currentValue: null, + _currentValue2: null, + _threadCount: 0, +}; + +if (__DEV__) { + CacheContext._currentRenderer = null; + CacheContext._currentRenderer2 = null; +} diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js index 11bb07f989a52..458bb7df21307 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js @@ -12,6 +12,7 @@ import type {Lanes, Lane} from './ReactFiberLane.new'; import type { ReactFundamentalComponentInstance, ReactScopeInstance, + ReactContext, } from 'shared/ReactTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type { @@ -155,6 +156,7 @@ import { import {resetChildFibers} from './ReactChildFiber.new'; import {createScopeInstance} from './ReactFiberScope.new'; import {transferActualDuration} from './ReactProfilerTimer.new'; +import {CacheContext} from './ReactFiberCacheComponent'; function markUpdate(workInProgress: Fiber) { // Tag the fiber with an update effect. This turns a Placement into @@ -1137,7 +1139,8 @@ function completeWork( return null; case ContextProvider: // Pop provider fiber - popProvider(workInProgress); + const context: ReactContext = workInProgress.type._context; + popProvider(context, workInProgress); bubbleProperties(workInProgress); return null; case IncompleteClassComponent: { @@ -1485,6 +1488,7 @@ function completeWork( } case CacheComponent: { if (enableCache) { + popProvider(CacheContext, workInProgress); bubbleProperties(workInProgress); return null; } diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js index 6e4fab4d05f00..f3c3efafcb9e0 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js @@ -12,6 +12,7 @@ import type {Lanes, Lane} from './ReactFiberLane.old'; import type { ReactFundamentalComponentInstance, ReactScopeInstance, + ReactContext, } from 'shared/ReactTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type { @@ -155,6 +156,7 @@ import { import {resetChildFibers} from './ReactChildFiber.old'; import {createScopeInstance} from './ReactFiberScope.old'; import {transferActualDuration} from './ReactProfilerTimer.old'; +import {CacheContext} from './ReactFiberCacheComponent'; function markUpdate(workInProgress: Fiber) { // Tag the fiber with an update effect. This turns a Placement into @@ -1137,7 +1139,8 @@ function completeWork( return null; case ContextProvider: // Pop provider fiber - popProvider(workInProgress); + const context: ReactContext = workInProgress.type._context; + popProvider(context, workInProgress); bubbleProperties(workInProgress); return null; case IncompleteClassComponent: { @@ -1485,6 +1488,7 @@ function completeWork( } case CacheComponent: { if (enableCache) { + popProvider(CacheContext, workInProgress); bubbleProperties(workInProgress); return null; } diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js index 66c85b274df0e..f6afdf226c797 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.new.js +++ b/packages/react-reconciler/src/ReactFiberHooks.new.js @@ -92,6 +92,7 @@ import { import {getIsRendering} from './ReactCurrentFiber'; import {logStateUpdateScheduled} from './DebugTracing'; import {markStateUpdateScheduled} from './SchedulingProfiler'; +import {CacheContext} from './ReactFiberCacheComponent'; const {ReactCurrentDispatcher, ReactCurrentBatchConfig} = ReactSharedInternals; @@ -1818,7 +1819,30 @@ function dispatchAction( } function getCacheForType(resourceType: () => T): T { - invariant(false, 'Not implemented.'); + const cache = readContext(CacheContext); + invariant( + cache !== null, + 'Tried to fetch data, but no cache was found. To fix, wrap your ' + + "component in a boundary. It doesn't need to be a direct " + + 'parent; it can be anywhere in the ancestor path', + ); + let cachesByType = cache.data; + if (cachesByType === null) { + cachesByType = cache.data = new Map(); + // TODO: Warn if constructor returns undefined? Creates ambiguity with + // existence check above. (I don't want to use `has`. Two map lookups + // instead of one? Silly.) + const cacheForType = resourceType(); + cachesByType.set(resourceType, cacheForType); + return cacheForType; + } else { + let cacheForType: T | void = (cachesByType.get(resourceType): any); + if (cacheForType === undefined) { + cacheForType = resourceType(); + cachesByType.set(resourceType, cacheForType); + } + return cacheForType; + } } export const ContextOnlyDispatcher: Dispatcher = { @@ -1865,6 +1889,9 @@ const HooksDispatcherOnMount: Dispatcher = { unstable_isNewReconciler: enableNewReconciler, }; +if (enableCache) { + (HooksDispatcherOnMount: Dispatcher).getCacheForType = getCacheForType; +} const HooksDispatcherOnUpdate: Dispatcher = { readContext, diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js index 49e222fa6735f..250937d744be6 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.old.js +++ b/packages/react-reconciler/src/ReactFiberHooks.old.js @@ -92,6 +92,7 @@ import { import {getIsRendering} from './ReactCurrentFiber'; import {logStateUpdateScheduled} from './DebugTracing'; import {markStateUpdateScheduled} from './SchedulingProfiler'; +import {CacheContext} from './ReactFiberCacheComponent'; const {ReactCurrentDispatcher, ReactCurrentBatchConfig} = ReactSharedInternals; @@ -1818,7 +1819,30 @@ function dispatchAction( } function getCacheForType(resourceType: () => T): T { - invariant(false, 'Not implemented.'); + const cache = readContext(CacheContext); + invariant( + cache !== null, + 'Tried to fetch data, but no cache was found. To fix, wrap your ' + + "component in a boundary. It doesn't need to be a direct " + + 'parent; it can be anywhere in the ancestor path', + ); + let cachesByType = cache.data; + if (cachesByType === null) { + cachesByType = cache.data = new Map(); + // TODO: Warn if constructor returns undefined? Creates ambiguity with + // existence check above. (I don't want to use `has`. Two map lookups + // instead of one? Silly.) + const cacheForType = resourceType(); + cachesByType.set(resourceType, cacheForType); + return cacheForType; + } else { + let cacheForType: T | void = (cachesByType.get(resourceType): any); + if (cacheForType === undefined) { + cacheForType = resourceType(); + cachesByType.set(resourceType, cacheForType); + } + return cacheForType; + } } export const ContextOnlyDispatcher: Dispatcher = { @@ -1865,6 +1889,9 @@ const HooksDispatcherOnMount: Dispatcher = { unstable_isNewReconciler: enableNewReconciler, }; +if (enableCache) { + (HooksDispatcherOnMount: Dispatcher).getCacheForType = getCacheForType; +} const HooksDispatcherOnUpdate: Dispatcher = { readContext, diff --git a/packages/react-reconciler/src/ReactFiberLane.new.js b/packages/react-reconciler/src/ReactFiberLane.new.js index b5d8846ec2002..06a5ecd707e04 100644 --- a/packages/react-reconciler/src/ReactFiberLane.new.js +++ b/packages/react-reconciler/src/ReactFiberLane.new.js @@ -8,6 +8,7 @@ */ import type {FiberRoot, ReactPriorityLevel} from './ReactInternalTypes'; +import type {Cache} from './ReactFiberCacheComponent'; // TODO: Ideally these types would be opaque but that doesn't work well with // our reconciler fork infra, since these leak into non-reconciler packages. @@ -36,6 +37,7 @@ export type Lane = number; export type LaneMap = Array; import invariant from 'shared/invariant'; +import {enableCache} from 'shared/ReactFeatureFlags'; import { ImmediatePriority as ImmediateSchedulerPriority, @@ -770,6 +772,17 @@ export function markRootEntangled(root: FiberRoot, entangledLanes: Lanes) { } } +export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { + if (!enableCache) { + return (null: any); + } + // Create a fresh cache. + const freshCache = { + providers: null, + data: null, + }; + return freshCache; +} export function getBumpedLaneForHydration( root: FiberRoot, renderLanes: Lanes, diff --git a/packages/react-reconciler/src/ReactFiberLane.old.js b/packages/react-reconciler/src/ReactFiberLane.old.js index f58ab20531740..c866f0ee81e26 100644 --- a/packages/react-reconciler/src/ReactFiberLane.old.js +++ b/packages/react-reconciler/src/ReactFiberLane.old.js @@ -8,6 +8,7 @@ */ import type {FiberRoot, ReactPriorityLevel} from './ReactInternalTypes'; +import type {Cache} from './ReactFiberCacheComponent'; // TODO: Ideally these types would be opaque but that doesn't work well with // our reconciler fork infra, since these leak into non-reconciler packages. @@ -36,6 +37,7 @@ export type Lane = number; export type LaneMap = Array; import invariant from 'shared/invariant'; +import {enableCache} from 'shared/ReactFeatureFlags'; import { ImmediatePriority as ImmediateSchedulerPriority, @@ -770,6 +772,17 @@ export function markRootEntangled(root: FiberRoot, entangledLanes: Lanes) { } } +export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { + if (!enableCache) { + return (null: any); + } + // Create a fresh cache. + const freshCache = { + providers: null, + data: null, + }; + return freshCache; +} export function getBumpedLaneForHydration( root: FiberRoot, renderLanes: Lanes, diff --git a/packages/react-reconciler/src/ReactFiberNewContext.new.js b/packages/react-reconciler/src/ReactFiberNewContext.new.js index 8e387ee012ec3..584e2ff43b5cc 100644 --- a/packages/react-reconciler/src/ReactFiberNewContext.new.js +++ b/packages/react-reconciler/src/ReactFiberNewContext.new.js @@ -72,9 +72,11 @@ export function exitDisallowedContextReadInDEV(): void { } } -export function pushProvider(providerFiber: Fiber, nextValue: T): void { - const context: ReactContext = providerFiber.type._context; - +export function pushProvider( + providerFiber: Fiber, + context: ReactContext, + nextValue: T, +): void { if (isPrimaryRenderer) { push(valueCursor, context._currentValue, providerFiber); @@ -112,12 +114,12 @@ export function pushProvider(providerFiber: Fiber, nextValue: T): void { } } -export function popProvider(providerFiber: Fiber): void { +export function popProvider( + context: ReactContext, + providerFiber: Fiber, +): void { const currentValue = valueCursor.current; - pop(valueCursor, providerFiber); - - const context: ReactContext = providerFiber.type._context; if (isPrimaryRenderer) { context._currentValue = currentValue; } else { diff --git a/packages/react-reconciler/src/ReactFiberNewContext.old.js b/packages/react-reconciler/src/ReactFiberNewContext.old.js index c7f03442d5797..da4859f0be800 100644 --- a/packages/react-reconciler/src/ReactFiberNewContext.old.js +++ b/packages/react-reconciler/src/ReactFiberNewContext.old.js @@ -72,9 +72,11 @@ export function exitDisallowedContextReadInDEV(): void { } } -export function pushProvider(providerFiber: Fiber, nextValue: T): void { - const context: ReactContext = providerFiber.type._context; - +export function pushProvider( + providerFiber: Fiber, + context: ReactContext, + nextValue: T, +): void { if (isPrimaryRenderer) { push(valueCursor, context._currentValue, providerFiber); @@ -112,12 +114,12 @@ export function pushProvider(providerFiber: Fiber, nextValue: T): void { } } -export function popProvider(providerFiber: Fiber): void { +export function popProvider( + context: ReactContext, + providerFiber: Fiber, +): void { const currentValue = valueCursor.current; - pop(valueCursor, providerFiber); - - const context: ReactContext = providerFiber.type._context; if (isPrimaryRenderer) { context._currentValue = currentValue; } else { diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js index 8d445cedd44f9..a394e385e1af9 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js @@ -7,6 +7,7 @@ * @flow */ +import type {ReactContext} from 'shared/ReactTypes'; import type {Fiber} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.new'; import type {SuspenseState} from './ReactFiberSuspenseComponent.new'; @@ -22,12 +23,14 @@ import { SuspenseListComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import {DidCapture, NoFlags, ShouldCapture} from './ReactFiberFlags'; import {NoMode, ProfileMode} from './ReactTypeOfMode'; import { enableSuspenseServerRenderer, enableProfilerTimer, + enableCache, } from 'shared/ReactFeatureFlags'; import {popHostContainer, popHostContext} from './ReactFiberHostContext.new'; @@ -41,6 +44,7 @@ import { import {popProvider} from './ReactFiberNewContext.new'; import {popRenderLanes} from './ReactFiberWorkLoop.new'; import {transferActualDuration} from './ReactProfilerTimer.new'; +import {CacheContext} from './ReactFiberCacheComponent'; import invariant from 'shared/invariant'; @@ -120,12 +124,18 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { popHostContainer(workInProgress); return null; case ContextProvider: - popProvider(workInProgress); + const context: ReactContext = workInProgress.type._context; + popProvider(context, workInProgress); return null; case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(workInProgress); return null; + case CacheComponent: + if (enableCache) { + popProvider(CacheContext, workInProgress); + } + return null; default: return null; } @@ -160,12 +170,18 @@ function unwindInterruptedWork(interruptedWork: Fiber) { popSuspenseContext(interruptedWork); break; case ContextProvider: - popProvider(interruptedWork); + const context: ReactContext = interruptedWork.type._context; + popProvider(context, interruptedWork); break; case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(interruptedWork); break; + case CacheComponent: + if (enableCache) { + popProvider(CacheContext, interruptedWork); + } + break; default: break; } diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js index 3590f3ea4f1b1..11ec5fb1dc720 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js @@ -7,6 +7,7 @@ * @flow */ +import type {ReactContext} from 'shared/ReactTypes'; import type {Fiber} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.old'; import type {SuspenseState} from './ReactFiberSuspenseComponent.old'; @@ -22,12 +23,14 @@ import { SuspenseListComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import {DidCapture, NoFlags, ShouldCapture} from './ReactFiberFlags'; import {NoMode, ProfileMode} from './ReactTypeOfMode'; import { enableSuspenseServerRenderer, enableProfilerTimer, + enableCache, } from 'shared/ReactFeatureFlags'; import {popHostContainer, popHostContext} from './ReactFiberHostContext.old'; @@ -41,6 +44,7 @@ import { import {popProvider} from './ReactFiberNewContext.old'; import {popRenderLanes} from './ReactFiberWorkLoop.old'; import {transferActualDuration} from './ReactProfilerTimer.old'; +import {CacheContext} from './ReactFiberCacheComponent'; import invariant from 'shared/invariant'; @@ -120,12 +124,18 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { popHostContainer(workInProgress); return null; case ContextProvider: - popProvider(workInProgress); + const context: ReactContext = workInProgress.type._context; + popProvider(context, workInProgress); return null; case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(workInProgress); return null; + case CacheComponent: + if (enableCache) { + popProvider(CacheContext, workInProgress); + } + return null; default: return null; } @@ -160,12 +170,18 @@ function unwindInterruptedWork(interruptedWork: Fiber) { popSuspenseContext(interruptedWork); break; case ContextProvider: - popProvider(interruptedWork); + const context: ReactContext = interruptedWork.type._context; + popProvider(context, interruptedWork); break; case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(interruptedWork); break; + case CacheComponent: + if (enableCache) { + popProvider(CacheContext, interruptedWork); + } + break; default: break; } diff --git a/packages/react-reconciler/src/__tests__/ReactCache-test.js b/packages/react-reconciler/src/__tests__/ReactCache-test.js index b193817e2d1de..b86f4d2235026 100644 --- a/packages/react-reconciler/src/__tests__/ReactCache-test.js +++ b/packages/react-reconciler/src/__tests__/ReactCache-test.js @@ -1,6 +1,11 @@ let React; let ReactNoop; let Cache; +let getCacheForType; +let Scheduler; +let Suspense; +let textService; +let textServiceVersion; describe('ReactCache', () => { beforeEach(() => { @@ -9,8 +14,103 @@ describe('ReactCache', () => { React = require('react'); ReactNoop = require('react-noop-renderer'); Cache = React.unstable_Cache; + Scheduler = require('scheduler'); + Suspense = React.Suspense; + getCacheForType = React.unstable_getCacheForType; + + // Represents some data service that returns text. It likely has additional + // caching layers, like a CDN or the local browser cache. It can be mutated + // or emptied independently of the React cache. + textService = new Map(); + textServiceVersion = 1; }); + function createTextCache() { + return new Map(); + } + + function readText(text) { + const textCache = getCacheForType(createTextCache); + const record = textCache.get(text); + if (record !== undefined) { + switch (record.status) { + case 'pending': + throw record.value; + case 'rejected': + throw record.value; + case 'resolved': + return record.value; + } + } else { + Scheduler.unstable_yieldValue(`Cache miss! [${text}]`); + + let request = textService.get(text); + if (request === undefined) { + let resolve; + let reject; + request = new Promise((res, rej) => { + resolve = res; + reject = rej; + }); + request.resolve = resolve; + request.reject = reject; + + // Add the request to a backing cache. This may outlive the lifetime + // of the component that is currently reading the data. + textService.set(text, request); + } + + const thenable = request.then( + value => { + if (newRecord.status === 'pending') { + newRecord.status = 'resolved'; + newRecord.value = value; + } + }, + error => { + if (newRecord.status === 'pending') { + newRecord.status = 'rejected'; + newRecord.value = error; + } + }, + ); + + const newRecord = { + ping: null, + status: 'pending', + value: thenable, + }; + textCache.set(text, newRecord); + + throw thenable; + } + } + + function resolveText(text) { + const request = textService.get(text); + if (request !== undefined) { + request.resolve(textServiceVersion); + return request; + } else { + const newRequest = Promise.resolve(textServiceVersion); + newRequest.resolve = newRequest.reject = () => {}; + textService.set(text, newRequest); + return newRequest; + } + } + + function Text({text}) { + Scheduler.unstable_yieldValue(text); + return text; + } + + function AsyncText({text, showVersion}) { + const version = readText(text); + const fullText = showVersion ? `${text} [v${version}]` : text; + Scheduler.unstable_yieldValue(fullText); + return fullText; + } + // @gate experimental test('render Cache component', async () => { const root = ReactNoop.createRoot(); @@ -19,4 +119,26 @@ describe('ReactCache', () => { }); expect(root).toMatchRenderedOutput('Hi'); }); + + // @gate experimental + test('mount new data', async () => { + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render( + + }> + + + , + ); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A']); + expect(root).toMatchRenderedOutput('A'); + }); }); diff --git a/scripts/error-codes/codes.json b/scripts/error-codes/codes.json index 641901fcaba05..6941f92df2770 100644 --- a/scripts/error-codes/codes.json +++ b/scripts/error-codes/codes.json @@ -371,5 +371,6 @@ "380": "Reading the cache is only supported while rendering.", "381": "This feature is not supported by ReactSuspenseTestUtils.", "382": "This query has received more parameters than the last time the same query was used. Always pass the exact number of parameters that the query needs.", - "383": "This query has received fewer parameters than the last time the same query was used. Always pass the exact number of parameters that the query needs." + "383": "This query has received fewer parameters than the last time the same query was used. Always pass the exact number of parameters that the query needs.", + "384": "Tried to fetch data, but no cache was found. To fix, wrap your component in a boundary. It doesn't need to be a direct parent; it can be anywhere in the ancestor path" } From f9b829d391bf9c65061050b96afce163c357e643 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Mon, 14 Dec 2020 00:14:09 -0600 Subject: [PATCH 03/30] Use same cache for all new data in a single update If multiple Cache boundaries mount at the same time, they should use the same cache, even if they are in totally separate trees. The plan is to extend this further so that we keep reusing the same cache for all incoming updates until one of them finishes. (Not yet implemented.) --- .../src/ReactFiberLane.new.js | 20 ++++++++++ .../src/ReactFiberLane.old.js | 20 ++++++++++ .../src/ReactFiberRoot.new.js | 5 +++ .../src/ReactFiberRoot.old.js | 5 +++ .../src/ReactInternalTypes.js | 3 ++ .../src/__tests__/ReactCache-test.js | 39 +++++++++++++++++++ 6 files changed, 92 insertions(+) diff --git a/packages/react-reconciler/src/ReactFiberLane.new.js b/packages/react-reconciler/src/ReactFiberLane.new.js index 06a5ecd707e04..ae983d699ea02 100644 --- a/packages/react-reconciler/src/ReactFiberLane.new.js +++ b/packages/react-reconciler/src/ReactFiberLane.new.js @@ -755,6 +755,14 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) { lanes &= ~lane; } + + if (enableCache) { + // Clear the pooled cache so subsequent updates get fresh data. + // TODO: This is very naive and only works if the shell of a cache boundary + // doesn't suspend. The next, key feature is to preserve caches across + // multiple attempts (suspend -> ping) to render a new tree. + root.pooledCache = null; + } } export function markRootEntangled(root: FiberRoot, entangledLanes: Lanes) { @@ -776,11 +784,23 @@ export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { if (!enableCache) { return (null: any); } + + // Check if there's a pooled cache. This is really just a batching heuristic + // so that two transitions that happen in a similar timeframe can share the + // same cache. + const pooledCache = root.pooledCache; + if (pooledCache !== null) { + return pooledCache; + } + // Create a fresh cache. const freshCache = { providers: null, data: null, }; + + // This is now the pooled cache. + root.pooledCache = freshCache; return freshCache; } export function getBumpedLaneForHydration( diff --git a/packages/react-reconciler/src/ReactFiberLane.old.js b/packages/react-reconciler/src/ReactFiberLane.old.js index c866f0ee81e26..547a67b00c5a0 100644 --- a/packages/react-reconciler/src/ReactFiberLane.old.js +++ b/packages/react-reconciler/src/ReactFiberLane.old.js @@ -755,6 +755,14 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) { lanes &= ~lane; } + + if (enableCache) { + // Clear the pooled cache so subsequent updates get fresh data. + // TODO: This is very naive and only works if the shell of a cache boundary + // doesn't suspend. The next, key feature is to preserve caches across + // multiple attempts (suspend -> ping) to render a new tree. + root.pooledCache = null; + } } export function markRootEntangled(root: FiberRoot, entangledLanes: Lanes) { @@ -776,11 +784,23 @@ export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { if (!enableCache) { return (null: any); } + + // Check if there's a pooled cache. This is really just a batching heuristic + // so that two transitions that happen in a similar timeframe can share the + // same cache. + const pooledCache = root.pooledCache; + if (pooledCache !== null) { + return pooledCache; + } + // Create a fresh cache. const freshCache = { providers: null, data: null, }; + + // This is now the pooled cache. + root.pooledCache = freshCache; return freshCache; } export function getBumpedLaneForHydration( diff --git a/packages/react-reconciler/src/ReactFiberRoot.new.js b/packages/react-reconciler/src/ReactFiberRoot.new.js index 1b504a18fc326..f5b6a3857fd3a 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.new.js +++ b/packages/react-reconciler/src/ReactFiberRoot.new.js @@ -21,6 +21,7 @@ import { import { enableSchedulerTracing, enableSuspenseCallback, + enableCache, } from 'shared/ReactFeatureFlags'; import {unstable_getThreadID} from 'scheduler/tracing'; import {initializeUpdateQueue} from './ReactUpdateQueue.new'; @@ -52,6 +53,10 @@ function FiberRootNode(containerInfo, tag, hydrate) { this.entangledLanes = NoLanes; this.entanglements = createLaneMap(NoLanes); + if (enableCache) { + this.pooledCache = null; + } + if (supportsHydration) { this.mutableSourceEagerHydrationData = null; } diff --git a/packages/react-reconciler/src/ReactFiberRoot.old.js b/packages/react-reconciler/src/ReactFiberRoot.old.js index 84509babdb51a..da87187ae1bb5 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.old.js +++ b/packages/react-reconciler/src/ReactFiberRoot.old.js @@ -21,6 +21,7 @@ import { import { enableSchedulerTracing, enableSuspenseCallback, + enableCache, } from 'shared/ReactFeatureFlags'; import {unstable_getThreadID} from 'scheduler/tracing'; import {initializeUpdateQueue} from './ReactUpdateQueue.old'; @@ -52,6 +53,10 @@ function FiberRootNode(containerInfo, tag, hydrate) { this.entangledLanes = NoLanes; this.entanglements = createLaneMap(NoLanes); + if (enableCache) { + this.pooledCache = null; + } + if (supportsHydration) { this.mutableSourceEagerHydrationData = null; } diff --git a/packages/react-reconciler/src/ReactInternalTypes.js b/packages/react-reconciler/src/ReactInternalTypes.js index c48d53fbb919b..288893279e040 100644 --- a/packages/react-reconciler/src/ReactInternalTypes.js +++ b/packages/react-reconciler/src/ReactInternalTypes.js @@ -25,6 +25,7 @@ import type {RootTag} from './ReactRootTags'; import type {TimeoutHandle, NoTimeout} from './ReactFiberHostConfig'; import type {Wakeable} from 'shared/ReactTypes'; import type {Interaction} from 'scheduler/src/Tracing'; +import type {Cache} from './ReactFiberCacheComponent'; // Unwind Circular: moved from ReactFiberHooks.old export type HookType = @@ -235,6 +236,8 @@ type BaseFiberRootProperties = {| entangledLanes: Lanes, entanglements: LaneMap, + + pooledCache: Cache | null, |}; // The following attributes are only used by interaction tracing builds. diff --git a/packages/react-reconciler/src/__tests__/ReactCache-test.js b/packages/react-reconciler/src/__tests__/ReactCache-test.js index b86f4d2235026..8a2d0a1fcbcb3 100644 --- a/packages/react-reconciler/src/__tests__/ReactCache-test.js +++ b/packages/react-reconciler/src/__tests__/ReactCache-test.js @@ -141,4 +141,43 @@ describe('ReactCache', () => { expect(Scheduler).toHaveYielded(['A']); expect(root).toMatchRenderedOutput('A'); }); + + // @gate experimental + test('multiple new Cache boundaries in the same update share the same, fresh cache', async () => { + function App({text}) { + return ( + <> + + }> + + + + + }> + + + + + ); + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render(); + }); + // Even though there are two new trees, they should share the same + // data cache. So there should be only a single cache miss for A. + expect(Scheduler).toHaveYielded([ + 'Cache miss! [A]', + 'Loading...', + 'Loading...', + ]); + expect(root).toMatchRenderedOutput('Loading...Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A', 'A']); + expect(root).toMatchRenderedOutput('AA'); + }); }); From 57d6c1df357e782535f39447c723ff42d47f23f8 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Mon, 14 Dec 2020 01:51:17 -0600 Subject: [PATCH 04/30] Retain in-progress caches on the root, per lane All the data that loaded as a result of a single transition/update should share the same cache. This includes nested content that gets progressively "filled in" after the initial shell is displayed. If the shell itself were wrapped in a Cache boundary, such that the cache can commit with suspending, then this is easy: once the boundary mounts, the cache is attached the React tree. The tricky part is when the shell does not include a cache boundary. In the naive approach, since the cache is not part of the initial tree, it does not get retained; during the retry, a fresh cache is created, leading to duplicate requests and possibly an infinite loop as requests are endlessly created then discarded. This is the essential problem we faced several years ago when building Simple Cache Provider (later the react-cache package). Our solution is to retain in-flight caches on the root, associated by lane. The cache cleared from the root once all of the lanes that depend on it finish rendering. Because progressively rendering nested boundaries ("retry" updates) uses a different lane from the update that spawned it, we must take extra care to transfer the cache to the new lane when scheduling the retry. --- .../src/ReactFiberCommitWork.new.js | 30 ++- .../src/ReactFiberCommitWork.old.js | 30 ++- .../src/ReactFiberLane.new.js | 148 +++++++++++++-- .../src/ReactFiberLane.old.js | 148 +++++++++++++-- .../src/ReactFiberRoot.new.js | 1 + .../src/ReactFiberRoot.old.js | 1 + .../src/ReactFiberWorkLoop.new.js | 44 ++++- .../src/ReactFiberWorkLoop.old.js | 44 ++++- .../src/ReactInternalTypes.js | 1 + .../src/__tests__/ReactCache-test.js | 176 ++++++++++++++++++ 10 files changed, 563 insertions(+), 60 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberCommitWork.new.js b/packages/react-reconciler/src/ReactFiberCommitWork.new.js index d3082d5f785b8..5cb2c9ae62e50 100644 --- a/packages/react-reconciler/src/ReactFiberCommitWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCommitWork.new.js @@ -25,6 +25,7 @@ import type {Wakeable} from 'shared/ReactTypes'; import type {ReactPriorityLevel} from './ReactInternalTypes'; import type {OffscreenState} from './ReactFiberOffscreenComponent'; import type {HookFlags} from './ReactHookEffectTags'; +import type {Cache} from './ReactFiberCacheComponent'; import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import { @@ -56,6 +57,7 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import { invokeGuardedCallback, @@ -1513,7 +1515,11 @@ function commitDeletion( } } -function commitWork(current: Fiber | null, finishedWork: Fiber): void { +function commitWork( + current: Fiber | null, + finishedWork: Fiber, + cache: Cache | null, +): void { if (!supportsMutation) { switch (finishedWork.tag) { case FunctionComponent: @@ -1549,11 +1555,11 @@ function commitWork(current: Fiber | null, finishedWork: Fiber): void { } case SuspenseComponent: { commitSuspenseComponent(finishedWork); - attachSuspenseRetryListeners(finishedWork); + attachSuspenseRetryListeners(finishedWork, cache); return; } case SuspenseListComponent: { - attachSuspenseRetryListeners(finishedWork); + attachSuspenseRetryListeners(finishedWork, cache); return; } case HostRoot: { @@ -1664,11 +1670,11 @@ function commitWork(current: Fiber | null, finishedWork: Fiber): void { } case SuspenseComponent: { commitSuspenseComponent(finishedWork); - attachSuspenseRetryListeners(finishedWork); + attachSuspenseRetryListeners(finishedWork, cache); return; } case SuspenseListComponent: { - attachSuspenseRetryListeners(finishedWork); + attachSuspenseRetryListeners(finishedWork, cache); return; } case IncompleteClassComponent: { @@ -1697,6 +1703,8 @@ function commitWork(current: Fiber | null, finishedWork: Fiber): void { hideOrUnhideAllChildren(finishedWork, isHidden); return; } + case CacheComponent: + return; } invariant( false, @@ -1772,7 +1780,10 @@ function commitSuspenseHydrationCallbacks( } } -function attachSuspenseRetryListeners(finishedWork: Fiber) { +function attachSuspenseRetryListeners( + finishedWork: Fiber, + cache: Cache | null, +) { // If this boundary just timed out, then it will have a set of wakeables. // For each wakeable, attach a listener so that when it resolves, React // attempts to re-render the boundary in the primary (pre-timeout) state. @@ -1785,7 +1796,12 @@ function attachSuspenseRetryListeners(finishedWork: Fiber) { } wakeables.forEach(wakeable => { // Memoize using the boundary fiber to prevent redundant listeners. - let retry = resolveRetryWakeable.bind(null, finishedWork, wakeable); + let retry = resolveRetryWakeable.bind( + null, + finishedWork, + wakeable, + cache, + ); if (!retryCache.has(wakeable)) { if (enableSchedulerTracing) { if (wakeable.__reactDoNotTraceInteractions !== true) { diff --git a/packages/react-reconciler/src/ReactFiberCommitWork.old.js b/packages/react-reconciler/src/ReactFiberCommitWork.old.js index 95e48b1e4ae4f..254a7d5dcc077 100644 --- a/packages/react-reconciler/src/ReactFiberCommitWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCommitWork.old.js @@ -25,6 +25,7 @@ import type {Wakeable} from 'shared/ReactTypes'; import type {ReactPriorityLevel} from './ReactInternalTypes'; import type {OffscreenState} from './ReactFiberOffscreenComponent'; import type {HookFlags} from './ReactHookEffectTags'; +import type {Cache} from './ReactFiberCacheComponent'; import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import { @@ -56,6 +57,7 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, + CacheComponent, } from './ReactWorkTags'; import { invokeGuardedCallback, @@ -1513,7 +1515,11 @@ function commitDeletion( } } -function commitWork(current: Fiber | null, finishedWork: Fiber): void { +function commitWork( + current: Fiber | null, + finishedWork: Fiber, + cache: Cache | null, +): void { if (!supportsMutation) { switch (finishedWork.tag) { case FunctionComponent: @@ -1549,11 +1555,11 @@ function commitWork(current: Fiber | null, finishedWork: Fiber): void { } case SuspenseComponent: { commitSuspenseComponent(finishedWork); - attachSuspenseRetryListeners(finishedWork); + attachSuspenseRetryListeners(finishedWork, cache); return; } case SuspenseListComponent: { - attachSuspenseRetryListeners(finishedWork); + attachSuspenseRetryListeners(finishedWork, cache); return; } case HostRoot: { @@ -1664,11 +1670,11 @@ function commitWork(current: Fiber | null, finishedWork: Fiber): void { } case SuspenseComponent: { commitSuspenseComponent(finishedWork); - attachSuspenseRetryListeners(finishedWork); + attachSuspenseRetryListeners(finishedWork, cache); return; } case SuspenseListComponent: { - attachSuspenseRetryListeners(finishedWork); + attachSuspenseRetryListeners(finishedWork, cache); return; } case IncompleteClassComponent: { @@ -1697,6 +1703,8 @@ function commitWork(current: Fiber | null, finishedWork: Fiber): void { hideOrUnhideAllChildren(finishedWork, isHidden); return; } + case CacheComponent: + return; } invariant( false, @@ -1772,7 +1780,10 @@ function commitSuspenseHydrationCallbacks( } } -function attachSuspenseRetryListeners(finishedWork: Fiber) { +function attachSuspenseRetryListeners( + finishedWork: Fiber, + cache: Cache | null, +) { // If this boundary just timed out, then it will have a set of wakeables. // For each wakeable, attach a listener so that when it resolves, React // attempts to re-render the boundary in the primary (pre-timeout) state. @@ -1785,7 +1796,12 @@ function attachSuspenseRetryListeners(finishedWork: Fiber) { } wakeables.forEach(wakeable => { // Memoize using the boundary fiber to prevent redundant listeners. - let retry = resolveRetryWakeable.bind(null, finishedWork, wakeable); + let retry = resolveRetryWakeable.bind( + null, + finishedWork, + wakeable, + cache, + ); if (!retryCache.has(wakeable)) { if (enableSchedulerTracing) { if (wakeable.__reactDoNotTraceInteractions !== true) { diff --git a/packages/react-reconciler/src/ReactFiberLane.new.js b/packages/react-reconciler/src/ReactFiberLane.new.js index ae983d699ea02..8acfdade7df0a 100644 --- a/packages/react-reconciler/src/ReactFiberLane.new.js +++ b/packages/react-reconciler/src/ReactFiberLane.new.js @@ -742,6 +742,7 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) { const entanglements = root.entanglements; const eventTimes = root.eventTimes; const expirationTimes = root.expirationTimes; + const pooledCache = root.pooledCache; // Clear the lanes that no longer have pending work let lanes = noLongerPendingLanes; @@ -753,15 +754,31 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) { eventTimes[index] = NoTimestamp; expirationTimes[index] = NoTimestamp; - lanes &= ~lane; - } + if (enableCache) { + // Subsequent loads in this lane should use a fresh cache. + // TODO: If a cache is no longer associated with any lane, we should issue + // an abort signal. + const caches = root.caches; + if (caches !== null) { + if (remainingLanes === 0) { + // Fast path. Clear all caches at once. + root.caches = createLaneMap(null); + root.pooledCache = null; + } else { + const cache = caches[index]; + if (cache !== null) { + caches[index] = null; + if (cache === pooledCache) { + // The pooled cache is now part of the committed tree. We'll now + // clear it so that the next transition gets a fresh cache. + root.pooledCache = null; + } + } + } + } + } - if (enableCache) { - // Clear the pooled cache so subsequent updates get fresh data. - // TODO: This is very naive and only works if the shell of a cache boundary - // doesn't suspend. The next, key feature is to preserve caches across - // multiple attempts (suspend -> ping) to render a new tree. - root.pooledCache = null; + lanes &= ~lane; } } @@ -785,12 +802,62 @@ export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { return (null: any); } - // Check if there's a pooled cache. This is really just a batching heuristic - // so that two transitions that happen in a similar timeframe can share the - // same cache. - const pooledCache = root.pooledCache; - if (pooledCache !== null) { - return pooledCache; + // 1. Check if the currently rendering lanes already have a pending cache + // associated with them. If so, use this cache. If for some reason two or + // more lanes have different caches, pick the highest priority one. + // 2. Otherwise, check the root's `pooledCache`. This the oldest cache + // that has not yet been committed. This is really just a batching + // heuristic so that two transitions that happen in a similar timeframe can + // share the same cache. If it exists, use this cache. + // 3. If there's no pooled cache, create a fresh cache. This is now the + // pooled cache. + + let caches = root.caches; + + // TODO: There should be a primary render lane, and we should use whatever + // cache is associated with that one. + if (caches === null) { + caches = root.caches = createLaneMap(null); + } else { + let lanes = renderLanes; + while (lanes > 0) { + const lane = getHighestPriorityLane(lanes); + const index = laneToIndex(lane); + const inProgressCache: Cache | null = caches[index]; + if (inProgressCache !== null) { + // This render lane already has a cache associated with it. Reuse it. + + // If the other render lanes are not already associated with a cache, + // associate them with this one. + let otherRenderLanes = renderLanes & ~lane; + while (otherRenderLanes > 0) { + const otherIndex = pickArbitraryLaneIndex(otherRenderLanes); + const otherLane = 1 << otherIndex; + // We shouldn't overwrite a cache that already exists, since that could + // lead to dropped requests or data, i.e. if the current render suspends. + if (caches[otherIndex] === null) { + caches[otherIndex] = inProgressCache; + } + otherRenderLanes &= ~otherLane; + } + return inProgressCache; + } + lanes &= ~lane; + } + // There are no in-progress caches associated with the current render. Check + // if there's a pooled cache. + const pooledCache = root.pooledCache; + if (pooledCache !== null) { + // Associate the pooled cache with each of the render lanes. + lanes = renderLanes; + while (lanes > 0) { + const index = pickArbitraryLaneIndex(lanes); + const lane = 1 << index; + caches[index] = pooledCache; + lanes &= ~lane; + } + return pooledCache; + } } // Create a fresh cache. @@ -801,8 +868,61 @@ export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { // This is now the pooled cache. root.pooledCache = freshCache; + + // Associate the new cache with each of the render lanes. + let lanes = renderLanes; + while (lanes > 0) { + const index = pickArbitraryLaneIndex(lanes); + const lane = 1 << index; + caches[index] = freshCache; + lanes &= ~lane; + } + return freshCache; } + +export function getWorkInProgressCache( + root: FiberRoot, + renderLanes: Lanes, +): Cache | null { + if (enableCache) { + // TODO: There should be a primary render lane, and we should use whatever + // cache is associated with that one. + const caches = root.caches; + if (caches !== null) { + let lanes = renderLanes; + while (lanes > 0) { + const lane = getHighestPriorityLane(lanes); + const index = laneToIndex(lane); + const inProgressCache: Cache | null = caches[index]; + if (inProgressCache !== null) { + return inProgressCache; + } + lanes &= ~lane; + } + } + } + return null; +} + +export function transferCacheToSpawnedLane( + root: FiberRoot, + cache: Cache, + lane: Lane, +) { + const index = laneToIndex(lane); + let caches = root.caches; + if (caches !== null) { + const existingCache: Cache | null = caches[index]; + if (existingCache === null) { + caches[index] = cache; + } + } else { + caches = root.caches = createLaneMap(null); + caches[index] = cache; + } +} + export function getBumpedLaneForHydration( root: FiberRoot, renderLanes: Lanes, diff --git a/packages/react-reconciler/src/ReactFiberLane.old.js b/packages/react-reconciler/src/ReactFiberLane.old.js index 547a67b00c5a0..4b07ae1f0e7f2 100644 --- a/packages/react-reconciler/src/ReactFiberLane.old.js +++ b/packages/react-reconciler/src/ReactFiberLane.old.js @@ -742,6 +742,7 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) { const entanglements = root.entanglements; const eventTimes = root.eventTimes; const expirationTimes = root.expirationTimes; + const pooledCache = root.pooledCache; // Clear the lanes that no longer have pending work let lanes = noLongerPendingLanes; @@ -753,15 +754,31 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) { eventTimes[index] = NoTimestamp; expirationTimes[index] = NoTimestamp; - lanes &= ~lane; - } + if (enableCache) { + // Subsequent loads in this lane should use a fresh cache. + // TODO: If a cache is no longer associated with any lane, we should issue + // an abort signal. + const caches = root.caches; + if (caches !== null) { + if (remainingLanes === 0) { + // Fast path. Clear all caches at once. + root.caches = createLaneMap(null); + root.pooledCache = null; + } else { + const cache = caches[index]; + if (cache !== null) { + caches[index] = null; + if (cache === pooledCache) { + // The pooled cache is now part of the committed tree. We'll now + // clear it so that the next transition gets a fresh cache. + root.pooledCache = null; + } + } + } + } + } - if (enableCache) { - // Clear the pooled cache so subsequent updates get fresh data. - // TODO: This is very naive and only works if the shell of a cache boundary - // doesn't suspend. The next, key feature is to preserve caches across - // multiple attempts (suspend -> ping) to render a new tree. - root.pooledCache = null; + lanes &= ~lane; } } @@ -785,12 +802,62 @@ export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { return (null: any); } - // Check if there's a pooled cache. This is really just a batching heuristic - // so that two transitions that happen in a similar timeframe can share the - // same cache. - const pooledCache = root.pooledCache; - if (pooledCache !== null) { - return pooledCache; + // 1. Check if the currently rendering lanes already have a pending cache + // associated with them. If so, use this cache. If for some reason two or + // more lanes have different caches, pick the highest priority one. + // 2. Otherwise, check the root's `pooledCache`. This the oldest cache + // that has not yet been committed. This is really just a batching + // heuristic so that two transitions that happen in a similar timeframe can + // share the same cache. If it exists, use this cache. + // 3. If there's no pooled cache, create a fresh cache. This is now the + // pooled cache. + + let caches = root.caches; + + // TODO: There should be a primary render lane, and we should use whatever + // cache is associated with that one. + if (caches === null) { + caches = root.caches = createLaneMap(null); + } else { + let lanes = renderLanes; + while (lanes > 0) { + const lane = getHighestPriorityLane(lanes); + const index = laneToIndex(lane); + const inProgressCache: Cache | null = caches[index]; + if (inProgressCache !== null) { + // This render lane already has a cache associated with it. Reuse it. + + // If the other render lanes are not already associated with a cache, + // associate them with this one. + let otherRenderLanes = renderLanes & ~lane; + while (otherRenderLanes > 0) { + const otherIndex = pickArbitraryLaneIndex(otherRenderLanes); + const otherLane = 1 << otherIndex; + // We shouldn't overwrite a cache that already exists, since that could + // lead to dropped requests or data, i.e. if the current render suspends. + if (caches[otherIndex] === null) { + caches[otherIndex] = inProgressCache; + } + otherRenderLanes &= ~otherLane; + } + return inProgressCache; + } + lanes &= ~lane; + } + // There are no in-progress caches associated with the current render. Check + // if there's a pooled cache. + const pooledCache = root.pooledCache; + if (pooledCache !== null) { + // Associate the pooled cache with each of the render lanes. + lanes = renderLanes; + while (lanes > 0) { + const index = pickArbitraryLaneIndex(lanes); + const lane = 1 << index; + caches[index] = pooledCache; + lanes &= ~lane; + } + return pooledCache; + } } // Create a fresh cache. @@ -801,8 +868,61 @@ export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { // This is now the pooled cache. root.pooledCache = freshCache; + + // Associate the new cache with each of the render lanes. + let lanes = renderLanes; + while (lanes > 0) { + const index = pickArbitraryLaneIndex(lanes); + const lane = 1 << index; + caches[index] = freshCache; + lanes &= ~lane; + } + return freshCache; } + +export function getWorkInProgressCache( + root: FiberRoot, + renderLanes: Lanes, +): Cache | null { + if (enableCache) { + // TODO: There should be a primary render lane, and we should use whatever + // cache is associated with that one. + const caches = root.caches; + if (caches !== null) { + let lanes = renderLanes; + while (lanes > 0) { + const lane = getHighestPriorityLane(lanes); + const index = laneToIndex(lane); + const inProgressCache: Cache | null = caches[index]; + if (inProgressCache !== null) { + return inProgressCache; + } + lanes &= ~lane; + } + } + } + return null; +} + +export function transferCacheToSpawnedLane( + root: FiberRoot, + cache: Cache, + lane: Lane, +) { + const index = laneToIndex(lane); + let caches = root.caches; + if (caches !== null) { + const existingCache: Cache | null = caches[index]; + if (existingCache === null) { + caches[index] = cache; + } + } else { + caches = root.caches = createLaneMap(null); + caches[index] = cache; + } +} + export function getBumpedLaneForHydration( root: FiberRoot, renderLanes: Lanes, diff --git a/packages/react-reconciler/src/ReactFiberRoot.new.js b/packages/react-reconciler/src/ReactFiberRoot.new.js index f5b6a3857fd3a..cf46bc7bae382 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.new.js +++ b/packages/react-reconciler/src/ReactFiberRoot.new.js @@ -54,6 +54,7 @@ function FiberRootNode(containerInfo, tag, hydrate) { this.entanglements = createLaneMap(NoLanes); if (enableCache) { + this.caches = createLaneMap(null); this.pooledCache = null; } diff --git a/packages/react-reconciler/src/ReactFiberRoot.old.js b/packages/react-reconciler/src/ReactFiberRoot.old.js index da87187ae1bb5..4ebb3f7804b9c 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.old.js +++ b/packages/react-reconciler/src/ReactFiberRoot.old.js @@ -54,6 +54,7 @@ function FiberRootNode(containerInfo, tag, hydrate) { this.entanglements = createLaneMap(NoLanes); if (enableCache) { + this.caches = createLaneMap(null); this.pooledCache = null; } diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js index 390feac653140..538d85f4005b6 100644 --- a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js +++ b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js @@ -16,6 +16,7 @@ import type {SuspenseState} from './ReactFiberSuspenseComponent.new'; import type {Effect as HookEffect} from './ReactFiberHooks.new'; import type {StackCursor} from './ReactFiberStack.new'; import type {FunctionComponentUpdateQueue} from './ReactFiberHooks.new'; +import type {Cache} from './ReactFiberCacheComponent'; import { warnAboutDeprecatedLifecycles, @@ -178,6 +179,8 @@ import { markRootFinished, schedulerPriorityToLanePriority, lanePriorityToSchedulerPriority, + getWorkInProgressCache, + transferCacheToSpawnedLane, } from './ReactFiberLane.new'; import {requestCurrentTransition, NoTransition} from './ReactFiberTransition'; import {beginWork as originalBeginWork} from './ReactFiberBeginWork.new'; @@ -1923,6 +1926,12 @@ function commitRootImpl(root, renderPriorityLevel) { // So we can clear these now to allow a new callback to be scheduled. root.callbackNode = null; + // TODO: This is only used when a render spawns a retry. So we could pass this + // fron the render phase instead, only for the relevant RootExitStatuses. + // However, we may end up using this same strategy for other types of spawned + // work, like Offscreen. + const cache = getWorkInProgressCache(root, lanes); + // Update the first and last pending times on this root. The new first // pending time is whatever is left on the root fiber. let remainingLanes = mergeLanes(finishedWork.lanes, finishedWork.childLanes); @@ -2039,6 +2048,7 @@ function commitRootImpl(root, renderPriorityLevel) { null, root, renderPriorityLevel, + cache, ); if (hasCaughtError()) { invariant(nextEffect !== null, 'Should be working on an effect.'); @@ -2048,7 +2058,7 @@ function commitRootImpl(root, renderPriorityLevel) { } } else { try { - commitMutationEffects(root, renderPriorityLevel); + commitMutationEffects(root, renderPriorityLevel, cache); } catch (error) { invariant(nextEffect !== null, 'Should be working on an effect.'); captureCommitPhaseError(nextEffect, error); @@ -2303,7 +2313,11 @@ function commitBeforeMutationEffects() { } } -function commitMutationEffects(root: FiberRoot, renderPriorityLevel) { +function commitMutationEffects( + root: FiberRoot, + renderPriorityLevel: ReactPriorityLevel, + cache: Cache | null, +) { // TODO: Should probably move the bulk of this function to commitWork. while (nextEffect !== null) { setCurrentDebugFiberInDEV(nextEffect); @@ -2352,7 +2366,7 @@ function commitMutationEffects(root: FiberRoot, renderPriorityLevel) { // Update const current = nextEffect.alternate; - commitWork(current, nextEffect); + commitWork(current, nextEffect, cache); break; } case Hydrating: { @@ -2364,12 +2378,12 @@ function commitMutationEffects(root: FiberRoot, renderPriorityLevel) { // Update const current = nextEffect.alternate; - commitWork(current, nextEffect); + commitWork(current, nextEffect, cache); break; } case Update: { const current = nextEffect.alternate; - commitWork(current, nextEffect); + commitWork(current, nextEffect, cache); break; } case Deletion: { @@ -2749,7 +2763,11 @@ export function pingSuspendedRoot( schedulePendingInteractions(root, pingedLanes); } -function retryTimedOutBoundary(boundaryFiber: Fiber, retryLane: Lane) { +function retryTimedOutBoundary( + boundaryFiber: Fiber, + dataCache: Cache | null, + retryLane: Lane, +) { // The boundary fiber (a Suspense component or SuspenseList component) // previously was rendered in its fallback state. One of the promises that // suspended it has resolved, which means at least part of the tree was @@ -2764,6 +2782,10 @@ function retryTimedOutBoundary(boundaryFiber: Fiber, retryLane: Lane) { markRootUpdated(root, retryLane, eventTime); ensureRootIsScheduled(root, eventTime); schedulePendingInteractions(root, retryLane); + + if (dataCache !== null) { + transferCacheToSpawnedLane(root, dataCache, retryLane); + } } } @@ -2773,10 +2795,14 @@ export function retryDehydratedSuspenseBoundary(boundaryFiber: Fiber) { if (suspenseState !== null) { retryLane = suspenseState.retryLane; } - retryTimedOutBoundary(boundaryFiber, retryLane); + retryTimedOutBoundary(boundaryFiber, null, retryLane); } -export function resolveRetryWakeable(boundaryFiber: Fiber, wakeable: Wakeable) { +export function resolveRetryWakeable( + boundaryFiber: Fiber, + wakeable: Wakeable, + dataCache: Cache | null, +) { let retryLane = NoLane; // Default let retryCache: WeakSet | Set | null; if (enableSuspenseServerRenderer) { @@ -2808,7 +2834,7 @@ export function resolveRetryWakeable(boundaryFiber: Fiber, wakeable: Wakeable) { retryCache.delete(wakeable); } - retryTimedOutBoundary(boundaryFiber, retryLane); + retryTimedOutBoundary(boundaryFiber, dataCache, retryLane); } // Computes the next Just Noticeable Difference (JND) boundary. diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js index 0aa21dbfa85b4..4235b6c7e469d 100644 --- a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js +++ b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js @@ -16,6 +16,7 @@ import type {SuspenseState} from './ReactFiberSuspenseComponent.old'; import type {Effect as HookEffect} from './ReactFiberHooks.old'; import type {StackCursor} from './ReactFiberStack.old'; import type {FunctionComponentUpdateQueue} from './ReactFiberHooks.old'; +import type {Cache} from './ReactFiberCacheComponent'; import { warnAboutDeprecatedLifecycles, @@ -178,6 +179,8 @@ import { markRootFinished, schedulerPriorityToLanePriority, lanePriorityToSchedulerPriority, + getWorkInProgressCache, + transferCacheToSpawnedLane, } from './ReactFiberLane.old'; import {requestCurrentTransition, NoTransition} from './ReactFiberTransition'; import {beginWork as originalBeginWork} from './ReactFiberBeginWork.old'; @@ -1923,6 +1926,12 @@ function commitRootImpl(root, renderPriorityLevel) { // So we can clear these now to allow a new callback to be scheduled. root.callbackNode = null; + // TODO: This is only used when a render spawns a retry. So we could pass this + // fron the render phase instead, only for the relevant RootExitStatuses. + // However, we may end up using this same strategy for other types of spawned + // work, like Offscreen. + const cache = getWorkInProgressCache(root, lanes); + // Update the first and last pending times on this root. The new first // pending time is whatever is left on the root fiber. let remainingLanes = mergeLanes(finishedWork.lanes, finishedWork.childLanes); @@ -2039,6 +2048,7 @@ function commitRootImpl(root, renderPriorityLevel) { null, root, renderPriorityLevel, + cache, ); if (hasCaughtError()) { invariant(nextEffect !== null, 'Should be working on an effect.'); @@ -2048,7 +2058,7 @@ function commitRootImpl(root, renderPriorityLevel) { } } else { try { - commitMutationEffects(root, renderPriorityLevel); + commitMutationEffects(root, renderPriorityLevel, cache); } catch (error) { invariant(nextEffect !== null, 'Should be working on an effect.'); captureCommitPhaseError(nextEffect, error); @@ -2303,7 +2313,11 @@ function commitBeforeMutationEffects() { } } -function commitMutationEffects(root: FiberRoot, renderPriorityLevel) { +function commitMutationEffects( + root: FiberRoot, + renderPriorityLevel: ReactPriorityLevel, + cache: Cache | null, +) { // TODO: Should probably move the bulk of this function to commitWork. while (nextEffect !== null) { setCurrentDebugFiberInDEV(nextEffect); @@ -2352,7 +2366,7 @@ function commitMutationEffects(root: FiberRoot, renderPriorityLevel) { // Update const current = nextEffect.alternate; - commitWork(current, nextEffect); + commitWork(current, nextEffect, cache); break; } case Hydrating: { @@ -2364,12 +2378,12 @@ function commitMutationEffects(root: FiberRoot, renderPriorityLevel) { // Update const current = nextEffect.alternate; - commitWork(current, nextEffect); + commitWork(current, nextEffect, cache); break; } case Update: { const current = nextEffect.alternate; - commitWork(current, nextEffect); + commitWork(current, nextEffect, cache); break; } case Deletion: { @@ -2749,7 +2763,11 @@ export function pingSuspendedRoot( schedulePendingInteractions(root, pingedLanes); } -function retryTimedOutBoundary(boundaryFiber: Fiber, retryLane: Lane) { +function retryTimedOutBoundary( + boundaryFiber: Fiber, + dataCache: Cache | null, + retryLane: Lane, +) { // The boundary fiber (a Suspense component or SuspenseList component) // previously was rendered in its fallback state. One of the promises that // suspended it has resolved, which means at least part of the tree was @@ -2764,6 +2782,10 @@ function retryTimedOutBoundary(boundaryFiber: Fiber, retryLane: Lane) { markRootUpdated(root, retryLane, eventTime); ensureRootIsScheduled(root, eventTime); schedulePendingInteractions(root, retryLane); + + if (dataCache !== null) { + transferCacheToSpawnedLane(root, dataCache, retryLane); + } } } @@ -2773,10 +2795,14 @@ export function retryDehydratedSuspenseBoundary(boundaryFiber: Fiber) { if (suspenseState !== null) { retryLane = suspenseState.retryLane; } - retryTimedOutBoundary(boundaryFiber, retryLane); + retryTimedOutBoundary(boundaryFiber, null, retryLane); } -export function resolveRetryWakeable(boundaryFiber: Fiber, wakeable: Wakeable) { +export function resolveRetryWakeable( + boundaryFiber: Fiber, + wakeable: Wakeable, + dataCache: Cache | null, +) { let retryLane = NoLane; // Default let retryCache: WeakSet | Set | null; if (enableSuspenseServerRenderer) { @@ -2808,7 +2834,7 @@ export function resolveRetryWakeable(boundaryFiber: Fiber, wakeable: Wakeable) { retryCache.delete(wakeable); } - retryTimedOutBoundary(boundaryFiber, retryLane); + retryTimedOutBoundary(boundaryFiber, dataCache, retryLane); } // Computes the next Just Noticeable Difference (JND) boundary. diff --git a/packages/react-reconciler/src/ReactInternalTypes.js b/packages/react-reconciler/src/ReactInternalTypes.js index 288893279e040..b6651e92671ae 100644 --- a/packages/react-reconciler/src/ReactInternalTypes.js +++ b/packages/react-reconciler/src/ReactInternalTypes.js @@ -237,6 +237,7 @@ type BaseFiberRootProperties = {| entangledLanes: Lanes, entanglements: LaneMap, + caches: Array | null, pooledCache: Cache | null, |}; diff --git a/packages/react-reconciler/src/__tests__/ReactCache-test.js b/packages/react-reconciler/src/__tests__/ReactCache-test.js index 8a2d0a1fcbcb3..bc133da57f772 100644 --- a/packages/react-reconciler/src/__tests__/ReactCache-test.js +++ b/packages/react-reconciler/src/__tests__/ReactCache-test.js @@ -86,6 +86,11 @@ describe('ReactCache', () => { } } + function mutateRemoteTextService() { + textService = new Map(); + textServiceVersion++; + } + function resolveText(text) { const request = textService.get(text); if (request !== undefined) { @@ -180,4 +185,175 @@ describe('ReactCache', () => { expect(Scheduler).toHaveYielded(['A', 'A']); expect(root).toMatchRenderedOutput('AA'); }); + + // @gate experimental + test('new content inside an existing Cache boundary should re-use already cached data', async () => { + function App({showMore}) { + return ( + + }> + + + {showMore ? ( + }> + + + ) : null} + + ); + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + await resolveText('A'); + root.render(); + }); + expect(Scheduler).toHaveYielded([ + 'Cache miss! [A]', + 'Loading...', + 'A [v1]', + ]); + expect(root).toMatchRenderedOutput('A [v1]'); + + // Simulate a server mutation. + mutateRemoteTextService(); + + // Add a new cache boundary + await ReactNoop.act(async () => { + await resolveText('A'); + root.render(); + }); + expect(Scheduler).toHaveYielded([ + 'A [v1]', + // New tree should use already cached data + 'A [v1]', + ]); + expect(root).toMatchRenderedOutput('A [v1]A [v1]'); + }); + + // @gate experimental + test('a new Cache boundary uses fresh cache', async () => { + // The only difference from the previous test is that the "Show More" + // content is wrapped in a nested boundary + function App({showMore}) { + return ( + + }> + + + {showMore ? ( + + }> + + + + ) : null} + + ); + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + await resolveText('A'); + root.render(); + }); + expect(Scheduler).toHaveYielded([ + 'Cache miss! [A]', + 'Loading...', + 'A [v1]', + ]); + expect(root).toMatchRenderedOutput('A [v1]'); + + // Simulate a server mutation. + mutateRemoteTextService(); + + // Add a new cache boundary + await ReactNoop.act(async () => { + await resolveText('A'); + root.render(); + }); + expect(Scheduler).toHaveYielded([ + 'A [v1]', + // New tree should load fresh data. + 'Cache miss! [A]', + 'Loading...', + 'A [v2]', + ]); + expect(root).toMatchRenderedOutput('A [v1]A [v2]'); + }); + + // @gate experimental + test('inner content uses same cache as shell if spawned by the same transition', async () => { + const root = ReactNoop.createRoot(); + + function App() { + return ( + + }> + {/* The shell reads A */} + + {/* The inner content reads both A and B */} + }> + + + + + + + + ); + } + + function Shell({children}) { + readText('A'); + return ( + <> +
+ +
+
{children}
+ + ); + } + + function Content() { + readText('A'); + readText('B'); + return ; + } + + await ReactNoop.act(async () => { + root.render(); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading shell...']); + expect(root).toMatchRenderedOutput('Loading shell...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded([ + 'Shell', + // There's a cache miss for B, because it hasn't been read yet. But not + // A, because it was cached when we rendered the shell. + 'Cache miss! [B]', + 'Loading content...', + ]); + expect(root).toMatchRenderedOutput( + <> +
Shell
+
Loading content...
+ , + ); + + await ReactNoop.act(async () => { + await resolveText('B'); + }); + expect(Scheduler).toHaveYielded(['Content']); + expect(root).toMatchRenderedOutput( + <> +
Shell
+
Content
+ , + ); + }); }); From 893a831434aed3d18afd47bd1d14c56125747ab1 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Mon, 14 Dec 2020 01:53:16 -0600 Subject: [PATCH 05/30] Cache refreshing Implements useRefresh, a method to invalidate the cache and request new data. It will find the nearest boundary, clear its cache, and schedule an update to re-render with fresh data. We had discussed calling this method `useCacheInvalidation`. The problem I have with that name is that it is bad. I went with `useRefresh` because it, by contrast, is good. One might object is that it clashes with the name for "Fast Refresh" but I disagree. It's experimental anyway so we can bikeshed the name before release. --- .../src/server/ReactPartialRendererHooks.js | 5 + .../src/ReactFiberBeginWork.new.js | 100 +++++++-- .../src/ReactFiberBeginWork.old.js | 100 +++++++-- .../src/ReactFiberCommitWork.new.js | 25 +++ .../src/ReactFiberCommitWork.old.js | 25 +++ .../src/ReactFiberCompleteWork.new.js | 23 ++- .../src/ReactFiberCompleteWork.old.js | 23 ++- .../src/ReactFiberHooks.new.js | 79 +++++++- .../src/ReactFiberHooks.old.js | 79 +++++++- .../src/ReactFiberNewContext.new.js | 115 ++++++++++- .../src/ReactFiberNewContext.old.js | 115 ++++++++++- .../src/ReactFiberUnwindWork.new.js | 10 +- .../src/ReactFiberUnwindWork.old.js | 10 +- .../src/ReactInternalTypes.js | 4 +- .../src/__tests__/ReactCache-test.js | 190 ++++++++++++++++++ .../react-server/src/ReactFlightServer.js | 10 + .../src/ReactSuspenseTestUtils.js | 1 + packages/react/index.classic.fb.js | 1 + packages/react/index.experimental.js | 1 + packages/react/index.js | 1 + packages/react/index.modern.fb.js | 1 + packages/react/src/React.js | 2 + packages/react/src/ReactHooks.js | 6 + .../unstable-shared-subset.experimental.js | 1 + scripts/error-codes/codes.json | 3 +- 25 files changed, 884 insertions(+), 46 deletions(-) diff --git a/packages/react-dom/src/server/ReactPartialRendererHooks.js b/packages/react-dom/src/server/ReactPartialRendererHooks.js index 3a543aa337b6c..7759cc62c83bd 100644 --- a/packages/react-dom/src/server/ReactPartialRendererHooks.js +++ b/packages/react-dom/src/server/ReactPartialRendererHooks.js @@ -489,6 +489,10 @@ function useOpaqueIdentifier(): OpaqueIDType { ); } +function useRefresh(): () => void { + invariant(false, 'Not implemented.'); +} + function noop(): void {} export let currentPartialRenderer: PartialRenderer = (null: any); @@ -520,4 +524,5 @@ export const Dispatcher: DispatcherType = { if (enableCache) { Dispatcher.getCacheForType = getCacheForType; + Dispatcher.useRefresh = useRefresh; } diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 863e015074009..eeb513c4218b7 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -133,6 +133,7 @@ import { isSuspenseInstanceFallback, registerSuspenseInstanceRetry, supportsHydration, + isPrimaryRenderer, } from './ReactFiberHostConfig'; import type {SuspenseInstance} from './ReactFiberHostConfig'; import {shouldSuspend} from './ReactFiberReconciler'; @@ -151,6 +152,7 @@ import {findFirstSuspended} from './ReactFiberSuspenseComponent.new'; import { pushProvider, propagateContextChange, + propagateCacheRefresh, readContext, prepareToReadContext, calculateChangedBits, @@ -662,22 +664,84 @@ function updateCacheComponent( return null; } - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - - const cache: Cache = - current === null - ? requestFreshCache(root, renderLanes) - : current.memoizedState; - - // TODO: Propagate changes, once refreshing exists. - pushProvider(workInProgress, CacheContext, cache); + // Read directly from the context. We don't set up a context dependency + // because the propagation function automatically includes CacheComponents in + // its search. + const parentCache: Cache | null = isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2; + + let ownCache: Cache | null = null; + // TODO: Fast path if parent has a new cache. Merely an optimization. Might + // not be worth it. + if (false) { + // The parent boundary also has a new cache. We're either inside a new tree, + // or there was a refresh. In both cases, we should use the parent cache. + ownCache = null; + } else { + if (current === null) { + // This is a newly mounted component. Request a fresh cache. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + const freshCache = requestFreshCache(root, renderLanes); + // This may be the same as the parent cache, like if the current render + // spawned from a previous render that already committed. Otherwise, this + // is the root of a cache consistency boundary. + if (freshCache !== parentCache) { + ownCache = freshCache; + pushProvider(workInProgress, CacheContext, freshCache); + // No need to propagate the refresh, because this is a new tree. + } else { + // Use the parent cache + ownCache = null; + } + } else { + // This component already mounted. + if (includesSomeLane(renderLanes, updateLanes)) { + // A refresh was scheduled. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + const freshCache = requestFreshCache(root, renderLanes); + if (freshCache !== parentCache) { + ownCache = freshCache; + pushProvider(workInProgress, CacheContext, freshCache); + // Refreshes propagate through the entire subtree. The refreshed cache + // will override nested caches. + propagateCacheRefresh(workInProgress, renderLanes); + } else { + // The fresh cache is the same as the parent cache. I think this + // unreachable in practice, because this means the parent cache was + // refreshed in the same render. So we would have already handled this + // in the earlier branch, where we check if the parent is new. + ownCache = null; + } + } else { + // Reuse the memoized cache. + const prevCache: Cache | null = current.memoizedState; + if (prevCache !== null) { + ownCache = prevCache; + // There was no refresh, so no need to propagate to nested boundaries. + pushProvider(workInProgress, CacheContext, prevCache); + } else { + ownCache = null; + } + } + } + } - workInProgress.memoizedState = cache; + // If this CacheComponent is the root of its tree, then `memoizedState` will + // point to a cache object. Otherwise, a null state indicates that this + // CacheComponent inherits from a parent boundary. We can use this to infer + // whether to push/pop the cache context. + workInProgress.memoizedState = ownCache; const nextChildren = workInProgress.pendingProps.children; reconcileChildren(current, workInProgress, nextChildren, renderLanes); @@ -3274,8 +3338,10 @@ function beginWork( } case CacheComponent: { if (enableCache) { - const cache: Cache = current.memoizedState; - pushProvider(workInProgress, CacheContext, cache); + const ownCache: Cache | null = workInProgress.memoizedState; + if (ownCache !== null) { + pushProvider(workInProgress, CacheContext, ownCache); + } } break; } diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index 8f474df3f8c92..76f75e06e319b 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -133,6 +133,7 @@ import { isSuspenseInstanceFallback, registerSuspenseInstanceRetry, supportsHydration, + isPrimaryRenderer, } from './ReactFiberHostConfig'; import type {SuspenseInstance} from './ReactFiberHostConfig'; import {shouldSuspend} from './ReactFiberReconciler'; @@ -151,6 +152,7 @@ import {findFirstSuspended} from './ReactFiberSuspenseComponent.old'; import { pushProvider, propagateContextChange, + propagateCacheRefresh, readContext, prepareToReadContext, calculateChangedBits, @@ -662,22 +664,84 @@ function updateCacheComponent( return null; } - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - - const cache: Cache = - current === null - ? requestFreshCache(root, renderLanes) - : current.memoizedState; - - // TODO: Propagate changes, once refreshing exists. - pushProvider(workInProgress, CacheContext, cache); + // Read directly from the context. We don't set up a context dependency + // because the propagation function automatically includes CacheComponents in + // its search. + const parentCache: Cache | null = isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2; + + let ownCache: Cache | null = null; + // TODO: Fast path if parent has a new cache. Merely an optimization. Might + // not be worth it. + if (false) { + // The parent boundary also has a new cache. We're either inside a new tree, + // or there was a refresh. In both cases, we should use the parent cache. + ownCache = null; + } else { + if (current === null) { + // This is a newly mounted component. Request a fresh cache. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + const freshCache = requestFreshCache(root, renderLanes); + // This may be the same as the parent cache, like if the current render + // spawned from a previous render that already committed. Otherwise, this + // is the root of a cache consistency boundary. + if (freshCache !== parentCache) { + ownCache = freshCache; + pushProvider(workInProgress, CacheContext, freshCache); + // No need to propagate the refresh, because this is a new tree. + } else { + // Use the parent cache + ownCache = null; + } + } else { + // This component already mounted. + if (includesSomeLane(renderLanes, updateLanes)) { + // A refresh was scheduled. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + const freshCache = requestFreshCache(root, renderLanes); + if (freshCache !== parentCache) { + ownCache = freshCache; + pushProvider(workInProgress, CacheContext, freshCache); + // Refreshes propagate through the entire subtree. The refreshed cache + // will override nested caches. + propagateCacheRefresh(workInProgress, renderLanes); + } else { + // The fresh cache is the same as the parent cache. I think this + // unreachable in practice, because this means the parent cache was + // refreshed in the same render. So we would have already handled this + // in the earlier branch, where we check if the parent is new. + ownCache = null; + } + } else { + // Reuse the memoized cache. + const prevCache: Cache | null = current.memoizedState; + if (prevCache !== null) { + ownCache = prevCache; + // There was no refresh, so no need to propagate to nested boundaries. + pushProvider(workInProgress, CacheContext, prevCache); + } else { + ownCache = null; + } + } + } + } - workInProgress.memoizedState = cache; + // If this CacheComponent is the root of its tree, then `memoizedState` will + // point to a cache object. Otherwise, a null state indicates that this + // CacheComponent inherits from a parent boundary. We can use this to infer + // whether to push/pop the cache context. + workInProgress.memoizedState = ownCache; const nextChildren = workInProgress.pendingProps.children; reconcileChildren(current, workInProgress, nextChildren, renderLanes); @@ -3274,8 +3338,10 @@ function beginWork( } case CacheComponent: { if (enableCache) { - const cache: Cache = current.memoizedState; - pushProvider(workInProgress, CacheContext, cache); + const ownCache: Cache | null = workInProgress.memoizedState; + if (ownCache !== null) { + pushProvider(workInProgress, CacheContext, ownCache); + } } break; } diff --git a/packages/react-reconciler/src/ReactFiberCommitWork.new.js b/packages/react-reconciler/src/ReactFiberCommitWork.new.js index 5cb2c9ae62e50..5dfdfff1c5165 100644 --- a/packages/react-reconciler/src/ReactFiberCommitWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCommitWork.new.js @@ -37,6 +37,7 @@ import { enableFundamentalAPI, enableSuspenseCallback, enableScopeAPI, + enableCache, } from 'shared/ReactFeatureFlags'; import { FunctionComponent, @@ -806,6 +807,30 @@ function commitLifeCycles( case OffscreenComponent: case LegacyHiddenComponent: return; + case CacheComponent: { + if (enableCache) { + if (current !== null) { + const oldCache: Cache | null = current.memoizedState; + if (oldCache !== null) { + const oldCacheProviders = oldCache.providers; + if (oldCacheProviders) { + oldCacheProviders.delete(current); + oldCacheProviders.delete(finishedWork); + } + } + } + const newCache: Cache | null = finishedWork.memoizedState; + if (newCache !== null) { + const newCacheProviders = newCache.providers; + if (newCacheProviders === null) { + newCache.providers = new Set([finishedWork]); + } else { + newCacheProviders.add(finishedWork); + } + } + } + return; + } } invariant( false, diff --git a/packages/react-reconciler/src/ReactFiberCommitWork.old.js b/packages/react-reconciler/src/ReactFiberCommitWork.old.js index 254a7d5dcc077..4f8c631b5235f 100644 --- a/packages/react-reconciler/src/ReactFiberCommitWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCommitWork.old.js @@ -37,6 +37,7 @@ import { enableFundamentalAPI, enableSuspenseCallback, enableScopeAPI, + enableCache, } from 'shared/ReactFeatureFlags'; import { FunctionComponent, @@ -806,6 +807,30 @@ function commitLifeCycles( case OffscreenComponent: case LegacyHiddenComponent: return; + case CacheComponent: { + if (enableCache) { + if (current !== null) { + const oldCache: Cache | null = current.memoizedState; + if (oldCache !== null) { + const oldCacheProviders = oldCache.providers; + if (oldCacheProviders) { + oldCacheProviders.delete(current); + oldCacheProviders.delete(finishedWork); + } + } + } + const newCache: Cache | null = finishedWork.memoizedState; + if (newCache !== null) { + const newCacheProviders = newCache.providers; + if (newCacheProviders === null) { + newCache.providers = new Set([finishedWork]); + } else { + newCacheProviders.add(finishedWork); + } + } + } + return; + } } invariant( false, diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js index 458bb7df21307..3aa000e4ca810 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js @@ -1488,7 +1488,28 @@ function completeWork( } case CacheComponent: { if (enableCache) { - popProvider(CacheContext, workInProgress); + // If the cache provided by this boundary has changed, schedule an + // effect to add this component to the cache's providers, and to remove + // it from the old cache. + // TODO: Schedule for Passive phase + const ownCache: Cache | null = workInProgress.memoizedState; + if (current === null) { + if (ownCache !== null) { + // This is a cache provider. + popProvider(CacheContext, workInProgress); + // Set up a refresh subscription. + workInProgress.flags |= Update; + } + } else { + if (ownCache !== null) { + // This is a cache provider. + popProvider(CacheContext, workInProgress); + } + if (ownCache !== current.memoizedState) { + // Cache changed. Create or update a refresh subscription. + workInProgress.flags |= Update; + } + } bubbleProperties(workInProgress); return null; } diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js index f3c3efafcb9e0..b20910ddc5ece 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js @@ -1488,7 +1488,28 @@ function completeWork( } case CacheComponent: { if (enableCache) { - popProvider(CacheContext, workInProgress); + // If the cache provided by this boundary has changed, schedule an + // effect to add this component to the cache's providers, and to remove + // it from the old cache. + // TODO: Schedule for Passive phase + const ownCache: Cache | null = workInProgress.memoizedState; + if (current === null) { + if (ownCache !== null) { + // This is a cache provider. + popProvider(CacheContext, workInProgress); + // Set up a refresh subscription. + workInProgress.flags |= Update; + } + } else { + if (ownCache !== null) { + // This is a cache provider. + popProvider(CacheContext, workInProgress); + } + if (ownCache !== current.memoizedState) { + // Cache changed. Create or update a refresh subscription. + workInProgress.flags |= Update; + } + } bubbleProperties(workInProgress); return null; } diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js index f6afdf226c797..04dfe81339ece 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.new.js +++ b/packages/react-reconciler/src/ReactFiberHooks.new.js @@ -19,6 +19,7 @@ import type {HookFlags} from './ReactHookEffectTags'; import type {ReactPriorityLevel} from './ReactInternalTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type {OpaqueIDType} from './ReactFiberHostConfig'; +import type {Cache} from './ReactFiberCacheComponent'; import ReactSharedInternals from 'shared/ReactSharedInternals'; import { @@ -1708,6 +1709,43 @@ function rerenderOpaqueIdentifier(): OpaqueIDType | void { return id; } +function mountRefresh() { + const cache: Cache | null = readContext(CacheContext); + return mountCallback(refreshCache.bind(null, cache), [cache]); +} + +function updateRefresh() { + const cache: Cache | null = readContext(CacheContext); + return updateCallback(refreshCache.bind(null, cache), [cache]); +} + +function refreshCache(cache: Cache | null) { + if (cache !== null) { + const providers = cache.providers; + if (providers !== null) { + providers.forEach(scheduleCacheRefresh); + } + } else { + // TODO: Warn if cache is null? + } +} + +function scheduleCacheRefresh(cacheComponentFiber: Fiber) { + // Inlined startTransition + // TODO: Maybe we shouldn't automatically give this transition priority. Are + // there valid use cases for a high-pri refresh? Like if the content is + // super stale and you want to immediately hide it. + const prevTransition = ReactCurrentBatchConfig.transition; + ReactCurrentBatchConfig.transition = 1; + try { + const eventTime = requestEventTime(); + const lane = requestUpdateLane(cacheComponentFiber); + scheduleUpdateOnFiber(cacheComponentFiber, lane, eventTime); + } finally { + ReactCurrentBatchConfig.transition = prevTransition; + } +} + function dispatchAction( fiber: Fiber, queue: UpdateQueue, @@ -1819,7 +1857,7 @@ function dispatchAction( } function getCacheForType(resourceType: () => T): T { - const cache = readContext(CacheContext); + const cache: Cache | null = readContext(CacheContext); invariant( cache !== null, 'Tried to fetch data, but no cache was found. To fix, wrap your ' + @@ -1867,6 +1905,7 @@ export const ContextOnlyDispatcher: Dispatcher = { }; if (enableCache) { (ContextOnlyDispatcher: Dispatcher).getCacheForType = getCacheForType; + (ContextOnlyDispatcher: Dispatcher).useRefresh = throwInvalidHookError; } const HooksDispatcherOnMount: Dispatcher = { @@ -1891,6 +1930,7 @@ const HooksDispatcherOnMount: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnMount: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnMount: Dispatcher).useRefresh = mountRefresh; } const HooksDispatcherOnUpdate: Dispatcher = { @@ -1915,6 +1955,7 @@ const HooksDispatcherOnUpdate: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnUpdate: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnUpdate: Dispatcher).useRefresh = updateRefresh; } const HooksDispatcherOnRerender: Dispatcher = { @@ -1939,6 +1980,7 @@ const HooksDispatcherOnRerender: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnRerender: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnRerender: Dispatcher).useRefresh = updateRefresh; } let HooksDispatcherOnMountInDEV: Dispatcher | null = null; @@ -2096,6 +2138,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnMountInDEV: Dispatcher).useRefresh = function useRefresh() { + currentHookNameInDev = 'useRefresh'; + mountHookTypesDev(); + return mountRefresh(); + }; } HooksDispatcherOnMountWithHookTypesInDEV = { @@ -2221,6 +2268,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).useRefresh = function useRefresh() { + currentHookNameInDev = 'useRefresh'; + updateHookTypesDev(); + return mountRefresh(); + }; } HooksDispatcherOnUpdateInDEV = { @@ -2346,6 +2398,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnUpdateInDEV: Dispatcher).useRefresh = function useRefresh() { + currentHookNameInDev = 'useRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } HooksDispatcherOnRerenderInDEV = { @@ -2472,6 +2529,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnRerenderInDEV: Dispatcher).useRefresh = function useRefresh() { + currentHookNameInDev = 'useRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } InvalidNestedHooksDispatcherOnMountInDEV = { @@ -2612,6 +2674,11 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType; + (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).useRefresh = function useRefresh() { + currentHookNameInDev = 'useRefresh'; + updateHookTypesDev(); + return mountRefresh(); + }; } InvalidNestedHooksDispatcherOnUpdateInDEV = { @@ -2752,6 +2819,11 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType; + (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).useRefresh = function useRefresh() { + currentHookNameInDev = 'useRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } InvalidNestedHooksDispatcherOnRerenderInDEV = { @@ -2893,5 +2965,10 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType; + (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).useRefresh = function useRefresh() { + currentHookNameInDev = 'useRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } } diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js index 250937d744be6..0643e0955c79f 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.old.js +++ b/packages/react-reconciler/src/ReactFiberHooks.old.js @@ -19,6 +19,7 @@ import type {HookFlags} from './ReactHookEffectTags'; import type {ReactPriorityLevel} from './ReactInternalTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type {OpaqueIDType} from './ReactFiberHostConfig'; +import type {Cache} from './ReactFiberCacheComponent'; import ReactSharedInternals from 'shared/ReactSharedInternals'; import { @@ -1708,6 +1709,43 @@ function rerenderOpaqueIdentifier(): OpaqueIDType | void { return id; } +function mountRefresh() { + const cache: Cache | null = readContext(CacheContext); + return mountCallback(refreshCache.bind(null, cache), [cache]); +} + +function updateRefresh() { + const cache: Cache | null = readContext(CacheContext); + return updateCallback(refreshCache.bind(null, cache), [cache]); +} + +function refreshCache(cache: Cache | null) { + if (cache !== null) { + const providers = cache.providers; + if (providers !== null) { + providers.forEach(scheduleCacheRefresh); + } + } else { + // TODO: Warn if cache is null? + } +} + +function scheduleCacheRefresh(cacheComponentFiber: Fiber) { + // Inlined startTransition + // TODO: Maybe we shouldn't automatically give this transition priority. Are + // there valid use cases for a high-pri refresh? Like if the content is + // super stale and you want to immediately hide it. + const prevTransition = ReactCurrentBatchConfig.transition; + ReactCurrentBatchConfig.transition = 1; + try { + const eventTime = requestEventTime(); + const lane = requestUpdateLane(cacheComponentFiber); + scheduleUpdateOnFiber(cacheComponentFiber, lane, eventTime); + } finally { + ReactCurrentBatchConfig.transition = prevTransition; + } +} + function dispatchAction( fiber: Fiber, queue: UpdateQueue, @@ -1819,7 +1857,7 @@ function dispatchAction( } function getCacheForType(resourceType: () => T): T { - const cache = readContext(CacheContext); + const cache: Cache | null = readContext(CacheContext); invariant( cache !== null, 'Tried to fetch data, but no cache was found. To fix, wrap your ' + @@ -1867,6 +1905,7 @@ export const ContextOnlyDispatcher: Dispatcher = { }; if (enableCache) { (ContextOnlyDispatcher: Dispatcher).getCacheForType = getCacheForType; + (ContextOnlyDispatcher: Dispatcher).useRefresh = throwInvalidHookError; } const HooksDispatcherOnMount: Dispatcher = { @@ -1891,6 +1930,7 @@ const HooksDispatcherOnMount: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnMount: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnMount: Dispatcher).useRefresh = mountRefresh; } const HooksDispatcherOnUpdate: Dispatcher = { @@ -1915,6 +1955,7 @@ const HooksDispatcherOnUpdate: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnUpdate: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnUpdate: Dispatcher).useRefresh = updateRefresh; } const HooksDispatcherOnRerender: Dispatcher = { @@ -1939,6 +1980,7 @@ const HooksDispatcherOnRerender: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnRerender: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnRerender: Dispatcher).useRefresh = updateRefresh; } let HooksDispatcherOnMountInDEV: Dispatcher | null = null; @@ -2096,6 +2138,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnMountInDEV: Dispatcher).useRefresh = function useRefresh() { + currentHookNameInDev = 'useRefresh'; + mountHookTypesDev(); + return mountRefresh(); + }; } HooksDispatcherOnMountWithHookTypesInDEV = { @@ -2221,6 +2268,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).useRefresh = function useRefresh() { + currentHookNameInDev = 'useRefresh'; + updateHookTypesDev(); + return mountRefresh(); + }; } HooksDispatcherOnUpdateInDEV = { @@ -2346,6 +2398,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnUpdateInDEV: Dispatcher).useRefresh = function useRefresh() { + currentHookNameInDev = 'useRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } HooksDispatcherOnRerenderInDEV = { @@ -2472,6 +2529,11 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType; + (HooksDispatcherOnRerenderInDEV: Dispatcher).useRefresh = function useRefresh() { + currentHookNameInDev = 'useRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } InvalidNestedHooksDispatcherOnMountInDEV = { @@ -2612,6 +2674,11 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType; + (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).useRefresh = function useRefresh() { + currentHookNameInDev = 'useRefresh'; + updateHookTypesDev(); + return mountRefresh(); + }; } InvalidNestedHooksDispatcherOnUpdateInDEV = { @@ -2752,6 +2819,11 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType; + (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).useRefresh = function useRefresh() { + currentHookNameInDev = 'useRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } InvalidNestedHooksDispatcherOnRerenderInDEV = { @@ -2893,5 +2965,10 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType; + (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).useRefresh = function useRefresh() { + currentHookNameInDev = 'useRefresh'; + updateHookTypesDev(); + return updateRefresh(); + }; } } diff --git a/packages/react-reconciler/src/ReactFiberNewContext.new.js b/packages/react-reconciler/src/ReactFiberNewContext.new.js index 584e2ff43b5cc..7bb83518eb8fd 100644 --- a/packages/react-reconciler/src/ReactFiberNewContext.new.js +++ b/packages/react-reconciler/src/ReactFiberNewContext.new.js @@ -19,6 +19,7 @@ import { ContextProvider, ClassComponent, DehydratedFragment, + CacheComponent, } from './ReactWorkTags'; import { NoLanes, @@ -33,7 +34,11 @@ import invariant from 'shared/invariant'; import is from 'shared/objectIs'; import {createUpdate, enqueueUpdate, ForceUpdate} from './ReactUpdateQueue.new'; import {markWorkInProgressReceivedUpdate} from './ReactFiberBeginWork.new'; -import {enableSuspenseServerRenderer} from 'shared/ReactFeatureFlags'; +import {CacheContext} from './ReactFiberCacheComponent'; +import { + enableSuspenseServerRenderer, + enableCache, +} from 'shared/ReactFeatureFlags'; const valueCursor: StackCursor = createCursor(null); @@ -296,6 +301,114 @@ export function propagateContextChange( } } +export function propagateCacheRefresh( + workInProgress: Fiber, + renderLanes: Lanes, +): void { + if (!enableCache) { + return; + } + + let fiber = workInProgress.child; + if (fiber !== null) { + // Set the return pointer of the child to the work-in-progress fiber. + fiber.return = workInProgress; + } + while (fiber !== null) { + let nextFiber; + + // Visit this fiber. + const list = fiber.dependencies; + if (list !== null) { + nextFiber = fiber.child; + + let dependency = list.firstContext; + while (dependency !== null) { + // Check if the context matches. + if (dependency.context === CacheContext) { + // Match! Schedule an update on this fiber. + + if (fiber.tag === ClassComponent) { + // Schedule a force update on the work-in-progress. + const update = createUpdate( + NoTimestamp, + pickArbitraryLane(renderLanes), + ); + update.tag = ForceUpdate; + // TODO: Because we don't have a work-in-progress, this will add the + // update to the current fiber, too, which means it will persist even if + // this render is thrown away. Since it's a race condition, not sure it's + // worth fixing. + enqueueUpdate(fiber, update); + } + fiber.lanes = mergeLanes(fiber.lanes, renderLanes); + const alternate = fiber.alternate; + if (alternate !== null) { + alternate.lanes = mergeLanes(alternate.lanes, renderLanes); + } + scheduleWorkOnParentPath(fiber.return, renderLanes); + + // Mark the updated lanes on the list, too. + list.lanes = mergeLanes(list.lanes, renderLanes); + + // Since we already found a match, we can stop traversing the + // dependency list. + break; + } + dependency = dependency.next; + } + } else if (fiber.tag === CacheComponent) { + const nestedCache = fiber.memoizedState; + if (nestedCache !== null) { + // Found a nested cache boundary with its own cache. The parent refresh + // should override it. Mark it with an update. + fiber.lanes = mergeLanes(fiber.lanes, renderLanes); + const alternate = fiber.alternate; + if (alternate !== null) { + alternate.lanes = mergeLanes(alternate.lanes, renderLanes); + } + scheduleWorkOnParentPath(fiber.return, renderLanes); + } + + // Unlike propagateContextChange, we don't stop traversing when we reach a + // nested cache boundary; refreshes propagate through the entire subtree. + // The refreshed cache will override nested caches. + // + // We also don't need to do anything special with DehydratedFragments, + // since the Fast Boot renderer is not allowed to fetch data. + nextFiber = fiber.child; + } else { + // Traverse down. + nextFiber = fiber.child; + } + + if (nextFiber !== null) { + // Set the return pointer of the child to the work-in-progress fiber. + nextFiber.return = fiber; + } else { + // No child. Traverse to next sibling. + nextFiber = fiber; + while (nextFiber !== null) { + if (nextFiber === workInProgress) { + // We're back to the root of this subtree. Exit. + nextFiber = null; + break; + } + const sibling = nextFiber.sibling; + if (sibling !== null) { + // Set the return pointer of the sibling to the work-in-progress fiber. + sibling.return = nextFiber.return; + nextFiber = sibling; + break; + } + // No more siblings. Traverse up. + nextFiber = nextFiber.return; + } + } + fiber = nextFiber; + } +} + export function prepareToReadContext( workInProgress: Fiber, renderLanes: Lanes, diff --git a/packages/react-reconciler/src/ReactFiberNewContext.old.js b/packages/react-reconciler/src/ReactFiberNewContext.old.js index da4859f0be800..6901a1e28ae42 100644 --- a/packages/react-reconciler/src/ReactFiberNewContext.old.js +++ b/packages/react-reconciler/src/ReactFiberNewContext.old.js @@ -19,6 +19,7 @@ import { ContextProvider, ClassComponent, DehydratedFragment, + CacheComponent, } from './ReactWorkTags'; import { NoLanes, @@ -33,7 +34,11 @@ import invariant from 'shared/invariant'; import is from 'shared/objectIs'; import {createUpdate, enqueueUpdate, ForceUpdate} from './ReactUpdateQueue.old'; import {markWorkInProgressReceivedUpdate} from './ReactFiberBeginWork.old'; -import {enableSuspenseServerRenderer} from 'shared/ReactFeatureFlags'; +import {CacheContext} from './ReactFiberCacheComponent'; +import { + enableSuspenseServerRenderer, + enableCache, +} from 'shared/ReactFeatureFlags'; const valueCursor: StackCursor = createCursor(null); @@ -296,6 +301,114 @@ export function propagateContextChange( } } +export function propagateCacheRefresh( + workInProgress: Fiber, + renderLanes: Lanes, +): void { + if (!enableCache) { + return; + } + + let fiber = workInProgress.child; + if (fiber !== null) { + // Set the return pointer of the child to the work-in-progress fiber. + fiber.return = workInProgress; + } + while (fiber !== null) { + let nextFiber; + + // Visit this fiber. + const list = fiber.dependencies; + if (list !== null) { + nextFiber = fiber.child; + + let dependency = list.firstContext; + while (dependency !== null) { + // Check if the context matches. + if (dependency.context === CacheContext) { + // Match! Schedule an update on this fiber. + + if (fiber.tag === ClassComponent) { + // Schedule a force update on the work-in-progress. + const update = createUpdate( + NoTimestamp, + pickArbitraryLane(renderLanes), + ); + update.tag = ForceUpdate; + // TODO: Because we don't have a work-in-progress, this will add the + // update to the current fiber, too, which means it will persist even if + // this render is thrown away. Since it's a race condition, not sure it's + // worth fixing. + enqueueUpdate(fiber, update); + } + fiber.lanes = mergeLanes(fiber.lanes, renderLanes); + const alternate = fiber.alternate; + if (alternate !== null) { + alternate.lanes = mergeLanes(alternate.lanes, renderLanes); + } + scheduleWorkOnParentPath(fiber.return, renderLanes); + + // Mark the updated lanes on the list, too. + list.lanes = mergeLanes(list.lanes, renderLanes); + + // Since we already found a match, we can stop traversing the + // dependency list. + break; + } + dependency = dependency.next; + } + } else if (fiber.tag === CacheComponent) { + const nestedCache = fiber.memoizedState; + if (nestedCache !== null) { + // Found a nested cache boundary with its own cache. The parent refresh + // should override it. Mark it with an update. + fiber.lanes = mergeLanes(fiber.lanes, renderLanes); + const alternate = fiber.alternate; + if (alternate !== null) { + alternate.lanes = mergeLanes(alternate.lanes, renderLanes); + } + scheduleWorkOnParentPath(fiber.return, renderLanes); + } + + // Unlike propagateContextChange, we don't stop traversing when we reach a + // nested cache boundary; refreshes propagate through the entire subtree. + // The refreshed cache will override nested caches. + // + // We also don't need to do anything special with DehydratedFragments, + // since the Fast Boot renderer is not allowed to fetch data. + nextFiber = fiber.child; + } else { + // Traverse down. + nextFiber = fiber.child; + } + + if (nextFiber !== null) { + // Set the return pointer of the child to the work-in-progress fiber. + nextFiber.return = fiber; + } else { + // No child. Traverse to next sibling. + nextFiber = fiber; + while (nextFiber !== null) { + if (nextFiber === workInProgress) { + // We're back to the root of this subtree. Exit. + nextFiber = null; + break; + } + const sibling = nextFiber.sibling; + if (sibling !== null) { + // Set the return pointer of the sibling to the work-in-progress fiber. + sibling.return = nextFiber.return; + nextFiber = sibling; + break; + } + // No more siblings. Traverse up. + nextFiber = nextFiber.return; + } + } + fiber = nextFiber; + } +} + export function prepareToReadContext( workInProgress: Fiber, renderLanes: Lanes, diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js index a394e385e1af9..eb480177eb570 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js @@ -133,7 +133,10 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { return null; case CacheComponent: if (enableCache) { - popProvider(CacheContext, workInProgress); + const ownCache: Cache | null = workInProgress.memoizedState; + if (ownCache !== null) { + popProvider(CacheContext, workInProgress); + } } return null; default: @@ -179,7 +182,10 @@ function unwindInterruptedWork(interruptedWork: Fiber) { break; case CacheComponent: if (enableCache) { - popProvider(CacheContext, interruptedWork); + const ownCache: Cache | null = interruptedWork.memoizedState; + if (ownCache !== null) { + popProvider(CacheContext, interruptedWork); + } } break; default: diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js index 11ec5fb1dc720..6827ac7ccf0ac 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js @@ -133,7 +133,10 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { return null; case CacheComponent: if (enableCache) { - popProvider(CacheContext, workInProgress); + const ownCache: Cache | null = workInProgress.memoizedState; + if (ownCache !== null) { + popProvider(CacheContext, workInProgress); + } } return null; default: @@ -179,7 +182,10 @@ function unwindInterruptedWork(interruptedWork: Fiber) { break; case CacheComponent: if (enableCache) { - popProvider(CacheContext, interruptedWork); + const ownCache: Cache | null = interruptedWork.memoizedState; + if (ownCache !== null) { + popProvider(CacheContext, interruptedWork); + } } break; default: diff --git a/packages/react-reconciler/src/ReactInternalTypes.js b/packages/react-reconciler/src/ReactInternalTypes.js index b6651e92671ae..d73e9e5e74b1f 100644 --- a/packages/react-reconciler/src/ReactInternalTypes.js +++ b/packages/react-reconciler/src/ReactInternalTypes.js @@ -42,7 +42,8 @@ export type HookType = | 'useDeferredValue' | 'useTransition' | 'useMutableSource' - | 'useOpaqueIdentifier'; + | 'useOpaqueIdentifier' + | 'useRefresh'; export type ReactPriorityLevel = 99 | 98 | 97 | 96 | 95 | 90; @@ -318,6 +319,7 @@ export type Dispatcher = {| subscribe: MutableSourceSubscribeFn, ): Snapshot, useOpaqueIdentifier(): any, + useRefresh?: () => () => void, unstable_isNewReconciler?: boolean, |}; diff --git a/packages/react-reconciler/src/__tests__/ReactCache-test.js b/packages/react-reconciler/src/__tests__/ReactCache-test.js index bc133da57f772..6f00ba96c0e57 100644 --- a/packages/react-reconciler/src/__tests__/ReactCache-test.js +++ b/packages/react-reconciler/src/__tests__/ReactCache-test.js @@ -4,6 +4,8 @@ let Cache; let getCacheForType; let Scheduler; let Suspense; +let useRefresh; + let textService; let textServiceVersion; @@ -17,6 +19,7 @@ describe('ReactCache', () => { Scheduler = require('scheduler'); Suspense = React.Suspense; getCacheForType = React.unstable_getCacheForType; + useRefresh = React.unstable_useRefresh; // Represents some data service that returns text. It likely has additional // caching layers, like a CDN or the local browser cache. It can be mutated @@ -356,4 +359,191 @@ describe('ReactCache', () => { , ); }); + + // @gate experimental + test('refresh a cache', async () => { + let refresh; + function App() { + refresh = useRefresh(); + return ; + } + + // Mount initial data + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render( + + }> + + + , + ); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A [v1]']); + expect(root).toMatchRenderedOutput('A [v1]'); + + // Mutate the text service, then refresh for new data. + mutateRemoteTextService(); + await ReactNoop.act(async () => { + refresh(); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('A [v1]'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + // Note that the version has updated + expect(Scheduler).toHaveYielded(['A [v2]']); + expect(root).toMatchRenderedOutput('A [v2]'); + }); + + // @gate experimental + test('refreshing a parent cache also refreshes its children', async () => { + let refreshShell; + function RefreshShell() { + refreshShell = useRefresh(); + return null; + } + + function App({showMore}) { + return ( + + + }> + + + {showMore ? ( + + }> + + + + ) : null} + + ); + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + await resolveText('A'); + root.render(); + }); + expect(Scheduler).toHaveYielded([ + 'Cache miss! [A]', + 'Loading...', + 'A [v1]', + ]); + expect(root).toMatchRenderedOutput('A [v1]'); + + // Simulate a server mutation. + mutateRemoteTextService(); + + // Add a new cache boundary + await ReactNoop.act(async () => { + await resolveText('A'); + root.render(); + }); + expect(Scheduler).toHaveYielded([ + 'A [v1]', + // New tree should load fresh data. + 'Cache miss! [A]', + 'Loading...', + 'A [v2]', + ]); + expect(root).toMatchRenderedOutput('A [v1]A [v2]'); + + // Now refresh the shell. This should also cause the "Show More" contents to + // refresh, since its cache is nested inside the outer one. + mutateRemoteTextService(); + await ReactNoop.act(async () => { + refreshShell(); + }); + expect(Scheduler).toHaveYielded([ + 'Cache miss! [A]', + 'Loading...', + 'Loading...', + ]); + expect(root).toMatchRenderedOutput('A [v1]A [v2]'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A [v3]', 'A [v3]']); + expect(root).toMatchRenderedOutput('A [v3]A [v3]'); + }); + + // @gate experimental + test( + 'refreshing a cache boundary also refreshes the other boundaries ' + + 'that mounted at the same time (i.e. the ones that share the same cache)', + async () => { + let refreshFirstBoundary; + function RefreshFirstBoundary() { + refreshFirstBoundary = useRefresh(); + return null; + } + + function App({text}) { + return ( + <> + + }> + + + + + + }> + + + + + ); + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render(); + }); + // Even though there are two new trees, they should share the same + // data cache. So there should be only a single cache miss for A. + expect(Scheduler).toHaveYielded([ + 'Cache miss! [A]', + 'Loading...', + 'Loading...', + ]); + expect(root).toMatchRenderedOutput('Loading...Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A [v1]', 'A [v1]']); + expect(root).toMatchRenderedOutput('A [v1]A [v1]'); + + // Refresh the first boundary. It should also refresh the second boundary, + // since they appeared at the same time. + mutateRemoteTextService(); + await ReactNoop.act(async () => { + await refreshFirstBoundary(); + }); + expect(Scheduler).toHaveYielded([ + 'Cache miss! [A]', + 'Loading...', + 'Loading...', + ]); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A [v2]', 'A [v2]']); + expect(root).toMatchRenderedOutput('A [v2]A [v2]'); + }, + ); }); diff --git a/packages/react-server/src/ReactFlightServer.js b/packages/react-server/src/ReactFlightServer.js index d6717a0830760..0f8170852f035 100644 --- a/packages/react-server/src/ReactFlightServer.js +++ b/packages/react-server/src/ReactFlightServer.js @@ -758,6 +758,13 @@ function unsupportedHook(): void { invariant(false, 'This Hook is not supported in Server Components.'); } +function unsupportedRefresh(): void { + invariant( + currentCache, + 'Refreshing the cache is not supported in Server Components.', + ); +} + let currentCache: Map | null = null; const Dispatcher: DispatcherType = { @@ -797,4 +804,7 @@ const Dispatcher: DispatcherType = { useEffect: (unsupportedHook: any), useOpaqueIdentifier: (unsupportedHook: any), useMutableSource: (unsupportedHook: any), + useRefresh(): () => void { + return unsupportedRefresh; + }, }; diff --git a/packages/react-suspense-test-utils/src/ReactSuspenseTestUtils.js b/packages/react-suspense-test-utils/src/ReactSuspenseTestUtils.js index a58c6cae824c0..37ecb3a1c3c6a 100644 --- a/packages/react-suspense-test-utils/src/ReactSuspenseTestUtils.js +++ b/packages/react-suspense-test-utils/src/ReactSuspenseTestUtils.js @@ -44,6 +44,7 @@ export function waitForSuspense(fn: () => T): Promise { useTransition: unsupported, useOpaqueIdentifier: unsupported, useMutableSource: unsupported, + useRefresh: unsupported, }; // Not using async/await because we don't compile it. return new Promise((resolve, reject) => { diff --git a/packages/react/index.classic.fb.js b/packages/react/index.classic.fb.js index 4beccf1dfbca0..79bb0696ec1e0 100644 --- a/packages/react/index.classic.fb.js +++ b/packages/react/index.classic.fb.js @@ -52,6 +52,7 @@ export { SuspenseList as unstable_SuspenseList, unstable_getCacheForType, unstable_Cache, + unstable_useRefresh, // enableScopeAPI unstable_Scope, unstable_useOpaqueIdentifier, diff --git a/packages/react/index.experimental.js b/packages/react/index.experimental.js index 8c4d97eb8e746..43c3f08eb000d 100644 --- a/packages/react/index.experimental.js +++ b/packages/react/index.experimental.js @@ -47,6 +47,7 @@ export { unstable_useOpaqueIdentifier, unstable_getCacheForType, unstable_Cache, + unstable_useRefresh, // enableDebugTracing unstable_DebugTracingMode, } from './src/React'; diff --git a/packages/react/index.js b/packages/react/index.js index 7f0173bd69e1c..fb074650a0852 100644 --- a/packages/react/index.js +++ b/packages/react/index.js @@ -84,4 +84,5 @@ export { unstable_useOpaqueIdentifier, unstable_getCacheForType, unstable_Cache, + unstable_useRefresh, } from './src/React'; diff --git a/packages/react/index.modern.fb.js b/packages/react/index.modern.fb.js index 982388dd2d645..1ab2bdd13dbe0 100644 --- a/packages/react/index.modern.fb.js +++ b/packages/react/index.modern.fb.js @@ -51,6 +51,7 @@ export { SuspenseList as unstable_SuspenseList, unstable_getCacheForType, unstable_Cache, + unstable_useRefresh, // enableScopeAPI unstable_Scope, unstable_useOpaqueIdentifier, diff --git a/packages/react/src/React.js b/packages/react/src/React.js index 3382743b8ccd0..9567ff7dc3b22 100644 --- a/packages/react/src/React.js +++ b/packages/react/src/React.js @@ -49,6 +49,7 @@ import { useTransition, useDeferredValue, useOpaqueIdentifier, + useRefresh, } from './ReactHooks'; import { createElementWithValidation, @@ -113,6 +114,7 @@ export { REACT_SUSPENSE_LIST_TYPE as SuspenseList, REACT_LEGACY_HIDDEN_TYPE as unstable_LegacyHidden, getCacheForType as unstable_getCacheForType, + useRefresh as unstable_useRefresh, REACT_CACHE_TYPE as unstable_Cache, // enableFundamentalAPI createFundamental as unstable_createFundamental, diff --git a/packages/react/src/ReactHooks.js b/packages/react/src/ReactHooks.js index 1020efa74cb96..0f34b6405a29b 100644 --- a/packages/react/src/ReactHooks.js +++ b/packages/react/src/ReactHooks.js @@ -180,3 +180,9 @@ export function useMutableSource( const dispatcher = resolveDispatcher(); return dispatcher.useMutableSource(source, getSnapshot, subscribe); } + +export function useRefresh(): () => void { + const dispatcher = resolveDispatcher(); + // $FlowFixMe This is unstable, thus optional + return dispatcher.useRefresh(); +} diff --git a/packages/react/unstable-shared-subset.experimental.js b/packages/react/unstable-shared-subset.experimental.js index 890066957e383..bd2e1cd77c25d 100644 --- a/packages/react/unstable-shared-subset.experimental.js +++ b/packages/react/unstable-shared-subset.experimental.js @@ -33,6 +33,7 @@ export { SuspenseList as unstable_SuspenseList, unstable_useOpaqueIdentifier, unstable_getCacheForType, + unstable_useRefresh, // enableDebugTracing unstable_DebugTracingMode, } from './src/React'; diff --git a/scripts/error-codes/codes.json b/scripts/error-codes/codes.json index 6941f92df2770..028fca5bdc513 100644 --- a/scripts/error-codes/codes.json +++ b/scripts/error-codes/codes.json @@ -372,5 +372,6 @@ "381": "This feature is not supported by ReactSuspenseTestUtils.", "382": "This query has received more parameters than the last time the same query was used. Always pass the exact number of parameters that the query needs.", "383": "This query has received fewer parameters than the last time the same query was used. Always pass the exact number of parameters that the query needs.", - "384": "Tried to fetch data, but no cache was found. To fix, wrap your component in a boundary. It doesn't need to be a direct parent; it can be anywhere in the ancestor path" + "384": "Tried to fetch data, but no cache was found. To fix, wrap your component in a boundary. It doesn't need to be a direct parent; it can be anywhere in the ancestor path", + "385": "Refreshing the cache is not supported in Server Components." } From eabf2554be6e276f6915d084172531ca9e2b3233 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Mon, 14 Dec 2020 09:58:29 -0600 Subject: [PATCH 06/30] Refresh with seeded data Usually, when performing a server mutation, the response includes an updated version of the mutated data. This avoids an extra roundtrip, and because of eventual consistency, it also guarantees that we reload with the freshest possible data. If we didn't seed with the mutation response, and instead refetched with a separate GET request, we might receive stale data as the mutation propagates through the data layer. Not all refreshes are the result of a mutation, though, so the seed is not required. --- .../src/server/ReactPartialRendererHooks.js | 2 +- .../src/ReactFiberHooks.new.js | 27 +++++++++-- .../src/ReactFiberHooks.old.js | 27 +++++++++-- .../src/ReactFiberWorkLoop.new.js | 4 +- .../src/ReactFiberWorkLoop.old.js | 4 +- .../src/ReactInternalTypes.js | 2 +- .../src/__tests__/ReactCache-test.js | 48 +++++++++++++++++++ .../react-server/src/ReactFlightServer.js | 2 +- packages/react/src/ReactHooks.js | 2 +- 9 files changed, 104 insertions(+), 14 deletions(-) diff --git a/packages/react-dom/src/server/ReactPartialRendererHooks.js b/packages/react-dom/src/server/ReactPartialRendererHooks.js index 7759cc62c83bd..087e81b0e7683 100644 --- a/packages/react-dom/src/server/ReactPartialRendererHooks.js +++ b/packages/react-dom/src/server/ReactPartialRendererHooks.js @@ -489,7 +489,7 @@ function useOpaqueIdentifier(): OpaqueIDType { ); } -function useRefresh(): () => void { +function useRefresh(): (?() => T, ?T) => void { invariant(false, 'Not implemented.'); } diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js index 04dfe81339ece..445ae05403419 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.new.js +++ b/packages/react-reconciler/src/ReactFiberHooks.new.js @@ -45,6 +45,7 @@ import { setCurrentUpdateLanePriority, higherLanePriority, DefaultLanePriority, + transferCacheToSpawnedLane, } from './ReactFiberLane.new'; import {readContext} from './ReactFiberNewContext.new'; import { @@ -1719,28 +1720,46 @@ function updateRefresh() { return updateCallback(refreshCache.bind(null, cache), [cache]); } -function refreshCache(cache: Cache | null) { +function refreshCache(cache: Cache | null, seedKey: ?() => T, seedValue: T) { if (cache !== null) { const providers = cache.providers; if (providers !== null) { - providers.forEach(scheduleCacheRefresh); + let seededCache = null; + if (seedKey !== null && seedKey !== undefined) { + // TODO: Warn if wrong type + seededCache = { + providers: null, + data: new Map([[seedKey, seedValue]]), + }; + } + providers.forEach(provider => + scheduleCacheRefresh(provider, seededCache), + ); } } else { // TODO: Warn if cache is null? } } -function scheduleCacheRefresh(cacheComponentFiber: Fiber) { +function scheduleCacheRefresh( + cacheComponentFiber: Fiber, + seededCache: Cache | null, +) { // Inlined startTransition // TODO: Maybe we shouldn't automatically give this transition priority. Are // there valid use cases for a high-pri refresh? Like if the content is // super stale and you want to immediately hide it. const prevTransition = ReactCurrentBatchConfig.transition; ReactCurrentBatchConfig.transition = 1; + // TODO: Do we really need the try/finally? I don't think any of these + // functions would ever throw unless there's an internal error. try { const eventTime = requestEventTime(); const lane = requestUpdateLane(cacheComponentFiber); - scheduleUpdateOnFiber(cacheComponentFiber, lane, eventTime); + const root = scheduleUpdateOnFiber(cacheComponentFiber, lane, eventTime); + if (seededCache !== null && root !== null) { + transferCacheToSpawnedLane(root, seededCache, lane); + } } finally { ReactCurrentBatchConfig.transition = prevTransition; } diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js index 0643e0955c79f..8680a44ae8034 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.old.js +++ b/packages/react-reconciler/src/ReactFiberHooks.old.js @@ -45,6 +45,7 @@ import { setCurrentUpdateLanePriority, higherLanePriority, DefaultLanePriority, + transferCacheToSpawnedLane, } from './ReactFiberLane.old'; import {readContext} from './ReactFiberNewContext.old'; import { @@ -1719,28 +1720,46 @@ function updateRefresh() { return updateCallback(refreshCache.bind(null, cache), [cache]); } -function refreshCache(cache: Cache | null) { +function refreshCache(cache: Cache | null, seedKey: ?() => T, seedValue: T) { if (cache !== null) { const providers = cache.providers; if (providers !== null) { - providers.forEach(scheduleCacheRefresh); + let seededCache = null; + if (seedKey !== null && seedKey !== undefined) { + // TODO: Warn if wrong type + seededCache = { + providers: null, + data: new Map([[seedKey, seedValue]]), + }; + } + providers.forEach(provider => + scheduleCacheRefresh(provider, seededCache), + ); } } else { // TODO: Warn if cache is null? } } -function scheduleCacheRefresh(cacheComponentFiber: Fiber) { +function scheduleCacheRefresh( + cacheComponentFiber: Fiber, + seededCache: Cache | null, +) { // Inlined startTransition // TODO: Maybe we shouldn't automatically give this transition priority. Are // there valid use cases for a high-pri refresh? Like if the content is // super stale and you want to immediately hide it. const prevTransition = ReactCurrentBatchConfig.transition; ReactCurrentBatchConfig.transition = 1; + // TODO: Do we really need the try/finally? I don't think any of these + // functions would ever throw unless there's an internal error. try { const eventTime = requestEventTime(); const lane = requestUpdateLane(cacheComponentFiber); - scheduleUpdateOnFiber(cacheComponentFiber, lane, eventTime); + const root = scheduleUpdateOnFiber(cacheComponentFiber, lane, eventTime); + if (seededCache !== null && root !== null) { + transferCacheToSpawnedLane(root, seededCache, lane); + } } finally { ReactCurrentBatchConfig.transition = prevTransition; } diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js index 538d85f4005b6..91a127b2431b7 100644 --- a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js +++ b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js @@ -534,7 +534,7 @@ export function scheduleUpdateOnFiber( fiber: Fiber, lane: Lane, eventTime: number, -) { +): FiberRoot | null { checkForNestedUpdates(); warnAboutRenderPhaseUpdatesInDEV(fiber); @@ -656,6 +656,8 @@ export function scheduleUpdateOnFiber( // the same root, then it's not a huge deal, we just might batch more stuff // together more than necessary. mostRecentlyUpdatedRoot = root; + + return root; } // This is split into a separate function so we can mark a fiber with pending diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js index 4235b6c7e469d..ce6066fd15582 100644 --- a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js +++ b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js @@ -534,7 +534,7 @@ export function scheduleUpdateOnFiber( fiber: Fiber, lane: Lane, eventTime: number, -) { +): FiberRoot | null { checkForNestedUpdates(); warnAboutRenderPhaseUpdatesInDEV(fiber); @@ -656,6 +656,8 @@ export function scheduleUpdateOnFiber( // the same root, then it's not a huge deal, we just might batch more stuff // together more than necessary. mostRecentlyUpdatedRoot = root; + + return root; } // This is split into a separate function so we can mark a fiber with pending diff --git a/packages/react-reconciler/src/ReactInternalTypes.js b/packages/react-reconciler/src/ReactInternalTypes.js index d73e9e5e74b1f..ff04c72d6c762 100644 --- a/packages/react-reconciler/src/ReactInternalTypes.js +++ b/packages/react-reconciler/src/ReactInternalTypes.js @@ -319,7 +319,7 @@ export type Dispatcher = {| subscribe: MutableSourceSubscribeFn, ): Snapshot, useOpaqueIdentifier(): any, - useRefresh?: () => () => void, + useRefresh?: () => (?() => T, ?T) => void, unstable_isNewReconciler?: boolean, |}; diff --git a/packages/react-reconciler/src/__tests__/ReactCache-test.js b/packages/react-reconciler/src/__tests__/ReactCache-test.js index 6f00ba96c0e57..a933fda928d8c 100644 --- a/packages/react-reconciler/src/__tests__/ReactCache-test.js +++ b/packages/react-reconciler/src/__tests__/ReactCache-test.js @@ -404,6 +404,54 @@ describe('ReactCache', () => { expect(root).toMatchRenderedOutput('A [v2]'); }); + // @gate experimental + test('refresh a cache with seed data', async () => { + let refresh; + function App() { + refresh = useRefresh(); + return ; + } + + // Mount initial data + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render( + + }> + + + , + ); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A [v1]']); + expect(root).toMatchRenderedOutput('A [v1]'); + + // Mutate the text service, then refresh for new data. + mutateRemoteTextService(); + await ReactNoop.act(async () => { + // Refresh the cache with seeded data, like you would receive from a + // server mutation. + // TODO: Seeding multiple typed caches. Should work by calling `refresh` + // multiple times with different key/value pairs + const seededCache = new Map(); + seededCache.set('A', { + ping: null, + status: 'resolved', + value: textServiceVersion, + }); + refresh(createTextCache, seededCache); + }); + // The root should re-render without a cache miss. + expect(Scheduler).toHaveYielded(['A [v2]']); + expect(root).toMatchRenderedOutput('A [v2]'); + }); + // @gate experimental test('refreshing a parent cache also refreshes its children', async () => { let refreshShell; diff --git a/packages/react-server/src/ReactFlightServer.js b/packages/react-server/src/ReactFlightServer.js index 0f8170852f035..acc446dd14df3 100644 --- a/packages/react-server/src/ReactFlightServer.js +++ b/packages/react-server/src/ReactFlightServer.js @@ -804,7 +804,7 @@ const Dispatcher: DispatcherType = { useEffect: (unsupportedHook: any), useOpaqueIdentifier: (unsupportedHook: any), useMutableSource: (unsupportedHook: any), - useRefresh(): () => void { + useRefresh(): (?() => T, ?T) => void { return unsupportedRefresh; }, }; diff --git a/packages/react/src/ReactHooks.js b/packages/react/src/ReactHooks.js index 0f34b6405a29b..9a087af4a1f1d 100644 --- a/packages/react/src/ReactHooks.js +++ b/packages/react/src/ReactHooks.js @@ -181,7 +181,7 @@ export function useMutableSource( return dispatcher.useMutableSource(source, getSnapshot, subscribe); } -export function useRefresh(): () => void { +export function useRefresh(): (?() => T, ?T) => void { const dispatcher = resolveDispatcher(); // $FlowFixMe This is unstable, thus optional return dispatcher.useRefresh(); From c40683ab19460d73adbb56e9c8e1b8f29d8011d2 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Mon, 14 Dec 2020 17:11:02 -0600 Subject: [PATCH 07/30] Refreshes should not affect "sibling" boundaries I had thought we decided that refreshing a boundary would also refresh all the content that is currently consistent (i.e. shared the same underlying cache) with it, but I was wrong. Refreshing should only affect the nearest tree and its descendents. "Sibling" content will intentionally be inconsistent after the refresh. This allows me to drop the subscription stuff, which is nice. --- .../src/ReactFiberBeginWork.new.js | 114 +++++++++--------- .../src/ReactFiberBeginWork.old.js | 114 +++++++++--------- .../src/ReactFiberCacheComponent.js | 10 +- .../src/ReactFiberCommitWork.new.js | 26 +--- .../src/ReactFiberCommitWork.old.js | 25 +--- .../src/ReactFiberCompleteWork.new.js | 27 +---- .../src/ReactFiberCompleteWork.old.js | 27 +---- .../src/ReactFiberHooks.new.js | 93 +++++++------- .../src/ReactFiberHooks.old.js | 93 +++++++------- .../src/ReactFiberLane.new.js | 5 +- .../src/ReactFiberLane.old.js | 5 +- .../src/ReactFiberUnwindWork.new.js | 11 +- .../src/ReactFiberUnwindWork.old.js | 11 +- .../src/__tests__/ReactCache-test.js | 16 +-- 14 files changed, 242 insertions(+), 335 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index eeb513c4218b7..9295a54947cdd 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -23,7 +23,7 @@ import type { OffscreenProps, OffscreenState, } from './ReactFiberOffscreenComponent'; -import type {Cache} from './ReactFiberCacheComponent'; +import type {CacheInstance} from './ReactFiberCacheComponent'; import checkPropTypes from 'shared/checkPropTypes'; @@ -667,20 +667,41 @@ function updateCacheComponent( // Read directly from the context. We don't set up a context dependency // because the propagation function automatically includes CacheComponents in // its search. - const parentCache: Cache | null = isPrimaryRenderer + const parentCacheInstance: CacheInstance | null = isPrimaryRenderer ? CacheContext._currentValue : CacheContext._currentValue2; - let ownCache: Cache | null = null; - // TODO: Fast path if parent has a new cache. Merely an optimization. Might - // not be worth it. - if (false) { - // The parent boundary also has a new cache. We're either inside a new tree, - // or there was a refresh. In both cases, we should use the parent cache. - ownCache = null; + let ownCacheInstance: CacheInstance | null = null; + if (current === null) { + // This is a newly mounted component. Request a fresh cache. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + const freshCache = requestFreshCache(root, renderLanes); + // This may be the same as the parent cache, like if the current render + // spawned from a previous render that already committed. Otherwise, this + // is the root of a cache consistency boundary. + if ( + parentCacheInstance === null || + freshCache !== parentCacheInstance.cache + ) { + ownCacheInstance = { + cache: freshCache, + provider: workInProgress, + }; + pushProvider(workInProgress, CacheContext, ownCacheInstance); + // No need to propagate the refresh, because this is a new tree. + } else { + // Use the parent cache + ownCacheInstance = null; + } } else { - if (current === null) { - // This is a newly mounted component. Request a fresh cache. + // This component already mounted. + if (includesSomeLane(renderLanes, updateLanes)) { + // A refresh was scheduled. const root = getWorkInProgressRoot(); invariant( root !== null, @@ -688,51 +709,31 @@ function updateCacheComponent( 'file an issue.', ); const freshCache = requestFreshCache(root, renderLanes); - // This may be the same as the parent cache, like if the current render - // spawned from a previous render that already committed. Otherwise, this - // is the root of a cache consistency boundary. - if (freshCache !== parentCache) { - ownCache = freshCache; - pushProvider(workInProgress, CacheContext, freshCache); - // No need to propagate the refresh, because this is a new tree. + if ( + parentCacheInstance === null || + freshCache !== parentCacheInstance.cache + ) { + ownCacheInstance = { + cache: freshCache, + provider: workInProgress, + }; + pushProvider(workInProgress, CacheContext, ownCacheInstance); + // Refreshes propagate through the entire subtree. The refreshed cache + // will override nested caches. + propagateCacheRefresh(workInProgress, renderLanes); } else { - // Use the parent cache - ownCache = null; + // The fresh cache is the same as the parent cache. + ownCacheInstance = null; } } else { - // This component already mounted. - if (includesSomeLane(renderLanes, updateLanes)) { - // A refresh was scheduled. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - const freshCache = requestFreshCache(root, renderLanes); - if (freshCache !== parentCache) { - ownCache = freshCache; - pushProvider(workInProgress, CacheContext, freshCache); - // Refreshes propagate through the entire subtree. The refreshed cache - // will override nested caches. - propagateCacheRefresh(workInProgress, renderLanes); - } else { - // The fresh cache is the same as the parent cache. I think this - // unreachable in practice, because this means the parent cache was - // refreshed in the same render. So we would have already handled this - // in the earlier branch, where we check if the parent is new. - ownCache = null; - } + // Reuse the memoized cache. + const prevCacheInstance: CacheInstance | null = current.memoizedState; + if (prevCacheInstance !== null) { + ownCacheInstance = prevCacheInstance; + // There was no refresh, so no need to propagate to nested boundaries. + pushProvider(workInProgress, CacheContext, ownCacheInstance); } else { - // Reuse the memoized cache. - const prevCache: Cache | null = current.memoizedState; - if (prevCache !== null) { - ownCache = prevCache; - // There was no refresh, so no need to propagate to nested boundaries. - pushProvider(workInProgress, CacheContext, prevCache); - } else { - ownCache = null; - } + ownCacheInstance = null; } } } @@ -741,7 +742,7 @@ function updateCacheComponent( // point to a cache object. Otherwise, a null state indicates that this // CacheComponent inherits from a parent boundary. We can use this to infer // whether to push/pop the cache context. - workInProgress.memoizedState = ownCache; + workInProgress.memoizedState = ownCacheInstance; const nextChildren = workInProgress.pendingProps.children; reconcileChildren(current, workInProgress, nextChildren, renderLanes); @@ -3338,9 +3339,10 @@ function beginWork( } case CacheComponent: { if (enableCache) { - const ownCache: Cache | null = workInProgress.memoizedState; - if (ownCache !== null) { - pushProvider(workInProgress, CacheContext, ownCache); + const ownCacheInstance: CacheInstance | null = + workInProgress.memoizedState; + if (ownCacheInstance !== null) { + pushProvider(workInProgress, CacheContext, ownCacheInstance); } } break; diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index 76f75e06e319b..e71c6be870526 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -23,7 +23,7 @@ import type { OffscreenProps, OffscreenState, } from './ReactFiberOffscreenComponent'; -import type {Cache} from './ReactFiberCacheComponent'; +import type {CacheInstance} from './ReactFiberCacheComponent'; import checkPropTypes from 'shared/checkPropTypes'; @@ -667,20 +667,41 @@ function updateCacheComponent( // Read directly from the context. We don't set up a context dependency // because the propagation function automatically includes CacheComponents in // its search. - const parentCache: Cache | null = isPrimaryRenderer + const parentCacheInstance: CacheInstance | null = isPrimaryRenderer ? CacheContext._currentValue : CacheContext._currentValue2; - let ownCache: Cache | null = null; - // TODO: Fast path if parent has a new cache. Merely an optimization. Might - // not be worth it. - if (false) { - // The parent boundary also has a new cache. We're either inside a new tree, - // or there was a refresh. In both cases, we should use the parent cache. - ownCache = null; + let ownCacheInstance: CacheInstance | null = null; + if (current === null) { + // This is a newly mounted component. Request a fresh cache. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + const freshCache = requestFreshCache(root, renderLanes); + // This may be the same as the parent cache, like if the current render + // spawned from a previous render that already committed. Otherwise, this + // is the root of a cache consistency boundary. + if ( + parentCacheInstance === null || + freshCache !== parentCacheInstance.cache + ) { + ownCacheInstance = { + cache: freshCache, + provider: workInProgress, + }; + pushProvider(workInProgress, CacheContext, ownCacheInstance); + // No need to propagate the refresh, because this is a new tree. + } else { + // Use the parent cache + ownCacheInstance = null; + } } else { - if (current === null) { - // This is a newly mounted component. Request a fresh cache. + // This component already mounted. + if (includesSomeLane(renderLanes, updateLanes)) { + // A refresh was scheduled. const root = getWorkInProgressRoot(); invariant( root !== null, @@ -688,51 +709,31 @@ function updateCacheComponent( 'file an issue.', ); const freshCache = requestFreshCache(root, renderLanes); - // This may be the same as the parent cache, like if the current render - // spawned from a previous render that already committed. Otherwise, this - // is the root of a cache consistency boundary. - if (freshCache !== parentCache) { - ownCache = freshCache; - pushProvider(workInProgress, CacheContext, freshCache); - // No need to propagate the refresh, because this is a new tree. + if ( + parentCacheInstance === null || + freshCache !== parentCacheInstance.cache + ) { + ownCacheInstance = { + cache: freshCache, + provider: workInProgress, + }; + pushProvider(workInProgress, CacheContext, ownCacheInstance); + // Refreshes propagate through the entire subtree. The refreshed cache + // will override nested caches. + propagateCacheRefresh(workInProgress, renderLanes); } else { - // Use the parent cache - ownCache = null; + // The fresh cache is the same as the parent cache. + ownCacheInstance = null; } } else { - // This component already mounted. - if (includesSomeLane(renderLanes, updateLanes)) { - // A refresh was scheduled. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - const freshCache = requestFreshCache(root, renderLanes); - if (freshCache !== parentCache) { - ownCache = freshCache; - pushProvider(workInProgress, CacheContext, freshCache); - // Refreshes propagate through the entire subtree. The refreshed cache - // will override nested caches. - propagateCacheRefresh(workInProgress, renderLanes); - } else { - // The fresh cache is the same as the parent cache. I think this - // unreachable in practice, because this means the parent cache was - // refreshed in the same render. So we would have already handled this - // in the earlier branch, where we check if the parent is new. - ownCache = null; - } + // Reuse the memoized cache. + const prevCacheInstance: CacheInstance | null = current.memoizedState; + if (prevCacheInstance !== null) { + ownCacheInstance = prevCacheInstance; + // There was no refresh, so no need to propagate to nested boundaries. + pushProvider(workInProgress, CacheContext, ownCacheInstance); } else { - // Reuse the memoized cache. - const prevCache: Cache | null = current.memoizedState; - if (prevCache !== null) { - ownCache = prevCache; - // There was no refresh, so no need to propagate to nested boundaries. - pushProvider(workInProgress, CacheContext, prevCache); - } else { - ownCache = null; - } + ownCacheInstance = null; } } } @@ -741,7 +742,7 @@ function updateCacheComponent( // point to a cache object. Otherwise, a null state indicates that this // CacheComponent inherits from a parent boundary. We can use this to infer // whether to push/pop the cache context. - workInProgress.memoizedState = ownCache; + workInProgress.memoizedState = ownCacheInstance; const nextChildren = workInProgress.pendingProps.children; reconcileChildren(current, workInProgress, nextChildren, renderLanes); @@ -3338,9 +3339,10 @@ function beginWork( } case CacheComponent: { if (enableCache) { - const ownCache: Cache | null = workInProgress.memoizedState; - if (ownCache !== null) { - pushProvider(workInProgress, CacheContext, ownCache); + const ownCacheInstance: CacheInstance | null = + workInProgress.memoizedState; + if (ownCacheInstance !== null) { + pushProvider(workInProgress, CacheContext, ownCacheInstance); } } break; diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.js b/packages/react-reconciler/src/ReactFiberCacheComponent.js index 36ae44e1a3f97..5b64e5c42fa27 100644 --- a/packages/react-reconciler/src/ReactFiberCacheComponent.js +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.js @@ -11,12 +11,14 @@ import type {ReactContext} from 'shared/ReactTypes'; import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols'; -export type Cache = {| - providers: Set | null, - data: Map<() => mixed, mixed> | null, +export type Cache = Map<() => mixed, mixed>; + +export type CacheInstance = {| + cache: Cache | null, + provider: Fiber, |}; -export const CacheContext: ReactContext = { +export const CacheContext: ReactContext = { $$typeof: REACT_CONTEXT_TYPE, // We don't use Consumer/Provider for Cache components. So we'll cheat. Consumer: (null: any), diff --git a/packages/react-reconciler/src/ReactFiberCommitWork.new.js b/packages/react-reconciler/src/ReactFiberCommitWork.new.js index 5dfdfff1c5165..d216edc4314db 100644 --- a/packages/react-reconciler/src/ReactFiberCommitWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCommitWork.new.js @@ -37,7 +37,6 @@ import { enableFundamentalAPI, enableSuspenseCallback, enableScopeAPI, - enableCache, } from 'shared/ReactFeatureFlags'; import { FunctionComponent, @@ -806,31 +805,8 @@ function commitLifeCycles( case ScopeComponent: case OffscreenComponent: case LegacyHiddenComponent: + case CacheComponent: return; - case CacheComponent: { - if (enableCache) { - if (current !== null) { - const oldCache: Cache | null = current.memoizedState; - if (oldCache !== null) { - const oldCacheProviders = oldCache.providers; - if (oldCacheProviders) { - oldCacheProviders.delete(current); - oldCacheProviders.delete(finishedWork); - } - } - } - const newCache: Cache | null = finishedWork.memoizedState; - if (newCache !== null) { - const newCacheProviders = newCache.providers; - if (newCacheProviders === null) { - newCache.providers = new Set([finishedWork]); - } else { - newCacheProviders.add(finishedWork); - } - } - } - return; - } } invariant( false, diff --git a/packages/react-reconciler/src/ReactFiberCommitWork.old.js b/packages/react-reconciler/src/ReactFiberCommitWork.old.js index 4f8c631b5235f..7f40c5e4ad49c 100644 --- a/packages/react-reconciler/src/ReactFiberCommitWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCommitWork.old.js @@ -806,31 +806,8 @@ function commitLifeCycles( case ScopeComponent: case OffscreenComponent: case LegacyHiddenComponent: + case CacheComponent: return; - case CacheComponent: { - if (enableCache) { - if (current !== null) { - const oldCache: Cache | null = current.memoizedState; - if (oldCache !== null) { - const oldCacheProviders = oldCache.providers; - if (oldCacheProviders) { - oldCacheProviders.delete(current); - oldCacheProviders.delete(finishedWork); - } - } - } - const newCache: Cache | null = finishedWork.memoizedState; - if (newCache !== null) { - const newCacheProviders = newCache.providers; - if (newCacheProviders === null) { - newCache.providers = new Set([finishedWork]); - } else { - newCacheProviders.add(finishedWork); - } - } - } - return; - } } invariant( false, diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js index 3aa000e4ca810..14dc69d01e658 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js @@ -28,6 +28,7 @@ import type { } from './ReactFiberSuspenseComponent.new'; import type {SuspenseContext} from './ReactFiberSuspenseContext.new'; import type {OffscreenState} from './ReactFiberOffscreenComponent'; +import type {CacheInstance} from './ReactFiberCacheComponent'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.new'; @@ -1488,27 +1489,11 @@ function completeWork( } case CacheComponent: { if (enableCache) { - // If the cache provided by this boundary has changed, schedule an - // effect to add this component to the cache's providers, and to remove - // it from the old cache. - // TODO: Schedule for Passive phase - const ownCache: Cache | null = workInProgress.memoizedState; - if (current === null) { - if (ownCache !== null) { - // This is a cache provider. - popProvider(CacheContext, workInProgress); - // Set up a refresh subscription. - workInProgress.flags |= Update; - } - } else { - if (ownCache !== null) { - // This is a cache provider. - popProvider(CacheContext, workInProgress); - } - if (ownCache !== current.memoizedState) { - // Cache changed. Create or update a refresh subscription. - workInProgress.flags |= Update; - } + const ownCacheInstance: CacheInstance | null = + workInProgress.memoizedState; + if (ownCacheInstance !== null) { + // This is a cache provider. + popProvider(CacheContext, workInProgress); } bubbleProperties(workInProgress); return null; diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js index b20910ddc5ece..ae6bc84df5372 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js @@ -28,6 +28,7 @@ import type { } from './ReactFiberSuspenseComponent.old'; import type {SuspenseContext} from './ReactFiberSuspenseContext.old'; import type {OffscreenState} from './ReactFiberOffscreenComponent'; +import type {CacheInstance} from './ReactFiberCacheComponent'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.old'; @@ -1488,27 +1489,11 @@ function completeWork( } case CacheComponent: { if (enableCache) { - // If the cache provided by this boundary has changed, schedule an - // effect to add this component to the cache's providers, and to remove - // it from the old cache. - // TODO: Schedule for Passive phase - const ownCache: Cache | null = workInProgress.memoizedState; - if (current === null) { - if (ownCache !== null) { - // This is a cache provider. - popProvider(CacheContext, workInProgress); - // Set up a refresh subscription. - workInProgress.flags |= Update; - } - } else { - if (ownCache !== null) { - // This is a cache provider. - popProvider(CacheContext, workInProgress); - } - if (ownCache !== current.memoizedState) { - // Cache changed. Create or update a refresh subscription. - workInProgress.flags |= Update; - } + const ownCacheInstance: CacheInstance | null = + workInProgress.memoizedState; + if (ownCacheInstance !== null) { + // This is a cache provider. + popProvider(CacheContext, workInProgress); } bubbleProperties(workInProgress); return null; diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js index 445ae05403419..c3a53c1eea35d 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.new.js +++ b/packages/react-reconciler/src/ReactFiberHooks.new.js @@ -19,7 +19,7 @@ import type {HookFlags} from './ReactHookEffectTags'; import type {ReactPriorityLevel} from './ReactInternalTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type {OpaqueIDType} from './ReactFiberHostConfig'; -import type {Cache} from './ReactFiberCacheComponent'; +import type {CacheInstance} from './ReactFiberCacheComponent'; import ReactSharedInternals from 'shared/ReactSharedInternals'; import { @@ -1711,57 +1711,48 @@ function rerenderOpaqueIdentifier(): OpaqueIDType | void { } function mountRefresh() { - const cache: Cache | null = readContext(CacheContext); - return mountCallback(refreshCache.bind(null, cache), [cache]); + // TODO: CacheInstance should never be null. Update type. + const cacheInstance: CacheInstance | null = readContext(CacheContext); + return mountCallback(refreshCache.bind(null, cacheInstance), [cacheInstance]); } function updateRefresh() { - const cache: Cache | null = readContext(CacheContext); - return updateCallback(refreshCache.bind(null, cache), [cache]); + const cacheInstance: CacheInstance | null = readContext(CacheContext); + return updateCallback(refreshCache.bind(null, cacheInstance), [ + cacheInstance, + ]); } -function refreshCache(cache: Cache | null, seedKey: ?() => T, seedValue: T) { - if (cache !== null) { - const providers = cache.providers; - if (providers !== null) { - let seededCache = null; - if (seedKey !== null && seedKey !== undefined) { +function refreshCache( + cacheInstance: CacheInstance | null, + seedKey: ?() => T, + seedValue: T, +) { + if (cacheInstance !== null) { + const provider = cacheInstance.provider; + + // Inlined startTransition + // TODO: Maybe we shouldn't automatically give this transition priority. Are + // there valid use cases for a high-pri refresh? Like if the content is + // super stale and you want to immediately hide it. + const prevTransition = ReactCurrentBatchConfig.transition; + ReactCurrentBatchConfig.transition = 1; + // TODO: Do we really need the try/finally? I don't think any of these + // functions would ever throw unless there's an internal error. + try { + const eventTime = requestEventTime(); + const lane = requestUpdateLane(provider); + const root = scheduleUpdateOnFiber(provider, lane, eventTime); + if (seedKey !== null && seedKey !== undefined && root !== null) { // TODO: Warn if wrong type - seededCache = { - providers: null, - data: new Map([[seedKey, seedValue]]), - }; + const seededCache = new Map([[seedKey, seedValue]]); + transferCacheToSpawnedLane(root, seededCache, lane); } - providers.forEach(provider => - scheduleCacheRefresh(provider, seededCache), - ); + } finally { + ReactCurrentBatchConfig.transition = prevTransition; } } else { - // TODO: Warn if cache is null? - } -} - -function scheduleCacheRefresh( - cacheComponentFiber: Fiber, - seededCache: Cache | null, -) { - // Inlined startTransition - // TODO: Maybe we shouldn't automatically give this transition priority. Are - // there valid use cases for a high-pri refresh? Like if the content is - // super stale and you want to immediately hide it. - const prevTransition = ReactCurrentBatchConfig.transition; - ReactCurrentBatchConfig.transition = 1; - // TODO: Do we really need the try/finally? I don't think any of these - // functions would ever throw unless there's an internal error. - try { - const eventTime = requestEventTime(); - const lane = requestUpdateLane(cacheComponentFiber); - const root = scheduleUpdateOnFiber(cacheComponentFiber, lane, eventTime); - if (seededCache !== null && root !== null) { - transferCacheToSpawnedLane(root, seededCache, lane); - } - } finally { - ReactCurrentBatchConfig.transition = prevTransition; + // TODO: CacheInstance should never be null. Update type. } } @@ -1876,27 +1867,27 @@ function dispatchAction( } function getCacheForType(resourceType: () => T): T { - const cache: Cache | null = readContext(CacheContext); + const cacheInstance: CacheInstance | null = readContext(CacheContext); invariant( - cache !== null, + cacheInstance !== null, 'Tried to fetch data, but no cache was found. To fix, wrap your ' + "component in a boundary. It doesn't need to be a direct " + 'parent; it can be anywhere in the ancestor path', ); - let cachesByType = cache.data; - if (cachesByType === null) { - cachesByType = cache.data = new Map(); + let cache = cacheInstance.cache; + if (cache === null) { + cache = cacheInstance.cache = new Map(); // TODO: Warn if constructor returns undefined? Creates ambiguity with // existence check above. (I don't want to use `has`. Two map lookups // instead of one? Silly.) const cacheForType = resourceType(); - cachesByType.set(resourceType, cacheForType); + cache.set(resourceType, cacheForType); return cacheForType; } else { - let cacheForType: T | void = (cachesByType.get(resourceType): any); + let cacheForType: T | void = (cache.get(resourceType): any); if (cacheForType === undefined) { cacheForType = resourceType(); - cachesByType.set(resourceType, cacheForType); + cache.set(resourceType, cacheForType); } return cacheForType; } diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js index 8680a44ae8034..76efd9171587c 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.old.js +++ b/packages/react-reconciler/src/ReactFiberHooks.old.js @@ -19,7 +19,7 @@ import type {HookFlags} from './ReactHookEffectTags'; import type {ReactPriorityLevel} from './ReactInternalTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type {OpaqueIDType} from './ReactFiberHostConfig'; -import type {Cache} from './ReactFiberCacheComponent'; +import type {CacheInstance} from './ReactFiberCacheComponent'; import ReactSharedInternals from 'shared/ReactSharedInternals'; import { @@ -1711,57 +1711,48 @@ function rerenderOpaqueIdentifier(): OpaqueIDType | void { } function mountRefresh() { - const cache: Cache | null = readContext(CacheContext); - return mountCallback(refreshCache.bind(null, cache), [cache]); + // TODO: CacheInstance should never be null. Update type. + const cacheInstance: CacheInstance | null = readContext(CacheContext); + return mountCallback(refreshCache.bind(null, cacheInstance), [cacheInstance]); } function updateRefresh() { - const cache: Cache | null = readContext(CacheContext); - return updateCallback(refreshCache.bind(null, cache), [cache]); + const cacheInstance: CacheInstance | null = readContext(CacheContext); + return updateCallback(refreshCache.bind(null, cacheInstance), [ + cacheInstance, + ]); } -function refreshCache(cache: Cache | null, seedKey: ?() => T, seedValue: T) { - if (cache !== null) { - const providers = cache.providers; - if (providers !== null) { - let seededCache = null; - if (seedKey !== null && seedKey !== undefined) { +function refreshCache( + cacheInstance: CacheInstance | null, + seedKey: ?() => T, + seedValue: T, +) { + if (cacheInstance !== null) { + const provider = cacheInstance.provider; + + // Inlined startTransition + // TODO: Maybe we shouldn't automatically give this transition priority. Are + // there valid use cases for a high-pri refresh? Like if the content is + // super stale and you want to immediately hide it. + const prevTransition = ReactCurrentBatchConfig.transition; + ReactCurrentBatchConfig.transition = 1; + // TODO: Do we really need the try/finally? I don't think any of these + // functions would ever throw unless there's an internal error. + try { + const eventTime = requestEventTime(); + const lane = requestUpdateLane(provider); + const root = scheduleUpdateOnFiber(provider, lane, eventTime); + if (seedKey !== null && seedKey !== undefined && root !== null) { // TODO: Warn if wrong type - seededCache = { - providers: null, - data: new Map([[seedKey, seedValue]]), - }; + const seededCache = new Map([[seedKey, seedValue]]); + transferCacheToSpawnedLane(root, seededCache, lane); } - providers.forEach(provider => - scheduleCacheRefresh(provider, seededCache), - ); + } finally { + ReactCurrentBatchConfig.transition = prevTransition; } } else { - // TODO: Warn if cache is null? - } -} - -function scheduleCacheRefresh( - cacheComponentFiber: Fiber, - seededCache: Cache | null, -) { - // Inlined startTransition - // TODO: Maybe we shouldn't automatically give this transition priority. Are - // there valid use cases for a high-pri refresh? Like if the content is - // super stale and you want to immediately hide it. - const prevTransition = ReactCurrentBatchConfig.transition; - ReactCurrentBatchConfig.transition = 1; - // TODO: Do we really need the try/finally? I don't think any of these - // functions would ever throw unless there's an internal error. - try { - const eventTime = requestEventTime(); - const lane = requestUpdateLane(cacheComponentFiber); - const root = scheduleUpdateOnFiber(cacheComponentFiber, lane, eventTime); - if (seededCache !== null && root !== null) { - transferCacheToSpawnedLane(root, seededCache, lane); - } - } finally { - ReactCurrentBatchConfig.transition = prevTransition; + // TODO: CacheInstance should never be null. Update type. } } @@ -1876,27 +1867,27 @@ function dispatchAction( } function getCacheForType(resourceType: () => T): T { - const cache: Cache | null = readContext(CacheContext); + const cacheInstance: CacheInstance | null = readContext(CacheContext); invariant( - cache !== null, + cacheInstance !== null, 'Tried to fetch data, but no cache was found. To fix, wrap your ' + "component in a boundary. It doesn't need to be a direct " + 'parent; it can be anywhere in the ancestor path', ); - let cachesByType = cache.data; - if (cachesByType === null) { - cachesByType = cache.data = new Map(); + let cache = cacheInstance.cache; + if (cache === null) { + cache = cacheInstance.cache = new Map(); // TODO: Warn if constructor returns undefined? Creates ambiguity with // existence check above. (I don't want to use `has`. Two map lookups // instead of one? Silly.) const cacheForType = resourceType(); - cachesByType.set(resourceType, cacheForType); + cache.set(resourceType, cacheForType); return cacheForType; } else { - let cacheForType: T | void = (cachesByType.get(resourceType): any); + let cacheForType: T | void = (cache.get(resourceType): any); if (cacheForType === undefined) { cacheForType = resourceType(); - cachesByType.set(resourceType, cacheForType); + cache.set(resourceType, cacheForType); } return cacheForType; } diff --git a/packages/react-reconciler/src/ReactFiberLane.new.js b/packages/react-reconciler/src/ReactFiberLane.new.js index 8acfdade7df0a..b82f49290f014 100644 --- a/packages/react-reconciler/src/ReactFiberLane.new.js +++ b/packages/react-reconciler/src/ReactFiberLane.new.js @@ -861,10 +861,7 @@ export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { } // Create a fresh cache. - const freshCache = { - providers: null, - data: null, - }; + const freshCache = new Map(); // This is now the pooled cache. root.pooledCache = freshCache; diff --git a/packages/react-reconciler/src/ReactFiberLane.old.js b/packages/react-reconciler/src/ReactFiberLane.old.js index 4b07ae1f0e7f2..444b5ef425ea1 100644 --- a/packages/react-reconciler/src/ReactFiberLane.old.js +++ b/packages/react-reconciler/src/ReactFiberLane.old.js @@ -861,10 +861,7 @@ export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { } // Create a fresh cache. - const freshCache = { - providers: null, - data: null, - }; + const freshCache = new Map(); // This is now the pooled cache. root.pooledCache = freshCache; diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js index eb480177eb570..da9741be3b05c 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js @@ -11,6 +11,7 @@ import type {ReactContext} from 'shared/ReactTypes'; import type {Fiber} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.new'; import type {SuspenseState} from './ReactFiberSuspenseComponent.new'; +import type {CacheInstance} from './ReactFiberCacheComponent'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.new'; import { @@ -133,8 +134,9 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { return null; case CacheComponent: if (enableCache) { - const ownCache: Cache | null = workInProgress.memoizedState; - if (ownCache !== null) { + const ownCacheInstance: CacheInstance | null = + workInProgress.memoizedState; + if (ownCacheInstance !== null) { popProvider(CacheContext, workInProgress); } } @@ -182,8 +184,9 @@ function unwindInterruptedWork(interruptedWork: Fiber) { break; case CacheComponent: if (enableCache) { - const ownCache: Cache | null = interruptedWork.memoizedState; - if (ownCache !== null) { + const ownCacheInstance: CacheInstance | null = + interruptedWork.memoizedState; + if (ownCacheInstance !== null) { popProvider(CacheContext, interruptedWork); } } diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js index 6827ac7ccf0ac..5237b0c45b5ff 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js @@ -11,6 +11,7 @@ import type {ReactContext} from 'shared/ReactTypes'; import type {Fiber} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.old'; import type {SuspenseState} from './ReactFiberSuspenseComponent.old'; +import type {CacheInstance} from './ReactFiberCacheComponent'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.old'; import { @@ -133,8 +134,9 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { return null; case CacheComponent: if (enableCache) { - const ownCache: Cache | null = workInProgress.memoizedState; - if (ownCache !== null) { + const ownCacheInstance: CacheInstance | null = + workInProgress.memoizedState; + if (ownCacheInstance !== null) { popProvider(CacheContext, workInProgress); } } @@ -182,8 +184,9 @@ function unwindInterruptedWork(interruptedWork: Fiber) { break; case CacheComponent: if (enableCache) { - const ownCache: Cache | null = interruptedWork.memoizedState; - if (ownCache !== null) { + const ownCacheInstance: CacheInstance | null = + interruptedWork.memoizedState; + if (ownCacheInstance !== null) { popProvider(CacheContext, interruptedWork); } } diff --git a/packages/react-reconciler/src/__tests__/ReactCache-test.js b/packages/react-reconciler/src/__tests__/ReactCache-test.js index a933fda928d8c..18d4f59710d2d 100644 --- a/packages/react-reconciler/src/__tests__/ReactCache-test.js +++ b/packages/react-reconciler/src/__tests__/ReactCache-test.js @@ -529,7 +529,7 @@ describe('ReactCache', () => { // @gate experimental test( - 'refreshing a cache boundary also refreshes the other boundaries ' + + 'refreshing a cache boundary does not refresh the other boundaries ' + 'that mounted at the same time (i.e. the ones that share the same cache)', async () => { let refreshFirstBoundary; @@ -575,23 +575,19 @@ describe('ReactCache', () => { expect(Scheduler).toHaveYielded(['A [v1]', 'A [v1]']); expect(root).toMatchRenderedOutput('A [v1]A [v1]'); - // Refresh the first boundary. It should also refresh the second boundary, - // since they appeared at the same time. + // Refresh the first boundary. It should not refresh the second boundary, + // even though they previously shared the same underlying cache. mutateRemoteTextService(); await ReactNoop.act(async () => { await refreshFirstBoundary(); }); - expect(Scheduler).toHaveYielded([ - 'Cache miss! [A]', - 'Loading...', - 'Loading...', - ]); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); await ReactNoop.act(async () => { await resolveText('A'); }); - expect(Scheduler).toHaveYielded(['A [v2]', 'A [v2]']); - expect(root).toMatchRenderedOutput('A [v2]A [v2]'); + expect(Scheduler).toHaveYielded(['A [v2]']); + expect(root).toMatchRenderedOutput('A [v2]A [v1]'); }, ); }); From 220a9d72c818fc2ef7eab4d2798e170044680cc4 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Mon, 14 Dec 2020 18:10:46 -0600 Subject: [PATCH 08/30] Add implicit root-level cache If `getCacheForType` or `useRefresh` cannot find a parent , they will access a top-level cache associated with the root. The behavior is effectively the same as if you wrapped the entire tree in a boundary. --- .../src/ReactFiberBeginWork.new.js | 14 +++++ .../src/ReactFiberBeginWork.old.js | 14 +++++ .../src/ReactFiberCompleteWork.new.js | 3 + .../src/ReactFiberCompleteWork.old.js | 3 + .../src/ReactFiberHooks.new.js | 27 ++++++-- .../src/ReactFiberHooks.old.js | 27 ++++++-- .../src/ReactFiberRoot.new.js | 12 ++++ .../src/ReactFiberRoot.old.js | 12 ++++ .../src/ReactFiberUnwindWork.new.js | 6 ++ .../src/ReactFiberUnwindWork.old.js | 6 ++ .../src/ReactInternalTypes.js | 2 +- .../src/__tests__/ReactCache-test.js | 62 +++++++++++++++++++ scripts/error-codes/codes.json | 2 +- 13 files changed, 180 insertions(+), 10 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 9295a54947cdd..225b585d22fce 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -1099,6 +1099,15 @@ function updateHostRoot(current, workInProgress, renderLanes) { const nextState = workInProgress.memoizedState; // Caution: React DevTools currently depends on this property // being called "element". + + if (enableCache) { + const nextCacheInstance: CacheInstance = nextState.cacheInstance; + pushProvider(workInProgress, CacheContext, nextCacheInstance); + if (nextCacheInstance !== prevState.cacheInstance) { + propagateCacheRefresh(workInProgress, renderLanes); + } + } + const nextChildren = nextState.element; if (nextChildren === prevChildren) { resetHydrationState(); @@ -3170,6 +3179,11 @@ function beginWork( switch (workInProgress.tag) { case HostRoot: pushHostRootContext(workInProgress); + if (enableCache) { + const nextCacheInstance: CacheInstance = + current.memoizedState.cacheInstance; + pushProvider(workInProgress, CacheContext, nextCacheInstance); + } resetHydrationState(); break; case HostComponent: diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index e71c6be870526..676bbdce428ae 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -1099,6 +1099,15 @@ function updateHostRoot(current, workInProgress, renderLanes) { const nextState = workInProgress.memoizedState; // Caution: React DevTools currently depends on this property // being called "element". + + if (enableCache) { + const nextCacheInstance: CacheInstance = nextState.cacheInstance; + pushProvider(workInProgress, CacheContext, nextCacheInstance); + if (nextCacheInstance !== prevState.cacheInstance) { + propagateCacheRefresh(workInProgress, renderLanes); + } + } + const nextChildren = nextState.element; if (nextChildren === prevChildren) { resetHydrationState(); @@ -3170,6 +3179,11 @@ function beginWork( switch (workInProgress.tag) { case HostRoot: pushHostRootContext(workInProgress); + if (enableCache) { + const nextCacheInstance: CacheInstance = + current.memoizedState.cacheInstance; + pushProvider(workInProgress, CacheContext, nextCacheInstance); + } resetHydrationState(); break; case HostComponent: diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js index 14dc69d01e658..71994fd481269 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js @@ -813,6 +813,9 @@ function completeWork( return null; } case HostRoot: { + if (enableCache) { + popProvider(CacheContext, workInProgress); + } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); resetMutableSourceWorkInProgressVersions(); diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js index ae6bc84df5372..7f164b9d66fa6 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js @@ -813,6 +813,9 @@ function completeWork( return null; } case HostRoot: { + if (enableCache) { + popProvider(CacheContext, workInProgress); + } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); resetMutableSourceWorkInProgressVersions(); diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js index c3a53c1eea35d..3c4221a08a425 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.new.js +++ b/packages/react-reconciler/src/ReactFiberHooks.new.js @@ -31,6 +31,7 @@ import { enableUseRefAccessWarning, } from 'shared/ReactFeatureFlags'; +import {HostRoot} from './ReactWorkTags'; import {NoMode, BlockingMode, DebugTracingMode} from './ReactTypeOfMode'; import { NoLane, @@ -95,6 +96,7 @@ import {getIsRendering} from './ReactCurrentFiber'; import {logStateUpdateScheduled} from './DebugTracing'; import {markStateUpdateScheduled} from './SchedulingProfiler'; import {CacheContext} from './ReactFiberCacheComponent'; +import {createUpdate, enqueueUpdate} from './ReactUpdateQueue.new'; const {ReactCurrentDispatcher, ReactCurrentBatchConfig} = ReactSharedInternals; @@ -1742,12 +1744,31 @@ function refreshCache( try { const eventTime = requestEventTime(); const lane = requestUpdateLane(provider); + // TODO: Does Cache work in legacy mode? Should decide and write a test. const root = scheduleUpdateOnFiber(provider, lane, eventTime); + + let seededCache = null; if (seedKey !== null && seedKey !== undefined && root !== null) { // TODO: Warn if wrong type - const seededCache = new Map([[seedKey, seedValue]]); + seededCache = new Map([[seedKey, seedValue]]); transferCacheToSpawnedLane(root, seededCache, lane); } + + if (provider.tag === HostRoot) { + const refreshUpdate = createUpdate(eventTime, lane); + refreshUpdate.payload = { + cacheInstance: { + provider: provider, + cache: + // For the root cache, we won't bother to lazily initialize the + // map. Seed an empty one. This saves use the trouble of having + // to use an updater function. Maybe we should use this approach + // for non-root refreshes, too. + seededCache !== null ? seededCache : new Map(), + }, + }; + enqueueUpdate(provider, refreshUpdate); + } } finally { ReactCurrentBatchConfig.transition = prevTransition; } @@ -1870,9 +1891,7 @@ function getCacheForType(resourceType: () => T): T { const cacheInstance: CacheInstance | null = readContext(CacheContext); invariant( cacheInstance !== null, - 'Tried to fetch data, but no cache was found. To fix, wrap your ' + - "component in a boundary. It doesn't need to be a direct " + - 'parent; it can be anywhere in the ancestor path', + 'Internal React error: Should always have a cache.', ); let cache = cacheInstance.cache; if (cache === null) { diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js index 76efd9171587c..8e343dac6b5e8 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.old.js +++ b/packages/react-reconciler/src/ReactFiberHooks.old.js @@ -31,6 +31,7 @@ import { enableUseRefAccessWarning, } from 'shared/ReactFeatureFlags'; +import {HostRoot} from './ReactWorkTags'; import {NoMode, BlockingMode, DebugTracingMode} from './ReactTypeOfMode'; import { NoLane, @@ -95,6 +96,7 @@ import {getIsRendering} from './ReactCurrentFiber'; import {logStateUpdateScheduled} from './DebugTracing'; import {markStateUpdateScheduled} from './SchedulingProfiler'; import {CacheContext} from './ReactFiberCacheComponent'; +import {createUpdate, enqueueUpdate} from './ReactUpdateQueue.old'; const {ReactCurrentDispatcher, ReactCurrentBatchConfig} = ReactSharedInternals; @@ -1742,12 +1744,31 @@ function refreshCache( try { const eventTime = requestEventTime(); const lane = requestUpdateLane(provider); + // TODO: Does Cache work in legacy mode? Should decide and write a test. const root = scheduleUpdateOnFiber(provider, lane, eventTime); + + let seededCache = null; if (seedKey !== null && seedKey !== undefined && root !== null) { // TODO: Warn if wrong type - const seededCache = new Map([[seedKey, seedValue]]); + seededCache = new Map([[seedKey, seedValue]]); transferCacheToSpawnedLane(root, seededCache, lane); } + + if (provider.tag === HostRoot) { + const refreshUpdate = createUpdate(eventTime, lane); + refreshUpdate.payload = { + cacheInstance: { + provider: provider, + cache: + // For the root cache, we won't bother to lazily initialize the + // map. Seed an empty one. This saves use the trouble of having + // to use an updater function. Maybe we should use this approach + // for non-root refreshes, too. + seededCache !== null ? seededCache : new Map(), + }, + }; + enqueueUpdate(provider, refreshUpdate); + } } finally { ReactCurrentBatchConfig.transition = prevTransition; } @@ -1870,9 +1891,7 @@ function getCacheForType(resourceType: () => T): T { const cacheInstance: CacheInstance | null = readContext(CacheContext); invariant( cacheInstance !== null, - 'Tried to fetch data, but no cache was found. To fix, wrap your ' + - "component in a boundary. It doesn't need to be a direct " + - 'parent; it can be anywhere in the ancestor path', + 'Internal React error: Should always have a cache.', ); let cache = cacheInstance.cache; if (cache === null) { diff --git a/packages/react-reconciler/src/ReactFiberRoot.new.js b/packages/react-reconciler/src/ReactFiberRoot.new.js index cf46bc7bae382..92af768c30d92 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.new.js +++ b/packages/react-reconciler/src/ReactFiberRoot.new.js @@ -103,6 +103,18 @@ export function createFiberRoot( root.current = uninitializedFiber; uninitializedFiber.stateNode = root; + const initialState = { + element: null, + // For the root cache, we won't bother to lazily initialize the map. Seed an + // empty one. This saves use the trouble of having to initialize in an + // updater function. + cacheInstance: { + cache: new Map(), + provider: uninitializedFiber, + }, + }; + uninitializedFiber.memoizedState = initialState; + initializeUpdateQueue(uninitializedFiber); return root; diff --git a/packages/react-reconciler/src/ReactFiberRoot.old.js b/packages/react-reconciler/src/ReactFiberRoot.old.js index 4ebb3f7804b9c..502edb84dafed 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.old.js +++ b/packages/react-reconciler/src/ReactFiberRoot.old.js @@ -103,6 +103,18 @@ export function createFiberRoot( root.current = uninitializedFiber; uninitializedFiber.stateNode = root; + const initialState = { + element: null, + // For the root cache, we won't bother to lazily initialize the map. Seed an + // empty one. This saves use the trouble of having to initialize in an + // updater function. + cacheInstance: { + cache: new Map(), + provider: uninitializedFiber, + }, + }; + uninitializedFiber.memoizedState = initialState; + initializeUpdateQueue(uninitializedFiber); return root; diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js index da9741be3b05c..51b8d43b95324 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js @@ -70,6 +70,9 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { return null; } case HostRoot: { + if (enableCache) { + popProvider(CacheContext, workInProgress); + } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); resetMutableSourceWorkInProgressVersions(); @@ -156,6 +159,9 @@ function unwindInterruptedWork(interruptedWork: Fiber) { break; } case HostRoot: { + if (enableCache) { + popProvider(CacheContext, interruptedWork); + } popHostContainer(interruptedWork); popTopLevelLegacyContextObject(interruptedWork); resetMutableSourceWorkInProgressVersions(); diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js index 5237b0c45b5ff..b45482661ed08 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js @@ -70,6 +70,9 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { return null; } case HostRoot: { + if (enableCache) { + popProvider(CacheContext, workInProgress); + } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); resetMutableSourceWorkInProgressVersions(); @@ -156,6 +159,9 @@ function unwindInterruptedWork(interruptedWork: Fiber) { break; } case HostRoot: { + if (enableCache) { + popProvider(CacheContext, interruptedWork); + } popHostContainer(interruptedWork); popTopLevelLegacyContextObject(interruptedWork); resetMutableSourceWorkInProgressVersions(); diff --git a/packages/react-reconciler/src/ReactInternalTypes.js b/packages/react-reconciler/src/ReactInternalTypes.js index ff04c72d6c762..894650dd8c0cd 100644 --- a/packages/react-reconciler/src/ReactInternalTypes.js +++ b/packages/react-reconciler/src/ReactInternalTypes.js @@ -238,7 +238,7 @@ type BaseFiberRootProperties = {| entangledLanes: Lanes, entanglements: LaneMap, - caches: Array | null, + caches: LaneMap | null, pooledCache: Cache | null, |}; diff --git a/packages/react-reconciler/src/__tests__/ReactCache-test.js b/packages/react-reconciler/src/__tests__/ReactCache-test.js index 18d4f59710d2d..9b38261378d0a 100644 --- a/packages/react-reconciler/src/__tests__/ReactCache-test.js +++ b/packages/react-reconciler/src/__tests__/ReactCache-test.js @@ -150,6 +150,26 @@ describe('ReactCache', () => { expect(root).toMatchRenderedOutput('A'); }); + // @gate experimental + test('root acts as implicit cache boundary', async () => { + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render( + }> + + , + ); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A']); + expect(root).toMatchRenderedOutput('A'); + }); + // @gate experimental test('multiple new Cache boundaries in the same update share the same, fresh cache', async () => { function App({text}) { @@ -404,6 +424,48 @@ describe('ReactCache', () => { expect(root).toMatchRenderedOutput('A [v2]'); }); + // @gate experimental + test('refresh the root cache', async () => { + let refresh; + function App() { + refresh = useRefresh(); + return ; + } + + // Mount initial data + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render( + }> + + , + ); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A [v1]']); + expect(root).toMatchRenderedOutput('A [v1]'); + + // Mutate the text service, then refresh for new data. + mutateRemoteTextService(); + await ReactNoop.act(async () => { + refresh(); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('A [v1]'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + // Note that the version has updated + expect(Scheduler).toHaveYielded(['A [v2]']); + expect(root).toMatchRenderedOutput('A [v2]'); + }); + // @gate experimental test('refresh a cache with seed data', async () => { let refresh; diff --git a/scripts/error-codes/codes.json b/scripts/error-codes/codes.json index 028fca5bdc513..2c2e5175a5eb2 100644 --- a/scripts/error-codes/codes.json +++ b/scripts/error-codes/codes.json @@ -372,6 +372,6 @@ "381": "This feature is not supported by ReactSuspenseTestUtils.", "382": "This query has received more parameters than the last time the same query was used. Always pass the exact number of parameters that the query needs.", "383": "This query has received fewer parameters than the last time the same query was used. Always pass the exact number of parameters that the query needs.", - "384": "Tried to fetch data, but no cache was found. To fix, wrap your component in a boundary. It doesn't need to be a direct parent; it can be anywhere in the ancestor path", + "384": "Internal React error: Should always have a cache.", "385": "Refreshing the cache is not supported in Server Components." } From da9dc4c5d83f433c8d5c6dd03cfbf8fcc91290fd Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Mon, 14 Dec 2020 21:46:50 -0600 Subject: [PATCH 09/30] Make CacheContext type non-nullable Now that the root is a cache provider, there will always be a cache. Don't need the null check. --- .../src/ReactFiberBeginWork.new.js | 7 +- .../src/ReactFiberBeginWork.old.js | 7 +- .../src/ReactFiberCacheComponent.js | 7 +- .../src/ReactFiberHooks.new.js | 93 +++++++++---------- .../src/ReactFiberHooks.old.js | 93 +++++++++---------- 5 files changed, 92 insertions(+), 115 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 225b585d22fce..bfc2d71f6cfee 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -667,7 +667,7 @@ function updateCacheComponent( // Read directly from the context. We don't set up a context dependency // because the propagation function automatically includes CacheComponents in // its search. - const parentCacheInstance: CacheInstance | null = isPrimaryRenderer + const parentCacheInstance: CacheInstance = isPrimaryRenderer ? CacheContext._currentValue : CacheContext._currentValue2; @@ -684,10 +684,7 @@ function updateCacheComponent( // This may be the same as the parent cache, like if the current render // spawned from a previous render that already committed. Otherwise, this // is the root of a cache consistency boundary. - if ( - parentCacheInstance === null || - freshCache !== parentCacheInstance.cache - ) { + if (freshCache !== parentCacheInstance.cache) { ownCacheInstance = { cache: freshCache, provider: workInProgress, diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index 676bbdce428ae..68783c24724c2 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -667,7 +667,7 @@ function updateCacheComponent( // Read directly from the context. We don't set up a context dependency // because the propagation function automatically includes CacheComponents in // its search. - const parentCacheInstance: CacheInstance | null = isPrimaryRenderer + const parentCacheInstance: CacheInstance = isPrimaryRenderer ? CacheContext._currentValue : CacheContext._currentValue2; @@ -684,10 +684,7 @@ function updateCacheComponent( // This may be the same as the parent cache, like if the current render // spawned from a previous render that already committed. Otherwise, this // is the root of a cache consistency boundary. - if ( - parentCacheInstance === null || - freshCache !== parentCacheInstance.cache - ) { + if (freshCache !== parentCacheInstance.cache) { ownCacheInstance = { cache: freshCache, provider: workInProgress, diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.js b/packages/react-reconciler/src/ReactFiberCacheComponent.js index 5b64e5c42fa27..ddcd741d487d3 100644 --- a/packages/react-reconciler/src/ReactFiberCacheComponent.js +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.js @@ -18,14 +18,15 @@ export type CacheInstance = {| provider: Fiber, |}; -export const CacheContext: ReactContext = { +export const CacheContext: ReactContext = { $$typeof: REACT_CONTEXT_TYPE, // We don't use Consumer/Provider for Cache components. So we'll cheat. Consumer: (null: any), Provider: (null: any), _calculateChangedBits: null, - _currentValue: null, - _currentValue2: null, + // We'll initialize these at the root. + _currentValue: (null: any), + _currentValue2: (null: any), _threadCount: 0, }; diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js index 3c4221a08a425..69fe4a11bc6d1 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.new.js +++ b/packages/react-reconciler/src/ReactFiberHooks.new.js @@ -1713,67 +1713,62 @@ function rerenderOpaqueIdentifier(): OpaqueIDType | void { } function mountRefresh() { - // TODO: CacheInstance should never be null. Update type. - const cacheInstance: CacheInstance | null = readContext(CacheContext); + const cacheInstance: CacheInstance = readContext(CacheContext); return mountCallback(refreshCache.bind(null, cacheInstance), [cacheInstance]); } function updateRefresh() { - const cacheInstance: CacheInstance | null = readContext(CacheContext); + const cacheInstance: CacheInstance = readContext(CacheContext); return updateCallback(refreshCache.bind(null, cacheInstance), [ cacheInstance, ]); } function refreshCache( - cacheInstance: CacheInstance | null, + cacheInstance: CacheInstance, seedKey: ?() => T, seedValue: T, ) { - if (cacheInstance !== null) { - const provider = cacheInstance.provider; - - // Inlined startTransition - // TODO: Maybe we shouldn't automatically give this transition priority. Are - // there valid use cases for a high-pri refresh? Like if the content is - // super stale and you want to immediately hide it. - const prevTransition = ReactCurrentBatchConfig.transition; - ReactCurrentBatchConfig.transition = 1; - // TODO: Do we really need the try/finally? I don't think any of these - // functions would ever throw unless there's an internal error. - try { - const eventTime = requestEventTime(); - const lane = requestUpdateLane(provider); - // TODO: Does Cache work in legacy mode? Should decide and write a test. - const root = scheduleUpdateOnFiber(provider, lane, eventTime); - - let seededCache = null; - if (seedKey !== null && seedKey !== undefined && root !== null) { - // TODO: Warn if wrong type - seededCache = new Map([[seedKey, seedValue]]); - transferCacheToSpawnedLane(root, seededCache, lane); - } + const provider = cacheInstance.provider; + + // Inlined startTransition + // TODO: Maybe we shouldn't automatically give this transition priority. Are + // there valid use cases for a high-pri refresh? Like if the content is + // super stale and you want to immediately hide it. + const prevTransition = ReactCurrentBatchConfig.transition; + ReactCurrentBatchConfig.transition = 1; + // TODO: Do we really need the try/finally? I don't think any of these + // functions would ever throw unless there's an internal error. + try { + const eventTime = requestEventTime(); + const lane = requestUpdateLane(provider); + // TODO: Does Cache work in legacy mode? Should decide and write a test. + const root = scheduleUpdateOnFiber(provider, lane, eventTime); + + let seededCache = null; + if (seedKey !== null && seedKey !== undefined && root !== null) { + // TODO: Warn if wrong type + seededCache = new Map([[seedKey, seedValue]]); + transferCacheToSpawnedLane(root, seededCache, lane); + } - if (provider.tag === HostRoot) { - const refreshUpdate = createUpdate(eventTime, lane); - refreshUpdate.payload = { - cacheInstance: { - provider: provider, - cache: - // For the root cache, we won't bother to lazily initialize the - // map. Seed an empty one. This saves use the trouble of having - // to use an updater function. Maybe we should use this approach - // for non-root refreshes, too. - seededCache !== null ? seededCache : new Map(), - }, - }; - enqueueUpdate(provider, refreshUpdate); - } - } finally { - ReactCurrentBatchConfig.transition = prevTransition; + if (provider.tag === HostRoot) { + const refreshUpdate = createUpdate(eventTime, lane); + refreshUpdate.payload = { + cacheInstance: { + provider: provider, + cache: + // For the root cache, we won't bother to lazily initialize the + // map. Seed an empty one. This saves use the trouble of having + // to use an updater function. Maybe we should use this approach + // for non-root refreshes, too. + seededCache !== null ? seededCache : new Map(), + }, + }; + enqueueUpdate(provider, refreshUpdate); } - } else { - // TODO: CacheInstance should never be null. Update type. + } finally { + ReactCurrentBatchConfig.transition = prevTransition; } } @@ -1888,11 +1883,7 @@ function dispatchAction( } function getCacheForType(resourceType: () => T): T { - const cacheInstance: CacheInstance | null = readContext(CacheContext); - invariant( - cacheInstance !== null, - 'Internal React error: Should always have a cache.', - ); + const cacheInstance: CacheInstance = readContext(CacheContext); let cache = cacheInstance.cache; if (cache === null) { cache = cacheInstance.cache = new Map(); diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js index 8e343dac6b5e8..8dc64d15ebca0 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.old.js +++ b/packages/react-reconciler/src/ReactFiberHooks.old.js @@ -1713,67 +1713,62 @@ function rerenderOpaqueIdentifier(): OpaqueIDType | void { } function mountRefresh() { - // TODO: CacheInstance should never be null. Update type. - const cacheInstance: CacheInstance | null = readContext(CacheContext); + const cacheInstance: CacheInstance = readContext(CacheContext); return mountCallback(refreshCache.bind(null, cacheInstance), [cacheInstance]); } function updateRefresh() { - const cacheInstance: CacheInstance | null = readContext(CacheContext); + const cacheInstance: CacheInstance = readContext(CacheContext); return updateCallback(refreshCache.bind(null, cacheInstance), [ cacheInstance, ]); } function refreshCache( - cacheInstance: CacheInstance | null, + cacheInstance: CacheInstance, seedKey: ?() => T, seedValue: T, ) { - if (cacheInstance !== null) { - const provider = cacheInstance.provider; - - // Inlined startTransition - // TODO: Maybe we shouldn't automatically give this transition priority. Are - // there valid use cases for a high-pri refresh? Like if the content is - // super stale and you want to immediately hide it. - const prevTransition = ReactCurrentBatchConfig.transition; - ReactCurrentBatchConfig.transition = 1; - // TODO: Do we really need the try/finally? I don't think any of these - // functions would ever throw unless there's an internal error. - try { - const eventTime = requestEventTime(); - const lane = requestUpdateLane(provider); - // TODO: Does Cache work in legacy mode? Should decide and write a test. - const root = scheduleUpdateOnFiber(provider, lane, eventTime); - - let seededCache = null; - if (seedKey !== null && seedKey !== undefined && root !== null) { - // TODO: Warn if wrong type - seededCache = new Map([[seedKey, seedValue]]); - transferCacheToSpawnedLane(root, seededCache, lane); - } + const provider = cacheInstance.provider; + + // Inlined startTransition + // TODO: Maybe we shouldn't automatically give this transition priority. Are + // there valid use cases for a high-pri refresh? Like if the content is + // super stale and you want to immediately hide it. + const prevTransition = ReactCurrentBatchConfig.transition; + ReactCurrentBatchConfig.transition = 1; + // TODO: Do we really need the try/finally? I don't think any of these + // functions would ever throw unless there's an internal error. + try { + const eventTime = requestEventTime(); + const lane = requestUpdateLane(provider); + // TODO: Does Cache work in legacy mode? Should decide and write a test. + const root = scheduleUpdateOnFiber(provider, lane, eventTime); + + let seededCache = null; + if (seedKey !== null && seedKey !== undefined && root !== null) { + // TODO: Warn if wrong type + seededCache = new Map([[seedKey, seedValue]]); + transferCacheToSpawnedLane(root, seededCache, lane); + } - if (provider.tag === HostRoot) { - const refreshUpdate = createUpdate(eventTime, lane); - refreshUpdate.payload = { - cacheInstance: { - provider: provider, - cache: - // For the root cache, we won't bother to lazily initialize the - // map. Seed an empty one. This saves use the trouble of having - // to use an updater function. Maybe we should use this approach - // for non-root refreshes, too. - seededCache !== null ? seededCache : new Map(), - }, - }; - enqueueUpdate(provider, refreshUpdate); - } - } finally { - ReactCurrentBatchConfig.transition = prevTransition; + if (provider.tag === HostRoot) { + const refreshUpdate = createUpdate(eventTime, lane); + refreshUpdate.payload = { + cacheInstance: { + provider: provider, + cache: + // For the root cache, we won't bother to lazily initialize the + // map. Seed an empty one. This saves use the trouble of having + // to use an updater function. Maybe we should use this approach + // for non-root refreshes, too. + seededCache !== null ? seededCache : new Map(), + }, + }; + enqueueUpdate(provider, refreshUpdate); } - } else { - // TODO: CacheInstance should never be null. Update type. + } finally { + ReactCurrentBatchConfig.transition = prevTransition; } } @@ -1888,11 +1883,7 @@ function dispatchAction( } function getCacheForType(resourceType: () => T): T { - const cacheInstance: CacheInstance | null = readContext(CacheContext); - invariant( - cacheInstance !== null, - 'Internal React error: Should always have a cache.', - ); + const cacheInstance: CacheInstance = readContext(CacheContext); let cache = cacheInstance.cache; if (cache === null) { cache = cacheInstance.cache = new Map(); From bc43607dbfed12bf883b8a7ed49695cab7853fb1 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Mon, 14 Dec 2020 22:58:38 -0600 Subject: [PATCH 10/30] Use an update queue for refreshes Refreshes are easier than initial mounts because we have a mounted fiber that we can attach the cache to. We don't need to rely on clever pooling tricks; they're just normal updates. More importantly, we're not at risk of dropping requests/data if we run out of lanes, which is especially important for refreshes because they can contain data seeded from a server mutation response; we cannot afford to accidentally evict it. --- .../src/ReactFiberBeginWork.new.js | 81 +++++++++++-------- .../src/ReactFiberBeginWork.old.js | 81 +++++++++++-------- .../src/ReactFiberCompleteWork.new.js | 3 +- .../src/ReactFiberCompleteWork.old.js | 3 +- .../src/ReactFiberHooks.new.js | 29 +++---- .../src/ReactFiberHooks.old.js | 29 +++---- .../src/ReactFiberUnwindWork.new.js | 5 +- .../src/ReactFiberUnwindWork.old.js | 5 +- 8 files changed, 132 insertions(+), 104 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index bfc2d71f6cfee..8f09b4cb551cc 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -23,7 +23,8 @@ import type { OffscreenProps, OffscreenState, } from './ReactFiberOffscreenComponent'; -import type {CacheInstance} from './ReactFiberCacheComponent'; +import type {CacheInstance, Cache} from './ReactFiberCacheComponent'; +import type {UpdateQueue} from './ReactUpdateQueue.new'; import checkPropTypes from 'shared/checkPropTypes'; @@ -671,9 +672,11 @@ function updateCacheComponent( ? CacheContext._currentValue : CacheContext._currentValue2; - let ownCacheInstance: CacheInstance | null = null; + let cacheInstance: CacheInstance | null = null; if (current === null) { // This is a newly mounted component. Request a fresh cache. + // TODO: Fast path when parent cache component is also a new mount? We can + // check `parentCacheInstance.provider.alternate`. const root = getWorkInProgressRoot(); invariant( root !== null, @@ -684,62 +687,74 @@ function updateCacheComponent( // This may be the same as the parent cache, like if the current render // spawned from a previous render that already committed. Otherwise, this // is the root of a cache consistency boundary. + let initialState; if (freshCache !== parentCacheInstance.cache) { - ownCacheInstance = { + cacheInstance = { cache: freshCache, provider: workInProgress, }; - pushProvider(workInProgress, CacheContext, ownCacheInstance); + initialState = { + cache: freshCache, + }; + pushProvider(workInProgress, CacheContext, cacheInstance); // No need to propagate the refresh, because this is a new tree. } else { // Use the parent cache - ownCacheInstance = null; + cacheInstance = null; + initialState = { + cache: null, + }; } + // Initialize an update queue. We use this for refreshes. + workInProgress.memoizedState = initialState; + initializeUpdateQueue(workInProgress); } else { // This component already mounted. if (includesSomeLane(renderLanes, updateLanes)) { - // A refresh was scheduled. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - const freshCache = requestFreshCache(root, renderLanes); - if ( - parentCacheInstance === null || - freshCache !== parentCacheInstance.cache - ) { - ownCacheInstance = { - cache: freshCache, + // An refresh was scheduled. If it was an refresh on this fiber, then we + // will have an update in the queue. Otherwise, it must have been an + // update on a parent, propagated via context. + cloneUpdateQueue(current, workInProgress); + processUpdateQueue(workInProgress, null, null, renderLanes); + const prevCache: Cache | null = current.memoizedState.cache; + const nextCache: Cache | null = workInProgress.memoizedState.cache; + + if (nextCache !== prevCache && nextCache !== null) { + // Received a refresh. + cacheInstance = { + cache: nextCache, provider: workInProgress, }; - pushProvider(workInProgress, CacheContext, ownCacheInstance); + pushProvider(workInProgress, CacheContext, cacheInstance); // Refreshes propagate through the entire subtree. The refreshed cache // will override nested caches. propagateCacheRefresh(workInProgress, renderLanes); } else { - // The fresh cache is the same as the parent cache. - ownCacheInstance = null; + // A parent cache boundary refreshed. So we can use the cache context. + cacheInstance = null; + + // If the update queue is empty, disconnect the old cache from the tree + // so it can be garbage collected. + if (workInProgress.lanes === NoLanes) { + const updateQueue: UpdateQueue = (workInProgress.updateQueue: any); + workInProgress.memoizedState = updateQueue.baseState = {cache: null}; + } } } else { // Reuse the memoized cache. - const prevCacheInstance: CacheInstance | null = current.memoizedState; - if (prevCacheInstance !== null) { - ownCacheInstance = prevCacheInstance; + cacheInstance = current.stateNode; + if (cacheInstance !== null) { // There was no refresh, so no need to propagate to nested boundaries. - pushProvider(workInProgress, CacheContext, ownCacheInstance); - } else { - ownCacheInstance = null; + pushProvider(workInProgress, CacheContext, cacheInstance); } } } - // If this CacheComponent is the root of its tree, then `memoizedState` will - // point to a cache object. Otherwise, a null state indicates that this + // If this CacheComponent is the root of its tree, then `stateNode` will + // point to a cache instance. Otherwise, a null instance indicates that this // CacheComponent inherits from a parent boundary. We can use this to infer // whether to push/pop the cache context. - workInProgress.memoizedState = ownCacheInstance; + workInProgress.stateNode = cacheInstance; const nextChildren = workInProgress.pendingProps.children; reconcileChildren(current, workInProgress, nextChildren, renderLanes); @@ -3350,11 +3365,11 @@ function beginWork( } case CacheComponent: { if (enableCache) { - const ownCacheInstance: CacheInstance | null = - workInProgress.memoizedState; + const ownCacheInstance: CacheInstance | null = current.stateNode; if (ownCacheInstance !== null) { pushProvider(workInProgress, CacheContext, ownCacheInstance); } + workInProgress.stateNode = ownCacheInstance; } break; } diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index 68783c24724c2..c3b060ee4637b 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -23,7 +23,8 @@ import type { OffscreenProps, OffscreenState, } from './ReactFiberOffscreenComponent'; -import type {CacheInstance} from './ReactFiberCacheComponent'; +import type {CacheInstance, Cache} from './ReactFiberCacheComponent'; +import type {UpdateQueue} from './ReactUpdateQueue.old'; import checkPropTypes from 'shared/checkPropTypes'; @@ -671,9 +672,11 @@ function updateCacheComponent( ? CacheContext._currentValue : CacheContext._currentValue2; - let ownCacheInstance: CacheInstance | null = null; + let cacheInstance: CacheInstance | null = null; if (current === null) { // This is a newly mounted component. Request a fresh cache. + // TODO: Fast path when parent cache component is also a new mount? We can + // check `parentCacheInstance.provider.alternate`. const root = getWorkInProgressRoot(); invariant( root !== null, @@ -684,62 +687,74 @@ function updateCacheComponent( // This may be the same as the parent cache, like if the current render // spawned from a previous render that already committed. Otherwise, this // is the root of a cache consistency boundary. + let initialState; if (freshCache !== parentCacheInstance.cache) { - ownCacheInstance = { + cacheInstance = { cache: freshCache, provider: workInProgress, }; - pushProvider(workInProgress, CacheContext, ownCacheInstance); + initialState = { + cache: freshCache, + }; + pushProvider(workInProgress, CacheContext, cacheInstance); // No need to propagate the refresh, because this is a new tree. } else { // Use the parent cache - ownCacheInstance = null; + cacheInstance = null; + initialState = { + cache: null, + }; } + // Initialize an update queue. We use this for refreshes. + workInProgress.memoizedState = initialState; + initializeUpdateQueue(workInProgress); } else { // This component already mounted. if (includesSomeLane(renderLanes, updateLanes)) { - // A refresh was scheduled. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - const freshCache = requestFreshCache(root, renderLanes); - if ( - parentCacheInstance === null || - freshCache !== parentCacheInstance.cache - ) { - ownCacheInstance = { - cache: freshCache, + // An refresh was scheduled. If it was an refresh on this fiber, then we + // will have an update in the queue. Otherwise, it must have been an + // update on a parent, propagated via context. + cloneUpdateQueue(current, workInProgress); + processUpdateQueue(workInProgress, null, null, renderLanes); + const prevCache: Cache | null = current.memoizedState.cache; + const nextCache: Cache | null = workInProgress.memoizedState.cache; + + if (nextCache !== prevCache && nextCache !== null) { + // Received a refresh. + cacheInstance = { + cache: nextCache, provider: workInProgress, }; - pushProvider(workInProgress, CacheContext, ownCacheInstance); + pushProvider(workInProgress, CacheContext, cacheInstance); // Refreshes propagate through the entire subtree. The refreshed cache // will override nested caches. propagateCacheRefresh(workInProgress, renderLanes); } else { - // The fresh cache is the same as the parent cache. - ownCacheInstance = null; + // A parent cache boundary refreshed. So we can use the cache context. + cacheInstance = null; + + // If the update queue is empty, disconnect the old cache from the tree + // so it can be garbage collected. + if (workInProgress.lanes === NoLanes) { + const updateQueue: UpdateQueue = (workInProgress.updateQueue: any); + workInProgress.memoizedState = updateQueue.baseState = {cache: null}; + } } } else { // Reuse the memoized cache. - const prevCacheInstance: CacheInstance | null = current.memoizedState; - if (prevCacheInstance !== null) { - ownCacheInstance = prevCacheInstance; + cacheInstance = current.stateNode; + if (cacheInstance !== null) { // There was no refresh, so no need to propagate to nested boundaries. - pushProvider(workInProgress, CacheContext, ownCacheInstance); - } else { - ownCacheInstance = null; + pushProvider(workInProgress, CacheContext, cacheInstance); } } } - // If this CacheComponent is the root of its tree, then `memoizedState` will - // point to a cache object. Otherwise, a null state indicates that this + // If this CacheComponent is the root of its tree, then `stateNode` will + // point to a cache instance. Otherwise, a null instance indicates that this // CacheComponent inherits from a parent boundary. We can use this to infer // whether to push/pop the cache context. - workInProgress.memoizedState = ownCacheInstance; + workInProgress.stateNode = cacheInstance; const nextChildren = workInProgress.pendingProps.children; reconcileChildren(current, workInProgress, nextChildren, renderLanes); @@ -3350,11 +3365,11 @@ function beginWork( } case CacheComponent: { if (enableCache) { - const ownCacheInstance: CacheInstance | null = - workInProgress.memoizedState; + const ownCacheInstance: CacheInstance | null = current.stateNode; if (ownCacheInstance !== null) { pushProvider(workInProgress, CacheContext, ownCacheInstance); } + workInProgress.stateNode = ownCacheInstance; } break; } diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js index 71994fd481269..28fb7b39e2e07 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js @@ -1492,8 +1492,7 @@ function completeWork( } case CacheComponent: { if (enableCache) { - const ownCacheInstance: CacheInstance | null = - workInProgress.memoizedState; + const ownCacheInstance: CacheInstance | null = workInProgress.stateNode; if (ownCacheInstance !== null) { // This is a cache provider. popProvider(CacheContext, workInProgress); diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js index 7f164b9d66fa6..a76d5562e65f5 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js @@ -1492,8 +1492,7 @@ function completeWork( } case CacheComponent: { if (enableCache) { - const ownCacheInstance: CacheInstance | null = - workInProgress.memoizedState; + const ownCacheInstance: CacheInstance | null = workInProgress.stateNode; if (ownCacheInstance !== null) { // This is a cache provider. popProvider(CacheContext, workInProgress); diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js index 69fe4a11bc6d1..c59b23b733202 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.new.js +++ b/packages/react-reconciler/src/ReactFiberHooks.new.js @@ -46,7 +46,6 @@ import { setCurrentUpdateLanePriority, higherLanePriority, DefaultLanePriority, - transferCacheToSpawnedLane, } from './ReactFiberLane.new'; import {readContext} from './ReactFiberNewContext.new'; import { @@ -1745,28 +1744,30 @@ function refreshCache( // TODO: Does Cache work in legacy mode? Should decide and write a test. const root = scheduleUpdateOnFiber(provider, lane, eventTime); - let seededCache = null; + const seededCache = new Map(); if (seedKey !== null && seedKey !== undefined && root !== null) { - // TODO: Warn if wrong type - seededCache = new Map([[seedKey, seedValue]]); - transferCacheToSpawnedLane(root, seededCache, lane); + // Seed the cache with the value passed by the caller. This could be from + // a server mutation, or it could be a streaming response. + seededCache.set(seedKey, seedValue); } + // Schedule an update on the cache boundary to trigger a refresh. + const refreshUpdate = createUpdate(eventTime, lane); + let payload; if (provider.tag === HostRoot) { - const refreshUpdate = createUpdate(eventTime, lane); - refreshUpdate.payload = { + payload = { cacheInstance: { provider: provider, - cache: - // For the root cache, we won't bother to lazily initialize the - // map. Seed an empty one. This saves use the trouble of having - // to use an updater function. Maybe we should use this approach - // for non-root refreshes, too. - seededCache !== null ? seededCache : new Map(), + cache: seededCache, }, }; - enqueueUpdate(provider, refreshUpdate); + } else { + payload = { + cache: seededCache, + }; } + refreshUpdate.payload = payload; + enqueueUpdate(provider, refreshUpdate); } finally { ReactCurrentBatchConfig.transition = prevTransition; } diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js index 8dc64d15ebca0..38b2dccf598ff 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.old.js +++ b/packages/react-reconciler/src/ReactFiberHooks.old.js @@ -46,7 +46,6 @@ import { setCurrentUpdateLanePriority, higherLanePriority, DefaultLanePriority, - transferCacheToSpawnedLane, } from './ReactFiberLane.old'; import {readContext} from './ReactFiberNewContext.old'; import { @@ -1745,28 +1744,30 @@ function refreshCache( // TODO: Does Cache work in legacy mode? Should decide and write a test. const root = scheduleUpdateOnFiber(provider, lane, eventTime); - let seededCache = null; + const seededCache = new Map(); if (seedKey !== null && seedKey !== undefined && root !== null) { - // TODO: Warn if wrong type - seededCache = new Map([[seedKey, seedValue]]); - transferCacheToSpawnedLane(root, seededCache, lane); + // Seed the cache with the value passed by the caller. This could be from + // a server mutation, or it could be a streaming response. + seededCache.set(seedKey, seedValue); } + // Schedule an update on the cache boundary to trigger a refresh. + const refreshUpdate = createUpdate(eventTime, lane); + let payload; if (provider.tag === HostRoot) { - const refreshUpdate = createUpdate(eventTime, lane); - refreshUpdate.payload = { + payload = { cacheInstance: { provider: provider, - cache: - // For the root cache, we won't bother to lazily initialize the - // map. Seed an empty one. This saves use the trouble of having - // to use an updater function. Maybe we should use this approach - // for non-root refreshes, too. - seededCache !== null ? seededCache : new Map(), + cache: seededCache, }, }; - enqueueUpdate(provider, refreshUpdate); + } else { + payload = { + cache: seededCache, + }; } + refreshUpdate.payload = payload; + enqueueUpdate(provider, refreshUpdate); } finally { ReactCurrentBatchConfig.transition = prevTransition; } diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js index 51b8d43b95324..8ed6329288e57 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js @@ -137,8 +137,7 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { return null; case CacheComponent: if (enableCache) { - const ownCacheInstance: CacheInstance | null = - workInProgress.memoizedState; + const ownCacheInstance: CacheInstance | null = workInProgress.stateNode; if (ownCacheInstance !== null) { popProvider(CacheContext, workInProgress); } @@ -191,7 +190,7 @@ function unwindInterruptedWork(interruptedWork: Fiber) { case CacheComponent: if (enableCache) { const ownCacheInstance: CacheInstance | null = - interruptedWork.memoizedState; + interruptedWork.stateNode; if (ownCacheInstance !== null) { popProvider(CacheContext, interruptedWork); } diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js index b45482661ed08..83727fe4713c1 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js @@ -137,8 +137,7 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { return null; case CacheComponent: if (enableCache) { - const ownCacheInstance: CacheInstance | null = - workInProgress.memoizedState; + const ownCacheInstance: CacheInstance | null = workInProgress.stateNode; if (ownCacheInstance !== null) { popProvider(CacheContext, workInProgress); } @@ -191,7 +190,7 @@ function unwindInterruptedWork(interruptedWork: Fiber) { case CacheComponent: if (enableCache) { const ownCacheInstance: CacheInstance | null = - interruptedWork.memoizedState; + interruptedWork.stateNode; if (ownCacheInstance !== null) { popProvider(CacheContext, interruptedWork); } From 900af348fdb0476eec4800239825c5d5d9407553 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Mon, 14 Dec 2020 23:21:08 -0600 Subject: [PATCH 11/30] Add fast path for nested mounting Caches Only the top most Cache boundary in a newly mounting tree needs to call `requestFreshCache`. Nested caches can inherit the parent cache by reading from context. This is strictly a performance optimization, since `requestFreshCache` would return the parent cache, anyway. After our planned lanes refactor, `requestFreshCache` might be fast enough that we don't need this special fast path. --- .../src/ReactFiberBeginWork.new.js | 57 +++++++++++-------- .../src/ReactFiberBeginWork.old.js | 57 +++++++++++-------- 2 files changed, 64 insertions(+), 50 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 8f09b4cb551cc..eea81fe666ce3 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -674,36 +674,43 @@ function updateCacheComponent( let cacheInstance: CacheInstance | null = null; if (current === null) { - // This is a newly mounted component. Request a fresh cache. - // TODO: Fast path when parent cache component is also a new mount? We can - // check `parentCacheInstance.provider.alternate`. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - const freshCache = requestFreshCache(root, renderLanes); - // This may be the same as the parent cache, like if the current render - // spawned from a previous render that already committed. Otherwise, this - // is the root of a cache consistency boundary. let initialState; - if (freshCache !== parentCacheInstance.cache) { - cacheInstance = { - cache: freshCache, - provider: workInProgress, - }; - initialState = { - cache: freshCache, - }; - pushProvider(workInProgress, CacheContext, cacheInstance); - // No need to propagate the refresh, because this is a new tree. - } else { - // Use the parent cache + if (parentCacheInstance.provider.alternate === null) { + // Fast path. The parent Cache boundary is also a new mount. We can + // inherit its cache. cacheInstance = null; initialState = { cache: null, }; + } else { + // This is a newly mounted component. Request a fresh cache. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + const freshCache = requestFreshCache(root, renderLanes); + // This may be the same as the parent cache, like if the current render + // spawned from a previous render that already committed. Otherwise, this + // is the root of a cache consistency boundary. + if (freshCache !== parentCacheInstance.cache) { + cacheInstance = { + cache: freshCache, + provider: workInProgress, + }; + initialState = { + cache: freshCache, + }; + pushProvider(workInProgress, CacheContext, cacheInstance); + // No need to propagate the refresh, because this is a new tree. + } else { + // Use the parent cache + cacheInstance = null; + initialState = { + cache: null, + }; + } } // Initialize an update queue. We use this for refreshes. workInProgress.memoizedState = initialState; diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index c3b060ee4637b..cdaf368dd1b97 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -674,36 +674,43 @@ function updateCacheComponent( let cacheInstance: CacheInstance | null = null; if (current === null) { - // This is a newly mounted component. Request a fresh cache. - // TODO: Fast path when parent cache component is also a new mount? We can - // check `parentCacheInstance.provider.alternate`. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - const freshCache = requestFreshCache(root, renderLanes); - // This may be the same as the parent cache, like if the current render - // spawned from a previous render that already committed. Otherwise, this - // is the root of a cache consistency boundary. let initialState; - if (freshCache !== parentCacheInstance.cache) { - cacheInstance = { - cache: freshCache, - provider: workInProgress, - }; - initialState = { - cache: freshCache, - }; - pushProvider(workInProgress, CacheContext, cacheInstance); - // No need to propagate the refresh, because this is a new tree. - } else { - // Use the parent cache + if (parentCacheInstance.provider.alternate === null) { + // Fast path. The parent Cache boundary is also a new mount. We can + // inherit its cache. cacheInstance = null; initialState = { cache: null, }; + } else { + // This is a newly mounted component. Request a fresh cache. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + const freshCache = requestFreshCache(root, renderLanes); + // This may be the same as the parent cache, like if the current render + // spawned from a previous render that already committed. Otherwise, this + // is the root of a cache consistency boundary. + if (freshCache !== parentCacheInstance.cache) { + cacheInstance = { + cache: freshCache, + provider: workInProgress, + }; + initialState = { + cache: freshCache, + }; + pushProvider(workInProgress, CacheContext, cacheInstance); + // No need to propagate the refresh, because this is a new tree. + } else { + // Use the parent cache + cacheInstance = null; + initialState = { + cache: null, + }; + } } // Initialize an update queue. We use this for refreshes. workInProgress.memoizedState = initialState; From 3cb49365bab472b66a467eb6e00ac3ffded6579f Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Tue, 15 Dec 2020 01:14:56 -0600 Subject: [PATCH 12/30] Every request in initial render shares same cache I noticed this bug when writing some new tests. The HostRoot fiber is special because it's always mounted. The moment you call `createRoot()`, you have a "current" (albeit empty) tree. So the "initial render" of an app is actually implemented internally as an update. This model has some nice advantages because we can use a regular Fiber and regular update queue to manage in-progress work even before the initial commit. However, for the purposes of the cache, we want the initial render to be treated like an initial render: all requests should share the same cache, including nested boundaries. My trick of checking if the provider fiber has an alternate won't work, because the root fiber always has an alternate. So I use another trick: if the provider fiber is a host root, check if `memoizedState.element` is null. We also check the alternate. The work-in-progress fiber's `element` will never be null because we're inside a work-in-progress tree. So if either fiber's element is null, that fiber must be the current one, which most likely means it's the initial mount. (I say "most likely" because, technically, you could pass `null` to `root.render()`. But, meh, good enough.) Fixing this revealed a related bug in one of my tests. When you render the initial app, all the caches on the entire page share the same provider: the root. So a refresh anywhere in the UI will refresh the entire screen... until you navigate or reveal more content. The more you interact with the UI, the more granular the consistency boundaries get. I also found another bug where caches were not transfered across retries if the original update was spawned by a cache refresh. That's because refresh caches are stored in the provider's update queue; we don't track these on these on the root, because they're already part of the tree. So for these types of retries, we can go back to the original trick I attempted at the beginning of this exploration: when mounting a new tree, consult the render lanes to see if it's a retry. If it is, and there's no cache associated with those lanes, then the retry must have been the result of a cache refresh. Which means the nearest cache provider must be the one that we want: the one that refreshed. --- .../src/ReactFiberBeginWork.new.js | 28 +++++++++-- .../src/ReactFiberBeginWork.old.js | 28 +++++++++-- .../src/ReactFiberLane.new.js | 26 +++++++++- .../src/ReactFiberLane.old.js | 26 +++++++++- .../src/ReactFiberWorkLoop.new.js | 7 +++ .../src/ReactFiberWorkLoop.old.js | 7 +++ .../src/__tests__/ReactCache-test.js | 49 +++++++++++++++++-- 7 files changed, 158 insertions(+), 13 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index eea81fe666ce3..96984910f469f 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -201,6 +201,7 @@ import { renderDidSuspendDelayIfPossible, markSkippedUpdateLanes, getWorkInProgressRoot, + getRootRenderLanes, pushRenderLanes, getExecutionContext, RetryAfterError, @@ -675,7 +676,24 @@ function updateCacheComponent( let cacheInstance: CacheInstance | null = null; if (current === null) { let initialState; - if (parentCacheInstance.provider.alternate === null) { + const providerFiber = parentCacheInstance.provider; + if ( + // If the provider fiber does not have an alternate, it must be a mount. + providerFiber.alternate === null || + // Host roots are never not mounted. Even during the initial render. So we + // use a trick. Check if `memoizedState.element` is null. We also check + // the alternate. The work-in-progress fiber's `element` will never be + // null because we're inside a work-in-progress tree. So if either fiber's + // element is null, that fiber must be the current one, which most likely + // means it's the initial mount. + // + // (I say "most likely" because, technically, you could pass `null` to + // `root.render()`. But, meh, good enough.) + (providerFiber.tag === HostRoot && + (providerFiber.memoizedState.element === null || + (providerFiber.alternate !== null && + providerFiber.alternate.memoizedState.element === null))) + ) { // Fast path. The parent Cache boundary is also a new mount. We can // inherit its cache. cacheInstance = null; @@ -690,11 +708,15 @@ function updateCacheComponent( 'Expected a work-in-progress root. This is a bug in React. Please ' + 'file an issue.', ); - const freshCache = requestFreshCache(root, renderLanes); + const freshCache: Cache | null = requestFreshCache( + root, + getRootRenderLanes(), + renderLanes, + ); // This may be the same as the parent cache, like if the current render // spawned from a previous render that already committed. Otherwise, this // is the root of a cache consistency boundary. - if (freshCache !== parentCacheInstance.cache) { + if (freshCache !== null && freshCache !== parentCacheInstance.cache) { cacheInstance = { cache: freshCache, provider: workInProgress, diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index cdaf368dd1b97..4ba49edd92c56 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -201,6 +201,7 @@ import { renderDidSuspendDelayIfPossible, markSkippedUpdateLanes, getWorkInProgressRoot, + getRootRenderLanes, pushRenderLanes, getExecutionContext, RetryAfterError, @@ -675,7 +676,24 @@ function updateCacheComponent( let cacheInstance: CacheInstance | null = null; if (current === null) { let initialState; - if (parentCacheInstance.provider.alternate === null) { + const providerFiber = parentCacheInstance.provider; + if ( + // If the provider fiber does not have an alternate, it must be a mount. + providerFiber.alternate === null || + // Host roots are never not mounted. Even during the initial render. So we + // use a trick. Check if `memoizedState.element` is null. We also check + // the alternate. The work-in-progress fiber's `element` will never be + // null because we're inside a work-in-progress tree. So if either fiber's + // element is null, that fiber must be the current one, which most likely + // means it's the initial mount. + // + // (I say "most likely" because, technically, you could pass `null` to + // `root.render()`. But, meh, good enough.) + (providerFiber.tag === HostRoot && + (providerFiber.memoizedState.element === null || + (providerFiber.alternate !== null && + providerFiber.alternate.memoizedState.element === null))) + ) { // Fast path. The parent Cache boundary is also a new mount. We can // inherit its cache. cacheInstance = null; @@ -690,11 +708,15 @@ function updateCacheComponent( 'Expected a work-in-progress root. This is a bug in React. Please ' + 'file an issue.', ); - const freshCache = requestFreshCache(root, renderLanes); + const freshCache: Cache | null = requestFreshCache( + root, + getRootRenderLanes(), + renderLanes, + ); // This may be the same as the parent cache, like if the current render // spawned from a previous render that already committed. Otherwise, this // is the root of a cache consistency boundary. - if (freshCache !== parentCacheInstance.cache) { + if (freshCache !== null && freshCache !== parentCacheInstance.cache) { cacheInstance = { cache: freshCache, provider: workInProgress, diff --git a/packages/react-reconciler/src/ReactFiberLane.new.js b/packages/react-reconciler/src/ReactFiberLane.new.js index b82f49290f014..ac42fee5b8583 100644 --- a/packages/react-reconciler/src/ReactFiberLane.new.js +++ b/packages/react-reconciler/src/ReactFiberLane.new.js @@ -797,9 +797,13 @@ export function markRootEntangled(root: FiberRoot, entangledLanes: Lanes) { } } -export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { +export function requestFreshCache( + root: FiberRoot, + rootRenderLanes: Lanes, + renderLanes: Lanes, +): Cache | null { if (!enableCache) { - return (null: any); + return null; } // 1. Check if the currently rendering lanes already have a pending cache @@ -844,6 +848,16 @@ export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { } lanes &= ~lane; } + + if (includesOnlyRetries(rootRenderLanes)) { + // If this is a retry, and there's no cache associated with this lane, + // that must be because the original update was triggered by a refresh. + // Refreshes are stored on the Cache update queue, not the root. So, + // return null to indicate that we should use the parent cache (the cache + // that refreshed). + return null; + } + // There are no in-progress caches associated with the current render. Check // if there's a pooled cache. const pooledCache = root.pooledCache; @@ -860,6 +874,14 @@ export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { } } + if (includesOnlyRetries(rootRenderLanes)) { + // If this is a retry, and there's no cache associated with this lane, that + // must be because the original update was triggered by a refresh. Refreshes + // are stored on the Cache update queue, not the root. So, return null to + // indicate that we should use the parent cache (the cache that refreshed). + return null; + } + // Create a fresh cache. const freshCache = new Map(); diff --git a/packages/react-reconciler/src/ReactFiberLane.old.js b/packages/react-reconciler/src/ReactFiberLane.old.js index 444b5ef425ea1..c7c43c38b406f 100644 --- a/packages/react-reconciler/src/ReactFiberLane.old.js +++ b/packages/react-reconciler/src/ReactFiberLane.old.js @@ -797,9 +797,13 @@ export function markRootEntangled(root: FiberRoot, entangledLanes: Lanes) { } } -export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { +export function requestFreshCache( + root: FiberRoot, + rootRenderLanes: Lanes, + renderLanes: Lanes, +): Cache | null { if (!enableCache) { - return (null: any); + return null; } // 1. Check if the currently rendering lanes already have a pending cache @@ -844,6 +848,16 @@ export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { } lanes &= ~lane; } + + if (includesOnlyRetries(rootRenderLanes)) { + // If this is a retry, and there's no cache associated with this lane, + // that must be because the original update was triggered by a refresh. + // Refreshes are stored on the Cache update queue, not the root. So, + // return null to indicate that we should use the parent cache (the cache + // that refreshed). + return null; + } + // There are no in-progress caches associated with the current render. Check // if there's a pooled cache. const pooledCache = root.pooledCache; @@ -860,6 +874,14 @@ export function requestFreshCache(root: FiberRoot, renderLanes: Lanes): Cache { } } + if (includesOnlyRetries(rootRenderLanes)) { + // If this is a retry, and there's no cache associated with this lane, that + // must be because the original update was triggered by a refresh. Refreshes + // are stored on the Cache update queue, not the root. So, return null to + // indicate that we should use the parent cache (the cache that refreshed). + return null; + } + // Create a fresh cache. const freshCache = new Map(); diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js index 91a127b2431b7..4cd481a750f99 100644 --- a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js +++ b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js @@ -386,6 +386,13 @@ export function getWorkInProgressRoot(): FiberRoot | null { return workInProgressRoot; } +// DELETE ME: This is only neccessary because of `subtreeLanes`. We should get +// rid of `subtreeLanes` and use entanglement for Suspense retries instead. We +// would still be able to tell it's a retry because we'd check the primary lane. +export function getRootRenderLanes(): Lanes { + return workInProgressRootRenderLanes; +} + export function requestEventTime() { if ((executionContext & (RenderContext | CommitContext)) !== NoContext) { // We're inside React, so it's fine to read the actual time. diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js index ce6066fd15582..52131b78c9aee 100644 --- a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js +++ b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js @@ -386,6 +386,13 @@ export function getWorkInProgressRoot(): FiberRoot | null { return workInProgressRoot; } +// DELETE ME: This is only neccessary because of `subtreeLanes`. We should get +// rid of `subtreeLanes` and use entanglement for Suspense retries instead. We +// would still be able to tell it's a retry because we'd check the primary lane. +export function getRootRenderLanes(): Lanes { + return workInProgressRootRenderLanes; +} + export function requestEventTime() { if ((executionContext & (RenderContext | CommitContext)) !== NoContext) { // We're inside React, so it's fine to read the actual time. diff --git a/packages/react-reconciler/src/__tests__/ReactCache-test.js b/packages/react-reconciler/src/__tests__/ReactCache-test.js index 9b38261378d0a..9e527735afca8 100644 --- a/packages/react-reconciler/src/__tests__/ReactCache-test.js +++ b/packages/react-reconciler/src/__tests__/ReactCache-test.js @@ -209,6 +209,39 @@ describe('ReactCache', () => { expect(root).toMatchRenderedOutput('AA'); }); + // @gate experimental + test( + 'nested cache boundaries share the same cache as the root during ' + + 'the initial render', + async () => { + function App({text}) { + return ( + }> + + + + + + ); + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render(); + }); + // Even though there are two new trees, they should share the same + // data cache. So there should be only a single cache miss for A. + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('Loading...'); + + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A', 'A']); + expect(root).toMatchRenderedOutput('AA'); + }, + ); + // @gate experimental test('new content inside an existing Cache boundary should re-use already cached data', async () => { function App({showMore}) { @@ -600,8 +633,8 @@ describe('ReactCache', () => { return null; } - function App({text}) { - return ( + function App({showMore}) { + return showMore ? ( <> }> @@ -615,13 +648,23 @@ describe('ReactCache', () => { - ); + ) : null; } + // First mount the initial shell without the nested boundaries. This is + // necessary for this test because we want the two inner boundaries to be + // treated like sibling providers that happen to share an underlying + // cache, as opposed to consumers of the root-level cache. const root = ReactNoop.createRoot(); await ReactNoop.act(async () => { root.render(); }); + + // Now reveal the boundaries. In a real app this would be a navigation. + await ReactNoop.act(async () => { + root.render(); + }); + // Even though there are two new trees, they should share the same // data cache. So there should be only a single cache miss for A. expect(Scheduler).toHaveYielded([ From 74852689625ca399e319f9249864f325de0fa6de Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Tue, 15 Dec 2020 13:24:55 -0600 Subject: [PATCH 13/30] pushCacheProvider/popCacheProvider Extracted these into functions so I can put more stuff there. --- .../src/ReactFiberBeginWork.new.js | 14 +++++++------- .../src/ReactFiberBeginWork.old.js | 14 +++++++------- .../src/ReactFiberCacheComponent.js | 18 ++++++++++++++++++ .../src/ReactFiberCompleteWork.new.js | 8 +++++--- .../src/ReactFiberCompleteWork.old.js | 8 +++++--- .../src/ReactFiberUnwindWork.new.js | 14 +++++++++----- .../src/ReactFiberUnwindWork.old.js | 14 +++++++++----- 7 files changed, 60 insertions(+), 30 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 96984910f469f..9068fc1d30ea2 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -209,7 +209,7 @@ import { } from './ReactFiberWorkLoop.new'; import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import {setWorkInProgressVersion} from './ReactMutableSource.new'; -import {CacheContext} from './ReactFiberCacheComponent'; +import {CacheContext, pushCacheProvider} from './ReactFiberCacheComponent'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -724,7 +724,7 @@ function updateCacheComponent( initialState = { cache: freshCache, }; - pushProvider(workInProgress, CacheContext, cacheInstance); + pushCacheProvider(workInProgress, cacheInstance); // No need to propagate the refresh, because this is a new tree. } else { // Use the parent cache @@ -754,7 +754,7 @@ function updateCacheComponent( cache: nextCache, provider: workInProgress, }; - pushProvider(workInProgress, CacheContext, cacheInstance); + pushCacheProvider(workInProgress, cacheInstance); // Refreshes propagate through the entire subtree. The refreshed cache // will override nested caches. propagateCacheRefresh(workInProgress, renderLanes); @@ -774,7 +774,7 @@ function updateCacheComponent( cacheInstance = current.stateNode; if (cacheInstance !== null) { // There was no refresh, so no need to propagate to nested boundaries. - pushProvider(workInProgress, CacheContext, cacheInstance); + pushCacheProvider(workInProgress, cacheInstance); } } } @@ -1143,7 +1143,7 @@ function updateHostRoot(current, workInProgress, renderLanes) { if (enableCache) { const nextCacheInstance: CacheInstance = nextState.cacheInstance; - pushProvider(workInProgress, CacheContext, nextCacheInstance); + pushCacheProvider(workInProgress, nextCacheInstance); if (nextCacheInstance !== prevState.cacheInstance) { propagateCacheRefresh(workInProgress, renderLanes); } @@ -3223,7 +3223,7 @@ function beginWork( if (enableCache) { const nextCacheInstance: CacheInstance = current.memoizedState.cacheInstance; - pushProvider(workInProgress, CacheContext, nextCacheInstance); + pushCacheProvider(workInProgress, nextCacheInstance); } resetHydrationState(); break; @@ -3396,7 +3396,7 @@ function beginWork( if (enableCache) { const ownCacheInstance: CacheInstance | null = current.stateNode; if (ownCacheInstance !== null) { - pushProvider(workInProgress, CacheContext, ownCacheInstance); + pushCacheProvider(workInProgress, ownCacheInstance); } workInProgress.stateNode = ownCacheInstance; } diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index 4ba49edd92c56..f1527798ff1b4 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -209,7 +209,7 @@ import { } from './ReactFiberWorkLoop.old'; import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import {setWorkInProgressVersion} from './ReactMutableSource.old'; -import {CacheContext} from './ReactFiberCacheComponent'; +import {CacheContext, pushCacheProvider} from './ReactFiberCacheComponent'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -724,7 +724,7 @@ function updateCacheComponent( initialState = { cache: freshCache, }; - pushProvider(workInProgress, CacheContext, cacheInstance); + pushCacheProvider(workInProgress, cacheInstance); // No need to propagate the refresh, because this is a new tree. } else { // Use the parent cache @@ -754,7 +754,7 @@ function updateCacheComponent( cache: nextCache, provider: workInProgress, }; - pushProvider(workInProgress, CacheContext, cacheInstance); + pushCacheProvider(workInProgress, cacheInstance); // Refreshes propagate through the entire subtree. The refreshed cache // will override nested caches. propagateCacheRefresh(workInProgress, renderLanes); @@ -774,7 +774,7 @@ function updateCacheComponent( cacheInstance = current.stateNode; if (cacheInstance !== null) { // There was no refresh, so no need to propagate to nested boundaries. - pushProvider(workInProgress, CacheContext, cacheInstance); + pushCacheProvider(workInProgress, cacheInstance); } } } @@ -1143,7 +1143,7 @@ function updateHostRoot(current, workInProgress, renderLanes) { if (enableCache) { const nextCacheInstance: CacheInstance = nextState.cacheInstance; - pushProvider(workInProgress, CacheContext, nextCacheInstance); + pushCacheProvider(workInProgress, nextCacheInstance); if (nextCacheInstance !== prevState.cacheInstance) { propagateCacheRefresh(workInProgress, renderLanes); } @@ -3223,7 +3223,7 @@ function beginWork( if (enableCache) { const nextCacheInstance: CacheInstance = current.memoizedState.cacheInstance; - pushProvider(workInProgress, CacheContext, nextCacheInstance); + pushCacheProvider(workInProgress, nextCacheInstance); } resetHydrationState(); break; @@ -3396,7 +3396,7 @@ function beginWork( if (enableCache) { const ownCacheInstance: CacheInstance | null = current.stateNode; if (ownCacheInstance !== null) { - pushProvider(workInProgress, CacheContext, ownCacheInstance); + pushCacheProvider(workInProgress, ownCacheInstance); } workInProgress.stateNode = ownCacheInstance; } diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.js b/packages/react-reconciler/src/ReactFiberCacheComponent.js index ddcd741d487d3..9ddd194a32c76 100644 --- a/packages/react-reconciler/src/ReactFiberCacheComponent.js +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.js @@ -11,6 +11,8 @@ import type {ReactContext} from 'shared/ReactTypes'; import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols'; +import {pushProvider, popProvider} from './ReactFiberNewContext.new'; + export type Cache = Map<() => mixed, mixed>; export type CacheInstance = {| @@ -34,3 +36,19 @@ if (__DEV__) { CacheContext._currentRenderer = null; CacheContext._currentRenderer2 = null; } + +export function pushCacheProvider( + workInProgress: Fiber, + cacheInstance: CacheInstance, +) { + pushProvider(workInProgress, CacheContext, cacheInstance); +} + +export function popCacheProvider( + workInProgress: Fiber, + // We don't actually use the cache instance object, but you're not supposed to + // call this function unless it exists. + cacheInstance: CacheInstance, +) { + popProvider(CacheContext, workInProgress); +} diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js index 28fb7b39e2e07..2818e1ecfb881 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js @@ -157,7 +157,7 @@ import { import {resetChildFibers} from './ReactChildFiber.new'; import {createScopeInstance} from './ReactFiberScope.new'; import {transferActualDuration} from './ReactProfilerTimer.new'; -import {CacheContext} from './ReactFiberCacheComponent'; +import {popCacheProvider} from './ReactFiberCacheComponent'; function markUpdate(workInProgress: Fiber) { // Tag the fiber with an update effect. This turns a Placement into @@ -814,7 +814,9 @@ function completeWork( } case HostRoot: { if (enableCache) { - popProvider(CacheContext, workInProgress); + const cacheInstance: CacheInstance = + workInProgress.memoizedState.cacheInstance; + popCacheProvider(workInProgress, cacheInstance); } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); @@ -1495,7 +1497,7 @@ function completeWork( const ownCacheInstance: CacheInstance | null = workInProgress.stateNode; if (ownCacheInstance !== null) { // This is a cache provider. - popProvider(CacheContext, workInProgress); + popCacheProvider(workInProgress, ownCacheInstance); } bubbleProperties(workInProgress); return null; diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js index a76d5562e65f5..d49b263b6612f 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js @@ -157,7 +157,7 @@ import { import {resetChildFibers} from './ReactChildFiber.old'; import {createScopeInstance} from './ReactFiberScope.old'; import {transferActualDuration} from './ReactProfilerTimer.old'; -import {CacheContext} from './ReactFiberCacheComponent'; +import {popCacheProvider} from './ReactFiberCacheComponent'; function markUpdate(workInProgress: Fiber) { // Tag the fiber with an update effect. This turns a Placement into @@ -814,7 +814,9 @@ function completeWork( } case HostRoot: { if (enableCache) { - popProvider(CacheContext, workInProgress); + const cacheInstance: CacheInstance = + workInProgress.memoizedState.cacheInstance; + popCacheProvider(workInProgress, cacheInstance); } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); @@ -1495,7 +1497,7 @@ function completeWork( const ownCacheInstance: CacheInstance | null = workInProgress.stateNode; if (ownCacheInstance !== null) { // This is a cache provider. - popProvider(CacheContext, workInProgress); + popCacheProvider(workInProgress, ownCacheInstance); } bubbleProperties(workInProgress); return null; diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js index 8ed6329288e57..018c5dd10ba5e 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js @@ -44,8 +44,8 @@ import { } from './ReactFiberContext.new'; import {popProvider} from './ReactFiberNewContext.new'; import {popRenderLanes} from './ReactFiberWorkLoop.new'; +import {popCacheProvider} from './ReactFiberCacheComponent'; import {transferActualDuration} from './ReactProfilerTimer.new'; -import {CacheContext} from './ReactFiberCacheComponent'; import invariant from 'shared/invariant'; @@ -71,7 +71,9 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { } case HostRoot: { if (enableCache) { - popProvider(CacheContext, workInProgress); + const cacheInstance: CacheInstance = + workInProgress.memoizedState.cacheInstance; + popCacheProvider(workInProgress, cacheInstance); } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); @@ -139,7 +141,7 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { if (enableCache) { const ownCacheInstance: CacheInstance | null = workInProgress.stateNode; if (ownCacheInstance !== null) { - popProvider(CacheContext, workInProgress); + popCacheProvider(workInProgress, ownCacheInstance); } } return null; @@ -159,7 +161,9 @@ function unwindInterruptedWork(interruptedWork: Fiber) { } case HostRoot: { if (enableCache) { - popProvider(CacheContext, interruptedWork); + const cacheInstance: CacheInstance = + interruptedWork.memoizedState.cacheInstance; + popCacheProvider(interruptedWork, cacheInstance); } popHostContainer(interruptedWork); popTopLevelLegacyContextObject(interruptedWork); @@ -192,7 +196,7 @@ function unwindInterruptedWork(interruptedWork: Fiber) { const ownCacheInstance: CacheInstance | null = interruptedWork.stateNode; if (ownCacheInstance !== null) { - popProvider(CacheContext, interruptedWork); + popCacheProvider(interruptedWork, ownCacheInstance); } } break; diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js index 83727fe4713c1..3ff3727e4fa3b 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js @@ -44,8 +44,8 @@ import { } from './ReactFiberContext.old'; import {popProvider} from './ReactFiberNewContext.old'; import {popRenderLanes} from './ReactFiberWorkLoop.old'; +import {popCacheProvider} from './ReactFiberCacheComponent'; import {transferActualDuration} from './ReactProfilerTimer.old'; -import {CacheContext} from './ReactFiberCacheComponent'; import invariant from 'shared/invariant'; @@ -71,7 +71,9 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { } case HostRoot: { if (enableCache) { - popProvider(CacheContext, workInProgress); + const cacheInstance: CacheInstance = + workInProgress.memoizedState.cacheInstance; + popCacheProvider(workInProgress, cacheInstance); } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); @@ -139,7 +141,7 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { if (enableCache) { const ownCacheInstance: CacheInstance | null = workInProgress.stateNode; if (ownCacheInstance !== null) { - popProvider(CacheContext, workInProgress); + popCacheProvider(workInProgress, ownCacheInstance); } } return null; @@ -159,7 +161,9 @@ function unwindInterruptedWork(interruptedWork: Fiber) { } case HostRoot: { if (enableCache) { - popProvider(CacheContext, interruptedWork); + const cacheInstance: CacheInstance = + interruptedWork.memoizedState.cacheInstance; + popCacheProvider(interruptedWork, cacheInstance); } popHostContainer(interruptedWork); popTopLevelLegacyContextObject(interruptedWork); @@ -192,7 +196,7 @@ function unwindInterruptedWork(interruptedWork: Fiber) { const ownCacheInstance: CacheInstance | null = interruptedWork.stateNode; if (ownCacheInstance !== null) { - popProvider(CacheContext, interruptedWork); + popCacheProvider(interruptedWork, ownCacheInstance); } } break; From 23d46bf9ac2e4f8ea2ca939f66fbbe61195ea31e Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Tue, 15 Dec 2020 17:23:38 -0600 Subject: [PATCH 14/30] Add warnings if cache context is in invalid state A parent cache refresh always overrides any nested cache. So there will only ever be a single fresh cache on the context stack. We can use this knowledge to detect stack mismatch bugs. --- .../src/ReactFiberBeginWork.new.js | 20 ++++++---- .../src/ReactFiberBeginWork.old.js | 20 ++++++---- .../src/ReactFiberCacheComponent.js | 39 +++++++++++++++++-- 3 files changed, 62 insertions(+), 17 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 9068fc1d30ea2..43dd53de8e0ce 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -209,7 +209,11 @@ import { } from './ReactFiberWorkLoop.new'; import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import {setWorkInProgressVersion} from './ReactMutableSource.new'; -import {CacheContext, pushCacheProvider} from './ReactFiberCacheComponent'; +import { + CacheContext, + pushFreshCacheProvider, + pushStaleCacheProvider, +} from './ReactFiberCacheComponent'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -724,7 +728,7 @@ function updateCacheComponent( initialState = { cache: freshCache, }; - pushCacheProvider(workInProgress, cacheInstance); + pushFreshCacheProvider(workInProgress, cacheInstance); // No need to propagate the refresh, because this is a new tree. } else { // Use the parent cache @@ -754,7 +758,7 @@ function updateCacheComponent( cache: nextCache, provider: workInProgress, }; - pushCacheProvider(workInProgress, cacheInstance); + pushFreshCacheProvider(workInProgress, cacheInstance); // Refreshes propagate through the entire subtree. The refreshed cache // will override nested caches. propagateCacheRefresh(workInProgress, renderLanes); @@ -774,7 +778,7 @@ function updateCacheComponent( cacheInstance = current.stateNode; if (cacheInstance !== null) { // There was no refresh, so no need to propagate to nested boundaries. - pushCacheProvider(workInProgress, cacheInstance); + pushStaleCacheProvider(workInProgress, cacheInstance); } } } @@ -1143,9 +1147,11 @@ function updateHostRoot(current, workInProgress, renderLanes) { if (enableCache) { const nextCacheInstance: CacheInstance = nextState.cacheInstance; - pushCacheProvider(workInProgress, nextCacheInstance); if (nextCacheInstance !== prevState.cacheInstance) { + pushFreshCacheProvider(workInProgress, nextCacheInstance); propagateCacheRefresh(workInProgress, renderLanes); + } else { + pushStaleCacheProvider(workInProgress, nextCacheInstance); } } @@ -3223,7 +3229,7 @@ function beginWork( if (enableCache) { const nextCacheInstance: CacheInstance = current.memoizedState.cacheInstance; - pushCacheProvider(workInProgress, nextCacheInstance); + pushStaleCacheProvider(workInProgress, nextCacheInstance); } resetHydrationState(); break; @@ -3396,7 +3402,7 @@ function beginWork( if (enableCache) { const ownCacheInstance: CacheInstance | null = current.stateNode; if (ownCacheInstance !== null) { - pushCacheProvider(workInProgress, ownCacheInstance); + pushStaleCacheProvider(workInProgress, ownCacheInstance); } workInProgress.stateNode = ownCacheInstance; } diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index f1527798ff1b4..c69a7287a0680 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -209,7 +209,11 @@ import { } from './ReactFiberWorkLoop.old'; import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import {setWorkInProgressVersion} from './ReactMutableSource.old'; -import {CacheContext, pushCacheProvider} from './ReactFiberCacheComponent'; +import { + CacheContext, + pushFreshCacheProvider, + pushStaleCacheProvider, +} from './ReactFiberCacheComponent'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -724,7 +728,7 @@ function updateCacheComponent( initialState = { cache: freshCache, }; - pushCacheProvider(workInProgress, cacheInstance); + pushFreshCacheProvider(workInProgress, cacheInstance); // No need to propagate the refresh, because this is a new tree. } else { // Use the parent cache @@ -754,7 +758,7 @@ function updateCacheComponent( cache: nextCache, provider: workInProgress, }; - pushCacheProvider(workInProgress, cacheInstance); + pushFreshCacheProvider(workInProgress, cacheInstance); // Refreshes propagate through the entire subtree. The refreshed cache // will override nested caches. propagateCacheRefresh(workInProgress, renderLanes); @@ -774,7 +778,7 @@ function updateCacheComponent( cacheInstance = current.stateNode; if (cacheInstance !== null) { // There was no refresh, so no need to propagate to nested boundaries. - pushCacheProvider(workInProgress, cacheInstance); + pushStaleCacheProvider(workInProgress, cacheInstance); } } } @@ -1143,9 +1147,11 @@ function updateHostRoot(current, workInProgress, renderLanes) { if (enableCache) { const nextCacheInstance: CacheInstance = nextState.cacheInstance; - pushCacheProvider(workInProgress, nextCacheInstance); if (nextCacheInstance !== prevState.cacheInstance) { + pushFreshCacheProvider(workInProgress, nextCacheInstance); propagateCacheRefresh(workInProgress, renderLanes); + } else { + pushStaleCacheProvider(workInProgress, nextCacheInstance); } } @@ -3223,7 +3229,7 @@ function beginWork( if (enableCache) { const nextCacheInstance: CacheInstance = current.memoizedState.cacheInstance; - pushCacheProvider(workInProgress, nextCacheInstance); + pushStaleCacheProvider(workInProgress, nextCacheInstance); } resetHydrationState(); break; @@ -3396,7 +3402,7 @@ function beginWork( if (enableCache) { const ownCacheInstance: CacheInstance | null = current.stateNode; if (ownCacheInstance !== null) { - pushCacheProvider(workInProgress, ownCacheInstance); + pushStaleCacheProvider(workInProgress, ownCacheInstance); } workInProgress.stateNode = ownCacheInstance; } diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.js b/packages/react-reconciler/src/ReactFiberCacheComponent.js index 9ddd194a32c76..dbe0af827b83c 100644 --- a/packages/react-reconciler/src/ReactFiberCacheComponent.js +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.js @@ -37,18 +37,51 @@ if (__DEV__) { CacheContext._currentRenderer2 = null; } -export function pushCacheProvider( +// A parent cache refresh always overrides any nested cache. So there will only +// ever be a single fresh cache on the context stack. +// TODO: Use this to detect parent refreshes. +let freshCacheInstance: CacheInstance | null = null; + +export function pushStaleCacheProvider( + workInProgress: Fiber, + cacheInstance: CacheInstance, +) { + if (__DEV__) { + if (freshCacheInstance !== null) { + console.error( + 'Already inside a fresh cache boundary. This is a bug in React.', + ); + } + } + pushProvider(workInProgress, CacheContext, cacheInstance); +} + +export function pushFreshCacheProvider( workInProgress: Fiber, cacheInstance: CacheInstance, ) { + if (__DEV__) { + if (freshCacheInstance !== null) { + console.error( + 'Already inside a fresh cache boundary. This is a bug in React.', + ); + } + } + freshCacheInstance = cacheInstance; pushProvider(workInProgress, CacheContext, cacheInstance); } export function popCacheProvider( workInProgress: Fiber, - // We don't actually use the cache instance object, but you're not supposed to - // call this function unless it exists. cacheInstance: CacheInstance, ) { + if (__DEV__) { + if (freshCacheInstance !== null && freshCacheInstance !== cacheInstance) { + console.error( + 'Unexpected cache instance on context. This is a bug in React.', + ); + } + } + freshCacheInstance = null; popProvider(CacheContext, workInProgress); } From 0d605b1b98f4bb498918a5e6c66a81e1e60778c5 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Tue, 15 Dec 2020 17:48:04 -0600 Subject: [PATCH 15/30] Explicitly check if the parent provider is fresh We can track this cheaply because there's only ever a single fresh provider. We don't need to store the fresh caches on the stack, just one if it exists. Then when we unwind the Fiber stack, we reset it. Bonus: this replaces the fast path I added to detect fresh roots, too. --- .../src/ReactFiberBeginWork.new.js | 37 ++++++++----------- .../src/ReactFiberBeginWork.old.js | 37 ++++++++----------- .../src/ReactFiberCacheComponent.js | 14 ++++++- 3 files changed, 42 insertions(+), 46 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 43dd53de8e0ce..583a15c316fd2 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -213,6 +213,7 @@ import { CacheContext, pushFreshCacheProvider, pushStaleCacheProvider, + hasFreshCacheProvider, } from './ReactFiberCacheComponent'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -680,25 +681,8 @@ function updateCacheComponent( let cacheInstance: CacheInstance | null = null; if (current === null) { let initialState; - const providerFiber = parentCacheInstance.provider; - if ( - // If the provider fiber does not have an alternate, it must be a mount. - providerFiber.alternate === null || - // Host roots are never not mounted. Even during the initial render. So we - // use a trick. Check if `memoizedState.element` is null. We also check - // the alternate. The work-in-progress fiber's `element` will never be - // null because we're inside a work-in-progress tree. So if either fiber's - // element is null, that fiber must be the current one, which most likely - // means it's the initial mount. - // - // (I say "most likely" because, technically, you could pass `null` to - // `root.render()`. But, meh, good enough.) - (providerFiber.tag === HostRoot && - (providerFiber.memoizedState.element === null || - (providerFiber.alternate !== null && - providerFiber.alternate.memoizedState.element === null))) - ) { - // Fast path. The parent Cache boundary is also a new mount. We can + if (hasFreshCacheProvider()) { + // Fast path. The parent Cache is either a new mount or a refresh. We can // inherit its cache. cacheInstance = null; initialState = { @@ -743,7 +727,11 @@ function updateCacheComponent( initializeUpdateQueue(workInProgress); } else { // This component already mounted. - if (includesSomeLane(renderLanes, updateLanes)) { + if (hasFreshCacheProvider()) { + // Fast path. The parent Cache is either a new mount or a refresh. We can + // inherit its cache. + cacheInstance = null; + } else if (includesSomeLane(renderLanes, updateLanes)) { // An refresh was scheduled. If it was an refresh on this fiber, then we // will have an update in the queue. Otherwise, it must have been an // update on a parent, propagated via context. @@ -1138,7 +1126,7 @@ function updateHostRoot(current, workInProgress, renderLanes) { ); const nextProps = workInProgress.pendingProps; const prevState = workInProgress.memoizedState; - const prevChildren = prevState !== null ? prevState.element : null; + const prevChildren = prevState.element; cloneUpdateQueue(current, workInProgress); processUpdateQueue(workInProgress, nextProps, null, renderLanes); const nextState = workInProgress.memoizedState; @@ -1151,7 +1139,12 @@ function updateHostRoot(current, workInProgress, renderLanes) { pushFreshCacheProvider(workInProgress, nextCacheInstance); propagateCacheRefresh(workInProgress, renderLanes); } else { - pushStaleCacheProvider(workInProgress, nextCacheInstance); + if (prevChildren === null) { + // If there are no children, this must be the initial render. + pushFreshCacheProvider(workInProgress, nextCacheInstance); + } else { + pushStaleCacheProvider(workInProgress, nextCacheInstance); + } } } diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index c69a7287a0680..f5142de58c7c9 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -213,6 +213,7 @@ import { CacheContext, pushFreshCacheProvider, pushStaleCacheProvider, + hasFreshCacheProvider, } from './ReactFiberCacheComponent'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -680,25 +681,8 @@ function updateCacheComponent( let cacheInstance: CacheInstance | null = null; if (current === null) { let initialState; - const providerFiber = parentCacheInstance.provider; - if ( - // If the provider fiber does not have an alternate, it must be a mount. - providerFiber.alternate === null || - // Host roots are never not mounted. Even during the initial render. So we - // use a trick. Check if `memoizedState.element` is null. We also check - // the alternate. The work-in-progress fiber's `element` will never be - // null because we're inside a work-in-progress tree. So if either fiber's - // element is null, that fiber must be the current one, which most likely - // means it's the initial mount. - // - // (I say "most likely" because, technically, you could pass `null` to - // `root.render()`. But, meh, good enough.) - (providerFiber.tag === HostRoot && - (providerFiber.memoizedState.element === null || - (providerFiber.alternate !== null && - providerFiber.alternate.memoizedState.element === null))) - ) { - // Fast path. The parent Cache boundary is also a new mount. We can + if (hasFreshCacheProvider()) { + // Fast path. The parent Cache is either a new mount or a refresh. We can // inherit its cache. cacheInstance = null; initialState = { @@ -743,7 +727,11 @@ function updateCacheComponent( initializeUpdateQueue(workInProgress); } else { // This component already mounted. - if (includesSomeLane(renderLanes, updateLanes)) { + if (hasFreshCacheProvider()) { + // Fast path. The parent Cache is either a new mount or a refresh. We can + // inherit its cache. + cacheInstance = null; + } else if (includesSomeLane(renderLanes, updateLanes)) { // An refresh was scheduled. If it was an refresh on this fiber, then we // will have an update in the queue. Otherwise, it must have been an // update on a parent, propagated via context. @@ -1138,7 +1126,7 @@ function updateHostRoot(current, workInProgress, renderLanes) { ); const nextProps = workInProgress.pendingProps; const prevState = workInProgress.memoizedState; - const prevChildren = prevState !== null ? prevState.element : null; + const prevChildren = prevState.element; cloneUpdateQueue(current, workInProgress); processUpdateQueue(workInProgress, nextProps, null, renderLanes); const nextState = workInProgress.memoizedState; @@ -1151,7 +1139,12 @@ function updateHostRoot(current, workInProgress, renderLanes) { pushFreshCacheProvider(workInProgress, nextCacheInstance); propagateCacheRefresh(workInProgress, renderLanes); } else { - pushStaleCacheProvider(workInProgress, nextCacheInstance); + if (prevChildren === null) { + // If there are no children, this must be the initial render. + pushFreshCacheProvider(workInProgress, nextCacheInstance); + } else { + pushStaleCacheProvider(workInProgress, nextCacheInstance); + } } } diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.js b/packages/react-reconciler/src/ReactFiberCacheComponent.js index dbe0af827b83c..185fad7cd5d57 100644 --- a/packages/react-reconciler/src/ReactFiberCacheComponent.js +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.js @@ -10,6 +10,7 @@ import type {ReactContext} from 'shared/ReactTypes'; import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols'; +import {HostRoot} from './ReactWorkTags'; import {pushProvider, popProvider} from './ReactFiberNewContext.new'; @@ -39,7 +40,6 @@ if (__DEV__) { // A parent cache refresh always overrides any nested cache. So there will only // ever be a single fresh cache on the context stack. -// TODO: Use this to detect parent refreshes. let freshCacheInstance: CacheInstance | null = null; export function pushStaleCacheProvider( @@ -61,7 +61,13 @@ export function pushFreshCacheProvider( cacheInstance: CacheInstance, ) { if (__DEV__) { - if (freshCacheInstance !== null) { + if ( + freshCacheInstance !== null && + // TODO: Remove this exception for roots. There are a few tests that throw + // in pushHostContainer, before the cache context is pushed. Not a huge + // issue, but should still fix. + workInProgress.tag !== HostRoot + ) { console.error( 'Already inside a fresh cache boundary. This is a bug in React.', ); @@ -85,3 +91,7 @@ export function popCacheProvider( freshCacheInstance = null; popProvider(CacheContext, workInProgress); } + +export function hasFreshCacheProvider() { + return freshCacheInstance !== null; +} From 65458e4bc09195c86e5b310a956b4cd020f28784 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Wed, 16 Dec 2020 03:00:53 -0600 Subject: [PATCH 16/30] Restore retry cache from Suspense/Offscreen fiber MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When a Suspense or Offscreen boundary resumes rendering, the inner tree should use the same cache that the outer one did during the original render. This is important not just for UI consistency reasons, but because dropping the original cache means dropping all the in- flight requests. This is arguably an edge case, because it only applies to the first Cache boundary in the new tree is not part of its "shell" — that is, if it's inside the first Suspense boundary, and isn't committed in the first render. But we should still try to get it right. Previously I was using an array on the root (the one we use for tracking caches that aren't yet rooted to the tree) but with that approach you quickly run out of lanes. The new approach is to store the cache on the Offscreen fiber. Suspense uses an Offscreen fiber internally to wrap its children, so the code is almost entirely shared. A neat property is that we only have to store a single cache per fallback/hidden tree. I had previously expected that we'd need to store a cache per retry lane per tree. But, when unhiding a hidden tree, the retry lanes must be entangled — that was the discovery we made when fixing the "flickering" bug earlier in the year. So we actually only need a single retry cache per hidden Suspense/Offscreen boundary. Even setting aside entanglement, the only reason you'd have multiple is if there were multiple parent refreshes, in which case the last one should win regardless. The important thing is that each separate tree can have their own, which this approach achieves. --- .../src/ReactFiberBeginWork.new.js | 170 +++++++++++++----- .../src/ReactFiberBeginWork.old.js | 170 +++++++++++++----- .../src/ReactFiberCacheComponent.js | 4 + .../src/ReactFiberCommitWork.new.js | 27 +-- .../src/ReactFiberCommitWork.old.js | 29 +-- .../src/ReactFiberCompleteWork.new.js | 5 + .../src/ReactFiberCompleteWork.old.js | 5 + .../src/ReactFiberHooks.new.js | 18 +- .../src/ReactFiberHooks.old.js | 18 +- .../src/ReactFiberLane.new.js | 131 ++++---------- .../src/ReactFiberLane.old.js | 131 ++++---------- .../src/ReactFiberOffscreenComponent.js | 2 + .../src/ReactFiberThrow.new.js | 15 +- .../src/ReactFiberThrow.old.js | 15 +- .../src/ReactFiberUnwindWork.new.js | 8 + .../src/ReactFiberUnwindWork.old.js | 10 +- .../src/ReactFiberWorkLoop.new.js | 47 +---- .../src/ReactFiberWorkLoop.old.js | 47 +---- .../src/ReactInternalTypes.js | 6 +- 19 files changed, 424 insertions(+), 434 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 583a15c316fd2..bc041133a9e0b 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -23,7 +23,7 @@ import type { OffscreenProps, OffscreenState, } from './ReactFiberOffscreenComponent'; -import type {CacheInstance, Cache} from './ReactFiberCacheComponent'; +import type {CacheInstance} from './ReactFiberCacheComponent'; import type {UpdateQueue} from './ReactUpdateQueue.new'; import checkPropTypes from 'shared/checkPropTypes'; @@ -119,7 +119,7 @@ import { removeLanes, mergeLanes, getBumpedLaneForHydration, - requestFreshCache, + requestCacheFromPool, } from './ReactFiberLane.new'; import { ConcurrentMode, @@ -134,7 +134,6 @@ import { isSuspenseInstanceFallback, registerSuspenseInstanceRetry, supportsHydration, - isPrimaryRenderer, } from './ReactFiberHostConfig'; import type {SuspenseInstance} from './ReactFiberHostConfig'; import {shouldSuspend} from './ReactFiberReconciler'; @@ -201,7 +200,6 @@ import { renderDidSuspendDelayIfPossible, markSkippedUpdateLanes, getWorkInProgressRoot, - getRootRenderLanes, pushRenderLanes, getExecutionContext, RetryAfterError, @@ -210,10 +208,10 @@ import { import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import {setWorkInProgressVersion} from './ReactMutableSource.new'; import { - CacheContext, pushFreshCacheProvider, pushStaleCacheProvider, hasFreshCacheProvider, + getFreshCacheProviderIfExists, } from './ReactFiberCacheComponent'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -590,23 +588,43 @@ function updateOffscreenComponent( const prevState: OffscreenState | null = current !== null ? current.memoizedState : null; + // If this is not null, this is a cache instance that was carried over from + // the previous render. We will push this to the cache context so that we can + // resume in-flight requests. However, we don't do this if there's already a + // fresh cache provider on the stack. + let cacheInstance: CacheInstance | null = null; + if ( nextProps.mode === 'hidden' || nextProps.mode === 'unstable-defer-without-hiding' ) { + // Rendering a hidden tree. if ((workInProgress.mode & ConcurrentMode) === NoMode) { // In legacy sync mode, don't defer the subtree. Render it now. // TODO: Figure out what we should do in Blocking mode. const nextState: OffscreenState = { baseLanes: NoLanes, + cache: null, }; workInProgress.memoizedState = nextState; pushRenderLanes(workInProgress, renderLanes); } else if (!includesSomeLane(renderLanes, (OffscreenLane: Lane))) { + // We're hidden, and we're not rendering at Offscreen. We will bail out + // and resume this tree later. let nextBaseLanes; if (prevState !== null) { const prevBaseLanes = prevState.baseLanes; nextBaseLanes = mergeLanes(prevBaseLanes, renderLanes); + + // Keep a reference to the in-flight cache so we can resume later. If + // there's no fresh cache on the stack, there might be one from a + // previous render. If so, reuse it. + cacheInstance = hasFreshCacheProvider() + ? getFreshCacheProviderIfExists() + : prevState.cache; + // We don't need to push to the cache context because we're about to + // bail out. There won't be a context mismatch because we only pop + // the cache context if `updateQueue` is non-null. } else { nextBaseLanes = renderLanes; } @@ -620,16 +638,34 @@ function updateOffscreenComponent( ); const nextState: OffscreenState = { baseLanes: nextBaseLanes, + cache: cacheInstance, }; workInProgress.memoizedState = nextState; + workInProgress.updateQueue = null; // We're about to bail out, but we need to push this to the stack anyway // to avoid a push/pop misalignment. pushRenderLanes(workInProgress, nextBaseLanes); return null; } else { + // This is the second render. The surrounding visible content has already + // committed. Now we resume rendering the hidden tree. + + if (!hasFreshCacheProvider() && prevState !== null) { + // If there was a fresh cache during the render that spawned this one, + // resume using it. + const prevCacheInstance = prevState.cache; + if (prevCacheInstance !== null) { + cacheInstance = prevCacheInstance; + pushFreshCacheProvider(workInProgress, prevCacheInstance); + // This isn't a refresh, it's a continuation of a previous render. + // So we don't need to propagate a context change. + } + } + // Rendering at offscreen, so we can clear the base lanes. const nextState: OffscreenState = { baseLanes: NoLanes, + cache: null, }; workInProgress.memoizedState = nextState; // Push the lanes that were skipped when we bailed out. @@ -638,9 +674,25 @@ function updateOffscreenComponent( pushRenderLanes(workInProgress, subtreeRenderLanes); } } else { + // Rendering a visible tree. let subtreeRenderLanes; if (prevState !== null) { + // We're going from hidden -> visible. + subtreeRenderLanes = mergeLanes(prevState.baseLanes, renderLanes); + + if (!hasFreshCacheProvider()) { + // If there was a fresh cache during the render that spawned this one, + // resume using it. + const prevCacheInstance = prevState.cache; + if (prevCacheInstance !== null) { + cacheInstance = prevCacheInstance; + pushFreshCacheProvider(workInProgress, prevCacheInstance); + // This isn't a refresh, it's a continuation of a previous render. + // So we don't need to propagate a context change. + } + } + // Since we're not hidden anymore, reset the state workInProgress.memoizedState = null; } else { @@ -652,6 +704,11 @@ function updateOffscreenComponent( pushRenderLanes(workInProgress, subtreeRenderLanes); } + // If we have a cache instance from a previous render attempt, then this will + // be non-null. We can use this to infer whether to push/pop the + // cache context. + workInProgress.updateQueue = cacheInstance; + reconcileChildren(current, workInProgress, nextChildren, renderLanes); return workInProgress.child; } @@ -671,13 +728,6 @@ function updateCacheComponent( return null; } - // Read directly from the context. We don't set up a context dependency - // because the propagation function automatically includes CacheComponents in - // its search. - const parentCacheInstance: CacheInstance = isPrimaryRenderer - ? CacheContext._currentValue - : CacheContext._currentValue2; - let cacheInstance: CacheInstance | null = null; if (current === null) { let initialState; @@ -696,31 +746,14 @@ function updateCacheComponent( 'Expected a work-in-progress root. This is a bug in React. Please ' + 'file an issue.', ); - const freshCache: Cache | null = requestFreshCache( - root, - getRootRenderLanes(), - renderLanes, - ); - // This may be the same as the parent cache, like if the current render - // spawned from a previous render that already committed. Otherwise, this - // is the root of a cache consistency boundary. - if (freshCache !== null && freshCache !== parentCacheInstance.cache) { - cacheInstance = { - cache: freshCache, - provider: workInProgress, - }; - initialState = { - cache: freshCache, - }; - pushFreshCacheProvider(workInProgress, cacheInstance); - // No need to propagate the refresh, because this is a new tree. - } else { - // Use the parent cache - cacheInstance = null; - initialState = { - cache: null, - }; - } + // This will always be different from the parent cache; otherwise we would + // have detected a fresh cache provider in the earlier branch. + cacheInstance = requestCacheFromPool(root, workInProgress, renderLanes); + initialState = { + cacheInstance, + }; + pushFreshCacheProvider(workInProgress, cacheInstance); + // No need to propagate a refresh, because this is a new tree. } // Initialize an update queue. We use this for refreshes. workInProgress.memoizedState = initialState; @@ -732,20 +765,20 @@ function updateCacheComponent( // inherit its cache. cacheInstance = null; } else if (includesSomeLane(renderLanes, updateLanes)) { - // An refresh was scheduled. If it was an refresh on this fiber, then we + // A refresh was scheduled. If it was a refresh on this fiber, then we // will have an update in the queue. Otherwise, it must have been an // update on a parent, propagated via context. + + // First check the update queue. cloneUpdateQueue(current, workInProgress); processUpdateQueue(workInProgress, null, null, renderLanes); - const prevCache: Cache | null = current.memoizedState.cache; - const nextCache: Cache | null = workInProgress.memoizedState.cache; - - if (nextCache !== prevCache && nextCache !== null) { + const prevCacheInstance: CacheInstance = + current.memoizedState.cacheInstance; + const nextCacheInstance: CacheInstance = + workInProgress.memoizedState.cacheInstance; + if (nextCacheInstance !== prevCacheInstance) { // Received a refresh. - cacheInstance = { - cache: nextCache, - provider: workInProgress, - }; + cacheInstance = nextCacheInstance; pushFreshCacheProvider(workInProgress, cacheInstance); // Refreshes propagate through the entire subtree. The refreshed cache // will override nested caches. @@ -1711,8 +1744,28 @@ const SUSPENDED_MARKER: SuspenseState = { }; function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { + // Keep a reference to the in-flight cache so we can resume later. + let cache = getFreshCacheProviderIfExists(); + if (cache === null) { + // If there's no cache on the stack, a nested Cache boundary may have + // spawned a new one. Check the cache pool. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + // If a nested cache accessed the pool during this render, it will be + // assigned to root.pooledCache. No need to check the lane-indexed pool. + // TODO: Actually I think I'm wrong and we do need to check the lane-indexed + // pool, to account for infinite transitions that are not triggered by a + // `refresh` call, since those won't put a fresh context on the stack. + // However, that's not idiomatic so this might be fine for now. + cache = root.pooledCache; + } return { baseLanes: renderLanes, + cache, }; } @@ -1720,8 +1773,33 @@ function updateSuspenseOffscreenState( prevOffscreenState: OffscreenState, renderLanes: Lanes, ): OffscreenState { + // Keep a reference to the in-flight cache so we can resume later. + let cache = getFreshCacheProviderIfExists(); + if (cache === null) { + // If there's no cache on the stack, a nested Cache boundary may have + // spawned a new one. Check the cache pool. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + // If a nested cache accessed the pool during this render, it will be + // assigned to root.pooledCache. No need to check the lane-indexed pool. + // TODO: Actually I think I'm wrong and we do need to check the lane-indexed + // pool, to account for infinite transitions that are not triggered by a + // `refresh` call, since those won't put a fresh context on the stack. + // However, that's not idiomatic so this might be fine for now. + cache = root.pooledCache; + if (cache === null) { + // If there's no cache in the pool, there might be one from a previous + // render. If so, reuse it. + cache = prevOffscreenState.cache; + } + } return { baseLanes: mergeLanes(prevOffscreenState.baseLanes, renderLanes), + cache, }; } diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index f5142de58c7c9..dfea7f31c41a7 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -23,7 +23,7 @@ import type { OffscreenProps, OffscreenState, } from './ReactFiberOffscreenComponent'; -import type {CacheInstance, Cache} from './ReactFiberCacheComponent'; +import type {CacheInstance} from './ReactFiberCacheComponent'; import type {UpdateQueue} from './ReactUpdateQueue.old'; import checkPropTypes from 'shared/checkPropTypes'; @@ -119,7 +119,7 @@ import { removeLanes, mergeLanes, getBumpedLaneForHydration, - requestFreshCache, + requestCacheFromPool, } from './ReactFiberLane.old'; import { ConcurrentMode, @@ -134,7 +134,6 @@ import { isSuspenseInstanceFallback, registerSuspenseInstanceRetry, supportsHydration, - isPrimaryRenderer, } from './ReactFiberHostConfig'; import type {SuspenseInstance} from './ReactFiberHostConfig'; import {shouldSuspend} from './ReactFiberReconciler'; @@ -201,7 +200,6 @@ import { renderDidSuspendDelayIfPossible, markSkippedUpdateLanes, getWorkInProgressRoot, - getRootRenderLanes, pushRenderLanes, getExecutionContext, RetryAfterError, @@ -210,10 +208,10 @@ import { import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import {setWorkInProgressVersion} from './ReactMutableSource.old'; import { - CacheContext, pushFreshCacheProvider, pushStaleCacheProvider, hasFreshCacheProvider, + getFreshCacheProviderIfExists, } from './ReactFiberCacheComponent'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -590,23 +588,43 @@ function updateOffscreenComponent( const prevState: OffscreenState | null = current !== null ? current.memoizedState : null; + // If this is not null, this is a cache instance that was carried over from + // the previous render. We will push this to the cache context so that we can + // resume in-flight requests. However, we don't do this if there's already a + // fresh cache provider on the stack. + let cacheInstance: CacheInstance | null = null; + if ( nextProps.mode === 'hidden' || nextProps.mode === 'unstable-defer-without-hiding' ) { + // Rendering a hidden tree. if ((workInProgress.mode & ConcurrentMode) === NoMode) { // In legacy sync mode, don't defer the subtree. Render it now. // TODO: Figure out what we should do in Blocking mode. const nextState: OffscreenState = { baseLanes: NoLanes, + cache: null, }; workInProgress.memoizedState = nextState; pushRenderLanes(workInProgress, renderLanes); } else if (!includesSomeLane(renderLanes, (OffscreenLane: Lane))) { + // We're hidden, and we're not rendering at Offscreen. We will bail out + // and resume this tree later. let nextBaseLanes; if (prevState !== null) { const prevBaseLanes = prevState.baseLanes; nextBaseLanes = mergeLanes(prevBaseLanes, renderLanes); + + // Keep a reference to the in-flight cache so we can resume later. If + // there's no fresh cache on the stack, there might be one from a + // previous render. If so, reuse it. + cacheInstance = hasFreshCacheProvider() + ? getFreshCacheProviderIfExists() + : prevState.cache; + // We don't need to push to the cache context because we're about to + // bail out. There won't be a context mismatch because we only pop + // the cache context if `updateQueue` is non-null. } else { nextBaseLanes = renderLanes; } @@ -620,16 +638,34 @@ function updateOffscreenComponent( ); const nextState: OffscreenState = { baseLanes: nextBaseLanes, + cache: cacheInstance, }; workInProgress.memoizedState = nextState; + workInProgress.updateQueue = null; // We're about to bail out, but we need to push this to the stack anyway // to avoid a push/pop misalignment. pushRenderLanes(workInProgress, nextBaseLanes); return null; } else { + // This is the second render. The surrounding visible content has already + // committed. Now we resume rendering the hidden tree. + + if (!hasFreshCacheProvider() && prevState !== null) { + // If there was a fresh cache during the render that spawned this one, + // resume using it. + const prevCacheInstance = prevState.cache; + if (prevCacheInstance !== null) { + cacheInstance = prevCacheInstance; + pushFreshCacheProvider(workInProgress, prevCacheInstance); + // This isn't a refresh, it's a continuation of a previous render. + // So we don't need to propagate a context change. + } + } + // Rendering at offscreen, so we can clear the base lanes. const nextState: OffscreenState = { baseLanes: NoLanes, + cache: null, }; workInProgress.memoizedState = nextState; // Push the lanes that were skipped when we bailed out. @@ -638,9 +674,25 @@ function updateOffscreenComponent( pushRenderLanes(workInProgress, subtreeRenderLanes); } } else { + // Rendering a visible tree. let subtreeRenderLanes; if (prevState !== null) { + // We're going from hidden -> visible. + subtreeRenderLanes = mergeLanes(prevState.baseLanes, renderLanes); + + if (!hasFreshCacheProvider()) { + // If there was a fresh cache during the render that spawned this one, + // resume using it. + const prevCacheInstance = prevState.cache; + if (prevCacheInstance !== null) { + cacheInstance = prevCacheInstance; + pushFreshCacheProvider(workInProgress, prevCacheInstance); + // This isn't a refresh, it's a continuation of a previous render. + // So we don't need to propagate a context change. + } + } + // Since we're not hidden anymore, reset the state workInProgress.memoizedState = null; } else { @@ -652,6 +704,11 @@ function updateOffscreenComponent( pushRenderLanes(workInProgress, subtreeRenderLanes); } + // If we have a cache instance from a previous render attempt, then this will + // be non-null. We can use this to infer whether to push/pop the + // cache context. + workInProgress.updateQueue = cacheInstance; + reconcileChildren(current, workInProgress, nextChildren, renderLanes); return workInProgress.child; } @@ -671,13 +728,6 @@ function updateCacheComponent( return null; } - // Read directly from the context. We don't set up a context dependency - // because the propagation function automatically includes CacheComponents in - // its search. - const parentCacheInstance: CacheInstance = isPrimaryRenderer - ? CacheContext._currentValue - : CacheContext._currentValue2; - let cacheInstance: CacheInstance | null = null; if (current === null) { let initialState; @@ -696,31 +746,14 @@ function updateCacheComponent( 'Expected a work-in-progress root. This is a bug in React. Please ' + 'file an issue.', ); - const freshCache: Cache | null = requestFreshCache( - root, - getRootRenderLanes(), - renderLanes, - ); - // This may be the same as the parent cache, like if the current render - // spawned from a previous render that already committed. Otherwise, this - // is the root of a cache consistency boundary. - if (freshCache !== null && freshCache !== parentCacheInstance.cache) { - cacheInstance = { - cache: freshCache, - provider: workInProgress, - }; - initialState = { - cache: freshCache, - }; - pushFreshCacheProvider(workInProgress, cacheInstance); - // No need to propagate the refresh, because this is a new tree. - } else { - // Use the parent cache - cacheInstance = null; - initialState = { - cache: null, - }; - } + // This will always be different from the parent cache; otherwise we would + // have detected a fresh cache provider in the earlier branch. + cacheInstance = requestCacheFromPool(root, workInProgress, renderLanes); + initialState = { + cacheInstance, + }; + pushFreshCacheProvider(workInProgress, cacheInstance); + // No need to propagate a refresh, because this is a new tree. } // Initialize an update queue. We use this for refreshes. workInProgress.memoizedState = initialState; @@ -732,20 +765,20 @@ function updateCacheComponent( // inherit its cache. cacheInstance = null; } else if (includesSomeLane(renderLanes, updateLanes)) { - // An refresh was scheduled. If it was an refresh on this fiber, then we + // A refresh was scheduled. If it was a refresh on this fiber, then we // will have an update in the queue. Otherwise, it must have been an // update on a parent, propagated via context. + + // First check the update queue. cloneUpdateQueue(current, workInProgress); processUpdateQueue(workInProgress, null, null, renderLanes); - const prevCache: Cache | null = current.memoizedState.cache; - const nextCache: Cache | null = workInProgress.memoizedState.cache; - - if (nextCache !== prevCache && nextCache !== null) { + const prevCacheInstance: CacheInstance = + current.memoizedState.cacheInstance; + const nextCacheInstance: CacheInstance = + workInProgress.memoizedState.cacheInstance; + if (nextCacheInstance !== prevCacheInstance) { // Received a refresh. - cacheInstance = { - cache: nextCache, - provider: workInProgress, - }; + cacheInstance = nextCacheInstance; pushFreshCacheProvider(workInProgress, cacheInstance); // Refreshes propagate through the entire subtree. The refreshed cache // will override nested caches. @@ -1711,8 +1744,28 @@ const SUSPENDED_MARKER: SuspenseState = { }; function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { + // Keep a reference to the in-flight cache so we can resume later. + let cache = getFreshCacheProviderIfExists(); + if (cache === null) { + // If there's no cache on the stack, a nested Cache boundary may have + // spawned a new one. Check the cache pool. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + // If a nested cache accessed the pool during this render, it will be + // assigned to root.pooledCache. No need to check the lane-indexed pool. + // TODO: Actually I think I'm wrong and we do need to check the lane-indexed + // pool, to account for infinite transitions that are not triggered by a + // `refresh` call, since those won't put a fresh context on the stack. + // However, that's not idiomatic so this might be fine for now. + cache = root.pooledCache; + } return { baseLanes: renderLanes, + cache, }; } @@ -1720,8 +1773,33 @@ function updateSuspenseOffscreenState( prevOffscreenState: OffscreenState, renderLanes: Lanes, ): OffscreenState { + // Keep a reference to the in-flight cache so we can resume later. + let cache = getFreshCacheProviderIfExists(); + if (cache === null) { + // If there's no cache on the stack, a nested Cache boundary may have + // spawned a new one. Check the cache pool. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + // If a nested cache accessed the pool during this render, it will be + // assigned to root.pooledCache. No need to check the lane-indexed pool. + // TODO: Actually I think I'm wrong and we do need to check the lane-indexed + // pool, to account for infinite transitions that are not triggered by a + // `refresh` call, since those won't put a fresh context on the stack. + // However, that's not idiomatic so this might be fine for now. + cache = root.pooledCache; + if (cache === null) { + // If there's no cache in the pool, there might be one from a previous + // render. If so, reuse it. + cache = prevOffscreenState.cache; + } + } return { baseLanes: mergeLanes(prevOffscreenState.baseLanes, renderLanes), + cache, }; } diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.js b/packages/react-reconciler/src/ReactFiberCacheComponent.js index 185fad7cd5d57..d726cce5b0cc9 100644 --- a/packages/react-reconciler/src/ReactFiberCacheComponent.js +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.js @@ -95,3 +95,7 @@ export function popCacheProvider( export function hasFreshCacheProvider() { return freshCacheInstance !== null; } + +export function getFreshCacheProviderIfExists(): CacheInstance | null { + return freshCacheInstance; +} diff --git a/packages/react-reconciler/src/ReactFiberCommitWork.new.js b/packages/react-reconciler/src/ReactFiberCommitWork.new.js index d216edc4314db..316bf9a7dd238 100644 --- a/packages/react-reconciler/src/ReactFiberCommitWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCommitWork.new.js @@ -25,7 +25,6 @@ import type {Wakeable} from 'shared/ReactTypes'; import type {ReactPriorityLevel} from './ReactInternalTypes'; import type {OffscreenState} from './ReactFiberOffscreenComponent'; import type {HookFlags} from './ReactHookEffectTags'; -import type {Cache} from './ReactFiberCacheComponent'; import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import { @@ -1516,11 +1515,7 @@ function commitDeletion( } } -function commitWork( - current: Fiber | null, - finishedWork: Fiber, - cache: Cache | null, -): void { +function commitWork(current: Fiber | null, finishedWork: Fiber): void { if (!supportsMutation) { switch (finishedWork.tag) { case FunctionComponent: @@ -1556,11 +1551,11 @@ function commitWork( } case SuspenseComponent: { commitSuspenseComponent(finishedWork); - attachSuspenseRetryListeners(finishedWork, cache); + attachSuspenseRetryListeners(finishedWork); return; } case SuspenseListComponent: { - attachSuspenseRetryListeners(finishedWork, cache); + attachSuspenseRetryListeners(finishedWork); return; } case HostRoot: { @@ -1671,11 +1666,11 @@ function commitWork( } case SuspenseComponent: { commitSuspenseComponent(finishedWork); - attachSuspenseRetryListeners(finishedWork, cache); + attachSuspenseRetryListeners(finishedWork); return; } case SuspenseListComponent: { - attachSuspenseRetryListeners(finishedWork, cache); + attachSuspenseRetryListeners(finishedWork); return; } case IncompleteClassComponent: { @@ -1781,10 +1776,7 @@ function commitSuspenseHydrationCallbacks( } } -function attachSuspenseRetryListeners( - finishedWork: Fiber, - cache: Cache | null, -) { +function attachSuspenseRetryListeners(finishedWork: Fiber) { // If this boundary just timed out, then it will have a set of wakeables. // For each wakeable, attach a listener so that when it resolves, React // attempts to re-render the boundary in the primary (pre-timeout) state. @@ -1797,12 +1789,7 @@ function attachSuspenseRetryListeners( } wakeables.forEach(wakeable => { // Memoize using the boundary fiber to prevent redundant listeners. - let retry = resolveRetryWakeable.bind( - null, - finishedWork, - wakeable, - cache, - ); + let retry = resolveRetryWakeable.bind(null, finishedWork, wakeable); if (!retryCache.has(wakeable)) { if (enableSchedulerTracing) { if (wakeable.__reactDoNotTraceInteractions !== true) { diff --git a/packages/react-reconciler/src/ReactFiberCommitWork.old.js b/packages/react-reconciler/src/ReactFiberCommitWork.old.js index 7f40c5e4ad49c..fd0a2e9d0e8b7 100644 --- a/packages/react-reconciler/src/ReactFiberCommitWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCommitWork.old.js @@ -24,8 +24,6 @@ import type {FunctionComponentUpdateQueue} from './ReactFiberHooks.old'; import type {Wakeable} from 'shared/ReactTypes'; import type {ReactPriorityLevel} from './ReactInternalTypes'; import type {OffscreenState} from './ReactFiberOffscreenComponent'; -import type {HookFlags} from './ReactHookEffectTags'; -import type {Cache} from './ReactFiberCacheComponent'; import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import { @@ -37,7 +35,6 @@ import { enableFundamentalAPI, enableSuspenseCallback, enableScopeAPI, - enableCache, } from 'shared/ReactFeatureFlags'; import { FunctionComponent, @@ -1517,11 +1514,7 @@ function commitDeletion( } } -function commitWork( - current: Fiber | null, - finishedWork: Fiber, - cache: Cache | null, -): void { +function commitWork(current: Fiber | null, finishedWork: Fiber): void { if (!supportsMutation) { switch (finishedWork.tag) { case FunctionComponent: @@ -1557,11 +1550,11 @@ function commitWork( } case SuspenseComponent: { commitSuspenseComponent(finishedWork); - attachSuspenseRetryListeners(finishedWork, cache); + attachSuspenseRetryListeners(finishedWork); return; } case SuspenseListComponent: { - attachSuspenseRetryListeners(finishedWork, cache); + attachSuspenseRetryListeners(finishedWork); return; } case HostRoot: { @@ -1672,11 +1665,11 @@ function commitWork( } case SuspenseComponent: { commitSuspenseComponent(finishedWork); - attachSuspenseRetryListeners(finishedWork, cache); + attachSuspenseRetryListeners(finishedWork); return; } case SuspenseListComponent: { - attachSuspenseRetryListeners(finishedWork, cache); + attachSuspenseRetryListeners(finishedWork); return; } case IncompleteClassComponent: { @@ -1782,10 +1775,7 @@ function commitSuspenseHydrationCallbacks( } } -function attachSuspenseRetryListeners( - finishedWork: Fiber, - cache: Cache | null, -) { +function attachSuspenseRetryListeners(finishedWork: Fiber) { // If this boundary just timed out, then it will have a set of wakeables. // For each wakeable, attach a listener so that when it resolves, React // attempts to re-render the boundary in the primary (pre-timeout) state. @@ -1798,12 +1788,7 @@ function attachSuspenseRetryListeners( } wakeables.forEach(wakeable => { // Memoize using the boundary fiber to prevent redundant listeners. - let retry = resolveRetryWakeable.bind( - null, - finishedWork, - wakeable, - cache, - ); + let retry = resolveRetryWakeable.bind(null, finishedWork, wakeable); if (!retryCache.has(wakeable)) { if (enableSchedulerTracing) { if (wakeable.__reactDoNotTraceInteractions !== true) { diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js index 2818e1ecfb881..6f7c91fda3c72 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js @@ -1490,6 +1490,11 @@ function completeWork( bubbleProperties(workInProgress); } + const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); + if (cacheInstance !== null) { + popCacheProvider(workInProgress, cacheInstance); + } + return null; } case CacheComponent: { diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js index d49b263b6612f..f52190ca75998 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js @@ -1490,6 +1490,11 @@ function completeWork( bubbleProperties(workInProgress); } + const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); + if (cacheInstance !== null) { + popCacheProvider(workInProgress, cacheInstance); + } + return null; } case CacheComponent: { diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js index c59b23b733202..157999c1e32a9 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.new.js +++ b/packages/react-reconciler/src/ReactFiberHooks.new.js @@ -31,7 +31,6 @@ import { enableUseRefAccessWarning, } from 'shared/ReactFeatureFlags'; -import {HostRoot} from './ReactWorkTags'; import {NoMode, BlockingMode, DebugTracingMode} from './ReactTypeOfMode'; import { NoLane, @@ -1753,19 +1752,12 @@ function refreshCache( // Schedule an update on the cache boundary to trigger a refresh. const refreshUpdate = createUpdate(eventTime, lane); - let payload; - if (provider.tag === HostRoot) { - payload = { - cacheInstance: { - provider: provider, - cache: seededCache, - }, - }; - } else { - payload = { + const payload = { + cacheInstance: { + provider: provider, cache: seededCache, - }; - } + }, + }; refreshUpdate.payload = payload; enqueueUpdate(provider, refreshUpdate); } finally { diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js index 38b2dccf598ff..c65a27cba9d70 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.old.js +++ b/packages/react-reconciler/src/ReactFiberHooks.old.js @@ -31,7 +31,6 @@ import { enableUseRefAccessWarning, } from 'shared/ReactFeatureFlags'; -import {HostRoot} from './ReactWorkTags'; import {NoMode, BlockingMode, DebugTracingMode} from './ReactTypeOfMode'; import { NoLane, @@ -1753,19 +1752,12 @@ function refreshCache( // Schedule an update on the cache boundary to trigger a refresh. const refreshUpdate = createUpdate(eventTime, lane); - let payload; - if (provider.tag === HostRoot) { - payload = { - cacheInstance: { - provider: provider, - cache: seededCache, - }, - }; - } else { - payload = { + const payload = { + cacheInstance: { + provider: provider, cache: seededCache, - }; - } + }, + }; refreshUpdate.payload = payload; enqueueUpdate(provider, refreshUpdate); } finally { diff --git a/packages/react-reconciler/src/ReactFiberLane.new.js b/packages/react-reconciler/src/ReactFiberLane.new.js index ac42fee5b8583..c02aa1f3302a9 100644 --- a/packages/react-reconciler/src/ReactFiberLane.new.js +++ b/packages/react-reconciler/src/ReactFiberLane.new.js @@ -8,7 +8,7 @@ */ import type {FiberRoot, ReactPriorityLevel} from './ReactInternalTypes'; -import type {Cache} from './ReactFiberCacheComponent'; +import type {CacheInstance} from './ReactFiberCacheComponent'; // TODO: Ideally these types would be opaque but that doesn't work well with // our reconciler fork infra, since these leak into non-reconciler packages. @@ -797,24 +797,39 @@ export function markRootEntangled(root: FiberRoot, entangledLanes: Lanes) { } } -export function requestFreshCache( +export function requestCacheFromPool( root: FiberRoot, - rootRenderLanes: Lanes, + provider: Fiber, renderLanes: Lanes, -): Cache | null { +): CacheInstance { if (!enableCache) { - return null; + return (null: any); } - // 1. Check if the currently rendering lanes already have a pending cache - // associated with them. If so, use this cache. If for some reason two or - // more lanes have different caches, pick the highest priority one. - // 2. Otherwise, check the root's `pooledCache`. This the oldest cache - // that has not yet been committed. This is really just a batching - // heuristic so that two transitions that happen in a similar timeframe can - // share the same cache. If it exists, use this cache. - // 3. If there's no pooled cache, create a fresh cache. This is now the - // pooled cache. + // 1. Check `root.pooledCache`. This is a batching heuristic — we set it + // whenever a cache is requeted from the pool and it's not already set. + // Subsequent requests to the pool will receive the same cache, until one + // of them finishes and we clear it. The reason we clear `pooledCache` is + // so that any subsequent transitions can get a fresh cache. + // + // However, even after we clear it, there may still be pending transitions. + // They should continue using the same cache. So we need to also track the + // caches per-lane for as long as it takes for the shell to commit. + // + // If `root.pooledCache` exists, return it and exit. + // + // 2. If `root.pooledCache` does not exist, check the pool to see if this + // render lane already has a cache associated with it. If it does, this + // is now the pooled cache. Assign `root.pooledCache`, return it, and exit. + // + // 3. If there is no matching cache in the pool, create a new one and + // associate it with the render lane. Assign `root.pooledCache`, return it, + // and exit. + + const pooledCache = root.pooledCache; + if (pooledCache !== null) { + return pooledCache; + } let caches = root.caches; @@ -827,7 +842,7 @@ export function requestFreshCache( while (lanes > 0) { const lane = getHighestPriorityLane(lanes); const index = laneToIndex(lane); - const inProgressCache: Cache | null = caches[index]; + const inProgressCache: CacheInstance | null = caches[index]; if (inProgressCache !== null) { // This render lane already has a cache associated with it. Reuse it. @@ -844,102 +859,32 @@ export function requestFreshCache( } otherRenderLanes &= ~otherLane; } + root.pooledCache = inProgressCache; return inProgressCache; } lanes &= ~lane; } - - if (includesOnlyRetries(rootRenderLanes)) { - // If this is a retry, and there's no cache associated with this lane, - // that must be because the original update was triggered by a refresh. - // Refreshes are stored on the Cache update queue, not the root. So, - // return null to indicate that we should use the parent cache (the cache - // that refreshed). - return null; - } - - // There are no in-progress caches associated with the current render. Check - // if there's a pooled cache. - const pooledCache = root.pooledCache; - if (pooledCache !== null) { - // Associate the pooled cache with each of the render lanes. - lanes = renderLanes; - while (lanes > 0) { - const index = pickArbitraryLaneIndex(lanes); - const lane = 1 << index; - caches[index] = pooledCache; - lanes &= ~lane; - } - return pooledCache; - } - } - - if (includesOnlyRetries(rootRenderLanes)) { - // If this is a retry, and there's no cache associated with this lane, that - // must be because the original update was triggered by a refresh. Refreshes - // are stored on the Cache update queue, not the root. So, return null to - // indicate that we should use the parent cache (the cache that refreshed). - return null; } // Create a fresh cache. - const freshCache = new Map(); + const cacheInstance = { + cache: new Map(), + provider, + }; // This is now the pooled cache. - root.pooledCache = freshCache; + root.pooledCache = cacheInstance; // Associate the new cache with each of the render lanes. let lanes = renderLanes; while (lanes > 0) { const index = pickArbitraryLaneIndex(lanes); const lane = 1 << index; - caches[index] = freshCache; + caches[index] = cacheInstance; lanes &= ~lane; } - return freshCache; -} - -export function getWorkInProgressCache( - root: FiberRoot, - renderLanes: Lanes, -): Cache | null { - if (enableCache) { - // TODO: There should be a primary render lane, and we should use whatever - // cache is associated with that one. - const caches = root.caches; - if (caches !== null) { - let lanes = renderLanes; - while (lanes > 0) { - const lane = getHighestPriorityLane(lanes); - const index = laneToIndex(lane); - const inProgressCache: Cache | null = caches[index]; - if (inProgressCache !== null) { - return inProgressCache; - } - lanes &= ~lane; - } - } - } - return null; -} - -export function transferCacheToSpawnedLane( - root: FiberRoot, - cache: Cache, - lane: Lane, -) { - const index = laneToIndex(lane); - let caches = root.caches; - if (caches !== null) { - const existingCache: Cache | null = caches[index]; - if (existingCache === null) { - caches[index] = cache; - } - } else { - caches = root.caches = createLaneMap(null); - caches[index] = cache; - } + return cacheInstance; } export function getBumpedLaneForHydration( diff --git a/packages/react-reconciler/src/ReactFiberLane.old.js b/packages/react-reconciler/src/ReactFiberLane.old.js index c7c43c38b406f..cf92d7ef1be46 100644 --- a/packages/react-reconciler/src/ReactFiberLane.old.js +++ b/packages/react-reconciler/src/ReactFiberLane.old.js @@ -8,7 +8,7 @@ */ import type {FiberRoot, ReactPriorityLevel} from './ReactInternalTypes'; -import type {Cache} from './ReactFiberCacheComponent'; +import type {CacheInstance} from './ReactFiberCacheComponent'; // TODO: Ideally these types would be opaque but that doesn't work well with // our reconciler fork infra, since these leak into non-reconciler packages. @@ -797,24 +797,39 @@ export function markRootEntangled(root: FiberRoot, entangledLanes: Lanes) { } } -export function requestFreshCache( +export function requestCacheFromPool( root: FiberRoot, - rootRenderLanes: Lanes, + provider: Fiber, renderLanes: Lanes, -): Cache | null { +): CacheInstance { if (!enableCache) { - return null; + return (null: any); } - // 1. Check if the currently rendering lanes already have a pending cache - // associated with them. If so, use this cache. If for some reason two or - // more lanes have different caches, pick the highest priority one. - // 2. Otherwise, check the root's `pooledCache`. This the oldest cache - // that has not yet been committed. This is really just a batching - // heuristic so that two transitions that happen in a similar timeframe can - // share the same cache. If it exists, use this cache. - // 3. If there's no pooled cache, create a fresh cache. This is now the - // pooled cache. + // 1. Check `root.pooledCache`. This is a batching heuristic — we set it + // whenever a cache is requeted from the pool and it's not already set. + // Subsequent requests to the pool will receive the same cache, until one + // of them finishes and we clear it. The reason we clear `pooledCache` is + // so that any subsequent transitions can get a fresh cache. + // + // However, even after we clear it, there may still be pending transitions. + // They should continue using the same cache. So we need to also track the + // caches per-lane for as long as it takes for the shell to commit. + // + // If `root.pooledCache` exists, return it and exit. + // + // 2. If `root.pooledCache` does not exist, check the pool to see if this + // render lane already has a cache associated with it. If it does, this + // is now the pooled cache. Assign `root.pooledCache`, return it, and exit. + // + // 3. If there is no matching cache in the pool, create a new one and + // associate it with the render lane. Assign `root.pooledCache`, return it, + // and exit. + + const pooledCache = root.pooledCache; + if (pooledCache !== null) { + return pooledCache; + } let caches = root.caches; @@ -827,7 +842,7 @@ export function requestFreshCache( while (lanes > 0) { const lane = getHighestPriorityLane(lanes); const index = laneToIndex(lane); - const inProgressCache: Cache | null = caches[index]; + const inProgressCache: CacheInstance | null = caches[index]; if (inProgressCache !== null) { // This render lane already has a cache associated with it. Reuse it. @@ -844,102 +859,32 @@ export function requestFreshCache( } otherRenderLanes &= ~otherLane; } + root.pooledCache = inProgressCache; return inProgressCache; } lanes &= ~lane; } - - if (includesOnlyRetries(rootRenderLanes)) { - // If this is a retry, and there's no cache associated with this lane, - // that must be because the original update was triggered by a refresh. - // Refreshes are stored on the Cache update queue, not the root. So, - // return null to indicate that we should use the parent cache (the cache - // that refreshed). - return null; - } - - // There are no in-progress caches associated with the current render. Check - // if there's a pooled cache. - const pooledCache = root.pooledCache; - if (pooledCache !== null) { - // Associate the pooled cache with each of the render lanes. - lanes = renderLanes; - while (lanes > 0) { - const index = pickArbitraryLaneIndex(lanes); - const lane = 1 << index; - caches[index] = pooledCache; - lanes &= ~lane; - } - return pooledCache; - } - } - - if (includesOnlyRetries(rootRenderLanes)) { - // If this is a retry, and there's no cache associated with this lane, that - // must be because the original update was triggered by a refresh. Refreshes - // are stored on the Cache update queue, not the root. So, return null to - // indicate that we should use the parent cache (the cache that refreshed). - return null; } // Create a fresh cache. - const freshCache = new Map(); + const cacheInstance = { + cache: new Map(), + provider, + }; // This is now the pooled cache. - root.pooledCache = freshCache; + root.pooledCache = cacheInstance; // Associate the new cache with each of the render lanes. let lanes = renderLanes; while (lanes > 0) { const index = pickArbitraryLaneIndex(lanes); const lane = 1 << index; - caches[index] = freshCache; + caches[index] = cacheInstance; lanes &= ~lane; } - return freshCache; -} - -export function getWorkInProgressCache( - root: FiberRoot, - renderLanes: Lanes, -): Cache | null { - if (enableCache) { - // TODO: There should be a primary render lane, and we should use whatever - // cache is associated with that one. - const caches = root.caches; - if (caches !== null) { - let lanes = renderLanes; - while (lanes > 0) { - const lane = getHighestPriorityLane(lanes); - const index = laneToIndex(lane); - const inProgressCache: Cache | null = caches[index]; - if (inProgressCache !== null) { - return inProgressCache; - } - lanes &= ~lane; - } - } - } - return null; -} - -export function transferCacheToSpawnedLane( - root: FiberRoot, - cache: Cache, - lane: Lane, -) { - const index = laneToIndex(lane); - let caches = root.caches; - if (caches !== null) { - const existingCache: Cache | null = caches[index]; - if (existingCache === null) { - caches[index] = cache; - } - } else { - caches = root.caches = createLaneMap(null); - caches[index] = cache; - } + return cacheInstance; } export function getBumpedLaneForHydration( diff --git a/packages/react-reconciler/src/ReactFiberOffscreenComponent.js b/packages/react-reconciler/src/ReactFiberOffscreenComponent.js index 4a013a9d7eaf9..a07632ac45b5e 100644 --- a/packages/react-reconciler/src/ReactFiberOffscreenComponent.js +++ b/packages/react-reconciler/src/ReactFiberOffscreenComponent.js @@ -9,6 +9,7 @@ import type {ReactNodeList} from 'shared/ReactTypes'; import type {Lanes} from './ReactFiberLane.old'; +import type {CacheInstance} from './ReactFiberCacheComponent'; export type OffscreenProps = {| // TODO: Pick an API before exposing the Offscreen type. I've chosen an enum @@ -28,4 +29,5 @@ export type OffscreenState = {| // will represent the pending work that must be included in the render in // order to unhide the component. baseLanes: Lanes, + cache: CacheInstance | null, |}; diff --git a/packages/react-reconciler/src/ReactFiberThrow.new.js b/packages/react-reconciler/src/ReactFiberThrow.new.js index 058b46be0040e..ea670b69f070a 100644 --- a/packages/react-reconciler/src/ReactFiberThrow.new.js +++ b/packages/react-reconciler/src/ReactFiberThrow.new.js @@ -21,6 +21,9 @@ import { HostRoot, SuspenseComponent, IncompleteClassComponent, + FunctionComponent, + ForwardRef, + SimpleMemoComponent, } from './ReactWorkTags'; import { DidCapture, @@ -209,9 +212,15 @@ function throwException( markComponentSuspended(sourceFiber, wakeable); } - if ((sourceFiber.mode & BlockingMode) === NoMode) { - // Reset the memoizedState to what it was before we attempted - // to render it. + // Reset the memoizedState to what it was before we attempted to render it. + // A legacy mode Suspense quirk, only relevant to hook components. + const tag = sourceFiber.tag; + if ( + (sourceFiber.mode & BlockingMode) === NoMode && + (tag === FunctionComponent || + tag === ForwardRef || + tag === SimpleMemoComponent) + ) { const currentSource = sourceFiber.alternate; if (currentSource) { sourceFiber.updateQueue = currentSource.updateQueue; diff --git a/packages/react-reconciler/src/ReactFiberThrow.old.js b/packages/react-reconciler/src/ReactFiberThrow.old.js index fbb9daa452625..781d523b48815 100644 --- a/packages/react-reconciler/src/ReactFiberThrow.old.js +++ b/packages/react-reconciler/src/ReactFiberThrow.old.js @@ -21,6 +21,9 @@ import { HostRoot, SuspenseComponent, IncompleteClassComponent, + FunctionComponent, + ForwardRef, + SimpleMemoComponent, } from './ReactWorkTags'; import { DidCapture, @@ -209,9 +212,15 @@ function throwException( markComponentSuspended(sourceFiber, wakeable); } - if ((sourceFiber.mode & BlockingMode) === NoMode) { - // Reset the memoizedState to what it was before we attempted - // to render it. + // Reset the memoizedState to what it was before we attempted to render it. + // A legacy mode Suspense quirk, only relevant to hook components. + const tag = sourceFiber.tag; + if ( + (sourceFiber.mode & BlockingMode) === NoMode && + (tag === FunctionComponent || + tag === ForwardRef || + tag === SimpleMemoComponent) + ) { const currentSource = sourceFiber.alternate; if (currentSource) { sourceFiber.updateQueue = currentSource.updateQueue; diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js index 018c5dd10ba5e..55a516b60312e 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js @@ -136,6 +136,10 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(workInProgress); + const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); + if (cacheInstance !== null) { + popCacheProvider(workInProgress, cacheInstance); + } return null; case CacheComponent: if (enableCache) { @@ -190,6 +194,10 @@ function unwindInterruptedWork(interruptedWork: Fiber) { case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(interruptedWork); + const cacheInstance: CacheInstance | null = (interruptedWork.updateQueue: any); + if (cacheInstance !== null) { + popCacheProvider(interruptedWork, cacheInstance); + } break; case CacheComponent: if (enableCache) { diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js index 3ff3727e4fa3b..73ee5d07e834b 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js @@ -136,6 +136,10 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(workInProgress); + const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); + if (cacheInstance !== null) { + popCacheProvider(workInProgress, cacheInstance); + } return null; case CacheComponent: if (enableCache) { @@ -160,12 +164,12 @@ function unwindInterruptedWork(interruptedWork: Fiber) { break; } case HostRoot: { + popHostContainer(interruptedWork); if (enableCache) { const cacheInstance: CacheInstance = interruptedWork.memoizedState.cacheInstance; popCacheProvider(interruptedWork, cacheInstance); } - popHostContainer(interruptedWork); popTopLevelLegacyContextObject(interruptedWork); resetMutableSourceWorkInProgressVersions(); break; @@ -190,6 +194,10 @@ function unwindInterruptedWork(interruptedWork: Fiber) { case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(interruptedWork); + const cacheInstance: CacheInstance | null = (interruptedWork.updateQueue: any); + if (cacheInstance !== null) { + popCacheProvider(interruptedWork, cacheInstance); + } break; case CacheComponent: if (enableCache) { diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js index 4cd481a750f99..347e0f5b11e51 100644 --- a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js +++ b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js @@ -15,8 +15,6 @@ import type {Interaction} from 'scheduler/src/Tracing'; import type {SuspenseState} from './ReactFiberSuspenseComponent.new'; import type {Effect as HookEffect} from './ReactFiberHooks.new'; import type {StackCursor} from './ReactFiberStack.new'; -import type {FunctionComponentUpdateQueue} from './ReactFiberHooks.new'; -import type {Cache} from './ReactFiberCacheComponent'; import { warnAboutDeprecatedLifecycles, @@ -179,8 +177,6 @@ import { markRootFinished, schedulerPriorityToLanePriority, lanePriorityToSchedulerPriority, - getWorkInProgressCache, - transferCacheToSpawnedLane, } from './ReactFiberLane.new'; import {requestCurrentTransition, NoTransition} from './ReactFiberTransition'; import {beginWork as originalBeginWork} from './ReactFiberBeginWork.new'; @@ -386,13 +382,6 @@ export function getWorkInProgressRoot(): FiberRoot | null { return workInProgressRoot; } -// DELETE ME: This is only neccessary because of `subtreeLanes`. We should get -// rid of `subtreeLanes` and use entanglement for Suspense retries instead. We -// would still be able to tell it's a retry because we'd check the primary lane. -export function getRootRenderLanes(): Lanes { - return workInProgressRootRenderLanes; -} - export function requestEventTime() { if ((executionContext & (RenderContext | CommitContext)) !== NoContext) { // We're inside React, so it's fine to read the actual time. @@ -1935,12 +1924,6 @@ function commitRootImpl(root, renderPriorityLevel) { // So we can clear these now to allow a new callback to be scheduled. root.callbackNode = null; - // TODO: This is only used when a render spawns a retry. So we could pass this - // fron the render phase instead, only for the relevant RootExitStatuses. - // However, we may end up using this same strategy for other types of spawned - // work, like Offscreen. - const cache = getWorkInProgressCache(root, lanes); - // Update the first and last pending times on this root. The new first // pending time is whatever is left on the root fiber. let remainingLanes = mergeLanes(finishedWork.lanes, finishedWork.childLanes); @@ -2057,7 +2040,6 @@ function commitRootImpl(root, renderPriorityLevel) { null, root, renderPriorityLevel, - cache, ); if (hasCaughtError()) { invariant(nextEffect !== null, 'Should be working on an effect.'); @@ -2067,7 +2049,7 @@ function commitRootImpl(root, renderPriorityLevel) { } } else { try { - commitMutationEffects(root, renderPriorityLevel, cache); + commitMutationEffects(root, renderPriorityLevel); } catch (error) { invariant(nextEffect !== null, 'Should be working on an effect.'); captureCommitPhaseError(nextEffect, error); @@ -2325,7 +2307,6 @@ function commitBeforeMutationEffects() { function commitMutationEffects( root: FiberRoot, renderPriorityLevel: ReactPriorityLevel, - cache: Cache | null, ) { // TODO: Should probably move the bulk of this function to commitWork. while (nextEffect !== null) { @@ -2375,7 +2356,7 @@ function commitMutationEffects( // Update const current = nextEffect.alternate; - commitWork(current, nextEffect, cache); + commitWork(current, nextEffect); break; } case Hydrating: { @@ -2387,12 +2368,12 @@ function commitMutationEffects( // Update const current = nextEffect.alternate; - commitWork(current, nextEffect, cache); + commitWork(current, nextEffect); break; } case Update: { const current = nextEffect.alternate; - commitWork(current, nextEffect, cache); + commitWork(current, nextEffect); break; } case Deletion: { @@ -2772,11 +2753,7 @@ export function pingSuspendedRoot( schedulePendingInteractions(root, pingedLanes); } -function retryTimedOutBoundary( - boundaryFiber: Fiber, - dataCache: Cache | null, - retryLane: Lane, -) { +function retryTimedOutBoundary(boundaryFiber: Fiber, retryLane: Lane) { // The boundary fiber (a Suspense component or SuspenseList component) // previously was rendered in its fallback state. One of the promises that // suspended it has resolved, which means at least part of the tree was @@ -2791,10 +2768,6 @@ function retryTimedOutBoundary( markRootUpdated(root, retryLane, eventTime); ensureRootIsScheduled(root, eventTime); schedulePendingInteractions(root, retryLane); - - if (dataCache !== null) { - transferCacheToSpawnedLane(root, dataCache, retryLane); - } } } @@ -2804,14 +2777,10 @@ export function retryDehydratedSuspenseBoundary(boundaryFiber: Fiber) { if (suspenseState !== null) { retryLane = suspenseState.retryLane; } - retryTimedOutBoundary(boundaryFiber, null, retryLane); + retryTimedOutBoundary(boundaryFiber, retryLane); } -export function resolveRetryWakeable( - boundaryFiber: Fiber, - wakeable: Wakeable, - dataCache: Cache | null, -) { +export function resolveRetryWakeable(boundaryFiber: Fiber, wakeable: Wakeable) { let retryLane = NoLane; // Default let retryCache: WeakSet | Set | null; if (enableSuspenseServerRenderer) { @@ -2843,7 +2812,7 @@ export function resolveRetryWakeable( retryCache.delete(wakeable); } - retryTimedOutBoundary(boundaryFiber, dataCache, retryLane); + retryTimedOutBoundary(boundaryFiber, retryLane); } // Computes the next Just Noticeable Difference (JND) boundary. diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js index 52131b78c9aee..7da1423841f3b 100644 --- a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js +++ b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js @@ -15,8 +15,6 @@ import type {Interaction} from 'scheduler/src/Tracing'; import type {SuspenseState} from './ReactFiberSuspenseComponent.old'; import type {Effect as HookEffect} from './ReactFiberHooks.old'; import type {StackCursor} from './ReactFiberStack.old'; -import type {FunctionComponentUpdateQueue} from './ReactFiberHooks.old'; -import type {Cache} from './ReactFiberCacheComponent'; import { warnAboutDeprecatedLifecycles, @@ -179,8 +177,6 @@ import { markRootFinished, schedulerPriorityToLanePriority, lanePriorityToSchedulerPriority, - getWorkInProgressCache, - transferCacheToSpawnedLane, } from './ReactFiberLane.old'; import {requestCurrentTransition, NoTransition} from './ReactFiberTransition'; import {beginWork as originalBeginWork} from './ReactFiberBeginWork.old'; @@ -386,13 +382,6 @@ export function getWorkInProgressRoot(): FiberRoot | null { return workInProgressRoot; } -// DELETE ME: This is only neccessary because of `subtreeLanes`. We should get -// rid of `subtreeLanes` and use entanglement for Suspense retries instead. We -// would still be able to tell it's a retry because we'd check the primary lane. -export function getRootRenderLanes(): Lanes { - return workInProgressRootRenderLanes; -} - export function requestEventTime() { if ((executionContext & (RenderContext | CommitContext)) !== NoContext) { // We're inside React, so it's fine to read the actual time. @@ -1935,12 +1924,6 @@ function commitRootImpl(root, renderPriorityLevel) { // So we can clear these now to allow a new callback to be scheduled. root.callbackNode = null; - // TODO: This is only used when a render spawns a retry. So we could pass this - // fron the render phase instead, only for the relevant RootExitStatuses. - // However, we may end up using this same strategy for other types of spawned - // work, like Offscreen. - const cache = getWorkInProgressCache(root, lanes); - // Update the first and last pending times on this root. The new first // pending time is whatever is left on the root fiber. let remainingLanes = mergeLanes(finishedWork.lanes, finishedWork.childLanes); @@ -2057,7 +2040,6 @@ function commitRootImpl(root, renderPriorityLevel) { null, root, renderPriorityLevel, - cache, ); if (hasCaughtError()) { invariant(nextEffect !== null, 'Should be working on an effect.'); @@ -2067,7 +2049,7 @@ function commitRootImpl(root, renderPriorityLevel) { } } else { try { - commitMutationEffects(root, renderPriorityLevel, cache); + commitMutationEffects(root, renderPriorityLevel); } catch (error) { invariant(nextEffect !== null, 'Should be working on an effect.'); captureCommitPhaseError(nextEffect, error); @@ -2325,7 +2307,6 @@ function commitBeforeMutationEffects() { function commitMutationEffects( root: FiberRoot, renderPriorityLevel: ReactPriorityLevel, - cache: Cache | null, ) { // TODO: Should probably move the bulk of this function to commitWork. while (nextEffect !== null) { @@ -2375,7 +2356,7 @@ function commitMutationEffects( // Update const current = nextEffect.alternate; - commitWork(current, nextEffect, cache); + commitWork(current, nextEffect); break; } case Hydrating: { @@ -2387,12 +2368,12 @@ function commitMutationEffects( // Update const current = nextEffect.alternate; - commitWork(current, nextEffect, cache); + commitWork(current, nextEffect); break; } case Update: { const current = nextEffect.alternate; - commitWork(current, nextEffect, cache); + commitWork(current, nextEffect); break; } case Deletion: { @@ -2772,11 +2753,7 @@ export function pingSuspendedRoot( schedulePendingInteractions(root, pingedLanes); } -function retryTimedOutBoundary( - boundaryFiber: Fiber, - dataCache: Cache | null, - retryLane: Lane, -) { +function retryTimedOutBoundary(boundaryFiber: Fiber, retryLane: Lane) { // The boundary fiber (a Suspense component or SuspenseList component) // previously was rendered in its fallback state. One of the promises that // suspended it has resolved, which means at least part of the tree was @@ -2791,10 +2768,6 @@ function retryTimedOutBoundary( markRootUpdated(root, retryLane, eventTime); ensureRootIsScheduled(root, eventTime); schedulePendingInteractions(root, retryLane); - - if (dataCache !== null) { - transferCacheToSpawnedLane(root, dataCache, retryLane); - } } } @@ -2804,14 +2777,10 @@ export function retryDehydratedSuspenseBoundary(boundaryFiber: Fiber) { if (suspenseState !== null) { retryLane = suspenseState.retryLane; } - retryTimedOutBoundary(boundaryFiber, null, retryLane); + retryTimedOutBoundary(boundaryFiber, retryLane); } -export function resolveRetryWakeable( - boundaryFiber: Fiber, - wakeable: Wakeable, - dataCache: Cache | null, -) { +export function resolveRetryWakeable(boundaryFiber: Fiber, wakeable: Wakeable) { let retryLane = NoLane; // Default let retryCache: WeakSet | Set | null; if (enableSuspenseServerRenderer) { @@ -2843,7 +2812,7 @@ export function resolveRetryWakeable( retryCache.delete(wakeable); } - retryTimedOutBoundary(boundaryFiber, dataCache, retryLane); + retryTimedOutBoundary(boundaryFiber, retryLane); } // Computes the next Just Noticeable Difference (JND) boundary. diff --git a/packages/react-reconciler/src/ReactInternalTypes.js b/packages/react-reconciler/src/ReactInternalTypes.js index 894650dd8c0cd..697f8cfd81d76 100644 --- a/packages/react-reconciler/src/ReactInternalTypes.js +++ b/packages/react-reconciler/src/ReactInternalTypes.js @@ -25,7 +25,7 @@ import type {RootTag} from './ReactRootTags'; import type {TimeoutHandle, NoTimeout} from './ReactFiberHostConfig'; import type {Wakeable} from 'shared/ReactTypes'; import type {Interaction} from 'scheduler/src/Tracing'; -import type {Cache} from './ReactFiberCacheComponent'; +import type {CacheInstance} from './ReactFiberCacheComponent'; // Unwind Circular: moved from ReactFiberHooks.old export type HookType = @@ -238,8 +238,8 @@ type BaseFiberRootProperties = {| entangledLanes: Lanes, entanglements: LaneMap, - caches: LaneMap | null, - pooledCache: Cache | null, + caches: LaneMap | null, + pooledCache: CacheInstance | null, |}; // The following attributes are only used by interaction tracing builds. From c599f9841ae5bc0afd739c5478c0ecdb8684d4c6 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Wed, 16 Dec 2020 17:13:36 -0600 Subject: [PATCH 17/30] Code size optimizations - Wraped more things in the feature flag. - Removed CacheComponent cases from commit phase. --- .../src/ReactFiberBeginWork.new.js | 118 ++++++++++-------- .../src/ReactFiberBeginWork.old.js | 118 ++++++++++-------- .../src/ReactFiberCacheComponent.js | 16 +++ .../src/ReactFiberCommitWork.new.js | 4 - .../src/ReactFiberCommitWork.old.js | 4 - .../src/ReactFiberCompleteWork.new.js | 8 +- .../src/ReactFiberCompleteWork.old.js | 8 +- .../src/ReactFiberHooks.new.js | 3 + .../src/ReactFiberHooks.old.js | 3 + .../src/ReactFiberUnwindWork.new.js | 16 ++- .../src/ReactFiberUnwindWork.old.js | 16 ++- 11 files changed, 180 insertions(+), 134 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index bc041133a9e0b..76a242fafb31f 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -616,15 +616,17 @@ function updateOffscreenComponent( const prevBaseLanes = prevState.baseLanes; nextBaseLanes = mergeLanes(prevBaseLanes, renderLanes); - // Keep a reference to the in-flight cache so we can resume later. If - // there's no fresh cache on the stack, there might be one from a - // previous render. If so, reuse it. - cacheInstance = hasFreshCacheProvider() - ? getFreshCacheProviderIfExists() - : prevState.cache; - // We don't need to push to the cache context because we're about to - // bail out. There won't be a context mismatch because we only pop - // the cache context if `updateQueue` is non-null. + if (enableCache) { + // Keep a reference to the in-flight cache so we can resume later. If + // there's no fresh cache on the stack, there might be one from a + // previous render. If so, reuse it. + cacheInstance = hasFreshCacheProvider() + ? getFreshCacheProviderIfExists() + : prevState.cache; + // We don't need to push to the cache context because we're about to + // bail out. There won't be a context mismatch because we only pop + // the cache context if `updateQueue` is non-null. + } } else { nextBaseLanes = renderLanes; } @@ -650,7 +652,7 @@ function updateOffscreenComponent( // This is the second render. The surrounding visible content has already // committed. Now we resume rendering the hidden tree. - if (!hasFreshCacheProvider() && prevState !== null) { + if (enableCache && !hasFreshCacheProvider() && prevState !== null) { // If there was a fresh cache during the render that spawned this one, // resume using it. const prevCacheInstance = prevState.cache; @@ -681,7 +683,7 @@ function updateOffscreenComponent( subtreeRenderLanes = mergeLanes(prevState.baseLanes, renderLanes); - if (!hasFreshCacheProvider()) { + if (enableCache && !hasFreshCacheProvider()) { // If there was a fresh cache during the render that spawned this one, // resume using it. const prevCacheInstance = prevState.cache; @@ -704,10 +706,12 @@ function updateOffscreenComponent( pushRenderLanes(workInProgress, subtreeRenderLanes); } - // If we have a cache instance from a previous render attempt, then this will - // be non-null. We can use this to infer whether to push/pop the - // cache context. - workInProgress.updateQueue = cacheInstance; + if (enableCache) { + // If we have a cache instance from a previous render attempt, then this will + // be non-null. We can use this to infer whether to push/pop the + // cache context. + workInProgress.updateQueue = cacheInstance; + } reconcileChildren(current, workInProgress, nextChildren, renderLanes); return workInProgress.child; @@ -1744,24 +1748,27 @@ const SUSPENDED_MARKER: SuspenseState = { }; function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { - // Keep a reference to the in-flight cache so we can resume later. - let cache = getFreshCacheProviderIfExists(); - if (cache === null) { - // If there's no cache on the stack, a nested Cache boundary may have - // spawned a new one. Check the cache pool. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - // If a nested cache accessed the pool during this render, it will be - // assigned to root.pooledCache. No need to check the lane-indexed pool. - // TODO: Actually I think I'm wrong and we do need to check the lane-indexed - // pool, to account for infinite transitions that are not triggered by a - // `refresh` call, since those won't put a fresh context on the stack. - // However, that's not idiomatic so this might be fine for now. - cache = root.pooledCache; + let cache = null; + if (enableCache) { + // Keep a reference to the in-flight cache so we can resume later. + cache = getFreshCacheProviderIfExists(); + if (cache === null) { + // If there's no cache on the stack, a nested Cache boundary may have + // spawned a new one. Check the cache pool. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + // If a nested cache accessed the pool during this render, it will be + // assigned to root.pooledCache. No need to check the lane-indexed pool. + // TODO: Actually I think I'm wrong and we do need to check the lane-indexed + // pool, to account for infinite transitions that are not triggered by a + // `refresh` call, since those won't put a fresh context on the stack. + // However, that's not idiomatic so this might be fine for now. + cache = root.pooledCache; + } } return { baseLanes: renderLanes, @@ -1773,28 +1780,31 @@ function updateSuspenseOffscreenState( prevOffscreenState: OffscreenState, renderLanes: Lanes, ): OffscreenState { - // Keep a reference to the in-flight cache so we can resume later. - let cache = getFreshCacheProviderIfExists(); - if (cache === null) { - // If there's no cache on the stack, a nested Cache boundary may have - // spawned a new one. Check the cache pool. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - // If a nested cache accessed the pool during this render, it will be - // assigned to root.pooledCache. No need to check the lane-indexed pool. - // TODO: Actually I think I'm wrong and we do need to check the lane-indexed - // pool, to account for infinite transitions that are not triggered by a - // `refresh` call, since those won't put a fresh context on the stack. - // However, that's not idiomatic so this might be fine for now. - cache = root.pooledCache; + let cache = null; + if (enableCache) { + // Keep a reference to the in-flight cache so we can resume later. + cache = getFreshCacheProviderIfExists(); if (cache === null) { - // If there's no cache in the pool, there might be one from a previous - // render. If so, reuse it. - cache = prevOffscreenState.cache; + // If there's no cache on the stack, a nested Cache boundary may have + // spawned a new one. Check the cache pool. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + // If a nested cache accessed the pool during this render, it will be + // assigned to root.pooledCache. No need to check the lane-indexed pool. + // TODO: Actually I think I'm wrong and we do need to check the lane-indexed + // pool, to account for infinite transitions that are not triggered by a + // `refresh` call, since those won't put a fresh context on the stack. + // However, that's not idiomatic so this might be fine for now. + cache = root.pooledCache; + if (cache === null) { + // If there's no cache in the pool, there might be one from a previous + // render. If so, reuse it. + cache = prevOffscreenState.cache; + } } } return { diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index dfea7f31c41a7..1f51432df34f6 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -616,15 +616,17 @@ function updateOffscreenComponent( const prevBaseLanes = prevState.baseLanes; nextBaseLanes = mergeLanes(prevBaseLanes, renderLanes); - // Keep a reference to the in-flight cache so we can resume later. If - // there's no fresh cache on the stack, there might be one from a - // previous render. If so, reuse it. - cacheInstance = hasFreshCacheProvider() - ? getFreshCacheProviderIfExists() - : prevState.cache; - // We don't need to push to the cache context because we're about to - // bail out. There won't be a context mismatch because we only pop - // the cache context if `updateQueue` is non-null. + if (enableCache) { + // Keep a reference to the in-flight cache so we can resume later. If + // there's no fresh cache on the stack, there might be one from a + // previous render. If so, reuse it. + cacheInstance = hasFreshCacheProvider() + ? getFreshCacheProviderIfExists() + : prevState.cache; + // We don't need to push to the cache context because we're about to + // bail out. There won't be a context mismatch because we only pop + // the cache context if `updateQueue` is non-null. + } } else { nextBaseLanes = renderLanes; } @@ -650,7 +652,7 @@ function updateOffscreenComponent( // This is the second render. The surrounding visible content has already // committed. Now we resume rendering the hidden tree. - if (!hasFreshCacheProvider() && prevState !== null) { + if (enableCache && !hasFreshCacheProvider() && prevState !== null) { // If there was a fresh cache during the render that spawned this one, // resume using it. const prevCacheInstance = prevState.cache; @@ -681,7 +683,7 @@ function updateOffscreenComponent( subtreeRenderLanes = mergeLanes(prevState.baseLanes, renderLanes); - if (!hasFreshCacheProvider()) { + if (enableCache && !hasFreshCacheProvider()) { // If there was a fresh cache during the render that spawned this one, // resume using it. const prevCacheInstance = prevState.cache; @@ -704,10 +706,12 @@ function updateOffscreenComponent( pushRenderLanes(workInProgress, subtreeRenderLanes); } - // If we have a cache instance from a previous render attempt, then this will - // be non-null. We can use this to infer whether to push/pop the - // cache context. - workInProgress.updateQueue = cacheInstance; + if (enableCache) { + // If we have a cache instance from a previous render attempt, then this will + // be non-null. We can use this to infer whether to push/pop the + // cache context. + workInProgress.updateQueue = cacheInstance; + } reconcileChildren(current, workInProgress, nextChildren, renderLanes); return workInProgress.child; @@ -1744,24 +1748,27 @@ const SUSPENDED_MARKER: SuspenseState = { }; function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { - // Keep a reference to the in-flight cache so we can resume later. - let cache = getFreshCacheProviderIfExists(); - if (cache === null) { - // If there's no cache on the stack, a nested Cache boundary may have - // spawned a new one. Check the cache pool. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - // If a nested cache accessed the pool during this render, it will be - // assigned to root.pooledCache. No need to check the lane-indexed pool. - // TODO: Actually I think I'm wrong and we do need to check the lane-indexed - // pool, to account for infinite transitions that are not triggered by a - // `refresh` call, since those won't put a fresh context on the stack. - // However, that's not idiomatic so this might be fine for now. - cache = root.pooledCache; + let cache = null; + if (enableCache) { + // Keep a reference to the in-flight cache so we can resume later. + cache = getFreshCacheProviderIfExists(); + if (cache === null) { + // If there's no cache on the stack, a nested Cache boundary may have + // spawned a new one. Check the cache pool. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + // If a nested cache accessed the pool during this render, it will be + // assigned to root.pooledCache. No need to check the lane-indexed pool. + // TODO: Actually I think I'm wrong and we do need to check the lane-indexed + // pool, to account for infinite transitions that are not triggered by a + // `refresh` call, since those won't put a fresh context on the stack. + // However, that's not idiomatic so this might be fine for now. + cache = root.pooledCache; + } } return { baseLanes: renderLanes, @@ -1773,28 +1780,31 @@ function updateSuspenseOffscreenState( prevOffscreenState: OffscreenState, renderLanes: Lanes, ): OffscreenState { - // Keep a reference to the in-flight cache so we can resume later. - let cache = getFreshCacheProviderIfExists(); - if (cache === null) { - // If there's no cache on the stack, a nested Cache boundary may have - // spawned a new one. Check the cache pool. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - // If a nested cache accessed the pool during this render, it will be - // assigned to root.pooledCache. No need to check the lane-indexed pool. - // TODO: Actually I think I'm wrong and we do need to check the lane-indexed - // pool, to account for infinite transitions that are not triggered by a - // `refresh` call, since those won't put a fresh context on the stack. - // However, that's not idiomatic so this might be fine for now. - cache = root.pooledCache; + let cache = null; + if (enableCache) { + // Keep a reference to the in-flight cache so we can resume later. + cache = getFreshCacheProviderIfExists(); if (cache === null) { - // If there's no cache in the pool, there might be one from a previous - // render. If so, reuse it. - cache = prevOffscreenState.cache; + // If there's no cache on the stack, a nested Cache boundary may have + // spawned a new one. Check the cache pool. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + // If a nested cache accessed the pool during this render, it will be + // assigned to root.pooledCache. No need to check the lane-indexed pool. + // TODO: Actually I think I'm wrong and we do need to check the lane-indexed + // pool, to account for infinite transitions that are not triggered by a + // `refresh` call, since those won't put a fresh context on the stack. + // However, that's not idiomatic so this might be fine for now. + cache = root.pooledCache; + if (cache === null) { + // If there's no cache in the pool, there might be one from a previous + // render. If so, reuse it. + cache = prevOffscreenState.cache; + } } } return { diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.js b/packages/react-reconciler/src/ReactFiberCacheComponent.js index d726cce5b0cc9..7071cb3eb4994 100644 --- a/packages/react-reconciler/src/ReactFiberCacheComponent.js +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.js @@ -9,6 +9,7 @@ import type {ReactContext} from 'shared/ReactTypes'; +import {enableCache} from 'shared/ReactFeatureFlags'; import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols'; import {HostRoot} from './ReactWorkTags'; @@ -46,6 +47,9 @@ export function pushStaleCacheProvider( workInProgress: Fiber, cacheInstance: CacheInstance, ) { + if (!enableCache) { + return; + } if (__DEV__) { if (freshCacheInstance !== null) { console.error( @@ -60,6 +64,9 @@ export function pushFreshCacheProvider( workInProgress: Fiber, cacheInstance: CacheInstance, ) { + if (!enableCache) { + return; + } if (__DEV__) { if ( freshCacheInstance !== null && @@ -81,6 +88,9 @@ export function popCacheProvider( workInProgress: Fiber, cacheInstance: CacheInstance, ) { + if (!enableCache) { + return; + } if (__DEV__) { if (freshCacheInstance !== null && freshCacheInstance !== cacheInstance) { console.error( @@ -93,9 +103,15 @@ export function popCacheProvider( } export function hasFreshCacheProvider() { + if (!enableCache) { + return false; + } return freshCacheInstance !== null; } export function getFreshCacheProviderIfExists(): CacheInstance | null { + if (!enableCache) { + return null; + } return freshCacheInstance; } diff --git a/packages/react-reconciler/src/ReactFiberCommitWork.new.js b/packages/react-reconciler/src/ReactFiberCommitWork.new.js index 316bf9a7dd238..d3082d5f785b8 100644 --- a/packages/react-reconciler/src/ReactFiberCommitWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCommitWork.new.js @@ -56,7 +56,6 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, - CacheComponent, } from './ReactWorkTags'; import { invokeGuardedCallback, @@ -804,7 +803,6 @@ function commitLifeCycles( case ScopeComponent: case OffscreenComponent: case LegacyHiddenComponent: - case CacheComponent: return; } invariant( @@ -1699,8 +1697,6 @@ function commitWork(current: Fiber | null, finishedWork: Fiber): void { hideOrUnhideAllChildren(finishedWork, isHidden); return; } - case CacheComponent: - return; } invariant( false, diff --git a/packages/react-reconciler/src/ReactFiberCommitWork.old.js b/packages/react-reconciler/src/ReactFiberCommitWork.old.js index fd0a2e9d0e8b7..31c91974577b0 100644 --- a/packages/react-reconciler/src/ReactFiberCommitWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCommitWork.old.js @@ -55,7 +55,6 @@ import { ScopeComponent, OffscreenComponent, LegacyHiddenComponent, - CacheComponent, } from './ReactWorkTags'; import { invokeGuardedCallback, @@ -803,7 +802,6 @@ function commitLifeCycles( case ScopeComponent: case OffscreenComponent: case LegacyHiddenComponent: - case CacheComponent: return; } invariant( @@ -1698,8 +1696,6 @@ function commitWork(current: Fiber | null, finishedWork: Fiber): void { hideOrUnhideAllChildren(finishedWork, isHidden); return; } - case CacheComponent: - return; } invariant( false, diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js index 6f7c91fda3c72..70ff57662a08f 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js @@ -1490,9 +1490,11 @@ function completeWork( bubbleProperties(workInProgress); } - const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); - if (cacheInstance !== null) { - popCacheProvider(workInProgress, cacheInstance); + if (enableCache) { + const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); + if (cacheInstance !== null) { + popCacheProvider(workInProgress, cacheInstance); + } } return null; diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js index f52190ca75998..7a2c50e80c393 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js @@ -1490,9 +1490,11 @@ function completeWork( bubbleProperties(workInProgress); } - const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); - if (cacheInstance !== null) { - popCacheProvider(workInProgress, cacheInstance); + if (enableCache) { + const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); + if (cacheInstance !== null) { + popCacheProvider(workInProgress, cacheInstance); + } } return null; diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js index 157999c1e32a9..4154e9d13b3ef 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.new.js +++ b/packages/react-reconciler/src/ReactFiberHooks.new.js @@ -1876,6 +1876,9 @@ function dispatchAction( } function getCacheForType(resourceType: () => T): T { + if (!enableCache) { + invariant(false, 'Not implemented.'); + } const cacheInstance: CacheInstance = readContext(CacheContext); let cache = cacheInstance.cache; if (cache === null) { diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js index c65a27cba9d70..176f929b671ab 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.old.js +++ b/packages/react-reconciler/src/ReactFiberHooks.old.js @@ -1876,6 +1876,9 @@ function dispatchAction( } function getCacheForType(resourceType: () => T): T { + if (!enableCache) { + invariant(false, 'Not implemented.'); + } const cacheInstance: CacheInstance = readContext(CacheContext); let cache = cacheInstance.cache; if (cache === null) { diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js index 55a516b60312e..addd8d4b8841e 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js @@ -136,9 +136,11 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(workInProgress); - const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); - if (cacheInstance !== null) { - popCacheProvider(workInProgress, cacheInstance); + if (enableCache) { + const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); + if (cacheInstance !== null) { + popCacheProvider(workInProgress, cacheInstance); + } } return null; case CacheComponent: @@ -194,9 +196,11 @@ function unwindInterruptedWork(interruptedWork: Fiber) { case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(interruptedWork); - const cacheInstance: CacheInstance | null = (interruptedWork.updateQueue: any); - if (cacheInstance !== null) { - popCacheProvider(interruptedWork, cacheInstance); + if (enableCache) { + const cacheInstance: CacheInstance | null = (interruptedWork.updateQueue: any); + if (cacheInstance !== null) { + popCacheProvider(interruptedWork, cacheInstance); + } } break; case CacheComponent: diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js index 73ee5d07e834b..77f0d885e3c89 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js @@ -136,9 +136,11 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(workInProgress); - const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); - if (cacheInstance !== null) { - popCacheProvider(workInProgress, cacheInstance); + if (enableCache) { + const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); + if (cacheInstance !== null) { + popCacheProvider(workInProgress, cacheInstance); + } } return null; case CacheComponent: @@ -194,9 +196,11 @@ function unwindInterruptedWork(interruptedWork: Fiber) { case OffscreenComponent: case LegacyHiddenComponent: popRenderLanes(interruptedWork); - const cacheInstance: CacheInstance | null = (interruptedWork.updateQueue: any); - if (cacheInstance !== null) { - popCacheProvider(interruptedWork, cacheInstance); + if (enableCache) { + const cacheInstance: CacheInstance | null = (interruptedWork.updateQueue: any); + if (cacheInstance !== null) { + popCacheProvider(interruptedWork, cacheInstance); + } } break; case CacheComponent: From 27c30e124112861458f9e36a872cb6a64385768c Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Wed, 16 Dec 2020 17:33:50 -0600 Subject: [PATCH 18/30] Remove useRefresh from unstable-shared-subset From @sebmarkbage's comment > This should not be included in this file. Which means that the error > the dispatcher throws should never be reachable, but worth keep in > case something is misconfigured or tries use internals. > > This ensures that statically, we can say that a shared/server component > can't import this at all so there's no risk of accidentally using it and > it's a signal that a client component is needed. --- packages/react/unstable-shared-subset.experimental.js | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/react/unstable-shared-subset.experimental.js b/packages/react/unstable-shared-subset.experimental.js index bd2e1cd77c25d..890066957e383 100644 --- a/packages/react/unstable-shared-subset.experimental.js +++ b/packages/react/unstable-shared-subset.experimental.js @@ -33,7 +33,6 @@ export { SuspenseList as unstable_SuspenseList, unstable_useOpaqueIdentifier, unstable_getCacheForType, - unstable_useRefresh, // enableDebugTracing unstable_DebugTracingMode, } from './src/React'; From ddaa1f13030ef968d69193315210a835e3c407e2 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Wed, 16 Dec 2020 21:03:34 -0600 Subject: [PATCH 19/30] Previous retry cache takes precedence over pool When committing a fallback, if there's no cache on the stack, check if there's a cache from the previous render. This is what we would have used for new content during the first pass when we attempted to unhide. If there's no previous cache, then we can check the pool. If a nested cache accessed the pool, it would have been assigned to `root.pooledCache`. --- .../src/ReactFiberBeginWork.new.js | 32 ++++++++----------- .../src/ReactFiberBeginWork.old.js | 32 ++++++++----------- 2 files changed, 28 insertions(+), 36 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 76a242fafb31f..84503b8cc346a 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -1785,25 +1785,21 @@ function updateSuspenseOffscreenState( // Keep a reference to the in-flight cache so we can resume later. cache = getFreshCacheProviderIfExists(); if (cache === null) { - // If there's no cache on the stack, a nested Cache boundary may have - // spawned a new one. Check the cache pool. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - // If a nested cache accessed the pool during this render, it will be - // assigned to root.pooledCache. No need to check the lane-indexed pool. - // TODO: Actually I think I'm wrong and we do need to check the lane-indexed - // pool, to account for infinite transitions that are not triggered by a - // `refresh` call, since those won't put a fresh context on the stack. - // However, that's not idiomatic so this might be fine for now. - cache = root.pooledCache; + // If there's no cache on the stack, check if there's a cache from the + // previous render. This is what we would have used for new content + // during the first pass when we attempted to unhide. + cache = prevOffscreenState.cache; if (cache === null) { - // If there's no cache in the pool, there might be one from a previous - // render. If so, reuse it. - cache = prevOffscreenState.cache; + // If there's no previous cache, then we can check the pool. If a nested + // cache accessed the pool during this render, it will be assigned to + // root.pooledCache. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + cache = root.pooledCache; } } } diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index 1f51432df34f6..e8a4e48c6a9e8 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -1785,25 +1785,21 @@ function updateSuspenseOffscreenState( // Keep a reference to the in-flight cache so we can resume later. cache = getFreshCacheProviderIfExists(); if (cache === null) { - // If there's no cache on the stack, a nested Cache boundary may have - // spawned a new one. Check the cache pool. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - // If a nested cache accessed the pool during this render, it will be - // assigned to root.pooledCache. No need to check the lane-indexed pool. - // TODO: Actually I think I'm wrong and we do need to check the lane-indexed - // pool, to account for infinite transitions that are not triggered by a - // `refresh` call, since those won't put a fresh context on the stack. - // However, that's not idiomatic so this might be fine for now. - cache = root.pooledCache; + // If there's no cache on the stack, check if there's a cache from the + // previous render. This is what we would have used for new content + // during the first pass when we attempted to unhide. + cache = prevOffscreenState.cache; if (cache === null) { - // If there's no cache in the pool, there might be one from a previous - // render. If so, reuse it. - cache = prevOffscreenState.cache; + // If there's no previous cache, then we can check the pool. If a nested + // cache accessed the pool during this render, it will be assigned to + // root.pooledCache. + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + cache = root.pooledCache; } } } From 79c65dc513f668e367fbc51bdbb60a861e7edf91 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Thu, 17 Dec 2020 00:22:23 -0600 Subject: [PATCH 20/30] To resume pooled cache, override root.pooledCache MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When retrying with a pooled cache resumed from the first render, we can't put the cache on the regular Suspense context, because it will override nested refreshes. We have to track it on a different conceptual context stack. Currently that's `root.pooledCache`. So my solution is to overwrite that field when we enter the nested subtree. (This might be too clever and I should put it on a stack cursor instead. Regardless, it doesn't change much about the structure of code.) I originally noticed this issue because the type of `root.pooledCache` was `{cache: Cache, provider: Fiber}` — pooled caches do not have providers! This fix partially depends on a planned change to get rid of the lane-indexed cache pool and always read from `root.pooledCache`. I'll do that in the next commit. --- .../src/ReactFiberBeginWork.new.js | 90 ++++++++++++++----- .../src/ReactFiberBeginWork.old.js | 90 ++++++++++++++----- .../src/ReactFiberCacheComponent.js | 40 ++++++++- .../src/ReactFiberCompleteWork.new.js | 25 +++++- .../src/ReactFiberCompleteWork.old.js | 25 +++++- .../src/ReactFiberLane.new.js | 18 ++-- .../src/ReactFiberLane.old.js | 18 ++-- .../src/ReactFiberOffscreenComponent.js | 7 +- .../src/ReactFiberUnwindWork.new.js | 43 +++++++-- .../src/ReactFiberUnwindWork.old.js | 43 +++++++-- .../src/ReactInternalTypes.js | 6 +- 11 files changed, 313 insertions(+), 92 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 84503b8cc346a..77355dc523091 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -23,7 +23,11 @@ import type { OffscreenProps, OffscreenState, } from './ReactFiberOffscreenComponent'; -import type {CacheInstance} from './ReactFiberCacheComponent'; +import type { + Cache, + CacheInstance, + PooledCacheInstance, +} from './ReactFiberCacheComponent'; import type {UpdateQueue} from './ReactUpdateQueue.new'; import checkPropTypes from 'shared/checkPropTypes'; @@ -211,6 +215,7 @@ import { pushFreshCacheProvider, pushStaleCacheProvider, hasFreshCacheProvider, + pushCachePool, getFreshCacheProviderIfExists, } from './ReactFiberCacheComponent'; @@ -592,7 +597,7 @@ function updateOffscreenComponent( // the previous render. We will push this to the cache context so that we can // resume in-flight requests. However, we don't do this if there's already a // fresh cache provider on the stack. - let cacheInstance: CacheInstance | null = null; + let cacheInstance: CacheInstance | PooledCacheInstance | null = null; if ( nextProps.mode === 'hidden' || @@ -658,9 +663,23 @@ function updateOffscreenComponent( const prevCacheInstance = prevState.cache; if (prevCacheInstance !== null) { cacheInstance = prevCacheInstance; - pushFreshCacheProvider(workInProgress, prevCacheInstance); - // This isn't a refresh, it's a continuation of a previous render. - // So we don't need to propagate a context change. + const provider = cacheInstance.provider; + // If the resumed cache has a provider, then it's a fresh cache. We + // should push it to the stack. Otherwise, it's from the cache pool + // and we should override the cache pool. + if (provider !== null) { + pushFreshCacheProvider(workInProgress, prevCacheInstance); + // This isn't a refresh, it's a continuation of a previous render. + // So we don't need to propagate a context change. + } else { + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + pushCachePool(root, prevCacheInstance); + } } } @@ -689,9 +708,22 @@ function updateOffscreenComponent( const prevCacheInstance = prevState.cache; if (prevCacheInstance !== null) { cacheInstance = prevCacheInstance; - pushFreshCacheProvider(workInProgress, prevCacheInstance); - // This isn't a refresh, it's a continuation of a previous render. - // So we don't need to propagate a context change. + // If the resumed cache has a provider, then it's a fresh cache. We + // should push it to the stack. Otherwise, it's from the cache pool + // and we should override the cache pool. + if (cacheInstance.provider !== null) { + pushFreshCacheProvider(workInProgress, prevCacheInstance); + // This isn't a refresh, it's a continuation of a previous render. + // So we don't need to propagate a context change. + } else { + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + pushCachePool(root, prevCacheInstance); + } } } @@ -752,7 +784,11 @@ function updateCacheComponent( ); // This will always be different from the parent cache; otherwise we would // have detected a fresh cache provider in the earlier branch. - cacheInstance = requestCacheFromPool(root, workInProgress, renderLanes); + const cache = requestCacheFromPool(root, renderLanes); + cacheInstance = { + cache, + provider: workInProgress, + }; initialState = { cacheInstance, }; @@ -1748,11 +1784,11 @@ const SUSPENDED_MARKER: SuspenseState = { }; function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { - let cache = null; + let cacheInstance: CacheInstance | PooledCacheInstance | null = null; if (enableCache) { // Keep a reference to the in-flight cache so we can resume later. - cache = getFreshCacheProviderIfExists(); - if (cache === null) { + cacheInstance = getFreshCacheProviderIfExists(); + if (cacheInstance === null) { // If there's no cache on the stack, a nested Cache boundary may have // spawned a new one. Check the cache pool. const root = getWorkInProgressRoot(); @@ -1767,12 +1803,18 @@ function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { // pool, to account for infinite transitions that are not triggered by a // `refresh` call, since those won't put a fresh context on the stack. // However, that's not idiomatic so this might be fine for now. - cache = root.pooledCache; + const pooledCache = root.pooledCache; + if (pooledCache !== null) { + cacheInstance = { + cache: (pooledCache: Cache), + provider: null, + }; + } } } return { baseLanes: renderLanes, - cache, + cache: cacheInstance, }; } @@ -1780,16 +1822,16 @@ function updateSuspenseOffscreenState( prevOffscreenState: OffscreenState, renderLanes: Lanes, ): OffscreenState { - let cache = null; + let cacheInstance = null; if (enableCache) { // Keep a reference to the in-flight cache so we can resume later. - cache = getFreshCacheProviderIfExists(); - if (cache === null) { + cacheInstance = getFreshCacheProviderIfExists(); + if (cacheInstance === null) { // If there's no cache on the stack, check if there's a cache from the // previous render. This is what we would have used for new content // during the first pass when we attempted to unhide. - cache = prevOffscreenState.cache; - if (cache === null) { + cacheInstance = prevOffscreenState.cache; + if (cacheInstance === null) { // If there's no previous cache, then we can check the pool. If a nested // cache accessed the pool during this render, it will be assigned to // root.pooledCache. @@ -1799,13 +1841,19 @@ function updateSuspenseOffscreenState( 'Expected a work-in-progress root. This is a bug in React. Please ' + 'file an issue.', ); - cache = root.pooledCache; + const pooledCache = root.pooledCache; + if (pooledCache !== null) { + cacheInstance = { + cache: (pooledCache: Cache), + provider: null, + }; + } } } } return { baseLanes: mergeLanes(prevOffscreenState.baseLanes, renderLanes), - cache, + cache: cacheInstance, }; } diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index e8a4e48c6a9e8..02d0e9447bf76 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -23,7 +23,11 @@ import type { OffscreenProps, OffscreenState, } from './ReactFiberOffscreenComponent'; -import type {CacheInstance} from './ReactFiberCacheComponent'; +import type { + Cache, + CacheInstance, + PooledCacheInstance, +} from './ReactFiberCacheComponent'; import type {UpdateQueue} from './ReactUpdateQueue.old'; import checkPropTypes from 'shared/checkPropTypes'; @@ -211,6 +215,7 @@ import { pushFreshCacheProvider, pushStaleCacheProvider, hasFreshCacheProvider, + pushCachePool, getFreshCacheProviderIfExists, } from './ReactFiberCacheComponent'; @@ -592,7 +597,7 @@ function updateOffscreenComponent( // the previous render. We will push this to the cache context so that we can // resume in-flight requests. However, we don't do this if there's already a // fresh cache provider on the stack. - let cacheInstance: CacheInstance | null = null; + let cacheInstance: CacheInstance | PooledCacheInstance | null = null; if ( nextProps.mode === 'hidden' || @@ -658,9 +663,23 @@ function updateOffscreenComponent( const prevCacheInstance = prevState.cache; if (prevCacheInstance !== null) { cacheInstance = prevCacheInstance; - pushFreshCacheProvider(workInProgress, prevCacheInstance); - // This isn't a refresh, it's a continuation of a previous render. - // So we don't need to propagate a context change. + const provider = cacheInstance.provider; + // If the resumed cache has a provider, then it's a fresh cache. We + // should push it to the stack. Otherwise, it's from the cache pool + // and we should override the cache pool. + if (provider !== null) { + pushFreshCacheProvider(workInProgress, prevCacheInstance); + // This isn't a refresh, it's a continuation of a previous render. + // So we don't need to propagate a context change. + } else { + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + pushCachePool(root, prevCacheInstance); + } } } @@ -689,9 +708,22 @@ function updateOffscreenComponent( const prevCacheInstance = prevState.cache; if (prevCacheInstance !== null) { cacheInstance = prevCacheInstance; - pushFreshCacheProvider(workInProgress, prevCacheInstance); - // This isn't a refresh, it's a continuation of a previous render. - // So we don't need to propagate a context change. + // If the resumed cache has a provider, then it's a fresh cache. We + // should push it to the stack. Otherwise, it's from the cache pool + // and we should override the cache pool. + if (cacheInstance.provider !== null) { + pushFreshCacheProvider(workInProgress, prevCacheInstance); + // This isn't a refresh, it's a continuation of a previous render. + // So we don't need to propagate a context change. + } else { + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + pushCachePool(root, prevCacheInstance); + } } } @@ -752,7 +784,11 @@ function updateCacheComponent( ); // This will always be different from the parent cache; otherwise we would // have detected a fresh cache provider in the earlier branch. - cacheInstance = requestCacheFromPool(root, workInProgress, renderLanes); + const cache = requestCacheFromPool(root, renderLanes); + cacheInstance = { + cache, + provider: workInProgress, + }; initialState = { cacheInstance, }; @@ -1748,11 +1784,11 @@ const SUSPENDED_MARKER: SuspenseState = { }; function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { - let cache = null; + let cacheInstance: CacheInstance | PooledCacheInstance | null = null; if (enableCache) { // Keep a reference to the in-flight cache so we can resume later. - cache = getFreshCacheProviderIfExists(); - if (cache === null) { + cacheInstance = getFreshCacheProviderIfExists(); + if (cacheInstance === null) { // If there's no cache on the stack, a nested Cache boundary may have // spawned a new one. Check the cache pool. const root = getWorkInProgressRoot(); @@ -1767,12 +1803,18 @@ function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { // pool, to account for infinite transitions that are not triggered by a // `refresh` call, since those won't put a fresh context on the stack. // However, that's not idiomatic so this might be fine for now. - cache = root.pooledCache; + const pooledCache = root.pooledCache; + if (pooledCache !== null) { + cacheInstance = { + cache: (pooledCache: Cache), + provider: null, + }; + } } } return { baseLanes: renderLanes, - cache, + cache: cacheInstance, }; } @@ -1780,16 +1822,16 @@ function updateSuspenseOffscreenState( prevOffscreenState: OffscreenState, renderLanes: Lanes, ): OffscreenState { - let cache = null; + let cacheInstance = null; if (enableCache) { // Keep a reference to the in-flight cache so we can resume later. - cache = getFreshCacheProviderIfExists(); - if (cache === null) { + cacheInstance = getFreshCacheProviderIfExists(); + if (cacheInstance === null) { // If there's no cache on the stack, check if there's a cache from the // previous render. This is what we would have used for new content // during the first pass when we attempted to unhide. - cache = prevOffscreenState.cache; - if (cache === null) { + cacheInstance = prevOffscreenState.cache; + if (cacheInstance === null) { // If there's no previous cache, then we can check the pool. If a nested // cache accessed the pool during this render, it will be assigned to // root.pooledCache. @@ -1799,13 +1841,19 @@ function updateSuspenseOffscreenState( 'Expected a work-in-progress root. This is a bug in React. Please ' + 'file an issue.', ); - cache = root.pooledCache; + const pooledCache = root.pooledCache; + if (pooledCache !== null) { + cacheInstance = { + cache: (pooledCache: Cache), + provider: null, + }; + } } } } return { baseLanes: mergeLanes(prevOffscreenState.baseLanes, renderLanes), - cache, + cache: cacheInstance, }; } diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.js b/packages/react-reconciler/src/ReactFiberCacheComponent.js index 7071cb3eb4994..3aed34685a823 100644 --- a/packages/react-reconciler/src/ReactFiberCacheComponent.js +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.js @@ -8,6 +8,7 @@ */ import type {ReactContext} from 'shared/ReactTypes'; +import type {FiberRoot} from './ReactInternalTypes'; import {enableCache} from 'shared/ReactFeatureFlags'; import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols'; @@ -18,10 +19,15 @@ import {pushProvider, popProvider} from './ReactFiberNewContext.new'; export type Cache = Map<() => mixed, mixed>; export type CacheInstance = {| - cache: Cache | null, + cache: Cache, provider: Fiber, |}; +export type PooledCacheInstance = {| + cache: Cache, + provider: null, +|}; + export const CacheContext: ReactContext = { $$typeof: REACT_CONTEXT_TYPE, // We don't use Consumer/Provider for Cache components. So we'll cheat. @@ -115,3 +121,35 @@ export function getFreshCacheProviderIfExists(): CacheInstance | null { } return freshCacheInstance; } + +export function pushCachePool( + root: FiberRoot, + cacheInstance: PooledCacheInstance, +) { + // This will temporarily override the root's pooled cache, so that any new + // Cache boundaries in the subtree use this one. The previous value on the + // "stack" is stored on the cache instance. We will restore it during the + // complete phase. + // + // The more straightforward way to do this would be to use the array-based + // stack (push/pop). Maybe this is too clever. + const prevPooledCacheOnStack = root.pooledCache; + root.pooledCache = cacheInstance.cache; + // This is never supposed to be null. I'm cheating. Sorry. It will be reset to + // the correct type when we pop. + cacheInstance.cache = ((prevPooledCacheOnStack: any): Cache); +} + +export function popCachePool( + root: FiberRoot, + cacheInstance: PooledCacheInstance, +) { + const retryCache: Cache = (root.pooledCache: any); + if (__DEV__) { + if (retryCache === null) { + console.error('Expected to have a pooled cache. This is a bug in React.'); + } + } + root.pooledCache = cacheInstance.cache; + cacheInstance.cache = retryCache; +} diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js index 70ff57662a08f..27b9d7044c1d7 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js @@ -28,7 +28,10 @@ import type { } from './ReactFiberSuspenseComponent.new'; import type {SuspenseContext} from './ReactFiberSuspenseContext.new'; import type {OffscreenState} from './ReactFiberOffscreenComponent'; -import type {CacheInstance} from './ReactFiberCacheComponent'; +import type { + CacheInstance, + PooledCacheInstance, +} from './ReactFiberCacheComponent'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.new'; @@ -145,6 +148,7 @@ import { popRenderLanes, getRenderTargetTime, subtreeRenderLanes, + getWorkInProgressRoot, } from './ReactFiberWorkLoop.new'; import {createFundamentalStateInstance} from './ReactFiberFundamental.new'; import { @@ -157,7 +161,7 @@ import { import {resetChildFibers} from './ReactChildFiber.new'; import {createScopeInstance} from './ReactFiberScope.new'; import {transferActualDuration} from './ReactProfilerTimer.new'; -import {popCacheProvider} from './ReactFiberCacheComponent'; +import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent'; function markUpdate(workInProgress: Fiber) { // Tag the fiber with an update effect. This turns a Placement into @@ -1491,9 +1495,22 @@ function completeWork( } if (enableCache) { - const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); + const cacheInstance: + | CacheInstance + | PooledCacheInstance + | null = (workInProgress.updateQueue: any); if (cacheInstance !== null) { - popCacheProvider(workInProgress, cacheInstance); + if (cacheInstance.provider !== null) { + popCacheProvider(workInProgress, cacheInstance); + } else { + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + popCachePool(root, cacheInstance); + } } } diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js index 7a2c50e80c393..ebfefec3130c8 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js @@ -28,7 +28,10 @@ import type { } from './ReactFiberSuspenseComponent.old'; import type {SuspenseContext} from './ReactFiberSuspenseContext.old'; import type {OffscreenState} from './ReactFiberOffscreenComponent'; -import type {CacheInstance} from './ReactFiberCacheComponent'; +import type { + CacheInstance, + PooledCacheInstance, +} from './ReactFiberCacheComponent'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.old'; @@ -145,6 +148,7 @@ import { popRenderLanes, getRenderTargetTime, subtreeRenderLanes, + getWorkInProgressRoot, } from './ReactFiberWorkLoop.old'; import {createFundamentalStateInstance} from './ReactFiberFundamental.old'; import { @@ -157,7 +161,7 @@ import { import {resetChildFibers} from './ReactChildFiber.old'; import {createScopeInstance} from './ReactFiberScope.old'; import {transferActualDuration} from './ReactProfilerTimer.old'; -import {popCacheProvider} from './ReactFiberCacheComponent'; +import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent'; function markUpdate(workInProgress: Fiber) { // Tag the fiber with an update effect. This turns a Placement into @@ -1491,9 +1495,22 @@ function completeWork( } if (enableCache) { - const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); + const cacheInstance: + | CacheInstance + | PooledCacheInstance + | null = (workInProgress.updateQueue: any); if (cacheInstance !== null) { - popCacheProvider(workInProgress, cacheInstance); + if (cacheInstance.provider !== null) { + popCacheProvider(workInProgress, cacheInstance); + } else { + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + popCachePool(root, cacheInstance); + } } } diff --git a/packages/react-reconciler/src/ReactFiberLane.new.js b/packages/react-reconciler/src/ReactFiberLane.new.js index c02aa1f3302a9..20b0f386d303b 100644 --- a/packages/react-reconciler/src/ReactFiberLane.new.js +++ b/packages/react-reconciler/src/ReactFiberLane.new.js @@ -8,7 +8,7 @@ */ import type {FiberRoot, ReactPriorityLevel} from './ReactInternalTypes'; -import type {CacheInstance} from './ReactFiberCacheComponent'; +import type {Cache} from './ReactFiberCacheComponent'; // TODO: Ideally these types would be opaque but that doesn't work well with // our reconciler fork infra, since these leak into non-reconciler packages. @@ -799,9 +799,8 @@ export function markRootEntangled(root: FiberRoot, entangledLanes: Lanes) { export function requestCacheFromPool( root: FiberRoot, - provider: Fiber, renderLanes: Lanes, -): CacheInstance { +): Cache { if (!enableCache) { return (null: any); } @@ -842,7 +841,7 @@ export function requestCacheFromPool( while (lanes > 0) { const lane = getHighestPriorityLane(lanes); const index = laneToIndex(lane); - const inProgressCache: CacheInstance | null = caches[index]; + const inProgressCache: Cache | null = caches[index]; if (inProgressCache !== null) { // This render lane already has a cache associated with it. Reuse it. @@ -867,24 +866,21 @@ export function requestCacheFromPool( } // Create a fresh cache. - const cacheInstance = { - cache: new Map(), - provider, - }; + const cache = new Map(); // This is now the pooled cache. - root.pooledCache = cacheInstance; + root.pooledCache = cache; // Associate the new cache with each of the render lanes. let lanes = renderLanes; while (lanes > 0) { const index = pickArbitraryLaneIndex(lanes); const lane = 1 << index; - caches[index] = cacheInstance; + caches[index] = cache; lanes &= ~lane; } - return cacheInstance; + return cache; } export function getBumpedLaneForHydration( diff --git a/packages/react-reconciler/src/ReactFiberLane.old.js b/packages/react-reconciler/src/ReactFiberLane.old.js index cf92d7ef1be46..1f75416c90602 100644 --- a/packages/react-reconciler/src/ReactFiberLane.old.js +++ b/packages/react-reconciler/src/ReactFiberLane.old.js @@ -8,7 +8,7 @@ */ import type {FiberRoot, ReactPriorityLevel} from './ReactInternalTypes'; -import type {CacheInstance} from './ReactFiberCacheComponent'; +import type {Cache} from './ReactFiberCacheComponent'; // TODO: Ideally these types would be opaque but that doesn't work well with // our reconciler fork infra, since these leak into non-reconciler packages. @@ -799,9 +799,8 @@ export function markRootEntangled(root: FiberRoot, entangledLanes: Lanes) { export function requestCacheFromPool( root: FiberRoot, - provider: Fiber, renderLanes: Lanes, -): CacheInstance { +): Cache { if (!enableCache) { return (null: any); } @@ -842,7 +841,7 @@ export function requestCacheFromPool( while (lanes > 0) { const lane = getHighestPriorityLane(lanes); const index = laneToIndex(lane); - const inProgressCache: CacheInstance | null = caches[index]; + const inProgressCache: Cache | null = caches[index]; if (inProgressCache !== null) { // This render lane already has a cache associated with it. Reuse it. @@ -867,24 +866,21 @@ export function requestCacheFromPool( } // Create a fresh cache. - const cacheInstance = { - cache: new Map(), - provider, - }; + const cache = new Map(); // This is now the pooled cache. - root.pooledCache = cacheInstance; + root.pooledCache = cache; // Associate the new cache with each of the render lanes. let lanes = renderLanes; while (lanes > 0) { const index = pickArbitraryLaneIndex(lanes); const lane = 1 << index; - caches[index] = cacheInstance; + caches[index] = cache; lanes &= ~lane; } - return cacheInstance; + return cache; } export function getBumpedLaneForHydration( diff --git a/packages/react-reconciler/src/ReactFiberOffscreenComponent.js b/packages/react-reconciler/src/ReactFiberOffscreenComponent.js index a07632ac45b5e..a72908a44ecb7 100644 --- a/packages/react-reconciler/src/ReactFiberOffscreenComponent.js +++ b/packages/react-reconciler/src/ReactFiberOffscreenComponent.js @@ -9,7 +9,10 @@ import type {ReactNodeList} from 'shared/ReactTypes'; import type {Lanes} from './ReactFiberLane.old'; -import type {CacheInstance} from './ReactFiberCacheComponent'; +import type { + CacheInstance, + PooledCacheInstance, +} from './ReactFiberCacheComponent'; export type OffscreenProps = {| // TODO: Pick an API before exposing the Offscreen type. I've chosen an enum @@ -29,5 +32,5 @@ export type OffscreenState = {| // will represent the pending work that must be included in the render in // order to unhide the component. baseLanes: Lanes, - cache: CacheInstance | null, + cache: CacheInstance | PooledCacheInstance | null, |}; diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js index addd8d4b8841e..d4f6fe94097b2 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js @@ -11,7 +11,10 @@ import type {ReactContext} from 'shared/ReactTypes'; import type {Fiber} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.new'; import type {SuspenseState} from './ReactFiberSuspenseComponent.new'; -import type {CacheInstance} from './ReactFiberCacheComponent'; +import type { + CacheInstance, + PooledCacheInstance, +} from './ReactFiberCacheComponent'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.new'; import { @@ -43,8 +46,8 @@ import { popTopLevelContextObject as popTopLevelLegacyContextObject, } from './ReactFiberContext.new'; import {popProvider} from './ReactFiberNewContext.new'; -import {popRenderLanes} from './ReactFiberWorkLoop.new'; -import {popCacheProvider} from './ReactFiberCacheComponent'; +import {popRenderLanes, getWorkInProgressRoot} from './ReactFiberWorkLoop.new'; +import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent'; import {transferActualDuration} from './ReactProfilerTimer.new'; import invariant from 'shared/invariant'; @@ -137,9 +140,22 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { case LegacyHiddenComponent: popRenderLanes(workInProgress); if (enableCache) { - const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); + const cacheInstance: + | CacheInstance + | PooledCacheInstance + | null = (workInProgress.updateQueue: any); if (cacheInstance !== null) { - popCacheProvider(workInProgress, cacheInstance); + if (cacheInstance.provider !== null) { + popCacheProvider(workInProgress, cacheInstance); + } else { + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + popCachePool(root, cacheInstance); + } } } return null; @@ -197,9 +213,22 @@ function unwindInterruptedWork(interruptedWork: Fiber) { case LegacyHiddenComponent: popRenderLanes(interruptedWork); if (enableCache) { - const cacheInstance: CacheInstance | null = (interruptedWork.updateQueue: any); + const cacheInstance: + | CacheInstance + | PooledCacheInstance + | null = (interruptedWork.updateQueue: any); if (cacheInstance !== null) { - popCacheProvider(interruptedWork, cacheInstance); + if (cacheInstance.provider !== null) { + popCacheProvider(interruptedWork, cacheInstance); + } else { + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + popCachePool(root, cacheInstance); + } } } break; diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js index 77f0d885e3c89..296bf0436d68e 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js @@ -11,7 +11,10 @@ import type {ReactContext} from 'shared/ReactTypes'; import type {Fiber} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.old'; import type {SuspenseState} from './ReactFiberSuspenseComponent.old'; -import type {CacheInstance} from './ReactFiberCacheComponent'; +import type { + CacheInstance, + PooledCacheInstance, +} from './ReactFiberCacheComponent'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.old'; import { @@ -43,8 +46,8 @@ import { popTopLevelContextObject as popTopLevelLegacyContextObject, } from './ReactFiberContext.old'; import {popProvider} from './ReactFiberNewContext.old'; -import {popRenderLanes} from './ReactFiberWorkLoop.old'; -import {popCacheProvider} from './ReactFiberCacheComponent'; +import {popRenderLanes, getWorkInProgressRoot} from './ReactFiberWorkLoop.old'; +import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent'; import {transferActualDuration} from './ReactProfilerTimer.old'; import invariant from 'shared/invariant'; @@ -137,9 +140,22 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { case LegacyHiddenComponent: popRenderLanes(workInProgress); if (enableCache) { - const cacheInstance: CacheInstance | null = (workInProgress.updateQueue: any); + const cacheInstance: + | CacheInstance + | PooledCacheInstance + | null = (workInProgress.updateQueue: any); if (cacheInstance !== null) { - popCacheProvider(workInProgress, cacheInstance); + if (cacheInstance.provider !== null) { + popCacheProvider(workInProgress, cacheInstance); + } else { + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + popCachePool(root, cacheInstance); + } } } return null; @@ -197,9 +213,22 @@ function unwindInterruptedWork(interruptedWork: Fiber) { case LegacyHiddenComponent: popRenderLanes(interruptedWork); if (enableCache) { - const cacheInstance: CacheInstance | null = (interruptedWork.updateQueue: any); + const cacheInstance: + | CacheInstance + | PooledCacheInstance + | null = (interruptedWork.updateQueue: any); if (cacheInstance !== null) { - popCacheProvider(interruptedWork, cacheInstance); + if (cacheInstance.provider !== null) { + popCacheProvider(interruptedWork, cacheInstance); + } else { + const root = getWorkInProgressRoot(); + invariant( + root !== null, + 'Expected a work-in-progress root. This is a bug in React. Please ' + + 'file an issue.', + ); + popCachePool(root, cacheInstance); + } } } break; diff --git a/packages/react-reconciler/src/ReactInternalTypes.js b/packages/react-reconciler/src/ReactInternalTypes.js index 697f8cfd81d76..894650dd8c0cd 100644 --- a/packages/react-reconciler/src/ReactInternalTypes.js +++ b/packages/react-reconciler/src/ReactInternalTypes.js @@ -25,7 +25,7 @@ import type {RootTag} from './ReactRootTags'; import type {TimeoutHandle, NoTimeout} from './ReactFiberHostConfig'; import type {Wakeable} from 'shared/ReactTypes'; import type {Interaction} from 'scheduler/src/Tracing'; -import type {CacheInstance} from './ReactFiberCacheComponent'; +import type {Cache} from './ReactFiberCacheComponent'; // Unwind Circular: moved from ReactFiberHooks.old export type HookType = @@ -238,8 +238,8 @@ type BaseFiberRootProperties = {| entangledLanes: Lanes, entanglements: LaneMap, - caches: LaneMap | null, - pooledCache: CacheInstance | null, + caches: LaneMap | null, + pooledCache: Cache | null, |}; // The following attributes are only used by interaction tracing builds. From 52fd1eb0245136b3dbc623c2480d769c559d2c51 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Thu, 17 Dec 2020 00:42:20 -0600 Subject: [PATCH 21/30] Use only a single pooled cache at a time The cache-per-lane approach makes conceptual sense but it's probably not worth it until we complete the Lanes entanglement refactor. In the current implementation we have to do lots of looping to maintain the pool. And most transitions get batched together, anyway. We'll re-evaluate later. --- .../src/ReactFiberLane.new.js | 98 ++----------------- .../src/ReactFiberLane.old.js | 98 ++----------------- .../src/ReactFiberRoot.new.js | 2 +- .../src/ReactInternalTypes.js | 2 +- 4 files changed, 22 insertions(+), 178 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberLane.new.js b/packages/react-reconciler/src/ReactFiberLane.new.js index 20b0f386d303b..9032d9b77cff2 100644 --- a/packages/react-reconciler/src/ReactFiberLane.new.js +++ b/packages/react-reconciler/src/ReactFiberLane.new.js @@ -739,10 +739,18 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) { root.entangledLanes &= remainingLanes; + if (enableCache) { + const pooledCacheLanes = (root.pooledCacheLanes &= remainingLanes); + if (pooledCacheLanes === NoLanes) { + // None of the remaining work relies on the cache pool. Clear it so + // subsequent requests get a new cache. + root.pooledCache = null; + } + } + const entanglements = root.entanglements; const eventTimes = root.eventTimes; const expirationTimes = root.expirationTimes; - const pooledCache = root.pooledCache; // Clear the lanes that no longer have pending work let lanes = noLongerPendingLanes; @@ -754,30 +762,6 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) { eventTimes[index] = NoTimestamp; expirationTimes[index] = NoTimestamp; - if (enableCache) { - // Subsequent loads in this lane should use a fresh cache. - // TODO: If a cache is no longer associated with any lane, we should issue - // an abort signal. - const caches = root.caches; - if (caches !== null) { - if (remainingLanes === 0) { - // Fast path. Clear all caches at once. - root.caches = createLaneMap(null); - root.pooledCache = null; - } else { - const cache = caches[index]; - if (cache !== null) { - caches[index] = null; - if (cache === pooledCache) { - // The pooled cache is now part of the committed tree. We'll now - // clear it so that the next transition gets a fresh cache. - root.pooledCache = null; - } - } - } - } - } - lanes &= ~lane; } } @@ -805,81 +789,19 @@ export function requestCacheFromPool( return (null: any); } - // 1. Check `root.pooledCache`. This is a batching heuristic — we set it - // whenever a cache is requeted from the pool and it's not already set. - // Subsequent requests to the pool will receive the same cache, until one - // of them finishes and we clear it. The reason we clear `pooledCache` is - // so that any subsequent transitions can get a fresh cache. - // - // However, even after we clear it, there may still be pending transitions. - // They should continue using the same cache. So we need to also track the - // caches per-lane for as long as it takes for the shell to commit. - // - // If `root.pooledCache` exists, return it and exit. - // - // 2. If `root.pooledCache` does not exist, check the pool to see if this - // render lane already has a cache associated with it. If it does, this - // is now the pooled cache. Assign `root.pooledCache`, return it, and exit. - // - // 3. If there is no matching cache in the pool, create a new one and - // associate it with the render lane. Assign `root.pooledCache`, return it, - // and exit. + root.pooledCacheLanes |= renderLanes; const pooledCache = root.pooledCache; if (pooledCache !== null) { return pooledCache; } - let caches = root.caches; - - // TODO: There should be a primary render lane, and we should use whatever - // cache is associated with that one. - if (caches === null) { - caches = root.caches = createLaneMap(null); - } else { - let lanes = renderLanes; - while (lanes > 0) { - const lane = getHighestPriorityLane(lanes); - const index = laneToIndex(lane); - const inProgressCache: Cache | null = caches[index]; - if (inProgressCache !== null) { - // This render lane already has a cache associated with it. Reuse it. - - // If the other render lanes are not already associated with a cache, - // associate them with this one. - let otherRenderLanes = renderLanes & ~lane; - while (otherRenderLanes > 0) { - const otherIndex = pickArbitraryLaneIndex(otherRenderLanes); - const otherLane = 1 << otherIndex; - // We shouldn't overwrite a cache that already exists, since that could - // lead to dropped requests or data, i.e. if the current render suspends. - if (caches[otherIndex] === null) { - caches[otherIndex] = inProgressCache; - } - otherRenderLanes &= ~otherLane; - } - root.pooledCache = inProgressCache; - return inProgressCache; - } - lanes &= ~lane; - } - } - // Create a fresh cache. const cache = new Map(); // This is now the pooled cache. root.pooledCache = cache; - // Associate the new cache with each of the render lanes. - let lanes = renderLanes; - while (lanes > 0) { - const index = pickArbitraryLaneIndex(lanes); - const lane = 1 << index; - caches[index] = cache; - lanes &= ~lane; - } - return cache; } diff --git a/packages/react-reconciler/src/ReactFiberLane.old.js b/packages/react-reconciler/src/ReactFiberLane.old.js index 1f75416c90602..95a3a3ed4f5ac 100644 --- a/packages/react-reconciler/src/ReactFiberLane.old.js +++ b/packages/react-reconciler/src/ReactFiberLane.old.js @@ -739,10 +739,18 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) { root.entangledLanes &= remainingLanes; + if (enableCache) { + const pooledCacheLanes = (root.pooledCacheLanes &= remainingLanes); + if (pooledCacheLanes === NoLanes) { + // None of the remaining work relies on the cache pool. Clear it so + // subsequent requests get a new cache. + root.pooledCache = null; + } + } + const entanglements = root.entanglements; const eventTimes = root.eventTimes; const expirationTimes = root.expirationTimes; - const pooledCache = root.pooledCache; // Clear the lanes that no longer have pending work let lanes = noLongerPendingLanes; @@ -754,30 +762,6 @@ export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) { eventTimes[index] = NoTimestamp; expirationTimes[index] = NoTimestamp; - if (enableCache) { - // Subsequent loads in this lane should use a fresh cache. - // TODO: If a cache is no longer associated with any lane, we should issue - // an abort signal. - const caches = root.caches; - if (caches !== null) { - if (remainingLanes === 0) { - // Fast path. Clear all caches at once. - root.caches = createLaneMap(null); - root.pooledCache = null; - } else { - const cache = caches[index]; - if (cache !== null) { - caches[index] = null; - if (cache === pooledCache) { - // The pooled cache is now part of the committed tree. We'll now - // clear it so that the next transition gets a fresh cache. - root.pooledCache = null; - } - } - } - } - } - lanes &= ~lane; } } @@ -805,81 +789,19 @@ export function requestCacheFromPool( return (null: any); } - // 1. Check `root.pooledCache`. This is a batching heuristic — we set it - // whenever a cache is requeted from the pool and it's not already set. - // Subsequent requests to the pool will receive the same cache, until one - // of them finishes and we clear it. The reason we clear `pooledCache` is - // so that any subsequent transitions can get a fresh cache. - // - // However, even after we clear it, there may still be pending transitions. - // They should continue using the same cache. So we need to also track the - // caches per-lane for as long as it takes for the shell to commit. - // - // If `root.pooledCache` exists, return it and exit. - // - // 2. If `root.pooledCache` does not exist, check the pool to see if this - // render lane already has a cache associated with it. If it does, this - // is now the pooled cache. Assign `root.pooledCache`, return it, and exit. - // - // 3. If there is no matching cache in the pool, create a new one and - // associate it with the render lane. Assign `root.pooledCache`, return it, - // and exit. + root.pooledCacheLanes |= renderLanes; const pooledCache = root.pooledCache; if (pooledCache !== null) { return pooledCache; } - let caches = root.caches; - - // TODO: There should be a primary render lane, and we should use whatever - // cache is associated with that one. - if (caches === null) { - caches = root.caches = createLaneMap(null); - } else { - let lanes = renderLanes; - while (lanes > 0) { - const lane = getHighestPriorityLane(lanes); - const index = laneToIndex(lane); - const inProgressCache: Cache | null = caches[index]; - if (inProgressCache !== null) { - // This render lane already has a cache associated with it. Reuse it. - - // If the other render lanes are not already associated with a cache, - // associate them with this one. - let otherRenderLanes = renderLanes & ~lane; - while (otherRenderLanes > 0) { - const otherIndex = pickArbitraryLaneIndex(otherRenderLanes); - const otherLane = 1 << otherIndex; - // We shouldn't overwrite a cache that already exists, since that could - // lead to dropped requests or data, i.e. if the current render suspends. - if (caches[otherIndex] === null) { - caches[otherIndex] = inProgressCache; - } - otherRenderLanes &= ~otherLane; - } - root.pooledCache = inProgressCache; - return inProgressCache; - } - lanes &= ~lane; - } - } - // Create a fresh cache. const cache = new Map(); // This is now the pooled cache. root.pooledCache = cache; - // Associate the new cache with each of the render lanes. - let lanes = renderLanes; - while (lanes > 0) { - const index = pickArbitraryLaneIndex(lanes); - const lane = 1 << index; - caches[index] = cache; - lanes &= ~lane; - } - return cache; } diff --git a/packages/react-reconciler/src/ReactFiberRoot.new.js b/packages/react-reconciler/src/ReactFiberRoot.new.js index 92af768c30d92..69ddb7ff4717a 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.new.js +++ b/packages/react-reconciler/src/ReactFiberRoot.new.js @@ -54,8 +54,8 @@ function FiberRootNode(containerInfo, tag, hydrate) { this.entanglements = createLaneMap(NoLanes); if (enableCache) { - this.caches = createLaneMap(null); this.pooledCache = null; + this.pooledCacheLanes = NoLanes; } if (supportsHydration) { diff --git a/packages/react-reconciler/src/ReactInternalTypes.js b/packages/react-reconciler/src/ReactInternalTypes.js index 894650dd8c0cd..c4a587060632d 100644 --- a/packages/react-reconciler/src/ReactInternalTypes.js +++ b/packages/react-reconciler/src/ReactInternalTypes.js @@ -238,8 +238,8 @@ type BaseFiberRootProperties = {| entangledLanes: Lanes, entanglements: LaneMap, - caches: LaneMap | null, pooledCache: Cache | null, + pooledCacheLanes: Lanes, |}; // The following attributes are only used by interaction tracing builds. From f4e212457aa98fa4458c6721ff3f85ebbcea6c7c Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Thu, 17 Dec 2020 00:50:48 -0600 Subject: [PATCH 22/30] useRefresh -> useCacheRefresh useRefresh is probably too general. We may also add additional APIs related to the cache, and including the word "cache" in all of them signals they are related. --- .../src/server/ReactPartialRendererHooks.js | 4 +-- .../src/ReactFiberHooks.new.js | 36 +++++++++---------- .../src/ReactFiberHooks.old.js | 36 +++++++++---------- .../src/ReactInternalTypes.js | 4 +-- .../src/__tests__/ReactCache-test.js | 14 ++++---- .../react-server/src/ReactFlightServer.js | 2 +- .../src/ReactSuspenseTestUtils.js | 2 +- packages/react/index.classic.fb.js | 2 +- packages/react/index.experimental.js | 2 +- packages/react/index.js | 2 +- packages/react/index.modern.fb.js | 2 +- packages/react/src/React.js | 4 +-- packages/react/src/ReactHooks.js | 4 +-- 13 files changed, 57 insertions(+), 57 deletions(-) diff --git a/packages/react-dom/src/server/ReactPartialRendererHooks.js b/packages/react-dom/src/server/ReactPartialRendererHooks.js index 087e81b0e7683..50edb72c2844a 100644 --- a/packages/react-dom/src/server/ReactPartialRendererHooks.js +++ b/packages/react-dom/src/server/ReactPartialRendererHooks.js @@ -489,7 +489,7 @@ function useOpaqueIdentifier(): OpaqueIDType { ); } -function useRefresh(): (?() => T, ?T) => void { +function useCacheRefresh(): (?() => T, ?T) => void { invariant(false, 'Not implemented.'); } @@ -524,5 +524,5 @@ export const Dispatcher: DispatcherType = { if (enableCache) { Dispatcher.getCacheForType = getCacheForType; - Dispatcher.useRefresh = useRefresh; + Dispatcher.useCacheRefresh = useCacheRefresh; } diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js index 4154e9d13b3ef..f30eb98ac922b 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.new.js +++ b/packages/react-reconciler/src/ReactFiberHooks.new.js @@ -1921,7 +1921,7 @@ export const ContextOnlyDispatcher: Dispatcher = { }; if (enableCache) { (ContextOnlyDispatcher: Dispatcher).getCacheForType = getCacheForType; - (ContextOnlyDispatcher: Dispatcher).useRefresh = throwInvalidHookError; + (ContextOnlyDispatcher: Dispatcher).useCacheRefresh = throwInvalidHookError; } const HooksDispatcherOnMount: Dispatcher = { @@ -1946,7 +1946,7 @@ const HooksDispatcherOnMount: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnMount: Dispatcher).getCacheForType = getCacheForType; - (HooksDispatcherOnMount: Dispatcher).useRefresh = mountRefresh; + (HooksDispatcherOnMount: Dispatcher).useCacheRefresh = mountRefresh; } const HooksDispatcherOnUpdate: Dispatcher = { @@ -1971,7 +1971,7 @@ const HooksDispatcherOnUpdate: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnUpdate: Dispatcher).getCacheForType = getCacheForType; - (HooksDispatcherOnUpdate: Dispatcher).useRefresh = updateRefresh; + (HooksDispatcherOnUpdate: Dispatcher).useCacheRefresh = updateRefresh; } const HooksDispatcherOnRerender: Dispatcher = { @@ -1996,7 +1996,7 @@ const HooksDispatcherOnRerender: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnRerender: Dispatcher).getCacheForType = getCacheForType; - (HooksDispatcherOnRerender: Dispatcher).useRefresh = updateRefresh; + (HooksDispatcherOnRerender: Dispatcher).useCacheRefresh = updateRefresh; } let HooksDispatcherOnMountInDEV: Dispatcher | null = null; @@ -2154,8 +2154,8 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType; - (HooksDispatcherOnMountInDEV: Dispatcher).useRefresh = function useRefresh() { - currentHookNameInDev = 'useRefresh'; + (HooksDispatcherOnMountInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; mountHookTypesDev(); return mountRefresh(); }; @@ -2284,8 +2284,8 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).getCacheForType = getCacheForType; - (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).useRefresh = function useRefresh() { - currentHookNameInDev = 'useRefresh'; + (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; updateHookTypesDev(); return mountRefresh(); }; @@ -2414,8 +2414,8 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType; - (HooksDispatcherOnUpdateInDEV: Dispatcher).useRefresh = function useRefresh() { - currentHookNameInDev = 'useRefresh'; + (HooksDispatcherOnUpdateInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; updateHookTypesDev(); return updateRefresh(); }; @@ -2545,8 +2545,8 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType; - (HooksDispatcherOnRerenderInDEV: Dispatcher).useRefresh = function useRefresh() { - currentHookNameInDev = 'useRefresh'; + (HooksDispatcherOnRerenderInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; updateHookTypesDev(); return updateRefresh(); }; @@ -2690,8 +2690,8 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType; - (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).useRefresh = function useRefresh() { - currentHookNameInDev = 'useRefresh'; + (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; updateHookTypesDev(); return mountRefresh(); }; @@ -2835,8 +2835,8 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType; - (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).useRefresh = function useRefresh() { - currentHookNameInDev = 'useRefresh'; + (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; updateHookTypesDev(); return updateRefresh(); }; @@ -2981,8 +2981,8 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType; - (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).useRefresh = function useRefresh() { - currentHookNameInDev = 'useRefresh'; + (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; updateHookTypesDev(); return updateRefresh(); }; diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js index 176f929b671ab..bbab1122c2817 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.old.js +++ b/packages/react-reconciler/src/ReactFiberHooks.old.js @@ -1921,7 +1921,7 @@ export const ContextOnlyDispatcher: Dispatcher = { }; if (enableCache) { (ContextOnlyDispatcher: Dispatcher).getCacheForType = getCacheForType; - (ContextOnlyDispatcher: Dispatcher).useRefresh = throwInvalidHookError; + (ContextOnlyDispatcher: Dispatcher).useCacheRefresh = throwInvalidHookError; } const HooksDispatcherOnMount: Dispatcher = { @@ -1946,7 +1946,7 @@ const HooksDispatcherOnMount: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnMount: Dispatcher).getCacheForType = getCacheForType; - (HooksDispatcherOnMount: Dispatcher).useRefresh = mountRefresh; + (HooksDispatcherOnMount: Dispatcher).useCacheRefresh = mountRefresh; } const HooksDispatcherOnUpdate: Dispatcher = { @@ -1971,7 +1971,7 @@ const HooksDispatcherOnUpdate: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnUpdate: Dispatcher).getCacheForType = getCacheForType; - (HooksDispatcherOnUpdate: Dispatcher).useRefresh = updateRefresh; + (HooksDispatcherOnUpdate: Dispatcher).useCacheRefresh = updateRefresh; } const HooksDispatcherOnRerender: Dispatcher = { @@ -1996,7 +1996,7 @@ const HooksDispatcherOnRerender: Dispatcher = { }; if (enableCache) { (HooksDispatcherOnRerender: Dispatcher).getCacheForType = getCacheForType; - (HooksDispatcherOnRerender: Dispatcher).useRefresh = updateRefresh; + (HooksDispatcherOnRerender: Dispatcher).useCacheRefresh = updateRefresh; } let HooksDispatcherOnMountInDEV: Dispatcher | null = null; @@ -2154,8 +2154,8 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType; - (HooksDispatcherOnMountInDEV: Dispatcher).useRefresh = function useRefresh() { - currentHookNameInDev = 'useRefresh'; + (HooksDispatcherOnMountInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; mountHookTypesDev(); return mountRefresh(); }; @@ -2284,8 +2284,8 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).getCacheForType = getCacheForType; - (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).useRefresh = function useRefresh() { - currentHookNameInDev = 'useRefresh'; + (HooksDispatcherOnMountWithHookTypesInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; updateHookTypesDev(); return mountRefresh(); }; @@ -2414,8 +2414,8 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType; - (HooksDispatcherOnUpdateInDEV: Dispatcher).useRefresh = function useRefresh() { - currentHookNameInDev = 'useRefresh'; + (HooksDispatcherOnUpdateInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; updateHookTypesDev(); return updateRefresh(); }; @@ -2545,8 +2545,8 @@ if (__DEV__) { }; if (enableCache) { (HooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType; - (HooksDispatcherOnRerenderInDEV: Dispatcher).useRefresh = function useRefresh() { - currentHookNameInDev = 'useRefresh'; + (HooksDispatcherOnRerenderInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; updateHookTypesDev(); return updateRefresh(); }; @@ -2690,8 +2690,8 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).getCacheForType = getCacheForType; - (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).useRefresh = function useRefresh() { - currentHookNameInDev = 'useRefresh'; + (InvalidNestedHooksDispatcherOnMountInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; updateHookTypesDev(); return mountRefresh(); }; @@ -2835,8 +2835,8 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).getCacheForType = getCacheForType; - (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).useRefresh = function useRefresh() { - currentHookNameInDev = 'useRefresh'; + (InvalidNestedHooksDispatcherOnUpdateInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; updateHookTypesDev(); return updateRefresh(); }; @@ -2981,8 +2981,8 @@ if (__DEV__) { }; if (enableCache) { (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).getCacheForType = getCacheForType; - (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).useRefresh = function useRefresh() { - currentHookNameInDev = 'useRefresh'; + (InvalidNestedHooksDispatcherOnRerenderInDEV: Dispatcher).useCacheRefresh = function useCacheRefresh() { + currentHookNameInDev = 'useCacheRefresh'; updateHookTypesDev(); return updateRefresh(); }; diff --git a/packages/react-reconciler/src/ReactInternalTypes.js b/packages/react-reconciler/src/ReactInternalTypes.js index c4a587060632d..6add5d08df6e6 100644 --- a/packages/react-reconciler/src/ReactInternalTypes.js +++ b/packages/react-reconciler/src/ReactInternalTypes.js @@ -43,7 +43,7 @@ export type HookType = | 'useTransition' | 'useMutableSource' | 'useOpaqueIdentifier' - | 'useRefresh'; + | 'useCacheRefresh'; export type ReactPriorityLevel = 99 | 98 | 97 | 96 | 95 | 90; @@ -319,7 +319,7 @@ export type Dispatcher = {| subscribe: MutableSourceSubscribeFn, ): Snapshot, useOpaqueIdentifier(): any, - useRefresh?: () => (?() => T, ?T) => void, + useCacheRefresh?: () => (?() => T, ?T) => void, unstable_isNewReconciler?: boolean, |}; diff --git a/packages/react-reconciler/src/__tests__/ReactCache-test.js b/packages/react-reconciler/src/__tests__/ReactCache-test.js index 9e527735afca8..3970151b7e6b2 100644 --- a/packages/react-reconciler/src/__tests__/ReactCache-test.js +++ b/packages/react-reconciler/src/__tests__/ReactCache-test.js @@ -4,7 +4,7 @@ let Cache; let getCacheForType; let Scheduler; let Suspense; -let useRefresh; +let useCacheRefresh; let textService; let textServiceVersion; @@ -19,7 +19,7 @@ describe('ReactCache', () => { Scheduler = require('scheduler'); Suspense = React.Suspense; getCacheForType = React.unstable_getCacheForType; - useRefresh = React.unstable_useRefresh; + useCacheRefresh = React.unstable_useCacheRefresh; // Represents some data service that returns text. It likely has additional // caching layers, like a CDN or the local browser cache. It can be mutated @@ -417,7 +417,7 @@ describe('ReactCache', () => { test('refresh a cache', async () => { let refresh; function App() { - refresh = useRefresh(); + refresh = useCacheRefresh(); return ; } @@ -461,7 +461,7 @@ describe('ReactCache', () => { test('refresh the root cache', async () => { let refresh; function App() { - refresh = useRefresh(); + refresh = useCacheRefresh(); return ; } @@ -503,7 +503,7 @@ describe('ReactCache', () => { test('refresh a cache with seed data', async () => { let refresh; function App() { - refresh = useRefresh(); + refresh = useCacheRefresh(); return ; } @@ -551,7 +551,7 @@ describe('ReactCache', () => { test('refreshing a parent cache also refreshes its children', async () => { let refreshShell; function RefreshShell() { - refreshShell = useRefresh(); + refreshShell = useCacheRefresh(); return null; } @@ -629,7 +629,7 @@ describe('ReactCache', () => { async () => { let refreshFirstBoundary; function RefreshFirstBoundary() { - refreshFirstBoundary = useRefresh(); + refreshFirstBoundary = useCacheRefresh(); return null; } diff --git a/packages/react-server/src/ReactFlightServer.js b/packages/react-server/src/ReactFlightServer.js index acc446dd14df3..ffbc13edaf60f 100644 --- a/packages/react-server/src/ReactFlightServer.js +++ b/packages/react-server/src/ReactFlightServer.js @@ -804,7 +804,7 @@ const Dispatcher: DispatcherType = { useEffect: (unsupportedHook: any), useOpaqueIdentifier: (unsupportedHook: any), useMutableSource: (unsupportedHook: any), - useRefresh(): (?() => T, ?T) => void { + useCacheRefresh(): (?() => T, ?T) => void { return unsupportedRefresh; }, }; diff --git a/packages/react-suspense-test-utils/src/ReactSuspenseTestUtils.js b/packages/react-suspense-test-utils/src/ReactSuspenseTestUtils.js index 37ecb3a1c3c6a..43c6c5184d2b9 100644 --- a/packages/react-suspense-test-utils/src/ReactSuspenseTestUtils.js +++ b/packages/react-suspense-test-utils/src/ReactSuspenseTestUtils.js @@ -44,7 +44,7 @@ export function waitForSuspense(fn: () => T): Promise { useTransition: unsupported, useOpaqueIdentifier: unsupported, useMutableSource: unsupported, - useRefresh: unsupported, + useCacheRefresh: unsupported, }; // Not using async/await because we don't compile it. return new Promise((resolve, reject) => { diff --git a/packages/react/index.classic.fb.js b/packages/react/index.classic.fb.js index 79bb0696ec1e0..366e86626fd15 100644 --- a/packages/react/index.classic.fb.js +++ b/packages/react/index.classic.fb.js @@ -52,7 +52,7 @@ export { SuspenseList as unstable_SuspenseList, unstable_getCacheForType, unstable_Cache, - unstable_useRefresh, + unstable_useCacheRefresh, // enableScopeAPI unstable_Scope, unstable_useOpaqueIdentifier, diff --git a/packages/react/index.experimental.js b/packages/react/index.experimental.js index 43c3f08eb000d..ba0d205f81297 100644 --- a/packages/react/index.experimental.js +++ b/packages/react/index.experimental.js @@ -47,7 +47,7 @@ export { unstable_useOpaqueIdentifier, unstable_getCacheForType, unstable_Cache, - unstable_useRefresh, + unstable_useCacheRefresh, // enableDebugTracing unstable_DebugTracingMode, } from './src/React'; diff --git a/packages/react/index.js b/packages/react/index.js index fb074650a0852..80e6591171b5c 100644 --- a/packages/react/index.js +++ b/packages/react/index.js @@ -84,5 +84,5 @@ export { unstable_useOpaqueIdentifier, unstable_getCacheForType, unstable_Cache, - unstable_useRefresh, + unstable_useCacheRefresh, } from './src/React'; diff --git a/packages/react/index.modern.fb.js b/packages/react/index.modern.fb.js index 1ab2bdd13dbe0..cf459c0bfb442 100644 --- a/packages/react/index.modern.fb.js +++ b/packages/react/index.modern.fb.js @@ -51,7 +51,7 @@ export { SuspenseList as unstable_SuspenseList, unstable_getCacheForType, unstable_Cache, - unstable_useRefresh, + unstable_useCacheRefresh, // enableScopeAPI unstable_Scope, unstable_useOpaqueIdentifier, diff --git a/packages/react/src/React.js b/packages/react/src/React.js index 9567ff7dc3b22..84490ef902c97 100644 --- a/packages/react/src/React.js +++ b/packages/react/src/React.js @@ -49,7 +49,7 @@ import { useTransition, useDeferredValue, useOpaqueIdentifier, - useRefresh, + useCacheRefresh, } from './ReactHooks'; import { createElementWithValidation, @@ -114,7 +114,7 @@ export { REACT_SUSPENSE_LIST_TYPE as SuspenseList, REACT_LEGACY_HIDDEN_TYPE as unstable_LegacyHidden, getCacheForType as unstable_getCacheForType, - useRefresh as unstable_useRefresh, + useCacheRefresh as unstable_useCacheRefresh, REACT_CACHE_TYPE as unstable_Cache, // enableFundamentalAPI createFundamental as unstable_createFundamental, diff --git a/packages/react/src/ReactHooks.js b/packages/react/src/ReactHooks.js index 9a087af4a1f1d..c1602a9bb53ed 100644 --- a/packages/react/src/ReactHooks.js +++ b/packages/react/src/ReactHooks.js @@ -181,8 +181,8 @@ export function useMutableSource( return dispatcher.useMutableSource(source, getSnapshot, subscribe); } -export function useRefresh(): (?() => T, ?T) => void { +export function useCacheRefresh(): (?() => T, ?T) => void { const dispatcher = resolveDispatcher(); // $FlowFixMe This is unstable, thus optional - return dispatcher.useRefresh(); + return dispatcher.useCacheRefresh(); } From e451117cc674f6d137c18251d7c35c0a132fdda4 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Thu, 17 Dec 2020 02:51:33 -0600 Subject: [PATCH 23/30] More tests --- .../src/__tests__/ReactCache-test.js | 218 ++++++++++++++++++ 1 file changed, 218 insertions(+) diff --git a/packages/react-reconciler/src/__tests__/ReactCache-test.js b/packages/react-reconciler/src/__tests__/ReactCache-test.js index 3970151b7e6b2..69f5fed69fbc6 100644 --- a/packages/react-reconciler/src/__tests__/ReactCache-test.js +++ b/packages/react-reconciler/src/__tests__/ReactCache-test.js @@ -5,6 +5,8 @@ let getCacheForType; let Scheduler; let Suspense; let useCacheRefresh; +let startTransition; +let useState; let textService; let textServiceVersion; @@ -20,6 +22,8 @@ describe('ReactCache', () => { Suspense = React.Suspense; getCacheForType = React.unstable_getCacheForType; useCacheRefresh = React.unstable_useCacheRefresh; + startTransition = React.unstable_startTransition; + useState = React.useState; // Represents some data service that returns text. It likely has additional // caching layers, like a CDN or the local browser cache. It can be mutated @@ -695,4 +699,218 @@ describe('ReactCache', () => { expect(root).toMatchRenderedOutput('A [v2]A [v1]'); }, ); + + // @gate experimental + test( + 'mount a new Cache boundary in a sibling while simultaneously ' + + 'resolving a Suspense boundary', + async () => { + function App({showMore}) { + return ( + <> + {showMore ? ( + }> + + + + + ) : null} + }> + + {' '} + {' '} + + + + + ); + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render(); + }); + expect(Scheduler).toHaveYielded([ + 'Cache miss! [A]', + 'Cache miss! [B]', + 'Loading...', + ]); + + await ReactNoop.act(async () => { + // This will resolve the content in the first cache + resolveText('A'); + resolveText('B'); + // Now let's simulate a mutation + mutateRemoteTextService(); + // And mount the second tree, which includes new content + root.render(); + }); + expect(Scheduler).toHaveYielded([ + // The new tree should use a fresh cache + 'Cache miss! [A]', + 'Loading...', + // The other tree uses the cached responses. This demonstrates that the + // requests are not dropped. + 'A [v1]', + 'B [v1]', + ]); + + // Now resolve the second tree + await ReactNoop.act(async () => { + resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A [v2]']); + expect(root).toMatchRenderedOutput('A [v2] A [v1] B [v1]'); + }, + ); + + // @gate experimental + test('cache pool is cleared once transitions that depend on it commit their shell', async () => { + function Child({text}) { + return ( + + + + ); + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render( + }>(empty), + ); + }); + expect(Scheduler).toHaveYielded([]); + expect(root).toMatchRenderedOutput('(empty)'); + + await ReactNoop.act(async () => { + startTransition(() => { + root.render( + }> + + , + ); + }); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('(empty)'); + + await ReactNoop.act(async () => { + startTransition(() => { + root.render( + }> + + + , + ); + }); + }); + expect(Scheduler).toHaveYielded([ + // No cache miss, because it uses the pooled cache + 'Loading...', + ]); + expect(root).toMatchRenderedOutput('(empty)'); + + // Resolve the request + await ReactNoop.act(async () => { + await resolveText('A'); + }); + expect(Scheduler).toHaveYielded(['A', 'A']); + expect(root).toMatchRenderedOutput('AA'); + + // Now do another transition + await ReactNoop.act(async () => { + startTransition(() => { + root.render( + }> + + + + , + ); + }); + }); + expect(Scheduler).toHaveYielded([ + // First two children use the old cache because they already finished + 'A', + 'A', + // The new child uses a fresh cache + 'Cache miss! [A]', + 'Loading...', + 'A', + 'A', + 'A', + ]); + expect(root).toMatchRenderedOutput('AAA'); + }); + + // @gate experimental + test('cache pool is not cleared by arbitrary commits', async () => { + function App() { + return ( + <> + + + + ); + } + + let showMore; + function ShowMore() { + const [shouldShow, _showMore] = useState(false); + showMore = () => _showMore(true); + return ( + <> + }> + {shouldShow ? ( + + + + ) : null} + + + ); + } + + let updateUnrelated; + function Unrelated() { + const [count, _updateUnrelated] = useState(0); + updateUnrelated = _updateUnrelated; + return ; + } + + const root = ReactNoop.createRoot(); + await ReactNoop.act(async () => { + root.render(); + }); + expect(Scheduler).toHaveYielded(['0']); + expect(root).toMatchRenderedOutput('0'); + + await ReactNoop.act(async () => { + startTransition(() => { + showMore(); + }); + }); + expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); + expect(root).toMatchRenderedOutput('0'); + + await ReactNoop.act(async () => { + updateUnrelated(1); + }); + expect(Scheduler).toHaveYielded([ + '1', + + // Happens to re-render the fallback. Doesn't need to, but not relevant + // to this test. + 'Loading...', + ]); + expect(root).toMatchRenderedOutput('1'); + + await ReactNoop.act(async () => { + resolveText('A'); + mutateRemoteTextService(); + }); + expect(Scheduler).toHaveYielded(['A']); + expect(root).toMatchRenderedOutput('A1'); + }); }); From 8e0b7561410116828a7d545b473f4835a9622542 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Thu, 17 Dec 2020 13:33:00 -0600 Subject: [PATCH 24/30] Wrap more things in flag --- .../src/ReactFiberBeginWork.new.js | 4 +-- .../src/ReactFiberBeginWork.old.js | 4 +-- .../src/ReactFiberCacheComponent.js | 28 +++++++++--------- .../src/ReactFiberRoot.new.js | 29 ++++++++++++------- .../src/ReactFiberRoot.old.js | 29 ++++++++++++------- 5 files changed, 55 insertions(+), 39 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 77355dc523091..1694ae287b617 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -1203,8 +1203,6 @@ function updateHostRoot(current, workInProgress, renderLanes) { cloneUpdateQueue(current, workInProgress); processUpdateQueue(workInProgress, nextProps, null, renderLanes); const nextState = workInProgress.memoizedState; - // Caution: React DevTools currently depends on this property - // being called "element". if (enableCache) { const nextCacheInstance: CacheInstance = nextState.cacheInstance; @@ -1221,6 +1219,8 @@ function updateHostRoot(current, workInProgress, renderLanes) { } } + // Caution: React DevTools currently depends on this property + // being called "element". const nextChildren = nextState.element; if (nextChildren === prevChildren) { resetHydrationState(); diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index 02d0e9447bf76..b12d9796cd247 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -1203,8 +1203,6 @@ function updateHostRoot(current, workInProgress, renderLanes) { cloneUpdateQueue(current, workInProgress); processUpdateQueue(workInProgress, nextProps, null, renderLanes); const nextState = workInProgress.memoizedState; - // Caution: React DevTools currently depends on this property - // being called "element". if (enableCache) { const nextCacheInstance: CacheInstance = nextState.cacheInstance; @@ -1221,6 +1219,8 @@ function updateHostRoot(current, workInProgress, renderLanes) { } } + // Caution: React DevTools currently depends on this property + // being called "element". const nextChildren = nextState.element; if (nextChildren === prevChildren) { resetHydrationState(); diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.js b/packages/react-reconciler/src/ReactFiberCacheComponent.js index 3aed34685a823..d57b4f43441eb 100644 --- a/packages/react-reconciler/src/ReactFiberCacheComponent.js +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.js @@ -28,19 +28,21 @@ export type PooledCacheInstance = {| provider: null, |}; -export const CacheContext: ReactContext = { - $$typeof: REACT_CONTEXT_TYPE, - // We don't use Consumer/Provider for Cache components. So we'll cheat. - Consumer: (null: any), - Provider: (null: any), - _calculateChangedBits: null, - // We'll initialize these at the root. - _currentValue: (null: any), - _currentValue2: (null: any), - _threadCount: 0, -}; - -if (__DEV__) { +export const CacheContext: ReactContext = enableCache + ? { + $$typeof: REACT_CONTEXT_TYPE, + // We don't use Consumer/Provider for Cache components. So we'll cheat. + Consumer: (null: any), + Provider: (null: any), + _calculateChangedBits: null, + // We'll initialize these at the root. + _currentValue: (null: any), + _currentValue2: (null: any), + _threadCount: 0, + } + : (null: any); + +if (__DEV__ && enableCache) { CacheContext._currentRenderer = null; CacheContext._currentRenderer2 = null; } diff --git a/packages/react-reconciler/src/ReactFiberRoot.new.js b/packages/react-reconciler/src/ReactFiberRoot.new.js index 69ddb7ff4717a..adc5d3b94df8f 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.new.js +++ b/packages/react-reconciler/src/ReactFiberRoot.new.js @@ -103,17 +103,24 @@ export function createFiberRoot( root.current = uninitializedFiber; uninitializedFiber.stateNode = root; - const initialState = { - element: null, - // For the root cache, we won't bother to lazily initialize the map. Seed an - // empty one. This saves use the trouble of having to initialize in an - // updater function. - cacheInstance: { - cache: new Map(), - provider: uninitializedFiber, - }, - }; - uninitializedFiber.memoizedState = initialState; + if (enableCache) { + const initialState = { + element: null, + // For the root cache, we won't bother to lazily initialize the map. Seed an + // empty one. This saves use the trouble of having to initialize in an + // updater function. + cacheInstance: { + cache: new Map(), + provider: uninitializedFiber, + }, + }; + uninitializedFiber.memoizedState = initialState; + } else { + const initialState = { + element: null, + }; + uninitializedFiber.memoizedState = initialState; + } initializeUpdateQueue(uninitializedFiber); diff --git a/packages/react-reconciler/src/ReactFiberRoot.old.js b/packages/react-reconciler/src/ReactFiberRoot.old.js index 502edb84dafed..be9166c828734 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.old.js +++ b/packages/react-reconciler/src/ReactFiberRoot.old.js @@ -103,17 +103,24 @@ export function createFiberRoot( root.current = uninitializedFiber; uninitializedFiber.stateNode = root; - const initialState = { - element: null, - // For the root cache, we won't bother to lazily initialize the map. Seed an - // empty one. This saves use the trouble of having to initialize in an - // updater function. - cacheInstance: { - cache: new Map(), - provider: uninitializedFiber, - }, - }; - uninitializedFiber.memoizedState = initialState; + if (enableCache) { + const initialState = { + element: null, + // For the root cache, we won't bother to lazily initialize the map. Seed an + // empty one. This saves use the trouble of having to initialize in an + // updater function. + cacheInstance: { + cache: new Map(), + provider: uninitializedFiber, + }, + }; + uninitializedFiber.memoizedState = initialState; + } else { + const initialState = { + element: null, + }; + uninitializedFiber.memoizedState = initialState; + } initializeUpdateQueue(uninitializedFiber); From eed92854d937ff2f96a3ef7c384c7260d3da33a2 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Thu, 17 Dec 2020 15:16:24 -0600 Subject: [PATCH 25/30] Remove default transition priority for refreshes The flaw here is that if another update in the same event is not wrapped in `startTransition`, then it won't be batched with the refresh. The solution is to wrap both in the same `startTransition`. It's worse for them not to be batched then for the batch to have too high a priority. We'll consider adding a warning. --- .../src/ReactFiberHooks.new.js | 55 ++++++++----------- .../src/ReactFiberHooks.old.js | 55 ++++++++----------- .../src/__tests__/ReactCache-test.js | 8 +-- 3 files changed, 50 insertions(+), 68 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js index f30eb98ac922b..5c04e6c050aad 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.new.js +++ b/packages/react-reconciler/src/ReactFiberHooks.new.js @@ -1729,40 +1729,31 @@ function refreshCache( ) { const provider = cacheInstance.provider; - // Inlined startTransition - // TODO: Maybe we shouldn't automatically give this transition priority. Are - // there valid use cases for a high-pri refresh? Like if the content is - // super stale and you want to immediately hide it. - const prevTransition = ReactCurrentBatchConfig.transition; - ReactCurrentBatchConfig.transition = 1; - // TODO: Do we really need the try/finally? I don't think any of these - // functions would ever throw unless there's an internal error. - try { - const eventTime = requestEventTime(); - const lane = requestUpdateLane(provider); - // TODO: Does Cache work in legacy mode? Should decide and write a test. - const root = scheduleUpdateOnFiber(provider, lane, eventTime); - - const seededCache = new Map(); - if (seedKey !== null && seedKey !== undefined && root !== null) { - // Seed the cache with the value passed by the caller. This could be from - // a server mutation, or it could be a streaming response. - seededCache.set(seedKey, seedValue); - } + // TODO: Consider warning if the refresh is at discrete priority, or if we + // otherwise suspect that it wasn't batched properly. - // Schedule an update on the cache boundary to trigger a refresh. - const refreshUpdate = createUpdate(eventTime, lane); - const payload = { - cacheInstance: { - provider: provider, - cache: seededCache, - }, - }; - refreshUpdate.payload = payload; - enqueueUpdate(provider, refreshUpdate); - } finally { - ReactCurrentBatchConfig.transition = prevTransition; + const eventTime = requestEventTime(); + const lane = requestUpdateLane(provider); + // TODO: Does Cache work in legacy mode? Should decide and write a test. + const root = scheduleUpdateOnFiber(provider, lane, eventTime); + + const seededCache = new Map(); + if (seedKey !== null && seedKey !== undefined && root !== null) { + // Seed the cache with the value passed by the caller. This could be from + // a server mutation, or it could be a streaming response. + seededCache.set(seedKey, seedValue); } + + // Schedule an update on the cache boundary to trigger a refresh. + const refreshUpdate = createUpdate(eventTime, lane); + const payload = { + cacheInstance: { + provider: provider, + cache: seededCache, + }, + }; + refreshUpdate.payload = payload; + enqueueUpdate(provider, refreshUpdate); } function dispatchAction( diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js index bbab1122c2817..26edcda15f94e 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.old.js +++ b/packages/react-reconciler/src/ReactFiberHooks.old.js @@ -1729,40 +1729,31 @@ function refreshCache( ) { const provider = cacheInstance.provider; - // Inlined startTransition - // TODO: Maybe we shouldn't automatically give this transition priority. Are - // there valid use cases for a high-pri refresh? Like if the content is - // super stale and you want to immediately hide it. - const prevTransition = ReactCurrentBatchConfig.transition; - ReactCurrentBatchConfig.transition = 1; - // TODO: Do we really need the try/finally? I don't think any of these - // functions would ever throw unless there's an internal error. - try { - const eventTime = requestEventTime(); - const lane = requestUpdateLane(provider); - // TODO: Does Cache work in legacy mode? Should decide and write a test. - const root = scheduleUpdateOnFiber(provider, lane, eventTime); - - const seededCache = new Map(); - if (seedKey !== null && seedKey !== undefined && root !== null) { - // Seed the cache with the value passed by the caller. This could be from - // a server mutation, or it could be a streaming response. - seededCache.set(seedKey, seedValue); - } + // TODO: Consider warning if the refresh is at discrete priority, or if we + // otherwise suspect that it wasn't batched properly. - // Schedule an update on the cache boundary to trigger a refresh. - const refreshUpdate = createUpdate(eventTime, lane); - const payload = { - cacheInstance: { - provider: provider, - cache: seededCache, - }, - }; - refreshUpdate.payload = payload; - enqueueUpdate(provider, refreshUpdate); - } finally { - ReactCurrentBatchConfig.transition = prevTransition; + const eventTime = requestEventTime(); + const lane = requestUpdateLane(provider); + // TODO: Does Cache work in legacy mode? Should decide and write a test. + const root = scheduleUpdateOnFiber(provider, lane, eventTime); + + const seededCache = new Map(); + if (seedKey !== null && seedKey !== undefined && root !== null) { + // Seed the cache with the value passed by the caller. This could be from + // a server mutation, or it could be a streaming response. + seededCache.set(seedKey, seedValue); } + + // Schedule an update on the cache boundary to trigger a refresh. + const refreshUpdate = createUpdate(eventTime, lane); + const payload = { + cacheInstance: { + provider: provider, + cache: seededCache, + }, + }; + refreshUpdate.payload = payload; + enqueueUpdate(provider, refreshUpdate); } function dispatchAction( diff --git a/packages/react-reconciler/src/__tests__/ReactCache-test.js b/packages/react-reconciler/src/__tests__/ReactCache-test.js index 69f5fed69fbc6..9d2991859952d 100644 --- a/packages/react-reconciler/src/__tests__/ReactCache-test.js +++ b/packages/react-reconciler/src/__tests__/ReactCache-test.js @@ -448,7 +448,7 @@ describe('ReactCache', () => { // Mutate the text service, then refresh for new data. mutateRemoteTextService(); await ReactNoop.act(async () => { - refresh(); + startTransition(() => refresh()); }); expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); expect(root).toMatchRenderedOutput('A [v1]'); @@ -490,7 +490,7 @@ describe('ReactCache', () => { // Mutate the text service, then refresh for new data. mutateRemoteTextService(); await ReactNoop.act(async () => { - refresh(); + startTransition(() => refresh()); }); expect(Scheduler).toHaveYielded(['Cache miss! [A]', 'Loading...']); expect(root).toMatchRenderedOutput('A [v1]'); @@ -544,7 +544,7 @@ describe('ReactCache', () => { status: 'resolved', value: textServiceVersion, }); - refresh(createTextCache, seededCache); + startTransition(() => refresh(createTextCache, seededCache)); }); // The root should re-render without a cache miss. expect(Scheduler).toHaveYielded(['A [v2]']); @@ -610,7 +610,7 @@ describe('ReactCache', () => { // refresh, since its cache is nested inside the outer one. mutateRemoteTextService(); await ReactNoop.act(async () => { - refreshShell(); + startTransition(() => refreshShell()); }); expect(Scheduler).toHaveYielded([ 'Cache miss! [A]', From 9894eb2aa4d5da82af7619a62ee13768c14e68f4 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Thu, 17 Dec 2020 17:01:40 -0600 Subject: [PATCH 26/30] Fork ReactFiberCacheComponent Split into `new` and `old` reconciler files --- .../src/ReactFiberBeginWork.new.js | 4 +- .../src/ReactFiberBeginWork.old.js | 4 +- ...ent.js => ReactFiberCacheComponent.new.js} | 0 .../src/ReactFiberCacheComponent.old.js | 157 ++++++++++++++++++ .../src/ReactFiberCompleteWork.new.js | 4 +- .../src/ReactFiberCompleteWork.old.js | 4 +- .../src/ReactFiberHooks.new.js | 4 +- .../src/ReactFiberHooks.old.js | 4 +- .../src/ReactFiberLane.new.js | 2 +- .../src/ReactFiberLane.old.js | 2 +- .../src/ReactFiberNewContext.new.js | 2 +- .../src/ReactFiberNewContext.old.js | 2 +- .../src/ReactFiberOffscreenComponent.js | 2 +- .../src/ReactFiberUnwindWork.new.js | 4 +- .../src/ReactFiberUnwindWork.old.js | 4 +- .../src/ReactInternalTypes.js | 2 +- 16 files changed, 179 insertions(+), 22 deletions(-) rename packages/react-reconciler/src/{ReactFiberCacheComponent.js => ReactFiberCacheComponent.new.js} (100%) create mode 100644 packages/react-reconciler/src/ReactFiberCacheComponent.old.js diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 1694ae287b617..592b57f577463 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -27,7 +27,7 @@ import type { Cache, CacheInstance, PooledCacheInstance, -} from './ReactFiberCacheComponent'; +} from './ReactFiberCacheComponent.new'; import type {UpdateQueue} from './ReactUpdateQueue.new'; import checkPropTypes from 'shared/checkPropTypes'; @@ -217,7 +217,7 @@ import { hasFreshCacheProvider, pushCachePool, getFreshCacheProviderIfExists, -} from './ReactFiberCacheComponent'; +} from './ReactFiberCacheComponent.new'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index b12d9796cd247..d9d95da611b84 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -27,7 +27,7 @@ import type { Cache, CacheInstance, PooledCacheInstance, -} from './ReactFiberCacheComponent'; +} from './ReactFiberCacheComponent.old'; import type {UpdateQueue} from './ReactUpdateQueue.old'; import checkPropTypes from 'shared/checkPropTypes'; @@ -217,7 +217,7 @@ import { hasFreshCacheProvider, pushCachePool, getFreshCacheProviderIfExists, -} from './ReactFiberCacheComponent'; +} from './ReactFiberCacheComponent.old'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.js b/packages/react-reconciler/src/ReactFiberCacheComponent.new.js similarity index 100% rename from packages/react-reconciler/src/ReactFiberCacheComponent.js rename to packages/react-reconciler/src/ReactFiberCacheComponent.new.js diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.old.js b/packages/react-reconciler/src/ReactFiberCacheComponent.old.js new file mode 100644 index 0000000000000..8b80a7526806e --- /dev/null +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.old.js @@ -0,0 +1,157 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow + */ + +import type {ReactContext} from 'shared/ReactTypes'; +import type {FiberRoot} from './ReactInternalTypes'; + +import {enableCache} from 'shared/ReactFeatureFlags'; +import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols'; +import {HostRoot} from './ReactWorkTags'; + +import {pushProvider, popProvider} from './ReactFiberNewContext.old'; + +export type Cache = Map<() => mixed, mixed>; + +export type CacheInstance = {| + cache: Cache, + provider: Fiber, +|}; + +export type PooledCacheInstance = {| + cache: Cache, + provider: null, +|}; + +export const CacheContext: ReactContext = enableCache + ? { + $$typeof: REACT_CONTEXT_TYPE, + // We don't use Consumer/Provider for Cache components. So we'll cheat. + Consumer: (null: any), + Provider: (null: any), + _calculateChangedBits: null, + // We'll initialize these at the root. + _currentValue: (null: any), + _currentValue2: (null: any), + _threadCount: 0, + } + : (null: any); + +if (__DEV__ && enableCache) { + CacheContext._currentRenderer = null; + CacheContext._currentRenderer2 = null; +} + +// A parent cache refresh always overrides any nested cache. So there will only +// ever be a single fresh cache on the context stack. +let freshCacheInstance: CacheInstance | null = null; + +export function pushStaleCacheProvider( + workInProgress: Fiber, + cacheInstance: CacheInstance, +) { + if (!enableCache) { + return; + } + if (__DEV__) { + if (freshCacheInstance !== null) { + console.error( + 'Already inside a fresh cache boundary. This is a bug in React.', + ); + } + } + pushProvider(workInProgress, CacheContext, cacheInstance); +} + +export function pushFreshCacheProvider( + workInProgress: Fiber, + cacheInstance: CacheInstance, +) { + if (!enableCache) { + return; + } + if (__DEV__) { + if ( + freshCacheInstance !== null && + // TODO: Remove this exception for roots. There are a few tests that throw + // in pushHostContainer, before the cache context is pushed. Not a huge + // issue, but should still fix. + workInProgress.tag !== HostRoot + ) { + console.error( + 'Already inside a fresh cache boundary. This is a bug in React.', + ); + } + } + freshCacheInstance = cacheInstance; + pushProvider(workInProgress, CacheContext, cacheInstance); +} + +export function popCacheProvider( + workInProgress: Fiber, + cacheInstance: CacheInstance, +) { + if (!enableCache) { + return; + } + if (__DEV__) { + if (freshCacheInstance !== null && freshCacheInstance !== cacheInstance) { + console.error( + 'Unexpected cache instance on context. This is a bug in React.', + ); + } + } + freshCacheInstance = null; + popProvider(CacheContext, workInProgress); +} + +export function hasFreshCacheProvider() { + if (!enableCache) { + return false; + } + return freshCacheInstance !== null; +} + +export function getFreshCacheProviderIfExists(): CacheInstance | null { + if (!enableCache) { + return null; + } + return freshCacheInstance; +} + +export function pushCachePool( + root: FiberRoot, + cacheInstance: PooledCacheInstance, +) { + // This will temporarily override the root's pooled cache, so that any new + // Cache boundaries in the subtree use this one. The previous value on the + // "stack" is stored on the cache instance. We will restore it during the + // complete phase. + // + // The more straightforward way to do this would be to use the array-based + // stack (push/pop). Maybe this is too clever. + const prevPooledCacheOnStack = root.pooledCache; + root.pooledCache = cacheInstance.cache; + // This is never supposed to be null. I'm cheating. Sorry. It will be reset to + // the correct type when we pop. + cacheInstance.cache = ((prevPooledCacheOnStack: any): Cache); +} + +export function popCachePool( + root: FiberRoot, + cacheInstance: PooledCacheInstance, +) { + const retryCache: Cache = (root.pooledCache: any); + if (__DEV__) { + if (retryCache === null) { + console.error('Expected to have a pooled cache. This is a bug in React.'); + } + } + root.pooledCache = cacheInstance.cache; + cacheInstance.cache = retryCache; +} diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js index 27b9d7044c1d7..7007cb2fb190d 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js @@ -31,7 +31,7 @@ import type {OffscreenState} from './ReactFiberOffscreenComponent'; import type { CacheInstance, PooledCacheInstance, -} from './ReactFiberCacheComponent'; +} from './ReactFiberCacheComponent.new'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.new'; @@ -161,7 +161,7 @@ import { import {resetChildFibers} from './ReactChildFiber.new'; import {createScopeInstance} from './ReactFiberScope.new'; import {transferActualDuration} from './ReactProfilerTimer.new'; -import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent'; +import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent.new'; function markUpdate(workInProgress: Fiber) { // Tag the fiber with an update effect. This turns a Placement into diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js index ebfefec3130c8..67301d481b4a2 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js @@ -31,7 +31,7 @@ import type {OffscreenState} from './ReactFiberOffscreenComponent'; import type { CacheInstance, PooledCacheInstance, -} from './ReactFiberCacheComponent'; +} from './ReactFiberCacheComponent.old'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.old'; @@ -161,7 +161,7 @@ import { import {resetChildFibers} from './ReactChildFiber.old'; import {createScopeInstance} from './ReactFiberScope.old'; import {transferActualDuration} from './ReactProfilerTimer.old'; -import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent'; +import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent.old'; function markUpdate(workInProgress: Fiber) { // Tag the fiber with an update effect. This turns a Placement into diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js index 5c04e6c050aad..3679f84220662 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.new.js +++ b/packages/react-reconciler/src/ReactFiberHooks.new.js @@ -19,7 +19,7 @@ import type {HookFlags} from './ReactHookEffectTags'; import type {ReactPriorityLevel} from './ReactInternalTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type {OpaqueIDType} from './ReactFiberHostConfig'; -import type {CacheInstance} from './ReactFiberCacheComponent'; +import type {CacheInstance} from './ReactFiberCacheComponent.new'; import ReactSharedInternals from 'shared/ReactSharedInternals'; import { @@ -93,7 +93,7 @@ import { import {getIsRendering} from './ReactCurrentFiber'; import {logStateUpdateScheduled} from './DebugTracing'; import {markStateUpdateScheduled} from './SchedulingProfiler'; -import {CacheContext} from './ReactFiberCacheComponent'; +import {CacheContext} from './ReactFiberCacheComponent.new'; import {createUpdate, enqueueUpdate} from './ReactUpdateQueue.new'; const {ReactCurrentDispatcher, ReactCurrentBatchConfig} = ReactSharedInternals; diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js index 26edcda15f94e..15ccfdf92c67d 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.old.js +++ b/packages/react-reconciler/src/ReactFiberHooks.old.js @@ -19,7 +19,7 @@ import type {HookFlags} from './ReactHookEffectTags'; import type {ReactPriorityLevel} from './ReactInternalTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type {OpaqueIDType} from './ReactFiberHostConfig'; -import type {CacheInstance} from './ReactFiberCacheComponent'; +import type {CacheInstance} from './ReactFiberCacheComponent.old'; import ReactSharedInternals from 'shared/ReactSharedInternals'; import { @@ -93,7 +93,7 @@ import { import {getIsRendering} from './ReactCurrentFiber'; import {logStateUpdateScheduled} from './DebugTracing'; import {markStateUpdateScheduled} from './SchedulingProfiler'; -import {CacheContext} from './ReactFiberCacheComponent'; +import {CacheContext} from './ReactFiberCacheComponent.old'; import {createUpdate, enqueueUpdate} from './ReactUpdateQueue.old'; const {ReactCurrentDispatcher, ReactCurrentBatchConfig} = ReactSharedInternals; diff --git a/packages/react-reconciler/src/ReactFiberLane.new.js b/packages/react-reconciler/src/ReactFiberLane.new.js index 9032d9b77cff2..3b078d4a3a9db 100644 --- a/packages/react-reconciler/src/ReactFiberLane.new.js +++ b/packages/react-reconciler/src/ReactFiberLane.new.js @@ -8,7 +8,7 @@ */ import type {FiberRoot, ReactPriorityLevel} from './ReactInternalTypes'; -import type {Cache} from './ReactFiberCacheComponent'; +import type {Cache} from './ReactFiberCacheComponent.new'; // TODO: Ideally these types would be opaque but that doesn't work well with // our reconciler fork infra, since these leak into non-reconciler packages. diff --git a/packages/react-reconciler/src/ReactFiberLane.old.js b/packages/react-reconciler/src/ReactFiberLane.old.js index 95a3a3ed4f5ac..131daaad94c71 100644 --- a/packages/react-reconciler/src/ReactFiberLane.old.js +++ b/packages/react-reconciler/src/ReactFiberLane.old.js @@ -8,7 +8,7 @@ */ import type {FiberRoot, ReactPriorityLevel} from './ReactInternalTypes'; -import type {Cache} from './ReactFiberCacheComponent'; +import type {Cache} from './ReactFiberCacheComponent.old'; // TODO: Ideally these types would be opaque but that doesn't work well with // our reconciler fork infra, since these leak into non-reconciler packages. diff --git a/packages/react-reconciler/src/ReactFiberNewContext.new.js b/packages/react-reconciler/src/ReactFiberNewContext.new.js index 7bb83518eb8fd..f20bed4b23dfd 100644 --- a/packages/react-reconciler/src/ReactFiberNewContext.new.js +++ b/packages/react-reconciler/src/ReactFiberNewContext.new.js @@ -34,7 +34,7 @@ import invariant from 'shared/invariant'; import is from 'shared/objectIs'; import {createUpdate, enqueueUpdate, ForceUpdate} from './ReactUpdateQueue.new'; import {markWorkInProgressReceivedUpdate} from './ReactFiberBeginWork.new'; -import {CacheContext} from './ReactFiberCacheComponent'; +import {CacheContext} from './ReactFiberCacheComponent.new'; import { enableSuspenseServerRenderer, enableCache, diff --git a/packages/react-reconciler/src/ReactFiberNewContext.old.js b/packages/react-reconciler/src/ReactFiberNewContext.old.js index 6901a1e28ae42..b7e677bc3b6e0 100644 --- a/packages/react-reconciler/src/ReactFiberNewContext.old.js +++ b/packages/react-reconciler/src/ReactFiberNewContext.old.js @@ -34,7 +34,7 @@ import invariant from 'shared/invariant'; import is from 'shared/objectIs'; import {createUpdate, enqueueUpdate, ForceUpdate} from './ReactUpdateQueue.old'; import {markWorkInProgressReceivedUpdate} from './ReactFiberBeginWork.old'; -import {CacheContext} from './ReactFiberCacheComponent'; +import {CacheContext} from './ReactFiberCacheComponent.old'; import { enableSuspenseServerRenderer, enableCache, diff --git a/packages/react-reconciler/src/ReactFiberOffscreenComponent.js b/packages/react-reconciler/src/ReactFiberOffscreenComponent.js index a72908a44ecb7..c0ad97116c141 100644 --- a/packages/react-reconciler/src/ReactFiberOffscreenComponent.js +++ b/packages/react-reconciler/src/ReactFiberOffscreenComponent.js @@ -12,7 +12,7 @@ import type {Lanes} from './ReactFiberLane.old'; import type { CacheInstance, PooledCacheInstance, -} from './ReactFiberCacheComponent'; +} from './ReactFiberCacheComponent.new'; export type OffscreenProps = {| // TODO: Pick an API before exposing the Offscreen type. I've chosen an enum diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js index d4f6fe94097b2..5b1946a544c52 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js @@ -14,7 +14,7 @@ import type {SuspenseState} from './ReactFiberSuspenseComponent.new'; import type { CacheInstance, PooledCacheInstance, -} from './ReactFiberCacheComponent'; +} from './ReactFiberCacheComponent.new'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.new'; import { @@ -47,7 +47,7 @@ import { } from './ReactFiberContext.new'; import {popProvider} from './ReactFiberNewContext.new'; import {popRenderLanes, getWorkInProgressRoot} from './ReactFiberWorkLoop.new'; -import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent'; +import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent.new'; import {transferActualDuration} from './ReactProfilerTimer.new'; import invariant from 'shared/invariant'; diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js index 296bf0436d68e..56359ad5d9846 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js @@ -14,7 +14,7 @@ import type {SuspenseState} from './ReactFiberSuspenseComponent.old'; import type { CacheInstance, PooledCacheInstance, -} from './ReactFiberCacheComponent'; +} from './ReactFiberCacheComponent.old'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.old'; import { @@ -47,7 +47,7 @@ import { } from './ReactFiberContext.old'; import {popProvider} from './ReactFiberNewContext.old'; import {popRenderLanes, getWorkInProgressRoot} from './ReactFiberWorkLoop.old'; -import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent'; +import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent.old'; import {transferActualDuration} from './ReactProfilerTimer.old'; import invariant from 'shared/invariant'; diff --git a/packages/react-reconciler/src/ReactInternalTypes.js b/packages/react-reconciler/src/ReactInternalTypes.js index 6add5d08df6e6..cb34ac74948e0 100644 --- a/packages/react-reconciler/src/ReactInternalTypes.js +++ b/packages/react-reconciler/src/ReactInternalTypes.js @@ -25,7 +25,7 @@ import type {RootTag} from './ReactRootTags'; import type {TimeoutHandle, NoTimeout} from './ReactFiberHostConfig'; import type {Wakeable} from 'shared/ReactTypes'; import type {Interaction} from 'scheduler/src/Tracing'; -import type {Cache} from './ReactFiberCacheComponent'; +import type {Cache} from './ReactFiberCacheComponent.old'; // Unwind Circular: moved from ReactFiberHooks.old export type HookType = From ea6700c4c53bd6b26bfbed0b4099ba6a0e173103 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Thu, 17 Dec 2020 19:43:10 -0600 Subject: [PATCH 27/30] Only mutate `root.pooledCache` in complete/unwind Less indirection when accessing during the render phase and less hard to make a mutation mistake. --- .../src/ReactFiberBeginWork.new.js | 57 ++++++-------- .../src/ReactFiberBeginWork.old.js | 57 ++++++-------- .../src/ReactFiberCacheComponent.new.js | 74 +++++++++++++++---- .../src/ReactFiberCacheComponent.old.js | 74 +++++++++++++++---- .../src/ReactFiberCompleteWork.new.js | 19 +++-- .../src/ReactFiberCompleteWork.old.js | 19 +++-- .../src/ReactFiberLane.new.js | 25 ------- .../src/ReactFiberLane.old.js | 25 ------- .../src/ReactFiberUnwindWork.new.js | 34 ++++----- .../src/ReactFiberUnwindWork.old.js | 34 ++++----- .../src/ReactFiberWorkLoop.new.js | 4 +- .../src/ReactFiberWorkLoop.old.js | 4 +- 12 files changed, 218 insertions(+), 208 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 592b57f577463..38f77f3f19b6f 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -123,7 +123,6 @@ import { removeLanes, mergeLanes, getBumpedLaneForHydration, - requestCacheFromPool, } from './ReactFiberLane.new'; import { ConcurrentMode, @@ -215,8 +214,11 @@ import { pushFreshCacheProvider, pushStaleCacheProvider, hasFreshCacheProvider, + requestCacheFromPool, + pushRootCachePool, pushCachePool, getFreshCacheProviderIfExists, + getPooledCacheIfExists, } from './ReactFiberCacheComponent.new'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -672,13 +674,7 @@ function updateOffscreenComponent( // This isn't a refresh, it's a continuation of a previous render. // So we don't need to propagate a context change. } else { - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - pushCachePool(root, prevCacheInstance); + pushCachePool(prevCacheInstance); } } } @@ -716,13 +712,7 @@ function updateOffscreenComponent( // This isn't a refresh, it's a continuation of a previous render. // So we don't need to propagate a context change. } else { - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - pushCachePool(root, prevCacheInstance); + pushCachePool(prevCacheInstance); } } } @@ -784,7 +774,7 @@ function updateCacheComponent( ); // This will always be different from the parent cache; otherwise we would // have detected a fresh cache provider in the earlier branch. - const cache = requestCacheFromPool(root, renderLanes); + const cache = requestCacheFromPool(renderLanes); cacheInstance = { cache, provider: workInProgress, @@ -1204,7 +1194,11 @@ function updateHostRoot(current, workInProgress, renderLanes) { processUpdateQueue(workInProgress, nextProps, null, renderLanes); const nextState = workInProgress.memoizedState; + const root: FiberRoot = workInProgress.stateNode; + if (enableCache) { + pushRootCachePool(root); + const nextCacheInstance: CacheInstance = nextState.cacheInstance; if (nextCacheInstance !== prevState.cacheInstance) { pushFreshCacheProvider(workInProgress, nextCacheInstance); @@ -1226,7 +1220,6 @@ function updateHostRoot(current, workInProgress, renderLanes) { resetHydrationState(); return bailoutOnAlreadyFinishedWork(current, workInProgress, renderLanes); } - const root: FiberRoot = workInProgress.stateNode; if (root.hydrate && enterHydrationState(workInProgress)) { // If we don't have any current children this might be the first pass. // We always try to hydrate. If this isn't a hydration pass there won't @@ -1797,13 +1790,11 @@ function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { 'Expected a work-in-progress root. This is a bug in React. Please ' + 'file an issue.', ); - // If a nested cache accessed the pool during this render, it will be - // assigned to root.pooledCache. No need to check the lane-indexed pool. - // TODO: Actually I think I'm wrong and we do need to check the lane-indexed - // pool, to account for infinite transitions that are not triggered by a - // `refresh` call, since those won't put a fresh context on the stack. - // However, that's not idiomatic so this might be fine for now. - const pooledCache = root.pooledCache; + // If a nested cache accessed the pool during this render, it will + // returned by this function. It will also return a cache that was + // accessed by a sibling tree, but that's also fine, since that's the + // cache that would have been claimed by any nested caches. + const pooledCache = getPooledCacheIfExists(); if (pooledCache !== null) { cacheInstance = { cache: (pooledCache: Cache), @@ -1832,16 +1823,11 @@ function updateSuspenseOffscreenState( // during the first pass when we attempted to unhide. cacheInstance = prevOffscreenState.cache; if (cacheInstance === null) { - // If there's no previous cache, then we can check the pool. If a nested - // cache accessed the pool during this render, it will be assigned to - // root.pooledCache. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - const pooledCache = root.pooledCache; + // If a nested cache accessed the pool during this render, it will + // returned by this function. It will also return a cache that was + // accessed by a sibling tree, but that's also fine, since that's the + // cache that would have been claimed by any nested caches. + const pooledCache = getPooledCacheIfExists(); if (pooledCache !== null) { cacheInstance = { cache: (pooledCache: Cache), @@ -3352,6 +3338,9 @@ function beginWork( case HostRoot: pushHostRootContext(workInProgress); if (enableCache) { + const root: FiberRoot = workInProgress.stateNode; + pushRootCachePool(root); + const nextCacheInstance: CacheInstance = current.memoizedState.cacheInstance; pushStaleCacheProvider(workInProgress, nextCacheInstance); diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index d9d95da611b84..bee52816e7a42 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -123,7 +123,6 @@ import { removeLanes, mergeLanes, getBumpedLaneForHydration, - requestCacheFromPool, } from './ReactFiberLane.old'; import { ConcurrentMode, @@ -215,8 +214,11 @@ import { pushFreshCacheProvider, pushStaleCacheProvider, hasFreshCacheProvider, + requestCacheFromPool, + pushRootCachePool, pushCachePool, getFreshCacheProviderIfExists, + getPooledCacheIfExists, } from './ReactFiberCacheComponent.old'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -672,13 +674,7 @@ function updateOffscreenComponent( // This isn't a refresh, it's a continuation of a previous render. // So we don't need to propagate a context change. } else { - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - pushCachePool(root, prevCacheInstance); + pushCachePool(prevCacheInstance); } } } @@ -716,13 +712,7 @@ function updateOffscreenComponent( // This isn't a refresh, it's a continuation of a previous render. // So we don't need to propagate a context change. } else { - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - pushCachePool(root, prevCacheInstance); + pushCachePool(prevCacheInstance); } } } @@ -784,7 +774,7 @@ function updateCacheComponent( ); // This will always be different from the parent cache; otherwise we would // have detected a fresh cache provider in the earlier branch. - const cache = requestCacheFromPool(root, renderLanes); + const cache = requestCacheFromPool(renderLanes); cacheInstance = { cache, provider: workInProgress, @@ -1204,7 +1194,11 @@ function updateHostRoot(current, workInProgress, renderLanes) { processUpdateQueue(workInProgress, nextProps, null, renderLanes); const nextState = workInProgress.memoizedState; + const root: FiberRoot = workInProgress.stateNode; + if (enableCache) { + pushRootCachePool(root); + const nextCacheInstance: CacheInstance = nextState.cacheInstance; if (nextCacheInstance !== prevState.cacheInstance) { pushFreshCacheProvider(workInProgress, nextCacheInstance); @@ -1226,7 +1220,6 @@ function updateHostRoot(current, workInProgress, renderLanes) { resetHydrationState(); return bailoutOnAlreadyFinishedWork(current, workInProgress, renderLanes); } - const root: FiberRoot = workInProgress.stateNode; if (root.hydrate && enterHydrationState(workInProgress)) { // If we don't have any current children this might be the first pass. // We always try to hydrate. If this isn't a hydration pass there won't @@ -1797,13 +1790,11 @@ function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { 'Expected a work-in-progress root. This is a bug in React. Please ' + 'file an issue.', ); - // If a nested cache accessed the pool during this render, it will be - // assigned to root.pooledCache. No need to check the lane-indexed pool. - // TODO: Actually I think I'm wrong and we do need to check the lane-indexed - // pool, to account for infinite transitions that are not triggered by a - // `refresh` call, since those won't put a fresh context on the stack. - // However, that's not idiomatic so this might be fine for now. - const pooledCache = root.pooledCache; + // If a nested cache accessed the pool during this render, it will + // returned by this function. It will also return a cache that was + // accessed by a sibling tree, but that's also fine, since that's the + // cache that would have been claimed by any nested caches. + const pooledCache = getPooledCacheIfExists(); if (pooledCache !== null) { cacheInstance = { cache: (pooledCache: Cache), @@ -1832,16 +1823,11 @@ function updateSuspenseOffscreenState( // during the first pass when we attempted to unhide. cacheInstance = prevOffscreenState.cache; if (cacheInstance === null) { - // If there's no previous cache, then we can check the pool. If a nested - // cache accessed the pool during this render, it will be assigned to - // root.pooledCache. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - const pooledCache = root.pooledCache; + // If a nested cache accessed the pool during this render, it will + // returned by this function. It will also return a cache that was + // accessed by a sibling tree, but that's also fine, since that's the + // cache that would have been claimed by any nested caches. + const pooledCache = getPooledCacheIfExists(); if (pooledCache !== null) { cacheInstance = { cache: (pooledCache: Cache), @@ -3352,6 +3338,9 @@ function beginWork( case HostRoot: pushHostRootContext(workInProgress); if (enableCache) { + const root: FiberRoot = workInProgress.stateNode; + pushRootCachePool(root); + const nextCacheInstance: CacheInstance = current.memoizedState.cacheInstance; pushStaleCacheProvider(workInProgress, nextCacheInstance); diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.new.js b/packages/react-reconciler/src/ReactFiberCacheComponent.new.js index d57b4f43441eb..f522ce29e4480 100644 --- a/packages/react-reconciler/src/ReactFiberCacheComponent.new.js +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.new.js @@ -9,6 +9,7 @@ import type {ReactContext} from 'shared/ReactTypes'; import type {FiberRoot} from './ReactInternalTypes'; +import type {Lanes} from './ReactFiberLane.new'; import {enableCache} from 'shared/ReactFeatureFlags'; import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols'; @@ -51,6 +52,9 @@ if (__DEV__ && enableCache) { // ever be a single fresh cache on the context stack. let freshCacheInstance: CacheInstance | null = null; +// The cache that we retrived from the pool during this render, if any +let pooledCache: Cache | null = null; + export function pushStaleCacheProvider( workInProgress: Fiber, cacheInstance: CacheInstance, @@ -124,34 +128,74 @@ export function getFreshCacheProviderIfExists(): CacheInstance | null { return freshCacheInstance; } -export function pushCachePool( - root: FiberRoot, - cacheInstance: PooledCacheInstance, -) { - // This will temporarily override the root's pooled cache, so that any new - // Cache boundaries in the subtree use this one. The previous value on the - // "stack" is stored on the cache instance. We will restore it during the +export function requestCacheFromPool(renderLanes: Lanes): Cache { + if (!enableCache) { + return (null: any); + } + if (pooledCache !== null) { + return pooledCache; + } + // Create a fresh cache. + pooledCache = new Map(); + return pooledCache; +} + +export function getPooledCacheIfExists(): Cache | null { + return pooledCache; +} + +export function pushRootCachePool(root: FiberRoot) { + if (!enableCache) { + return; + } + // When we start rendering a tree, read the pooled cache for this render + // from `root.pooledCache`. If it's currently `null`, we will lazily + // initialize it the first type it's requested. However, we only mutate + // the root itself during the complete/unwind phase of the HostRoot. + pooledCache = root.pooledCache; +} + +export function popRootCachePool(root: FiberRoot, renderLanes: Lanes) { + if (!enableCache) { + return; + } + // The `pooledCache` variable points to the cache that was used for new + // cache boundaries during this render, if any. Stash it on the root so that + // parallel transitions may share the same cache. We will clear this field + // once all the transitions that depend on it (which we track with + // `pooledCacheLanes`) have committed. + root.pooledCache = pooledCache; + root.pooledCacheLanes |= renderLanes; +} + +export function pushCachePool(cacheInstance: PooledCacheInstance) { + if (!enableCache) { + return; + } + // This will temporarily override the pooled cache for this render, so that + // any new Cache boundaries in the subtree use this one. The previous value on + // the "stack" is stored on the cache instance. We will restore it during the // complete phase. // // The more straightforward way to do this would be to use the array-based // stack (push/pop). Maybe this is too clever. - const prevPooledCacheOnStack = root.pooledCache; - root.pooledCache = cacheInstance.cache; + const prevPooledCacheOnStack = pooledCache; + pooledCache = cacheInstance.cache; // This is never supposed to be null. I'm cheating. Sorry. It will be reset to // the correct type when we pop. cacheInstance.cache = ((prevPooledCacheOnStack: any): Cache); } -export function popCachePool( - root: FiberRoot, - cacheInstance: PooledCacheInstance, -) { - const retryCache: Cache = (root.pooledCache: any); +export function popCachePool(cacheInstance: PooledCacheInstance) { + if (!enableCache) { + return; + } + const retryCache: Cache = (pooledCache: any); if (__DEV__) { if (retryCache === null) { console.error('Expected to have a pooled cache. This is a bug in React.'); } } - root.pooledCache = cacheInstance.cache; + pooledCache = cacheInstance.cache; cacheInstance.cache = retryCache; } diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.old.js b/packages/react-reconciler/src/ReactFiberCacheComponent.old.js index 8b80a7526806e..f2b69b658db5d 100644 --- a/packages/react-reconciler/src/ReactFiberCacheComponent.old.js +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.old.js @@ -9,6 +9,7 @@ import type {ReactContext} from 'shared/ReactTypes'; import type {FiberRoot} from './ReactInternalTypes'; +import type {Lanes} from './ReactFiberLane.old'; import {enableCache} from 'shared/ReactFeatureFlags'; import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols'; @@ -51,6 +52,9 @@ if (__DEV__ && enableCache) { // ever be a single fresh cache on the context stack. let freshCacheInstance: CacheInstance | null = null; +// The cache that we retrived from the pool during this render, if any +let pooledCache: Cache | null = null; + export function pushStaleCacheProvider( workInProgress: Fiber, cacheInstance: CacheInstance, @@ -124,34 +128,74 @@ export function getFreshCacheProviderIfExists(): CacheInstance | null { return freshCacheInstance; } -export function pushCachePool( - root: FiberRoot, - cacheInstance: PooledCacheInstance, -) { - // This will temporarily override the root's pooled cache, so that any new - // Cache boundaries in the subtree use this one. The previous value on the - // "stack" is stored on the cache instance. We will restore it during the +export function requestCacheFromPool(renderLanes: Lanes): Cache { + if (!enableCache) { + return (null: any); + } + if (pooledCache !== null) { + return pooledCache; + } + // Create a fresh cache. + pooledCache = new Map(); + return pooledCache; +} + +export function getPooledCacheIfExists(): Cache | null { + return pooledCache; +} + +export function pushRootCachePool(root: FiberRoot) { + if (!enableCache) { + return; + } + // When we start rendering a tree, read the pooled cache for this render + // from `root.pooledCache`. If it's currently `null`, we will lazily + // initialize it the first type it's requested. However, we only mutate + // the root itself during the complete/unwind phase of the HostRoot. + pooledCache = root.pooledCache; +} + +export function popRootCachePool(root: FiberRoot, renderLanes: Lanes) { + if (!enableCache) { + return; + } + // The `pooledCache` variable points to the cache that was used for new + // cache boundaries during this render, if any. Stash it on the root so that + // parallel transitions may share the same cache. We will clear this field + // once all the transitions that depend on it (which we track with + // `pooledCacheLanes`) have committed. + root.pooledCache = pooledCache; + root.pooledCacheLanes |= renderLanes; +} + +export function pushCachePool(cacheInstance: PooledCacheInstance) { + if (!enableCache) { + return; + } + // This will temporarily override the pooled cache for this render, so that + // any new Cache boundaries in the subtree use this one. The previous value on + // the "stack" is stored on the cache instance. We will restore it during the // complete phase. // // The more straightforward way to do this would be to use the array-based // stack (push/pop). Maybe this is too clever. - const prevPooledCacheOnStack = root.pooledCache; - root.pooledCache = cacheInstance.cache; + const prevPooledCacheOnStack = pooledCache; + pooledCache = cacheInstance.cache; // This is never supposed to be null. I'm cheating. Sorry. It will be reset to // the correct type when we pop. cacheInstance.cache = ((prevPooledCacheOnStack: any): Cache); } -export function popCachePool( - root: FiberRoot, - cacheInstance: PooledCacheInstance, -) { - const retryCache: Cache = (root.pooledCache: any); +export function popCachePool(cacheInstance: PooledCacheInstance) { + if (!enableCache) { + return; + } + const retryCache: Cache = (pooledCache: any); if (__DEV__) { if (retryCache === null) { console.error('Expected to have a pooled cache. This is a bug in React.'); } } - root.pooledCache = cacheInstance.cache; + pooledCache = cacheInstance.cache; cacheInstance.cache = retryCache; } diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js index 7007cb2fb190d..d78e502fcb65c 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js @@ -148,7 +148,6 @@ import { popRenderLanes, getRenderTargetTime, subtreeRenderLanes, - getWorkInProgressRoot, } from './ReactFiberWorkLoop.new'; import {createFundamentalStateInstance} from './ReactFiberFundamental.new'; import { @@ -161,7 +160,11 @@ import { import {resetChildFibers} from './ReactChildFiber.new'; import {createScopeInstance} from './ReactFiberScope.new'; import {transferActualDuration} from './ReactProfilerTimer.new'; -import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent.new'; +import { + popCacheProvider, + popRootCachePool, + popCachePool, +} from './ReactFiberCacheComponent.new'; function markUpdate(workInProgress: Fiber) { // Tag the fiber with an update effect. This turns a Placement into @@ -817,7 +820,10 @@ function completeWork( return null; } case HostRoot: { + const fiberRoot = (workInProgress.stateNode: FiberRoot); if (enableCache) { + popRootCachePool(fiberRoot, renderLanes); + const cacheInstance: CacheInstance = workInProgress.memoizedState.cacheInstance; popCacheProvider(workInProgress, cacheInstance); @@ -825,7 +831,6 @@ function completeWork( popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); resetMutableSourceWorkInProgressVersions(); - const fiberRoot = (workInProgress.stateNode: FiberRoot); if (fiberRoot.pendingContext) { fiberRoot.context = fiberRoot.pendingContext; fiberRoot.pendingContext = null; @@ -1503,13 +1508,7 @@ function completeWork( if (cacheInstance.provider !== null) { popCacheProvider(workInProgress, cacheInstance); } else { - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - popCachePool(root, cacheInstance); + popCachePool(cacheInstance); } } } diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js index 67301d481b4a2..e95b15d9987ce 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js @@ -148,7 +148,6 @@ import { popRenderLanes, getRenderTargetTime, subtreeRenderLanes, - getWorkInProgressRoot, } from './ReactFiberWorkLoop.old'; import {createFundamentalStateInstance} from './ReactFiberFundamental.old'; import { @@ -161,7 +160,11 @@ import { import {resetChildFibers} from './ReactChildFiber.old'; import {createScopeInstance} from './ReactFiberScope.old'; import {transferActualDuration} from './ReactProfilerTimer.old'; -import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent.old'; +import { + popCacheProvider, + popRootCachePool, + popCachePool, +} from './ReactFiberCacheComponent.old'; function markUpdate(workInProgress: Fiber) { // Tag the fiber with an update effect. This turns a Placement into @@ -817,7 +820,10 @@ function completeWork( return null; } case HostRoot: { + const fiberRoot = (workInProgress.stateNode: FiberRoot); if (enableCache) { + popRootCachePool(fiberRoot, renderLanes); + const cacheInstance: CacheInstance = workInProgress.memoizedState.cacheInstance; popCacheProvider(workInProgress, cacheInstance); @@ -825,7 +831,6 @@ function completeWork( popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); resetMutableSourceWorkInProgressVersions(); - const fiberRoot = (workInProgress.stateNode: FiberRoot); if (fiberRoot.pendingContext) { fiberRoot.context = fiberRoot.pendingContext; fiberRoot.pendingContext = null; @@ -1503,13 +1508,7 @@ function completeWork( if (cacheInstance.provider !== null) { popCacheProvider(workInProgress, cacheInstance); } else { - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - popCachePool(root, cacheInstance); + popCachePool(cacheInstance); } } } diff --git a/packages/react-reconciler/src/ReactFiberLane.new.js b/packages/react-reconciler/src/ReactFiberLane.new.js index 3b078d4a3a9db..351035d0c341a 100644 --- a/packages/react-reconciler/src/ReactFiberLane.new.js +++ b/packages/react-reconciler/src/ReactFiberLane.new.js @@ -8,7 +8,6 @@ */ import type {FiberRoot, ReactPriorityLevel} from './ReactInternalTypes'; -import type {Cache} from './ReactFiberCacheComponent.new'; // TODO: Ideally these types would be opaque but that doesn't work well with // our reconciler fork infra, since these leak into non-reconciler packages. @@ -781,30 +780,6 @@ export function markRootEntangled(root: FiberRoot, entangledLanes: Lanes) { } } -export function requestCacheFromPool( - root: FiberRoot, - renderLanes: Lanes, -): Cache { - if (!enableCache) { - return (null: any); - } - - root.pooledCacheLanes |= renderLanes; - - const pooledCache = root.pooledCache; - if (pooledCache !== null) { - return pooledCache; - } - - // Create a fresh cache. - const cache = new Map(); - - // This is now the pooled cache. - root.pooledCache = cache; - - return cache; -} - export function getBumpedLaneForHydration( root: FiberRoot, renderLanes: Lanes, diff --git a/packages/react-reconciler/src/ReactFiberLane.old.js b/packages/react-reconciler/src/ReactFiberLane.old.js index 131daaad94c71..7a00fe095e6c6 100644 --- a/packages/react-reconciler/src/ReactFiberLane.old.js +++ b/packages/react-reconciler/src/ReactFiberLane.old.js @@ -8,7 +8,6 @@ */ import type {FiberRoot, ReactPriorityLevel} from './ReactInternalTypes'; -import type {Cache} from './ReactFiberCacheComponent.old'; // TODO: Ideally these types would be opaque but that doesn't work well with // our reconciler fork infra, since these leak into non-reconciler packages. @@ -781,30 +780,6 @@ export function markRootEntangled(root: FiberRoot, entangledLanes: Lanes) { } } -export function requestCacheFromPool( - root: FiberRoot, - renderLanes: Lanes, -): Cache { - if (!enableCache) { - return (null: any); - } - - root.pooledCacheLanes |= renderLanes; - - const pooledCache = root.pooledCache; - if (pooledCache !== null) { - return pooledCache; - } - - // Create a fresh cache. - const cache = new Map(); - - // This is now the pooled cache. - root.pooledCache = cache; - - return cache; -} - export function getBumpedLaneForHydration( root: FiberRoot, renderLanes: Lanes, diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js index 5b1946a544c52..4815ca4279d97 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js @@ -8,7 +8,7 @@ */ import type {ReactContext} from 'shared/ReactTypes'; -import type {Fiber} from './ReactInternalTypes'; +import type {Fiber, FiberRoot} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.new'; import type {SuspenseState} from './ReactFiberSuspenseComponent.new'; import type { @@ -46,8 +46,12 @@ import { popTopLevelContextObject as popTopLevelLegacyContextObject, } from './ReactFiberContext.new'; import {popProvider} from './ReactFiberNewContext.new'; -import {popRenderLanes, getWorkInProgressRoot} from './ReactFiberWorkLoop.new'; -import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent.new'; +import {popRenderLanes} from './ReactFiberWorkLoop.new'; +import { + popCacheProvider, + popRootCachePool, + popCachePool, +} from './ReactFiberCacheComponent.new'; import {transferActualDuration} from './ReactProfilerTimer.new'; import invariant from 'shared/invariant'; @@ -74,6 +78,9 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { } case HostRoot: { if (enableCache) { + const root: FiberRoot = workInProgress.stateNode; + popRootCachePool(root, renderLanes); + const cacheInstance: CacheInstance = workInProgress.memoizedState.cacheInstance; popCacheProvider(workInProgress, cacheInstance); @@ -148,13 +155,7 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { if (cacheInstance.provider !== null) { popCacheProvider(workInProgress, cacheInstance); } else { - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - popCachePool(root, cacheInstance); + popCachePool(cacheInstance); } } } @@ -172,7 +173,7 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { } } -function unwindInterruptedWork(interruptedWork: Fiber) { +function unwindInterruptedWork(interruptedWork: Fiber, renderLanes: Lanes) { switch (interruptedWork.tag) { case ClassComponent: { const childContextTypes = interruptedWork.type.childContextTypes; @@ -183,6 +184,9 @@ function unwindInterruptedWork(interruptedWork: Fiber) { } case HostRoot: { if (enableCache) { + const root: FiberRoot = interruptedWork.stateNode; + popRootCachePool(root, renderLanes); + const cacheInstance: CacheInstance = interruptedWork.memoizedState.cacheInstance; popCacheProvider(interruptedWork, cacheInstance); @@ -221,13 +225,7 @@ function unwindInterruptedWork(interruptedWork: Fiber) { if (cacheInstance.provider !== null) { popCacheProvider(interruptedWork, cacheInstance); } else { - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - popCachePool(root, cacheInstance); + popCachePool(cacheInstance); } } } diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js index 56359ad5d9846..4a55f7b1fd62f 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js @@ -8,7 +8,7 @@ */ import type {ReactContext} from 'shared/ReactTypes'; -import type {Fiber} from './ReactInternalTypes'; +import type {Fiber, FiberRoot} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.old'; import type {SuspenseState} from './ReactFiberSuspenseComponent.old'; import type { @@ -46,8 +46,12 @@ import { popTopLevelContextObject as popTopLevelLegacyContextObject, } from './ReactFiberContext.old'; import {popProvider} from './ReactFiberNewContext.old'; -import {popRenderLanes, getWorkInProgressRoot} from './ReactFiberWorkLoop.old'; -import {popCacheProvider, popCachePool} from './ReactFiberCacheComponent.old'; +import {popRenderLanes} from './ReactFiberWorkLoop.old'; +import { + popCacheProvider, + popRootCachePool, + popCachePool, +} from './ReactFiberCacheComponent.old'; import {transferActualDuration} from './ReactProfilerTimer.old'; import invariant from 'shared/invariant'; @@ -74,6 +78,9 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { } case HostRoot: { if (enableCache) { + const root: FiberRoot = workInProgress.stateNode; + popRootCachePool(root, renderLanes); + const cacheInstance: CacheInstance = workInProgress.memoizedState.cacheInstance; popCacheProvider(workInProgress, cacheInstance); @@ -148,13 +155,7 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { if (cacheInstance.provider !== null) { popCacheProvider(workInProgress, cacheInstance); } else { - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - popCachePool(root, cacheInstance); + popCachePool(cacheInstance); } } } @@ -172,7 +173,7 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { } } -function unwindInterruptedWork(interruptedWork: Fiber) { +function unwindInterruptedWork(interruptedWork: Fiber, renderLanes: Lanes) { switch (interruptedWork.tag) { case ClassComponent: { const childContextTypes = interruptedWork.type.childContextTypes; @@ -184,6 +185,9 @@ function unwindInterruptedWork(interruptedWork: Fiber) { case HostRoot: { popHostContainer(interruptedWork); if (enableCache) { + const root: FiberRoot = interruptedWork.stateNode; + popRootCachePool(root, renderLanes); + const cacheInstance: CacheInstance = interruptedWork.memoizedState.cacheInstance; popCacheProvider(interruptedWork, cacheInstance); @@ -221,13 +225,7 @@ function unwindInterruptedWork(interruptedWork: Fiber) { if (cacheInstance.provider !== null) { popCacheProvider(interruptedWork, cacheInstance); } else { - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - popCachePool(root, cacheInstance); + popCachePool(cacheInstance); } } } diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js index 347e0f5b11e51..646d728f62d9c 100644 --- a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js +++ b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js @@ -1372,7 +1372,7 @@ function prepareFreshStack(root: FiberRoot, lanes: Lanes) { if (workInProgress !== null) { let interruptedWork = workInProgress.return; while (interruptedWork !== null) { - unwindInterruptedWork(interruptedWork); + unwindInterruptedWork(interruptedWork, workInProgressRootRenderLanes); interruptedWork = interruptedWork.return; } } @@ -3051,7 +3051,7 @@ if (__DEV__ && replayFailedUnitOfWorkWithInvokeGuardedCallback) { // same fiber again. // Unwind the failed stack frame - unwindInterruptedWork(unitOfWork); + unwindInterruptedWork(unitOfWork, workInProgressRootRenderLanes); // Restore the original properties of the fiber. assignFiberPropertiesInDEV(unitOfWork, originalWorkInProgressCopy); diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js index 7da1423841f3b..b03fbbb05b9ee 100644 --- a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js +++ b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js @@ -1372,7 +1372,7 @@ function prepareFreshStack(root: FiberRoot, lanes: Lanes) { if (workInProgress !== null) { let interruptedWork = workInProgress.return; while (interruptedWork !== null) { - unwindInterruptedWork(interruptedWork); + unwindInterruptedWork(interruptedWork, workInProgressRootRenderLanes); interruptedWork = interruptedWork.return; } } @@ -3051,7 +3051,7 @@ if (__DEV__ && replayFailedUnitOfWorkWithInvokeGuardedCallback) { // same fiber again. // Unwind the failed stack frame - unwindInterruptedWork(unitOfWork); + unwindInterruptedWork(unitOfWork, workInProgressRootRenderLanes); // Restore the original properties of the fiber. assignFiberPropertiesInDEV(unitOfWork, originalWorkInProgressCopy); From 3cfe44ef47b14c1252d97c7f25585bbc62a1e559 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Thu, 17 Dec 2020 21:10:34 -0600 Subject: [PATCH 28/30] Remove provider fiber from cache context I originally put the provider fiber on the cache context because I expected the semantics to be that a refresh finds the root of the "freshness" boundary; that is, all the data that refreshed or appeared as part of the same transition. Refreshing is a tricky problem space that we're not done exploring; the better default behavior is to refresh the most local provider, without considering freshness. This also makes the implementation simpler because `refresh` no longer needs to be bound to the provider fiber. Instead I traverse up the fiber return path. --- .../src/ReactFiberBeginWork.new.js | 173 +++++++++--------- .../src/ReactFiberBeginWork.old.js | 173 +++++++++--------- .../src/ReactFiberCacheComponent.new.js | 64 +++---- .../src/ReactFiberCacheComponent.old.js | 64 +++---- .../src/ReactFiberCompleteWork.new.js | 34 ++-- .../src/ReactFiberCompleteWork.old.js | 34 ++-- .../src/ReactFiberHooks.new.js | 94 +++++----- .../src/ReactFiberHooks.old.js | 94 +++++----- .../src/ReactFiberOffscreenComponent.js | 7 +- .../src/ReactFiberRoot.new.js | 5 +- .../src/ReactFiberRoot.old.js | 5 +- .../src/ReactFiberUnwindWork.new.js | 62 +++---- .../src/ReactFiberUnwindWork.old.js | 62 +++---- 13 files changed, 432 insertions(+), 439 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index 38f77f3f19b6f..e516e20aea52d 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -25,8 +25,9 @@ import type { } from './ReactFiberOffscreenComponent'; import type { Cache, - CacheInstance, - PooledCacheInstance, + SuspendedCache, + SuspendedCacheFresh, + SuspendedCachePool, } from './ReactFiberCacheComponent.new'; import type {UpdateQueue} from './ReactUpdateQueue.new'; @@ -211,6 +212,8 @@ import { import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import {setWorkInProgressVersion} from './ReactMutableSource.new'; import { + SuspendedCacheFreshTag, + SuspendedCachePoolTag, pushFreshCacheProvider, pushStaleCacheProvider, hasFreshCacheProvider, @@ -599,7 +602,7 @@ function updateOffscreenComponent( // the previous render. We will push this to the cache context so that we can // resume in-flight requests. However, we don't do this if there's already a // fresh cache provider on the stack. - let cacheInstance: CacheInstance | PooledCacheInstance | null = null; + let suspendedCache: SuspendedCache | null = null; if ( nextProps.mode === 'hidden' || @@ -627,9 +630,14 @@ function updateOffscreenComponent( // Keep a reference to the in-flight cache so we can resume later. If // there's no fresh cache on the stack, there might be one from a // previous render. If so, reuse it. - cacheInstance = hasFreshCacheProvider() - ? getFreshCacheProviderIfExists() - : prevState.cache; + const freshCache = getFreshCacheProviderIfExists(); + suspendedCache = + freshCache !== null + ? ({ + tag: SuspendedCacheFreshTag, + cache: freshCache, + }: SuspendedCacheFresh) + : prevState.cache; // We don't need to push to the cache context because we're about to // bail out. There won't be a context mismatch because we only pop // the cache context if `updateQueue` is non-null. @@ -647,7 +655,7 @@ function updateOffscreenComponent( ); const nextState: OffscreenState = { baseLanes: nextBaseLanes, - cache: cacheInstance, + cache: suspendedCache, }; workInProgress.memoizedState = nextState; workInProgress.updateQueue = null; @@ -662,19 +670,21 @@ function updateOffscreenComponent( if (enableCache && !hasFreshCacheProvider() && prevState !== null) { // If there was a fresh cache during the render that spawned this one, // resume using it. - const prevCacheInstance = prevState.cache; - if (prevCacheInstance !== null) { - cacheInstance = prevCacheInstance; - const provider = cacheInstance.provider; + const prevSuspendedCache = prevState.cache; + if (prevSuspendedCache !== null) { + suspendedCache = prevSuspendedCache; // If the resumed cache has a provider, then it's a fresh cache. We // should push it to the stack. Otherwise, it's from the cache pool // and we should override the cache pool. - if (provider !== null) { - pushFreshCacheProvider(workInProgress, prevCacheInstance); + if (suspendedCache.tag === SuspendedCacheFreshTag) { + pushFreshCacheProvider( + workInProgress, + (suspendedCache: SuspendedCacheFresh).cache, + ); // This isn't a refresh, it's a continuation of a previous render. // So we don't need to propagate a context change. } else { - pushCachePool(prevCacheInstance); + pushCachePool((suspendedCache: SuspendedCachePool)); } } } @@ -701,18 +711,21 @@ function updateOffscreenComponent( if (enableCache && !hasFreshCacheProvider()) { // If there was a fresh cache during the render that spawned this one, // resume using it. - const prevCacheInstance = prevState.cache; - if (prevCacheInstance !== null) { - cacheInstance = prevCacheInstance; + const prevSuspendedCache = prevState.cache; + if (prevSuspendedCache !== null) { + suspendedCache = prevSuspendedCache; // If the resumed cache has a provider, then it's a fresh cache. We // should push it to the stack. Otherwise, it's from the cache pool // and we should override the cache pool. - if (cacheInstance.provider !== null) { - pushFreshCacheProvider(workInProgress, prevCacheInstance); + if (suspendedCache.tag === SuspendedCacheFreshTag) { + pushFreshCacheProvider( + workInProgress, + (suspendedCache: SuspendedCacheFresh).cache, + ); // This isn't a refresh, it's a continuation of a previous render. // So we don't need to propagate a context change. } else { - pushCachePool(prevCacheInstance); + pushCachePool((suspendedCache: SuspendedCachePool)); } } } @@ -729,10 +742,10 @@ function updateOffscreenComponent( } if (enableCache) { - // If we have a cache instance from a previous render attempt, then this will - // be non-null. We can use this to infer whether to push/pop the + // If we have a suspended cache from a previous render attempt, then this + // will be non-null. We can use this to infer whether to push/pop the // cache context. - workInProgress.updateQueue = cacheInstance; + workInProgress.updateQueue = suspendedCache; } reconcileChildren(current, workInProgress, nextChildren, renderLanes); @@ -754,16 +767,12 @@ function updateCacheComponent( return null; } - let cacheInstance: CacheInstance | null = null; + let cache: Cache | null = null; if (current === null) { - let initialState; if (hasFreshCacheProvider()) { // Fast path. The parent Cache is either a new mount or a refresh. We can // inherit its cache. - cacheInstance = null; - initialState = { - cache: null, - }; + cache = null; } else { // This is a newly mounted component. Request a fresh cache. const root = getWorkInProgressRoot(); @@ -774,26 +783,19 @@ function updateCacheComponent( ); // This will always be different from the parent cache; otherwise we would // have detected a fresh cache provider in the earlier branch. - const cache = requestCacheFromPool(renderLanes); - cacheInstance = { - cache, - provider: workInProgress, - }; - initialState = { - cacheInstance, - }; - pushFreshCacheProvider(workInProgress, cacheInstance); + cache = requestCacheFromPool(renderLanes); + pushFreshCacheProvider(workInProgress, cache); // No need to propagate a refresh, because this is a new tree. } // Initialize an update queue. We use this for refreshes. - workInProgress.memoizedState = initialState; + workInProgress.memoizedState = {cache}; initializeUpdateQueue(workInProgress); } else { // This component already mounted. if (hasFreshCacheProvider()) { // Fast path. The parent Cache is either a new mount or a refresh. We can // inherit its cache. - cacheInstance = null; + cache = null; } else if (includesSomeLane(renderLanes, updateLanes)) { // A refresh was scheduled. If it was a refresh on this fiber, then we // will have an update in the queue. Otherwise, it must have been an @@ -802,20 +804,18 @@ function updateCacheComponent( // First check the update queue. cloneUpdateQueue(current, workInProgress); processUpdateQueue(workInProgress, null, null, renderLanes); - const prevCacheInstance: CacheInstance = - current.memoizedState.cacheInstance; - const nextCacheInstance: CacheInstance = - workInProgress.memoizedState.cacheInstance; - if (nextCacheInstance !== prevCacheInstance) { + const prevCache: Cache = current.memoizedState.cache; + const nextCache: Cache = workInProgress.memoizedState.cache; + if (nextCache !== prevCache) { // Received a refresh. - cacheInstance = nextCacheInstance; - pushFreshCacheProvider(workInProgress, cacheInstance); + cache = nextCache; + pushFreshCacheProvider(workInProgress, cache); // Refreshes propagate through the entire subtree. The refreshed cache // will override nested caches. propagateCacheRefresh(workInProgress, renderLanes); } else { // A parent cache boundary refreshed. So we can use the cache context. - cacheInstance = null; + cache = null; // If the update queue is empty, disconnect the old cache from the tree // so it can be garbage collected. @@ -826,10 +826,10 @@ function updateCacheComponent( } } else { // Reuse the memoized cache. - cacheInstance = current.stateNode; - if (cacheInstance !== null) { + cache = current.stateNode; + if (cache !== null) { // There was no refresh, so no need to propagate to nested boundaries. - pushStaleCacheProvider(workInProgress, cacheInstance); + pushStaleCacheProvider(workInProgress, cache); } } } @@ -838,7 +838,7 @@ function updateCacheComponent( // point to a cache instance. Otherwise, a null instance indicates that this // CacheComponent inherits from a parent boundary. We can use this to infer // whether to push/pop the cache context. - workInProgress.stateNode = cacheInstance; + workInProgress.stateNode = cache; const nextChildren = workInProgress.pendingProps.children; reconcileChildren(current, workInProgress, nextChildren, renderLanes); @@ -1199,16 +1199,16 @@ function updateHostRoot(current, workInProgress, renderLanes) { if (enableCache) { pushRootCachePool(root); - const nextCacheInstance: CacheInstance = nextState.cacheInstance; - if (nextCacheInstance !== prevState.cacheInstance) { - pushFreshCacheProvider(workInProgress, nextCacheInstance); + const nextCache: Cache = nextState.cache; + if (nextCache !== prevState.cache) { + pushFreshCacheProvider(workInProgress, nextCache); propagateCacheRefresh(workInProgress, renderLanes); } else { if (prevChildren === null) { // If there are no children, this must be the initial render. - pushFreshCacheProvider(workInProgress, nextCacheInstance); + pushFreshCacheProvider(workInProgress, nextCache); } else { - pushStaleCacheProvider(workInProgress, nextCacheInstance); + pushStaleCacheProvider(workInProgress, nextCache); } } } @@ -1777,11 +1777,16 @@ const SUSPENDED_MARKER: SuspenseState = { }; function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { - let cacheInstance: CacheInstance | PooledCacheInstance | null = null; + let suspendedCache: SuspendedCache | null = null; if (enableCache) { // Keep a reference to the in-flight cache so we can resume later. - cacheInstance = getFreshCacheProviderIfExists(); - if (cacheInstance === null) { + const freshCache = getFreshCacheProviderIfExists(); + if (freshCache !== null) { + suspendedCache = ({ + tag: SuspendedCacheFreshTag, + cache: freshCache, + }: SuspendedCacheFresh); + } else { // If there's no cache on the stack, a nested Cache boundary may have // spawned a new one. Check the cache pool. const root = getWorkInProgressRoot(); @@ -1796,16 +1801,16 @@ function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { // cache that would have been claimed by any nested caches. const pooledCache = getPooledCacheIfExists(); if (pooledCache !== null) { - cacheInstance = { - cache: (pooledCache: Cache), - provider: null, - }; + suspendedCache = ({ + tag: SuspendedCachePoolTag, + cache: pooledCache, + }: SuspendedCachePool); } } } return { baseLanes: renderLanes, - cache: cacheInstance, + cache: suspendedCache, }; } @@ -1813,33 +1818,38 @@ function updateSuspenseOffscreenState( prevOffscreenState: OffscreenState, renderLanes: Lanes, ): OffscreenState { - let cacheInstance = null; + let suspendedCache: SuspendedCache | null = null; if (enableCache) { // Keep a reference to the in-flight cache so we can resume later. - cacheInstance = getFreshCacheProviderIfExists(); - if (cacheInstance === null) { + const freshCache = getFreshCacheProviderIfExists(); + if (freshCache !== null) { + suspendedCache = ({ + tag: SuspendedCacheFreshTag, + cache: freshCache, + }: SuspendedCacheFresh); + } else { // If there's no cache on the stack, check if there's a cache from the // previous render. This is what we would have used for new content // during the first pass when we attempted to unhide. - cacheInstance = prevOffscreenState.cache; - if (cacheInstance === null) { + suspendedCache = prevOffscreenState.cache; + if (suspendedCache === null) { // If a nested cache accessed the pool during this render, it will // returned by this function. It will also return a cache that was // accessed by a sibling tree, but that's also fine, since that's the // cache that would have been claimed by any nested caches. const pooledCache = getPooledCacheIfExists(); if (pooledCache !== null) { - cacheInstance = { - cache: (pooledCache: Cache), - provider: null, - }; + suspendedCache = ({ + tag: SuspendedCachePoolTag, + cache: pooledCache, + }: SuspendedCachePool); } } } } return { baseLanes: mergeLanes(prevOffscreenState.baseLanes, renderLanes), - cache: cacheInstance, + cache: suspendedCache, }; } @@ -3341,9 +3351,8 @@ function beginWork( const root: FiberRoot = workInProgress.stateNode; pushRootCachePool(root); - const nextCacheInstance: CacheInstance = - current.memoizedState.cacheInstance; - pushStaleCacheProvider(workInProgress, nextCacheInstance); + const nextCache: Cache = current.memoizedState.cache; + pushStaleCacheProvider(workInProgress, nextCache); } resetHydrationState(); break; @@ -3514,11 +3523,11 @@ function beginWork( } case CacheComponent: { if (enableCache) { - const ownCacheInstance: CacheInstance | null = current.stateNode; - if (ownCacheInstance !== null) { - pushStaleCacheProvider(workInProgress, ownCacheInstance); + const cache: Cache | null = current.stateNode; + if (cache !== null) { + pushStaleCacheProvider(workInProgress, cache); } - workInProgress.stateNode = ownCacheInstance; + workInProgress.stateNode = cache; } break; } diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index bee52816e7a42..138ddfa29d5b5 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -25,8 +25,9 @@ import type { } from './ReactFiberOffscreenComponent'; import type { Cache, - CacheInstance, - PooledCacheInstance, + SuspendedCache, + SuspendedCacheFresh, + SuspendedCachePool, } from './ReactFiberCacheComponent.old'; import type {UpdateQueue} from './ReactUpdateQueue.old'; @@ -211,6 +212,8 @@ import { import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import {setWorkInProgressVersion} from './ReactMutableSource.old'; import { + SuspendedCacheFreshTag, + SuspendedCachePoolTag, pushFreshCacheProvider, pushStaleCacheProvider, hasFreshCacheProvider, @@ -599,7 +602,7 @@ function updateOffscreenComponent( // the previous render. We will push this to the cache context so that we can // resume in-flight requests. However, we don't do this if there's already a // fresh cache provider on the stack. - let cacheInstance: CacheInstance | PooledCacheInstance | null = null; + let suspendedCache: SuspendedCache | null = null; if ( nextProps.mode === 'hidden' || @@ -627,9 +630,14 @@ function updateOffscreenComponent( // Keep a reference to the in-flight cache so we can resume later. If // there's no fresh cache on the stack, there might be one from a // previous render. If so, reuse it. - cacheInstance = hasFreshCacheProvider() - ? getFreshCacheProviderIfExists() - : prevState.cache; + const freshCache = getFreshCacheProviderIfExists(); + suspendedCache = + freshCache !== null + ? ({ + tag: SuspendedCacheFreshTag, + cache: freshCache, + }: SuspendedCacheFresh) + : prevState.cache; // We don't need to push to the cache context because we're about to // bail out. There won't be a context mismatch because we only pop // the cache context if `updateQueue` is non-null. @@ -647,7 +655,7 @@ function updateOffscreenComponent( ); const nextState: OffscreenState = { baseLanes: nextBaseLanes, - cache: cacheInstance, + cache: suspendedCache, }; workInProgress.memoizedState = nextState; workInProgress.updateQueue = null; @@ -662,19 +670,21 @@ function updateOffscreenComponent( if (enableCache && !hasFreshCacheProvider() && prevState !== null) { // If there was a fresh cache during the render that spawned this one, // resume using it. - const prevCacheInstance = prevState.cache; - if (prevCacheInstance !== null) { - cacheInstance = prevCacheInstance; - const provider = cacheInstance.provider; + const prevSuspendedCache = prevState.cache; + if (prevSuspendedCache !== null) { + suspendedCache = prevSuspendedCache; // If the resumed cache has a provider, then it's a fresh cache. We // should push it to the stack. Otherwise, it's from the cache pool // and we should override the cache pool. - if (provider !== null) { - pushFreshCacheProvider(workInProgress, prevCacheInstance); + if (suspendedCache.tag === SuspendedCacheFreshTag) { + pushFreshCacheProvider( + workInProgress, + (suspendedCache: SuspendedCacheFresh).cache, + ); // This isn't a refresh, it's a continuation of a previous render. // So we don't need to propagate a context change. } else { - pushCachePool(prevCacheInstance); + pushCachePool((suspendedCache: SuspendedCachePool)); } } } @@ -701,18 +711,21 @@ function updateOffscreenComponent( if (enableCache && !hasFreshCacheProvider()) { // If there was a fresh cache during the render that spawned this one, // resume using it. - const prevCacheInstance = prevState.cache; - if (prevCacheInstance !== null) { - cacheInstance = prevCacheInstance; + const prevSuspendedCache = prevState.cache; + if (prevSuspendedCache !== null) { + suspendedCache = prevSuspendedCache; // If the resumed cache has a provider, then it's a fresh cache. We // should push it to the stack. Otherwise, it's from the cache pool // and we should override the cache pool. - if (cacheInstance.provider !== null) { - pushFreshCacheProvider(workInProgress, prevCacheInstance); + if (suspendedCache.tag === SuspendedCacheFreshTag) { + pushFreshCacheProvider( + workInProgress, + (suspendedCache: SuspendedCacheFresh).cache, + ); // This isn't a refresh, it's a continuation of a previous render. // So we don't need to propagate a context change. } else { - pushCachePool(prevCacheInstance); + pushCachePool((suspendedCache: SuspendedCachePool)); } } } @@ -729,10 +742,10 @@ function updateOffscreenComponent( } if (enableCache) { - // If we have a cache instance from a previous render attempt, then this will - // be non-null. We can use this to infer whether to push/pop the + // If we have a suspended cache from a previous render attempt, then this + // will be non-null. We can use this to infer whether to push/pop the // cache context. - workInProgress.updateQueue = cacheInstance; + workInProgress.updateQueue = suspendedCache; } reconcileChildren(current, workInProgress, nextChildren, renderLanes); @@ -754,16 +767,12 @@ function updateCacheComponent( return null; } - let cacheInstance: CacheInstance | null = null; + let cache: Cache | null = null; if (current === null) { - let initialState; if (hasFreshCacheProvider()) { // Fast path. The parent Cache is either a new mount or a refresh. We can // inherit its cache. - cacheInstance = null; - initialState = { - cache: null, - }; + cache = null; } else { // This is a newly mounted component. Request a fresh cache. const root = getWorkInProgressRoot(); @@ -774,26 +783,19 @@ function updateCacheComponent( ); // This will always be different from the parent cache; otherwise we would // have detected a fresh cache provider in the earlier branch. - const cache = requestCacheFromPool(renderLanes); - cacheInstance = { - cache, - provider: workInProgress, - }; - initialState = { - cacheInstance, - }; - pushFreshCacheProvider(workInProgress, cacheInstance); + cache = requestCacheFromPool(renderLanes); + pushFreshCacheProvider(workInProgress, cache); // No need to propagate a refresh, because this is a new tree. } // Initialize an update queue. We use this for refreshes. - workInProgress.memoizedState = initialState; + workInProgress.memoizedState = {cache}; initializeUpdateQueue(workInProgress); } else { // This component already mounted. if (hasFreshCacheProvider()) { // Fast path. The parent Cache is either a new mount or a refresh. We can // inherit its cache. - cacheInstance = null; + cache = null; } else if (includesSomeLane(renderLanes, updateLanes)) { // A refresh was scheduled. If it was a refresh on this fiber, then we // will have an update in the queue. Otherwise, it must have been an @@ -802,20 +804,18 @@ function updateCacheComponent( // First check the update queue. cloneUpdateQueue(current, workInProgress); processUpdateQueue(workInProgress, null, null, renderLanes); - const prevCacheInstance: CacheInstance = - current.memoizedState.cacheInstance; - const nextCacheInstance: CacheInstance = - workInProgress.memoizedState.cacheInstance; - if (nextCacheInstance !== prevCacheInstance) { + const prevCache: Cache = current.memoizedState.cache; + const nextCache: Cache = workInProgress.memoizedState.cache; + if (nextCache !== prevCache) { // Received a refresh. - cacheInstance = nextCacheInstance; - pushFreshCacheProvider(workInProgress, cacheInstance); + cache = nextCache; + pushFreshCacheProvider(workInProgress, cache); // Refreshes propagate through the entire subtree. The refreshed cache // will override nested caches. propagateCacheRefresh(workInProgress, renderLanes); } else { // A parent cache boundary refreshed. So we can use the cache context. - cacheInstance = null; + cache = null; // If the update queue is empty, disconnect the old cache from the tree // so it can be garbage collected. @@ -826,10 +826,10 @@ function updateCacheComponent( } } else { // Reuse the memoized cache. - cacheInstance = current.stateNode; - if (cacheInstance !== null) { + cache = current.stateNode; + if (cache !== null) { // There was no refresh, so no need to propagate to nested boundaries. - pushStaleCacheProvider(workInProgress, cacheInstance); + pushStaleCacheProvider(workInProgress, cache); } } } @@ -838,7 +838,7 @@ function updateCacheComponent( // point to a cache instance. Otherwise, a null instance indicates that this // CacheComponent inherits from a parent boundary. We can use this to infer // whether to push/pop the cache context. - workInProgress.stateNode = cacheInstance; + workInProgress.stateNode = cache; const nextChildren = workInProgress.pendingProps.children; reconcileChildren(current, workInProgress, nextChildren, renderLanes); @@ -1199,16 +1199,16 @@ function updateHostRoot(current, workInProgress, renderLanes) { if (enableCache) { pushRootCachePool(root); - const nextCacheInstance: CacheInstance = nextState.cacheInstance; - if (nextCacheInstance !== prevState.cacheInstance) { - pushFreshCacheProvider(workInProgress, nextCacheInstance); + const nextCache: Cache = nextState.cache; + if (nextCache !== prevState.cache) { + pushFreshCacheProvider(workInProgress, nextCache); propagateCacheRefresh(workInProgress, renderLanes); } else { if (prevChildren === null) { // If there are no children, this must be the initial render. - pushFreshCacheProvider(workInProgress, nextCacheInstance); + pushFreshCacheProvider(workInProgress, nextCache); } else { - pushStaleCacheProvider(workInProgress, nextCacheInstance); + pushStaleCacheProvider(workInProgress, nextCache); } } } @@ -1777,11 +1777,16 @@ const SUSPENDED_MARKER: SuspenseState = { }; function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { - let cacheInstance: CacheInstance | PooledCacheInstance | null = null; + let suspendedCache: SuspendedCache | null = null; if (enableCache) { // Keep a reference to the in-flight cache so we can resume later. - cacheInstance = getFreshCacheProviderIfExists(); - if (cacheInstance === null) { + const freshCache = getFreshCacheProviderIfExists(); + if (freshCache !== null) { + suspendedCache = ({ + tag: SuspendedCacheFreshTag, + cache: freshCache, + }: SuspendedCacheFresh); + } else { // If there's no cache on the stack, a nested Cache boundary may have // spawned a new one. Check the cache pool. const root = getWorkInProgressRoot(); @@ -1796,16 +1801,16 @@ function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { // cache that would have been claimed by any nested caches. const pooledCache = getPooledCacheIfExists(); if (pooledCache !== null) { - cacheInstance = { - cache: (pooledCache: Cache), - provider: null, - }; + suspendedCache = ({ + tag: SuspendedCachePoolTag, + cache: pooledCache, + }: SuspendedCachePool); } } } return { baseLanes: renderLanes, - cache: cacheInstance, + cache: suspendedCache, }; } @@ -1813,33 +1818,38 @@ function updateSuspenseOffscreenState( prevOffscreenState: OffscreenState, renderLanes: Lanes, ): OffscreenState { - let cacheInstance = null; + let suspendedCache: SuspendedCache | null = null; if (enableCache) { // Keep a reference to the in-flight cache so we can resume later. - cacheInstance = getFreshCacheProviderIfExists(); - if (cacheInstance === null) { + const freshCache = getFreshCacheProviderIfExists(); + if (freshCache !== null) { + suspendedCache = ({ + tag: SuspendedCacheFreshTag, + cache: freshCache, + }: SuspendedCacheFresh); + } else { // If there's no cache on the stack, check if there's a cache from the // previous render. This is what we would have used for new content // during the first pass when we attempted to unhide. - cacheInstance = prevOffscreenState.cache; - if (cacheInstance === null) { + suspendedCache = prevOffscreenState.cache; + if (suspendedCache === null) { // If a nested cache accessed the pool during this render, it will // returned by this function. It will also return a cache that was // accessed by a sibling tree, but that's also fine, since that's the // cache that would have been claimed by any nested caches. const pooledCache = getPooledCacheIfExists(); if (pooledCache !== null) { - cacheInstance = { - cache: (pooledCache: Cache), - provider: null, - }; + suspendedCache = ({ + tag: SuspendedCachePoolTag, + cache: pooledCache, + }: SuspendedCachePool); } } } } return { baseLanes: mergeLanes(prevOffscreenState.baseLanes, renderLanes), - cache: cacheInstance, + cache: suspendedCache, }; } @@ -3341,9 +3351,8 @@ function beginWork( const root: FiberRoot = workInProgress.stateNode; pushRootCachePool(root); - const nextCacheInstance: CacheInstance = - current.memoizedState.cacheInstance; - pushStaleCacheProvider(workInProgress, nextCacheInstance); + const nextCache: Cache = current.memoizedState.cache; + pushStaleCacheProvider(workInProgress, nextCache); } resetHydrationState(); break; @@ -3514,11 +3523,11 @@ function beginWork( } case CacheComponent: { if (enableCache) { - const ownCacheInstance: CacheInstance | null = current.stateNode; - if (ownCacheInstance !== null) { - pushStaleCacheProvider(workInProgress, ownCacheInstance); + const cache: Cache | null = current.stateNode; + if (cache !== null) { + pushStaleCacheProvider(workInProgress, cache); } - workInProgress.stateNode = ownCacheInstance; + workInProgress.stateNode = cache; } break; } diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.new.js b/packages/react-reconciler/src/ReactFiberCacheComponent.new.js index f522ce29e4480..629211211405d 100644 --- a/packages/react-reconciler/src/ReactFiberCacheComponent.new.js +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.new.js @@ -19,17 +19,22 @@ import {pushProvider, popProvider} from './ReactFiberNewContext.new'; export type Cache = Map<() => mixed, mixed>; -export type CacheInstance = {| +export type SuspendedCacheFresh = {| + tag: 0, cache: Cache, - provider: Fiber, |}; -export type PooledCacheInstance = {| +export type SuspendedCachePool = {| + tag: 1, cache: Cache, - provider: null, |}; -export const CacheContext: ReactContext = enableCache +export type SuspendedCache = SuspendedCacheFresh | SuspendedCachePool; + +export const SuspendedCacheFreshTag = 0; +export const SuspendedCachePoolTag = 1; + +export const CacheContext: ReactContext = enableCache ? { $$typeof: REACT_CONTEXT_TYPE, // We don't use Consumer/Provider for Cache components. So we'll cheat. @@ -50,38 +55,32 @@ if (__DEV__ && enableCache) { // A parent cache refresh always overrides any nested cache. So there will only // ever be a single fresh cache on the context stack. -let freshCacheInstance: CacheInstance | null = null; +let freshCache: Cache | null = null; // The cache that we retrived from the pool during this render, if any let pooledCache: Cache | null = null; -export function pushStaleCacheProvider( - workInProgress: Fiber, - cacheInstance: CacheInstance, -) { +export function pushStaleCacheProvider(workInProgress: Fiber, cache: Cache) { if (!enableCache) { return; } if (__DEV__) { - if (freshCacheInstance !== null) { + if (freshCache !== null) { console.error( 'Already inside a fresh cache boundary. This is a bug in React.', ); } } - pushProvider(workInProgress, CacheContext, cacheInstance); + pushProvider(workInProgress, CacheContext, cache); } -export function pushFreshCacheProvider( - workInProgress: Fiber, - cacheInstance: CacheInstance, -) { +export function pushFreshCacheProvider(workInProgress: Fiber, cache: Cache) { if (!enableCache) { return; } if (__DEV__) { if ( - freshCacheInstance !== null && + freshCache !== null && // TODO: Remove this exception for roots. There are a few tests that throw // in pushHostContainer, before the cache context is pushed. Not a huge // issue, but should still fix. @@ -92,25 +91,22 @@ export function pushFreshCacheProvider( ); } } - freshCacheInstance = cacheInstance; - pushProvider(workInProgress, CacheContext, cacheInstance); + freshCache = cache; + pushProvider(workInProgress, CacheContext, cache); } -export function popCacheProvider( - workInProgress: Fiber, - cacheInstance: CacheInstance, -) { +export function popCacheProvider(workInProgress: Fiber, cache: Cache) { if (!enableCache) { return; } if (__DEV__) { - if (freshCacheInstance !== null && freshCacheInstance !== cacheInstance) { + if (freshCache !== null && freshCache !== cache) { console.error( 'Unexpected cache instance on context. This is a bug in React.', ); } } - freshCacheInstance = null; + freshCache = null; popProvider(CacheContext, workInProgress); } @@ -118,14 +114,14 @@ export function hasFreshCacheProvider() { if (!enableCache) { return false; } - return freshCacheInstance !== null; + return freshCache !== null; } -export function getFreshCacheProviderIfExists(): CacheInstance | null { +export function getFreshCacheProviderIfExists(): Cache | null { if (!enableCache) { return null; } - return freshCacheInstance; + return freshCache; } export function requestCacheFromPool(renderLanes: Lanes): Cache { @@ -168,7 +164,7 @@ export function popRootCachePool(root: FiberRoot, renderLanes: Lanes) { root.pooledCacheLanes |= renderLanes; } -export function pushCachePool(cacheInstance: PooledCacheInstance) { +export function pushCachePool(suspendedCache: SuspendedCachePool) { if (!enableCache) { return; } @@ -180,13 +176,13 @@ export function pushCachePool(cacheInstance: PooledCacheInstance) { // The more straightforward way to do this would be to use the array-based // stack (push/pop). Maybe this is too clever. const prevPooledCacheOnStack = pooledCache; - pooledCache = cacheInstance.cache; + pooledCache = suspendedCache.cache; // This is never supposed to be null. I'm cheating. Sorry. It will be reset to // the correct type when we pop. - cacheInstance.cache = ((prevPooledCacheOnStack: any): Cache); + suspendedCache.cache = ((prevPooledCacheOnStack: any): Cache); } -export function popCachePool(cacheInstance: PooledCacheInstance) { +export function popCachePool(suspendedCache: SuspendedCachePool) { if (!enableCache) { return; } @@ -196,6 +192,6 @@ export function popCachePool(cacheInstance: PooledCacheInstance) { console.error('Expected to have a pooled cache. This is a bug in React.'); } } - pooledCache = cacheInstance.cache; - cacheInstance.cache = retryCache; + pooledCache = suspendedCache.cache; + suspendedCache.cache = retryCache; } diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.old.js b/packages/react-reconciler/src/ReactFiberCacheComponent.old.js index f2b69b658db5d..da5ed5b88de21 100644 --- a/packages/react-reconciler/src/ReactFiberCacheComponent.old.js +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.old.js @@ -19,17 +19,22 @@ import {pushProvider, popProvider} from './ReactFiberNewContext.old'; export type Cache = Map<() => mixed, mixed>; -export type CacheInstance = {| +export type SuspendedCacheFresh = {| + tag: 0, cache: Cache, - provider: Fiber, |}; -export type PooledCacheInstance = {| +export type SuspendedCachePool = {| + tag: 1, cache: Cache, - provider: null, |}; -export const CacheContext: ReactContext = enableCache +export type SuspendedCache = SuspendedCacheFresh | SuspendedCachePool; + +export const SuspendedCacheFreshTag = 0; +export const SuspendedCachePoolTag = 1; + +export const CacheContext: ReactContext = enableCache ? { $$typeof: REACT_CONTEXT_TYPE, // We don't use Consumer/Provider for Cache components. So we'll cheat. @@ -50,38 +55,32 @@ if (__DEV__ && enableCache) { // A parent cache refresh always overrides any nested cache. So there will only // ever be a single fresh cache on the context stack. -let freshCacheInstance: CacheInstance | null = null; +let freshCache: Cache | null = null; // The cache that we retrived from the pool during this render, if any let pooledCache: Cache | null = null; -export function pushStaleCacheProvider( - workInProgress: Fiber, - cacheInstance: CacheInstance, -) { +export function pushStaleCacheProvider(workInProgress: Fiber, cache: Cache) { if (!enableCache) { return; } if (__DEV__) { - if (freshCacheInstance !== null) { + if (freshCache !== null) { console.error( 'Already inside a fresh cache boundary. This is a bug in React.', ); } } - pushProvider(workInProgress, CacheContext, cacheInstance); + pushProvider(workInProgress, CacheContext, cache); } -export function pushFreshCacheProvider( - workInProgress: Fiber, - cacheInstance: CacheInstance, -) { +export function pushFreshCacheProvider(workInProgress: Fiber, cache: Cache) { if (!enableCache) { return; } if (__DEV__) { if ( - freshCacheInstance !== null && + freshCache !== null && // TODO: Remove this exception for roots. There are a few tests that throw // in pushHostContainer, before the cache context is pushed. Not a huge // issue, but should still fix. @@ -92,25 +91,22 @@ export function pushFreshCacheProvider( ); } } - freshCacheInstance = cacheInstance; - pushProvider(workInProgress, CacheContext, cacheInstance); + freshCache = cache; + pushProvider(workInProgress, CacheContext, cache); } -export function popCacheProvider( - workInProgress: Fiber, - cacheInstance: CacheInstance, -) { +export function popCacheProvider(workInProgress: Fiber, cache: Cache) { if (!enableCache) { return; } if (__DEV__) { - if (freshCacheInstance !== null && freshCacheInstance !== cacheInstance) { + if (freshCache !== null && freshCache !== cache) { console.error( 'Unexpected cache instance on context. This is a bug in React.', ); } } - freshCacheInstance = null; + freshCache = null; popProvider(CacheContext, workInProgress); } @@ -118,14 +114,14 @@ export function hasFreshCacheProvider() { if (!enableCache) { return false; } - return freshCacheInstance !== null; + return freshCache !== null; } -export function getFreshCacheProviderIfExists(): CacheInstance | null { +export function getFreshCacheProviderIfExists(): Cache | null { if (!enableCache) { return null; } - return freshCacheInstance; + return freshCache; } export function requestCacheFromPool(renderLanes: Lanes): Cache { @@ -168,7 +164,7 @@ export function popRootCachePool(root: FiberRoot, renderLanes: Lanes) { root.pooledCacheLanes |= renderLanes; } -export function pushCachePool(cacheInstance: PooledCacheInstance) { +export function pushCachePool(suspendedCache: SuspendedCachePool) { if (!enableCache) { return; } @@ -180,13 +176,13 @@ export function pushCachePool(cacheInstance: PooledCacheInstance) { // The more straightforward way to do this would be to use the array-based // stack (push/pop). Maybe this is too clever. const prevPooledCacheOnStack = pooledCache; - pooledCache = cacheInstance.cache; + pooledCache = suspendedCache.cache; // This is never supposed to be null. I'm cheating. Sorry. It will be reset to // the correct type when we pop. - cacheInstance.cache = ((prevPooledCacheOnStack: any): Cache); + suspendedCache.cache = ((prevPooledCacheOnStack: any): Cache); } -export function popCachePool(cacheInstance: PooledCacheInstance) { +export function popCachePool(suspendedCache: SuspendedCachePool) { if (!enableCache) { return; } @@ -196,6 +192,6 @@ export function popCachePool(cacheInstance: PooledCacheInstance) { console.error('Expected to have a pooled cache. This is a bug in React.'); } } - pooledCache = cacheInstance.cache; - cacheInstance.cache = retryCache; + pooledCache = suspendedCache.cache; + suspendedCache.cache = retryCache; } diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js index d78e502fcb65c..6f29a28b92c2b 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js @@ -29,8 +29,10 @@ import type { import type {SuspenseContext} from './ReactFiberSuspenseContext.new'; import type {OffscreenState} from './ReactFiberOffscreenComponent'; import type { - CacheInstance, - PooledCacheInstance, + SuspendedCache, + SuspendedCacheFresh, + SuspendedCachePool, + Cache, } from './ReactFiberCacheComponent.new'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.new'; @@ -161,6 +163,7 @@ import {resetChildFibers} from './ReactChildFiber.new'; import {createScopeInstance} from './ReactFiberScope.new'; import {transferActualDuration} from './ReactProfilerTimer.new'; import { + SuspendedCacheFreshTag, popCacheProvider, popRootCachePool, popCachePool, @@ -824,9 +827,8 @@ function completeWork( if (enableCache) { popRootCachePool(fiberRoot, renderLanes); - const cacheInstance: CacheInstance = - workInProgress.memoizedState.cacheInstance; - popCacheProvider(workInProgress, cacheInstance); + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); @@ -1500,15 +1502,15 @@ function completeWork( } if (enableCache) { - const cacheInstance: - | CacheInstance - | PooledCacheInstance - | null = (workInProgress.updateQueue: any); - if (cacheInstance !== null) { - if (cacheInstance.provider !== null) { - popCacheProvider(workInProgress, cacheInstance); + const suspendedCache: SuspendedCache | null = (workInProgress.updateQueue: any); + if (suspendedCache !== null) { + if (suspendedCache.tag === SuspendedCacheFreshTag) { + popCacheProvider( + workInProgress, + (suspendedCache: SuspendedCacheFresh).cache, + ); } else { - popCachePool(cacheInstance); + popCachePool((suspendedCache: SuspendedCachePool)); } } } @@ -1517,10 +1519,10 @@ function completeWork( } case CacheComponent: { if (enableCache) { - const ownCacheInstance: CacheInstance | null = workInProgress.stateNode; - if (ownCacheInstance !== null) { + const cache: Cache | null = workInProgress.stateNode; + if (cache !== null) { // This is a cache provider. - popCacheProvider(workInProgress, ownCacheInstance); + popCacheProvider(workInProgress, cache); } bubbleProperties(workInProgress); return null; diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js index e95b15d9987ce..1e12c0ccad875 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js @@ -29,8 +29,10 @@ import type { import type {SuspenseContext} from './ReactFiberSuspenseContext.old'; import type {OffscreenState} from './ReactFiberOffscreenComponent'; import type { - CacheInstance, - PooledCacheInstance, + SuspendedCache, + SuspendedCacheFresh, + SuspendedCachePool, + Cache, } from './ReactFiberCacheComponent.old'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.old'; @@ -161,6 +163,7 @@ import {resetChildFibers} from './ReactChildFiber.old'; import {createScopeInstance} from './ReactFiberScope.old'; import {transferActualDuration} from './ReactProfilerTimer.old'; import { + SuspendedCacheFreshTag, popCacheProvider, popRootCachePool, popCachePool, @@ -824,9 +827,8 @@ function completeWork( if (enableCache) { popRootCachePool(fiberRoot, renderLanes); - const cacheInstance: CacheInstance = - workInProgress.memoizedState.cacheInstance; - popCacheProvider(workInProgress, cacheInstance); + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); @@ -1500,15 +1502,15 @@ function completeWork( } if (enableCache) { - const cacheInstance: - | CacheInstance - | PooledCacheInstance - | null = (workInProgress.updateQueue: any); - if (cacheInstance !== null) { - if (cacheInstance.provider !== null) { - popCacheProvider(workInProgress, cacheInstance); + const suspendedCache: SuspendedCache | null = (workInProgress.updateQueue: any); + if (suspendedCache !== null) { + if (suspendedCache.tag === SuspendedCacheFreshTag) { + popCacheProvider( + workInProgress, + (suspendedCache: SuspendedCacheFresh).cache, + ); } else { - popCachePool(cacheInstance); + popCachePool((suspendedCache: SuspendedCachePool)); } } } @@ -1517,10 +1519,10 @@ function completeWork( } case CacheComponent: { if (enableCache) { - const ownCacheInstance: CacheInstance | null = workInProgress.stateNode; - if (ownCacheInstance !== null) { + const cache: Cache | null = workInProgress.stateNode; + if (cache !== null) { // This is a cache provider. - popCacheProvider(workInProgress, ownCacheInstance); + popCacheProvider(workInProgress, cache); } bubbleProperties(workInProgress); return null; diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js index 3679f84220662..617b704bb78ef 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.new.js +++ b/packages/react-reconciler/src/ReactFiberHooks.new.js @@ -19,7 +19,7 @@ import type {HookFlags} from './ReactHookEffectTags'; import type {ReactPriorityLevel} from './ReactInternalTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type {OpaqueIDType} from './ReactFiberHostConfig'; -import type {CacheInstance} from './ReactFiberCacheComponent.new'; +import type {Cache} from './ReactFiberCacheComponent.new'; import ReactSharedInternals from 'shared/ReactSharedInternals'; import { @@ -47,6 +47,7 @@ import { DefaultLanePriority, } from './ReactFiberLane.new'; import {readContext} from './ReactFiberNewContext.new'; +import {HostRoot, CacheComponent} from './ReactWorkTags'; import { Update as UpdateEffect, Passive as PassiveEffect, @@ -1711,49 +1712,53 @@ function rerenderOpaqueIdentifier(): OpaqueIDType | void { } function mountRefresh() { - const cacheInstance: CacheInstance = readContext(CacheContext); - return mountCallback(refreshCache.bind(null, cacheInstance), [cacheInstance]); + const hook = mountWorkInProgressHook(); + const refresh = (hook.memoizedState = refreshCache.bind( + null, + currentlyRenderingFiber, + )); + return refresh; } function updateRefresh() { - const cacheInstance: CacheInstance = readContext(CacheContext); - return updateCallback(refreshCache.bind(null, cacheInstance), [ - cacheInstance, - ]); + const hook = updateWorkInProgressHook(); + return hook.memoizedState; } -function refreshCache( - cacheInstance: CacheInstance, - seedKey: ?() => T, - seedValue: T, -) { - const provider = cacheInstance.provider; - +function refreshCache(fiber: Fiber, seedKey: ?() => T, seedValue: T) { // TODO: Consider warning if the refresh is at discrete priority, or if we // otherwise suspect that it wasn't batched properly. + let provider = fiber.return; + while (provider !== null) { + switch (provider.tag) { + case CacheComponent: + case HostRoot: { + const eventTime = requestEventTime(); + const lane = requestUpdateLane(provider); + // TODO: Does Cache work in legacy mode? Should decide and write a test. + const root = scheduleUpdateOnFiber(provider, lane, eventTime); + + const seededCache = new Map(); + if (seedKey !== null && seedKey !== undefined && root !== null) { + // Seed the cache with the value passed by the caller. This could be + // from a server mutation, or it could be a streaming response. + seededCache.set(seedKey, seedValue); + } - const eventTime = requestEventTime(); - const lane = requestUpdateLane(provider); - // TODO: Does Cache work in legacy mode? Should decide and write a test. - const root = scheduleUpdateOnFiber(provider, lane, eventTime); - - const seededCache = new Map(); - if (seedKey !== null && seedKey !== undefined && root !== null) { - // Seed the cache with the value passed by the caller. This could be from - // a server mutation, or it could be a streaming response. - seededCache.set(seedKey, seedValue); + // Schedule an update on the cache boundary to trigger a refresh. + const refreshUpdate = createUpdate(eventTime, lane); + const payload = { + cache: seededCache, + }; + refreshUpdate.payload = payload; + enqueueUpdate(provider, refreshUpdate); + return; + } + } + provider = provider.return; } - // Schedule an update on the cache boundary to trigger a refresh. - const refreshUpdate = createUpdate(eventTime, lane); - const payload = { - cacheInstance: { - provider: provider, - cache: seededCache, - }, - }; - refreshUpdate.payload = payload; - enqueueUpdate(provider, refreshUpdate); + // TODO: Warn if unmounted? } function dispatchAction( @@ -1870,24 +1875,13 @@ function getCacheForType(resourceType: () => T): T { if (!enableCache) { invariant(false, 'Not implemented.'); } - const cacheInstance: CacheInstance = readContext(CacheContext); - let cache = cacheInstance.cache; - if (cache === null) { - cache = cacheInstance.cache = new Map(); - // TODO: Warn if constructor returns undefined? Creates ambiguity with - // existence check above. (I don't want to use `has`. Two map lookups - // instead of one? Silly.) - const cacheForType = resourceType(); + const cache: Cache = readContext(CacheContext); + let cacheForType: T | void = (cache.get(resourceType): any); + if (cacheForType === undefined) { + cacheForType = resourceType(); cache.set(resourceType, cacheForType); - return cacheForType; - } else { - let cacheForType: T | void = (cache.get(resourceType): any); - if (cacheForType === undefined) { - cacheForType = resourceType(); - cache.set(resourceType, cacheForType); - } - return cacheForType; } + return cacheForType; } export const ContextOnlyDispatcher: Dispatcher = { diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js index 15ccfdf92c67d..922d6e793d09d 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.old.js +++ b/packages/react-reconciler/src/ReactFiberHooks.old.js @@ -19,7 +19,7 @@ import type {HookFlags} from './ReactHookEffectTags'; import type {ReactPriorityLevel} from './ReactInternalTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type {OpaqueIDType} from './ReactFiberHostConfig'; -import type {CacheInstance} from './ReactFiberCacheComponent.old'; +import type {Cache} from './ReactFiberCacheComponent.old'; import ReactSharedInternals from 'shared/ReactSharedInternals'; import { @@ -47,6 +47,7 @@ import { DefaultLanePriority, } from './ReactFiberLane.old'; import {readContext} from './ReactFiberNewContext.old'; +import {HostRoot, CacheComponent} from './ReactWorkTags'; import { Update as UpdateEffect, Passive as PassiveEffect, @@ -1711,49 +1712,53 @@ function rerenderOpaqueIdentifier(): OpaqueIDType | void { } function mountRefresh() { - const cacheInstance: CacheInstance = readContext(CacheContext); - return mountCallback(refreshCache.bind(null, cacheInstance), [cacheInstance]); + const hook = mountWorkInProgressHook(); + const refresh = (hook.memoizedState = refreshCache.bind( + null, + currentlyRenderingFiber, + )); + return refresh; } function updateRefresh() { - const cacheInstance: CacheInstance = readContext(CacheContext); - return updateCallback(refreshCache.bind(null, cacheInstance), [ - cacheInstance, - ]); + const hook = updateWorkInProgressHook(); + return hook.memoizedState; } -function refreshCache( - cacheInstance: CacheInstance, - seedKey: ?() => T, - seedValue: T, -) { - const provider = cacheInstance.provider; - +function refreshCache(fiber: Fiber, seedKey: ?() => T, seedValue: T) { // TODO: Consider warning if the refresh is at discrete priority, or if we // otherwise suspect that it wasn't batched properly. + let provider = fiber.return; + while (provider !== null) { + switch (provider.tag) { + case CacheComponent: + case HostRoot: { + const eventTime = requestEventTime(); + const lane = requestUpdateLane(provider); + // TODO: Does Cache work in legacy mode? Should decide and write a test. + const root = scheduleUpdateOnFiber(provider, lane, eventTime); + + const seededCache = new Map(); + if (seedKey !== null && seedKey !== undefined && root !== null) { + // Seed the cache with the value passed by the caller. This could be + // from a server mutation, or it could be a streaming response. + seededCache.set(seedKey, seedValue); + } - const eventTime = requestEventTime(); - const lane = requestUpdateLane(provider); - // TODO: Does Cache work in legacy mode? Should decide and write a test. - const root = scheduleUpdateOnFiber(provider, lane, eventTime); - - const seededCache = new Map(); - if (seedKey !== null && seedKey !== undefined && root !== null) { - // Seed the cache with the value passed by the caller. This could be from - // a server mutation, or it could be a streaming response. - seededCache.set(seedKey, seedValue); + // Schedule an update on the cache boundary to trigger a refresh. + const refreshUpdate = createUpdate(eventTime, lane); + const payload = { + cache: seededCache, + }; + refreshUpdate.payload = payload; + enqueueUpdate(provider, refreshUpdate); + return; + } + } + provider = provider.return; } - // Schedule an update on the cache boundary to trigger a refresh. - const refreshUpdate = createUpdate(eventTime, lane); - const payload = { - cacheInstance: { - provider: provider, - cache: seededCache, - }, - }; - refreshUpdate.payload = payload; - enqueueUpdate(provider, refreshUpdate); + // TODO: Warn if unmounted? } function dispatchAction( @@ -1870,24 +1875,13 @@ function getCacheForType(resourceType: () => T): T { if (!enableCache) { invariant(false, 'Not implemented.'); } - const cacheInstance: CacheInstance = readContext(CacheContext); - let cache = cacheInstance.cache; - if (cache === null) { - cache = cacheInstance.cache = new Map(); - // TODO: Warn if constructor returns undefined? Creates ambiguity with - // existence check above. (I don't want to use `has`. Two map lookups - // instead of one? Silly.) - const cacheForType = resourceType(); + const cache: Cache = readContext(CacheContext); + let cacheForType: T | void = (cache.get(resourceType): any); + if (cacheForType === undefined) { + cacheForType = resourceType(); cache.set(resourceType, cacheForType); - return cacheForType; - } else { - let cacheForType: T | void = (cache.get(resourceType): any); - if (cacheForType === undefined) { - cacheForType = resourceType(); - cache.set(resourceType, cacheForType); - } - return cacheForType; } + return cacheForType; } export const ContextOnlyDispatcher: Dispatcher = { diff --git a/packages/react-reconciler/src/ReactFiberOffscreenComponent.js b/packages/react-reconciler/src/ReactFiberOffscreenComponent.js index c0ad97116c141..f190a9bbb7235 100644 --- a/packages/react-reconciler/src/ReactFiberOffscreenComponent.js +++ b/packages/react-reconciler/src/ReactFiberOffscreenComponent.js @@ -9,10 +9,7 @@ import type {ReactNodeList} from 'shared/ReactTypes'; import type {Lanes} from './ReactFiberLane.old'; -import type { - CacheInstance, - PooledCacheInstance, -} from './ReactFiberCacheComponent.new'; +import type {SuspendedCache} from './ReactFiberCacheComponent.new'; export type OffscreenProps = {| // TODO: Pick an API before exposing the Offscreen type. I've chosen an enum @@ -32,5 +29,5 @@ export type OffscreenState = {| // will represent the pending work that must be included in the render in // order to unhide the component. baseLanes: Lanes, - cache: CacheInstance | PooledCacheInstance | null, + cache: SuspendedCache | null, |}; diff --git a/packages/react-reconciler/src/ReactFiberRoot.new.js b/packages/react-reconciler/src/ReactFiberRoot.new.js index adc5d3b94df8f..036b7289d78fe 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.new.js +++ b/packages/react-reconciler/src/ReactFiberRoot.new.js @@ -109,10 +109,7 @@ export function createFiberRoot( // For the root cache, we won't bother to lazily initialize the map. Seed an // empty one. This saves use the trouble of having to initialize in an // updater function. - cacheInstance: { - cache: new Map(), - provider: uninitializedFiber, - }, + cache: new Map(), }; uninitializedFiber.memoizedState = initialState; } else { diff --git a/packages/react-reconciler/src/ReactFiberRoot.old.js b/packages/react-reconciler/src/ReactFiberRoot.old.js index be9166c828734..49d427b61788f 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.old.js +++ b/packages/react-reconciler/src/ReactFiberRoot.old.js @@ -109,10 +109,7 @@ export function createFiberRoot( // For the root cache, we won't bother to lazily initialize the map. Seed an // empty one. This saves use the trouble of having to initialize in an // updater function. - cacheInstance: { - cache: new Map(), - provider: uninitializedFiber, - }, + cache: new Map(), }; uninitializedFiber.memoizedState = initialState; } else { diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js index 4815ca4279d97..f778f3366f0df 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js @@ -12,8 +12,10 @@ import type {Fiber, FiberRoot} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.new'; import type {SuspenseState} from './ReactFiberSuspenseComponent.new'; import type { - CacheInstance, - PooledCacheInstance, + Cache, + SuspendedCache, + SuspendedCacheFresh, + SuspendedCachePool, } from './ReactFiberCacheComponent.new'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.new'; @@ -48,6 +50,7 @@ import { import {popProvider} from './ReactFiberNewContext.new'; import {popRenderLanes} from './ReactFiberWorkLoop.new'; import { + SuspendedCacheFreshTag, popCacheProvider, popRootCachePool, popCachePool, @@ -81,9 +84,8 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { const root: FiberRoot = workInProgress.stateNode; popRootCachePool(root, renderLanes); - const cacheInstance: CacheInstance = - workInProgress.memoizedState.cacheInstance; - popCacheProvider(workInProgress, cacheInstance); + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); @@ -147,24 +149,24 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { case LegacyHiddenComponent: popRenderLanes(workInProgress); if (enableCache) { - const cacheInstance: - | CacheInstance - | PooledCacheInstance - | null = (workInProgress.updateQueue: any); - if (cacheInstance !== null) { - if (cacheInstance.provider !== null) { - popCacheProvider(workInProgress, cacheInstance); + const suspendedCache: SuspendedCache | null = (workInProgress.updateQueue: any); + if (suspendedCache !== null) { + if (suspendedCache.tag === SuspendedCacheFreshTag) { + popCacheProvider( + workInProgress, + (suspendedCache: SuspendedCacheFresh).cache, + ); } else { - popCachePool(cacheInstance); + popCachePool((suspendedCache: SuspendedCachePool)); } } } return null; case CacheComponent: if (enableCache) { - const ownCacheInstance: CacheInstance | null = workInProgress.stateNode; - if (ownCacheInstance !== null) { - popCacheProvider(workInProgress, ownCacheInstance); + const cache: Cache | null = workInProgress.stateNode; + if (cache !== null) { + popCacheProvider(workInProgress, cache); } } return null; @@ -187,9 +189,8 @@ function unwindInterruptedWork(interruptedWork: Fiber, renderLanes: Lanes) { const root: FiberRoot = interruptedWork.stateNode; popRootCachePool(root, renderLanes); - const cacheInstance: CacheInstance = - interruptedWork.memoizedState.cacheInstance; - popCacheProvider(interruptedWork, cacheInstance); + const cache: Cache = interruptedWork.memoizedState.cache; + popCacheProvider(interruptedWork, cache); } popHostContainer(interruptedWork); popTopLevelLegacyContextObject(interruptedWork); @@ -217,25 +218,24 @@ function unwindInterruptedWork(interruptedWork: Fiber, renderLanes: Lanes) { case LegacyHiddenComponent: popRenderLanes(interruptedWork); if (enableCache) { - const cacheInstance: - | CacheInstance - | PooledCacheInstance - | null = (interruptedWork.updateQueue: any); - if (cacheInstance !== null) { - if (cacheInstance.provider !== null) { - popCacheProvider(interruptedWork, cacheInstance); + const suspendedCache: SuspendedCache | null = (interruptedWork.updateQueue: any); + if (suspendedCache !== null) { + if (suspendedCache.tag === SuspendedCacheFreshTag) { + popCacheProvider( + interruptedWork, + (suspendedCache: SuspendedCacheFresh).cache, + ); } else { - popCachePool(cacheInstance); + popCachePool((suspendedCache: SuspendedCachePool)); } } } break; case CacheComponent: if (enableCache) { - const ownCacheInstance: CacheInstance | null = - interruptedWork.stateNode; - if (ownCacheInstance !== null) { - popCacheProvider(interruptedWork, ownCacheInstance); + const cache: Cache | null = interruptedWork.stateNode; + if (cache !== null) { + popCacheProvider(interruptedWork, cache); } } break; diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js index 4a55f7b1fd62f..7fa2dabacfa62 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js @@ -12,8 +12,10 @@ import type {Fiber, FiberRoot} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.old'; import type {SuspenseState} from './ReactFiberSuspenseComponent.old'; import type { - CacheInstance, - PooledCacheInstance, + Cache, + SuspendedCache, + SuspendedCacheFresh, + SuspendedCachePool, } from './ReactFiberCacheComponent.old'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.old'; @@ -48,6 +50,7 @@ import { import {popProvider} from './ReactFiberNewContext.old'; import {popRenderLanes} from './ReactFiberWorkLoop.old'; import { + SuspendedCacheFreshTag, popCacheProvider, popRootCachePool, popCachePool, @@ -81,9 +84,8 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { const root: FiberRoot = workInProgress.stateNode; popRootCachePool(root, renderLanes); - const cacheInstance: CacheInstance = - workInProgress.memoizedState.cacheInstance; - popCacheProvider(workInProgress, cacheInstance); + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); } popHostContainer(workInProgress); popTopLevelLegacyContextObject(workInProgress); @@ -147,24 +149,24 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { case LegacyHiddenComponent: popRenderLanes(workInProgress); if (enableCache) { - const cacheInstance: - | CacheInstance - | PooledCacheInstance - | null = (workInProgress.updateQueue: any); - if (cacheInstance !== null) { - if (cacheInstance.provider !== null) { - popCacheProvider(workInProgress, cacheInstance); + const suspendedCache: SuspendedCache | null = (workInProgress.updateQueue: any); + if (suspendedCache !== null) { + if (suspendedCache.tag === SuspendedCacheFreshTag) { + popCacheProvider( + workInProgress, + (suspendedCache: SuspendedCacheFresh).cache, + ); } else { - popCachePool(cacheInstance); + popCachePool((suspendedCache: SuspendedCachePool)); } } } return null; case CacheComponent: if (enableCache) { - const ownCacheInstance: CacheInstance | null = workInProgress.stateNode; - if (ownCacheInstance !== null) { - popCacheProvider(workInProgress, ownCacheInstance); + const cache: Cache | null = workInProgress.stateNode; + if (cache !== null) { + popCacheProvider(workInProgress, cache); } } return null; @@ -188,9 +190,8 @@ function unwindInterruptedWork(interruptedWork: Fiber, renderLanes: Lanes) { const root: FiberRoot = interruptedWork.stateNode; popRootCachePool(root, renderLanes); - const cacheInstance: CacheInstance = - interruptedWork.memoizedState.cacheInstance; - popCacheProvider(interruptedWork, cacheInstance); + const cache: Cache = interruptedWork.memoizedState.cache; + popCacheProvider(interruptedWork, cache); } popTopLevelLegacyContextObject(interruptedWork); resetMutableSourceWorkInProgressVersions(); @@ -217,25 +218,24 @@ function unwindInterruptedWork(interruptedWork: Fiber, renderLanes: Lanes) { case LegacyHiddenComponent: popRenderLanes(interruptedWork); if (enableCache) { - const cacheInstance: - | CacheInstance - | PooledCacheInstance - | null = (interruptedWork.updateQueue: any); - if (cacheInstance !== null) { - if (cacheInstance.provider !== null) { - popCacheProvider(interruptedWork, cacheInstance); + const suspendedCache: SuspendedCache | null = (interruptedWork.updateQueue: any); + if (suspendedCache !== null) { + if (suspendedCache.tag === SuspendedCacheFreshTag) { + popCacheProvider( + interruptedWork, + (suspendedCache: SuspendedCacheFresh).cache, + ); } else { - popCachePool(cacheInstance); + popCachePool((suspendedCache: SuspendedCachePool)); } } } break; case CacheComponent: if (enableCache) { - const ownCacheInstance: CacheInstance | null = - interruptedWork.stateNode; - if (ownCacheInstance !== null) { - popCacheProvider(interruptedWork, ownCacheInstance); + const cache: Cache | null = interruptedWork.stateNode; + if (cache !== null) { + popCacheProvider(interruptedWork, cache); } } break; From 4bb19dac3b1197e70a20c7a401eeb251092d6ce9 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Fri, 18 Dec 2020 03:37:04 -0600 Subject: [PATCH 29/30] Detect refreshes by comparing to previous parent Removes the fresh/stale distinction from the context stack and instead detects refreshes by comparing the previous and next parent cache. This is closer to one of the earlier implementation drafts, and it's essentially how you'd implement this in userspace using context. I had moved away from this when I got off on a tangent thinking about how the cache pool should work; once that fell into place, it became more clear what the relationship is between the context stack, which you use for updates ("Here"), and the cache pool, which you use for newly mounted content ("There"). The only thing we're doing internally that can't really be achieved in userspace is transfering the cache across Suspense retries. Kinda neat. --- .../src/ReactFiberBeginWork.new.js | 340 +++++++----------- .../src/ReactFiberBeginWork.old.js | 340 +++++++----------- .../src/ReactFiberCacheComponent.new.js | 193 +++++----- .../src/ReactFiberCacheComponent.old.js | 193 +++++----- .../src/ReactFiberCompleteWork.new.js | 28 +- .../src/ReactFiberCompleteWork.old.js | 28 +- .../src/ReactFiberHooks.new.js | 5 +- .../src/ReactFiberHooks.old.js | 5 +- .../src/ReactFiberNewContext.new.js | 119 +----- .../src/ReactFiberNewContext.old.js | 119 +----- .../src/ReactFiberOffscreenComponent.js | 4 +- .../src/ReactFiberRoot.new.js | 7 +- .../src/ReactFiberRoot.old.js | 7 +- .../src/ReactFiberUnwindWork.new.js | 47 +-- .../src/ReactFiberUnwindWork.old.js | 47 +-- .../src/__tests__/ReactCache-test.js | 4 +- scripts/error-codes/codes.json | 3 +- 17 files changed, 507 insertions(+), 982 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.new.js b/packages/react-reconciler/src/ReactFiberBeginWork.new.js index e516e20aea52d..52e6216d442eb 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.new.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.new.js @@ -25,9 +25,8 @@ import type { } from './ReactFiberOffscreenComponent'; import type { Cache, - SuspendedCache, - SuspendedCacheFresh, - SuspendedCachePool, + CacheComponentState, + SpawnedCachePool, } from './ReactFiberCacheComponent.new'; import type {UpdateQueue} from './ReactUpdateQueue.new'; @@ -138,6 +137,7 @@ import { isSuspenseInstanceFallback, registerSuspenseInstanceRetry, supportsHydration, + isPrimaryRenderer, } from './ReactFiberHostConfig'; import type {SuspenseInstance} from './ReactFiberHostConfig'; import {shouldSuspend} from './ReactFiberReconciler'; @@ -156,7 +156,6 @@ import {findFirstSuspended} from './ReactFiberSuspenseComponent.new'; import { pushProvider, propagateContextChange, - propagateCacheRefresh, readContext, prepareToReadContext, calculateChangedBits, @@ -212,17 +211,15 @@ import { import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import {setWorkInProgressVersion} from './ReactMutableSource.new'; import { - SuspendedCacheFreshTag, - SuspendedCachePoolTag, - pushFreshCacheProvider, - pushStaleCacheProvider, - hasFreshCacheProvider, requestCacheFromPool, + pushCacheProvider, pushRootCachePool, - pushCachePool, - getFreshCacheProviderIfExists, - getPooledCacheIfExists, + CacheContext, + getSuspendedCachePool, + restoreSpawnedCachePool, + getOffscreenDeferredCachePool, } from './ReactFiberCacheComponent.new'; +import {MAX_SIGNED_31_BIT_INT} from './MaxInts'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -598,11 +595,10 @@ function updateOffscreenComponent( const prevState: OffscreenState | null = current !== null ? current.memoizedState : null; - // If this is not null, this is a cache instance that was carried over from - // the previous render. We will push this to the cache context so that we can - // resume in-flight requests. However, we don't do this if there's already a - // fresh cache provider on the stack. - let suspendedCache: SuspendedCache | null = null; + // If this is not null, this is a cache pool that was carried over from the + // previous render. We will push this to the cache pool context so that we can + // resume in-flight requests. + let spawnedCachePool: SpawnedCachePool | null = null; if ( nextProps.mode === 'hidden' || @@ -614,7 +610,7 @@ function updateOffscreenComponent( // TODO: Figure out what we should do in Blocking mode. const nextState: OffscreenState = { baseLanes: NoLanes, - cache: null, + cachePool: null, }; workInProgress.memoizedState = nextState; pushRenderLanes(workInProgress, renderLanes); @@ -625,22 +621,12 @@ function updateOffscreenComponent( if (prevState !== null) { const prevBaseLanes = prevState.baseLanes; nextBaseLanes = mergeLanes(prevBaseLanes, renderLanes); - if (enableCache) { - // Keep a reference to the in-flight cache so we can resume later. If - // there's no fresh cache on the stack, there might be one from a - // previous render. If so, reuse it. - const freshCache = getFreshCacheProviderIfExists(); - suspendedCache = - freshCache !== null - ? ({ - tag: SuspendedCacheFreshTag, - cache: freshCache, - }: SuspendedCacheFresh) - : prevState.cache; - // We don't need to push to the cache context because we're about to + // Save the cache pool so we can resume later. + spawnedCachePool = getOffscreenDeferredCachePool(); + // We don't need to push to the cache pool because we're about to // bail out. There won't be a context mismatch because we only pop - // the cache context if `updateQueue` is non-null. + // the cache pool if `updateQueue` is non-null. } } else { nextBaseLanes = renderLanes; @@ -655,7 +641,7 @@ function updateOffscreenComponent( ); const nextState: OffscreenState = { baseLanes: nextBaseLanes, - cache: suspendedCache, + cachePool: spawnedCachePool, }; workInProgress.memoizedState = nextState; workInProgress.updateQueue = null; @@ -667,32 +653,23 @@ function updateOffscreenComponent( // This is the second render. The surrounding visible content has already // committed. Now we resume rendering the hidden tree. - if (enableCache && !hasFreshCacheProvider() && prevState !== null) { - // If there was a fresh cache during the render that spawned this one, - // resume using it. - const prevSuspendedCache = prevState.cache; - if (prevSuspendedCache !== null) { - suspendedCache = prevSuspendedCache; - // If the resumed cache has a provider, then it's a fresh cache. We - // should push it to the stack. Otherwise, it's from the cache pool - // and we should override the cache pool. - if (suspendedCache.tag === SuspendedCacheFreshTag) { - pushFreshCacheProvider( - workInProgress, - (suspendedCache: SuspendedCacheFresh).cache, - ); - // This isn't a refresh, it's a continuation of a previous render. - // So we don't need to propagate a context change. - } else { - pushCachePool((suspendedCache: SuspendedCachePool)); - } + if (enableCache && prevState !== null) { + // If the render that spawned this one accessed the cache pool, resume + // using the same cache. Unless the parent changed, since that means + // there was a refresh. + const prevCachePool = prevState.cachePool; + if (prevCachePool !== null) { + spawnedCachePool = restoreSpawnedCachePool( + workInProgress, + prevCachePool, + ); } } // Rendering at offscreen, so we can clear the base lanes. const nextState: OffscreenState = { baseLanes: NoLanes, - cache: null, + cachePool: null, }; workInProgress.memoizedState = nextState; // Push the lanes that were skipped when we bailed out. @@ -708,25 +685,16 @@ function updateOffscreenComponent( subtreeRenderLanes = mergeLanes(prevState.baseLanes, renderLanes); - if (enableCache && !hasFreshCacheProvider()) { - // If there was a fresh cache during the render that spawned this one, - // resume using it. - const prevSuspendedCache = prevState.cache; - if (prevSuspendedCache !== null) { - suspendedCache = prevSuspendedCache; - // If the resumed cache has a provider, then it's a fresh cache. We - // should push it to the stack. Otherwise, it's from the cache pool - // and we should override the cache pool. - if (suspendedCache.tag === SuspendedCacheFreshTag) { - pushFreshCacheProvider( - workInProgress, - (suspendedCache: SuspendedCacheFresh).cache, - ); - // This isn't a refresh, it's a continuation of a previous render. - // So we don't need to propagate a context change. - } else { - pushCachePool((suspendedCache: SuspendedCachePool)); - } + if (enableCache) { + // If the render that spawned this one accessed the cache pool, resume + // using the same cache. Unless the parent changed, since that means + // there was a refresh. + const prevCachePool = prevState.cachePool; + if (prevCachePool !== null) { + spawnedCachePool = restoreSpawnedCachePool( + workInProgress, + prevCachePool, + ); } } @@ -742,10 +710,9 @@ function updateOffscreenComponent( } if (enableCache) { - // If we have a suspended cache from a previous render attempt, then this - // will be non-null. We can use this to infer whether to push/pop the - // cache context. - workInProgress.updateQueue = suspendedCache; + // If we have a cache pool from a previous render attempt, then this will be + // non-null. We use this to infer whether to push/pop the cache context. + workInProgress.updateQueue = spawnedCachePool; } reconcileChildren(current, workInProgress, nextChildren, renderLanes); @@ -767,79 +734,64 @@ function updateCacheComponent( return null; } - let cache: Cache | null = null; + prepareToReadContext(workInProgress, renderLanes); + const parentCache = readContext(CacheContext); + if (current === null) { - if (hasFreshCacheProvider()) { - // Fast path. The parent Cache is either a new mount or a refresh. We can - // inherit its cache. - cache = null; - } else { - // This is a newly mounted component. Request a fresh cache. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - // This will always be different from the parent cache; otherwise we would - // have detected a fresh cache provider in the earlier branch. - cache = requestCacheFromPool(renderLanes); - pushFreshCacheProvider(workInProgress, cache); - // No need to propagate a refresh, because this is a new tree. - } - // Initialize an update queue. We use this for refreshes. - workInProgress.memoizedState = {cache}; + // Initial mount. Request a fresh cache from the pool. + const freshCache = requestCacheFromPool(renderLanes); + const initialState: CacheComponentState = { + parent: parentCache, + cache: freshCache, + }; + workInProgress.memoizedState = initialState; initializeUpdateQueue(workInProgress); + pushCacheProvider(workInProgress, freshCache); } else { - // This component already mounted. - if (hasFreshCacheProvider()) { - // Fast path. The parent Cache is either a new mount or a refresh. We can - // inherit its cache. - cache = null; - } else if (includesSomeLane(renderLanes, updateLanes)) { - // A refresh was scheduled. If it was a refresh on this fiber, then we - // will have an update in the queue. Otherwise, it must have been an - // update on a parent, propagated via context. - - // First check the update queue. + // Check for updates + if (includesSomeLane(renderLanes, updateLanes)) { cloneUpdateQueue(current, workInProgress); processUpdateQueue(workInProgress, null, null, renderLanes); - const prevCache: Cache = current.memoizedState.cache; - const nextCache: Cache = workInProgress.memoizedState.cache; - if (nextCache !== prevCache) { - // Received a refresh. - cache = nextCache; - pushFreshCacheProvider(workInProgress, cache); - // Refreshes propagate through the entire subtree. The refreshed cache - // will override nested caches. - propagateCacheRefresh(workInProgress, renderLanes); - } else { - // A parent cache boundary refreshed. So we can use the cache context. - cache = null; - - // If the update queue is empty, disconnect the old cache from the tree - // so it can be garbage collected. - if (workInProgress.lanes === NoLanes) { - const updateQueue: UpdateQueue = (workInProgress.updateQueue: any); - workInProgress.memoizedState = updateQueue.baseState = {cache: null}; - } + } + const prevState: CacheComponentState = current.memoizedState; + const nextState: CacheComponentState = workInProgress.memoizedState; + + // Compare the new parent cache to the previous to see detect there was + // a refresh. + if (prevState.parent !== parentCache) { + // Refresh in parent. Update the parent. + const derivedState: CacheComponentState = { + parent: parentCache, + cache: parentCache, + }; + + // Copied from getDerivedStateFromProps implementation. Once the update + // queue is empty, persist the derived state onto the base state. + workInProgress.memoizedState = derivedState; + if (workInProgress.lanes === NoLanes) { + const updateQueue: UpdateQueue = (workInProgress.updateQueue: any); + workInProgress.memoizedState = updateQueue.baseState = derivedState; } + + pushCacheProvider(workInProgress, parentCache); + // No need to propagate a context change because the refreshed parent + // already did. } else { - // Reuse the memoized cache. - cache = current.stateNode; - if (cache !== null) { - // There was no refresh, so no need to propagate to nested boundaries. - pushStaleCacheProvider(workInProgress, cache); + // The parent didn't refresh. Now check if this cache did. + const nextCache = nextState.cache; + pushCacheProvider(workInProgress, nextCache); + if (nextCache !== prevState.cache) { + // This cache refreshed. Propagate a context change. + propagateContextChange( + workInProgress, + CacheContext, + MAX_SIGNED_31_BIT_INT, + renderLanes, + ); } } } - // If this CacheComponent is the root of its tree, then `stateNode` will - // point to a cache instance. Otherwise, a null instance indicates that this - // CacheComponent inherits from a parent boundary. We can use this to infer - // whether to push/pop the cache context. - workInProgress.stateNode = cache; - const nextChildren = workInProgress.pendingProps.children; reconcileChildren(current, workInProgress, nextChildren, renderLanes); return workInProgress.child; @@ -1197,19 +1149,17 @@ function updateHostRoot(current, workInProgress, renderLanes) { const root: FiberRoot = workInProgress.stateNode; if (enableCache) { - pushRootCachePool(root); - const nextCache: Cache = nextState.cache; + pushRootCachePool(root); + pushCacheProvider(workInProgress, nextCache); if (nextCache !== prevState.cache) { - pushFreshCacheProvider(workInProgress, nextCache); - propagateCacheRefresh(workInProgress, renderLanes); - } else { - if (prevChildren === null) { - // If there are no children, this must be the initial render. - pushFreshCacheProvider(workInProgress, nextCache); - } else { - pushStaleCacheProvider(workInProgress, nextCache); - } + // The root cache refreshed. + propagateContextChange( + workInProgress, + CacheContext, + MAX_SIGNED_31_BIT_INT, + renderLanes, + ); } } @@ -1777,40 +1727,9 @@ const SUSPENDED_MARKER: SuspenseState = { }; function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { - let suspendedCache: SuspendedCache | null = null; - if (enableCache) { - // Keep a reference to the in-flight cache so we can resume later. - const freshCache = getFreshCacheProviderIfExists(); - if (freshCache !== null) { - suspendedCache = ({ - tag: SuspendedCacheFreshTag, - cache: freshCache, - }: SuspendedCacheFresh); - } else { - // If there's no cache on the stack, a nested Cache boundary may have - // spawned a new one. Check the cache pool. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - // If a nested cache accessed the pool during this render, it will - // returned by this function. It will also return a cache that was - // accessed by a sibling tree, but that's also fine, since that's the - // cache that would have been claimed by any nested caches. - const pooledCache = getPooledCacheIfExists(); - if (pooledCache !== null) { - suspendedCache = ({ - tag: SuspendedCachePoolTag, - cache: pooledCache, - }: SuspendedCachePool); - } - } - } return { baseLanes: renderLanes, - cache: suspendedCache, + cachePool: getSuspendedCachePool(), }; } @@ -1818,38 +1737,33 @@ function updateSuspenseOffscreenState( prevOffscreenState: OffscreenState, renderLanes: Lanes, ): OffscreenState { - let suspendedCache: SuspendedCache | null = null; + let cachePool: SpawnedCachePool | null = null; if (enableCache) { - // Keep a reference to the in-flight cache so we can resume later. - const freshCache = getFreshCacheProviderIfExists(); - if (freshCache !== null) { - suspendedCache = ({ - tag: SuspendedCacheFreshTag, - cache: freshCache, - }: SuspendedCacheFresh); - } else { - // If there's no cache on the stack, check if there's a cache from the - // previous render. This is what we would have used for new content - // during the first pass when we attempted to unhide. - suspendedCache = prevOffscreenState.cache; - if (suspendedCache === null) { - // If a nested cache accessed the pool during this render, it will - // returned by this function. It will also return a cache that was - // accessed by a sibling tree, but that's also fine, since that's the - // cache that would have been claimed by any nested caches. - const pooledCache = getPooledCacheIfExists(); - if (pooledCache !== null) { - suspendedCache = ({ - tag: SuspendedCachePoolTag, - cache: pooledCache, - }: SuspendedCachePool); - } + const prevCachePool: SpawnedCachePool | null = prevOffscreenState.cachePool; + if (prevCachePool !== null) { + const parentCache = isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2; + if (prevCachePool.parent !== parentCache) { + // Detected a refresh in the parent. This overrides any previously + // suspended cache. + cachePool = { + parent: parentCache, + pool: parentCache, + }; + } else { + // We can reuse the cache from last time. The only thing that would have + // overridden it is a parent refresh, which we checked for above. + cachePool = prevCachePool; } + } else { + // If there's no previous cache pool, grab the current one. + cachePool = getSuspendedCachePool(); } } return { baseLanes: mergeLanes(prevOffscreenState.baseLanes, renderLanes), - cache: suspendedCache, + cachePool, }; } @@ -3349,10 +3263,9 @@ function beginWork( pushHostRootContext(workInProgress); if (enableCache) { const root: FiberRoot = workInProgress.stateNode; + const cache: Cache = current.memoizedState.cache; + pushCacheProvider(workInProgress, cache); pushRootCachePool(root); - - const nextCache: Cache = current.memoizedState.cache; - pushStaleCacheProvider(workInProgress, nextCache); } resetHydrationState(); break; @@ -3523,11 +3436,8 @@ function beginWork( } case CacheComponent: { if (enableCache) { - const cache: Cache | null = current.stateNode; - if (cache !== null) { - pushStaleCacheProvider(workInProgress, cache); - } - workInProgress.stateNode = cache; + const cache: Cache = current.memoizedState.cache; + pushCacheProvider(workInProgress, cache); } break; } diff --git a/packages/react-reconciler/src/ReactFiberBeginWork.old.js b/packages/react-reconciler/src/ReactFiberBeginWork.old.js index 138ddfa29d5b5..5a25d0a9cd65f 100644 --- a/packages/react-reconciler/src/ReactFiberBeginWork.old.js +++ b/packages/react-reconciler/src/ReactFiberBeginWork.old.js @@ -25,9 +25,8 @@ import type { } from './ReactFiberOffscreenComponent'; import type { Cache, - SuspendedCache, - SuspendedCacheFresh, - SuspendedCachePool, + CacheComponentState, + SpawnedCachePool, } from './ReactFiberCacheComponent.old'; import type {UpdateQueue} from './ReactUpdateQueue.old'; @@ -138,6 +137,7 @@ import { isSuspenseInstanceFallback, registerSuspenseInstanceRetry, supportsHydration, + isPrimaryRenderer, } from './ReactFiberHostConfig'; import type {SuspenseInstance} from './ReactFiberHostConfig'; import {shouldSuspend} from './ReactFiberReconciler'; @@ -156,7 +156,6 @@ import {findFirstSuspended} from './ReactFiberSuspenseComponent.old'; import { pushProvider, propagateContextChange, - propagateCacheRefresh, readContext, prepareToReadContext, calculateChangedBits, @@ -212,17 +211,15 @@ import { import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import {setWorkInProgressVersion} from './ReactMutableSource.old'; import { - SuspendedCacheFreshTag, - SuspendedCachePoolTag, - pushFreshCacheProvider, - pushStaleCacheProvider, - hasFreshCacheProvider, requestCacheFromPool, + pushCacheProvider, pushRootCachePool, - pushCachePool, - getFreshCacheProviderIfExists, - getPooledCacheIfExists, + CacheContext, + getSuspendedCachePool, + restoreSpawnedCachePool, + getOffscreenDeferredCachePool, } from './ReactFiberCacheComponent.old'; +import {MAX_SIGNED_31_BIT_INT} from './MaxInts'; import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; @@ -598,11 +595,10 @@ function updateOffscreenComponent( const prevState: OffscreenState | null = current !== null ? current.memoizedState : null; - // If this is not null, this is a cache instance that was carried over from - // the previous render. We will push this to the cache context so that we can - // resume in-flight requests. However, we don't do this if there's already a - // fresh cache provider on the stack. - let suspendedCache: SuspendedCache | null = null; + // If this is not null, this is a cache pool that was carried over from the + // previous render. We will push this to the cache pool context so that we can + // resume in-flight requests. + let spawnedCachePool: SpawnedCachePool | null = null; if ( nextProps.mode === 'hidden' || @@ -614,7 +610,7 @@ function updateOffscreenComponent( // TODO: Figure out what we should do in Blocking mode. const nextState: OffscreenState = { baseLanes: NoLanes, - cache: null, + cachePool: null, }; workInProgress.memoizedState = nextState; pushRenderLanes(workInProgress, renderLanes); @@ -625,22 +621,12 @@ function updateOffscreenComponent( if (prevState !== null) { const prevBaseLanes = prevState.baseLanes; nextBaseLanes = mergeLanes(prevBaseLanes, renderLanes); - if (enableCache) { - // Keep a reference to the in-flight cache so we can resume later. If - // there's no fresh cache on the stack, there might be one from a - // previous render. If so, reuse it. - const freshCache = getFreshCacheProviderIfExists(); - suspendedCache = - freshCache !== null - ? ({ - tag: SuspendedCacheFreshTag, - cache: freshCache, - }: SuspendedCacheFresh) - : prevState.cache; - // We don't need to push to the cache context because we're about to + // Save the cache pool so we can resume later. + spawnedCachePool = getOffscreenDeferredCachePool(); + // We don't need to push to the cache pool because we're about to // bail out. There won't be a context mismatch because we only pop - // the cache context if `updateQueue` is non-null. + // the cache pool if `updateQueue` is non-null. } } else { nextBaseLanes = renderLanes; @@ -655,7 +641,7 @@ function updateOffscreenComponent( ); const nextState: OffscreenState = { baseLanes: nextBaseLanes, - cache: suspendedCache, + cachePool: spawnedCachePool, }; workInProgress.memoizedState = nextState; workInProgress.updateQueue = null; @@ -667,32 +653,23 @@ function updateOffscreenComponent( // This is the second render. The surrounding visible content has already // committed. Now we resume rendering the hidden tree. - if (enableCache && !hasFreshCacheProvider() && prevState !== null) { - // If there was a fresh cache during the render that spawned this one, - // resume using it. - const prevSuspendedCache = prevState.cache; - if (prevSuspendedCache !== null) { - suspendedCache = prevSuspendedCache; - // If the resumed cache has a provider, then it's a fresh cache. We - // should push it to the stack. Otherwise, it's from the cache pool - // and we should override the cache pool. - if (suspendedCache.tag === SuspendedCacheFreshTag) { - pushFreshCacheProvider( - workInProgress, - (suspendedCache: SuspendedCacheFresh).cache, - ); - // This isn't a refresh, it's a continuation of a previous render. - // So we don't need to propagate a context change. - } else { - pushCachePool((suspendedCache: SuspendedCachePool)); - } + if (enableCache && prevState !== null) { + // If the render that spawned this one accessed the cache pool, resume + // using the same cache. Unless the parent changed, since that means + // there was a refresh. + const prevCachePool = prevState.cachePool; + if (prevCachePool !== null) { + spawnedCachePool = restoreSpawnedCachePool( + workInProgress, + prevCachePool, + ); } } // Rendering at offscreen, so we can clear the base lanes. const nextState: OffscreenState = { baseLanes: NoLanes, - cache: null, + cachePool: null, }; workInProgress.memoizedState = nextState; // Push the lanes that were skipped when we bailed out. @@ -708,25 +685,16 @@ function updateOffscreenComponent( subtreeRenderLanes = mergeLanes(prevState.baseLanes, renderLanes); - if (enableCache && !hasFreshCacheProvider()) { - // If there was a fresh cache during the render that spawned this one, - // resume using it. - const prevSuspendedCache = prevState.cache; - if (prevSuspendedCache !== null) { - suspendedCache = prevSuspendedCache; - // If the resumed cache has a provider, then it's a fresh cache. We - // should push it to the stack. Otherwise, it's from the cache pool - // and we should override the cache pool. - if (suspendedCache.tag === SuspendedCacheFreshTag) { - pushFreshCacheProvider( - workInProgress, - (suspendedCache: SuspendedCacheFresh).cache, - ); - // This isn't a refresh, it's a continuation of a previous render. - // So we don't need to propagate a context change. - } else { - pushCachePool((suspendedCache: SuspendedCachePool)); - } + if (enableCache) { + // If the render that spawned this one accessed the cache pool, resume + // using the same cache. Unless the parent changed, since that means + // there was a refresh. + const prevCachePool = prevState.cachePool; + if (prevCachePool !== null) { + spawnedCachePool = restoreSpawnedCachePool( + workInProgress, + prevCachePool, + ); } } @@ -742,10 +710,9 @@ function updateOffscreenComponent( } if (enableCache) { - // If we have a suspended cache from a previous render attempt, then this - // will be non-null. We can use this to infer whether to push/pop the - // cache context. - workInProgress.updateQueue = suspendedCache; + // If we have a cache pool from a previous render attempt, then this will be + // non-null. We use this to infer whether to push/pop the cache context. + workInProgress.updateQueue = spawnedCachePool; } reconcileChildren(current, workInProgress, nextChildren, renderLanes); @@ -767,79 +734,64 @@ function updateCacheComponent( return null; } - let cache: Cache | null = null; + prepareToReadContext(workInProgress, renderLanes); + const parentCache = readContext(CacheContext); + if (current === null) { - if (hasFreshCacheProvider()) { - // Fast path. The parent Cache is either a new mount or a refresh. We can - // inherit its cache. - cache = null; - } else { - // This is a newly mounted component. Request a fresh cache. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - // This will always be different from the parent cache; otherwise we would - // have detected a fresh cache provider in the earlier branch. - cache = requestCacheFromPool(renderLanes); - pushFreshCacheProvider(workInProgress, cache); - // No need to propagate a refresh, because this is a new tree. - } - // Initialize an update queue. We use this for refreshes. - workInProgress.memoizedState = {cache}; + // Initial mount. Request a fresh cache from the pool. + const freshCache = requestCacheFromPool(renderLanes); + const initialState: CacheComponentState = { + parent: parentCache, + cache: freshCache, + }; + workInProgress.memoizedState = initialState; initializeUpdateQueue(workInProgress); + pushCacheProvider(workInProgress, freshCache); } else { - // This component already mounted. - if (hasFreshCacheProvider()) { - // Fast path. The parent Cache is either a new mount or a refresh. We can - // inherit its cache. - cache = null; - } else if (includesSomeLane(renderLanes, updateLanes)) { - // A refresh was scheduled. If it was a refresh on this fiber, then we - // will have an update in the queue. Otherwise, it must have been an - // update on a parent, propagated via context. - - // First check the update queue. + // Check for updates + if (includesSomeLane(renderLanes, updateLanes)) { cloneUpdateQueue(current, workInProgress); processUpdateQueue(workInProgress, null, null, renderLanes); - const prevCache: Cache = current.memoizedState.cache; - const nextCache: Cache = workInProgress.memoizedState.cache; - if (nextCache !== prevCache) { - // Received a refresh. - cache = nextCache; - pushFreshCacheProvider(workInProgress, cache); - // Refreshes propagate through the entire subtree. The refreshed cache - // will override nested caches. - propagateCacheRefresh(workInProgress, renderLanes); - } else { - // A parent cache boundary refreshed. So we can use the cache context. - cache = null; - - // If the update queue is empty, disconnect the old cache from the tree - // so it can be garbage collected. - if (workInProgress.lanes === NoLanes) { - const updateQueue: UpdateQueue = (workInProgress.updateQueue: any); - workInProgress.memoizedState = updateQueue.baseState = {cache: null}; - } + } + const prevState: CacheComponentState = current.memoizedState; + const nextState: CacheComponentState = workInProgress.memoizedState; + + // Compare the new parent cache to the previous to see detect there was + // a refresh. + if (prevState.parent !== parentCache) { + // Refresh in parent. Update the parent. + const derivedState: CacheComponentState = { + parent: parentCache, + cache: parentCache, + }; + + // Copied from getDerivedStateFromProps implementation. Once the update + // queue is empty, persist the derived state onto the base state. + workInProgress.memoizedState = derivedState; + if (workInProgress.lanes === NoLanes) { + const updateQueue: UpdateQueue = (workInProgress.updateQueue: any); + workInProgress.memoizedState = updateQueue.baseState = derivedState; } + + pushCacheProvider(workInProgress, parentCache); + // No need to propagate a context change because the refreshed parent + // already did. } else { - // Reuse the memoized cache. - cache = current.stateNode; - if (cache !== null) { - // There was no refresh, so no need to propagate to nested boundaries. - pushStaleCacheProvider(workInProgress, cache); + // The parent didn't refresh. Now check if this cache did. + const nextCache = nextState.cache; + pushCacheProvider(workInProgress, nextCache); + if (nextCache !== prevState.cache) { + // This cache refreshed. Propagate a context change. + propagateContextChange( + workInProgress, + CacheContext, + MAX_SIGNED_31_BIT_INT, + renderLanes, + ); } } } - // If this CacheComponent is the root of its tree, then `stateNode` will - // point to a cache instance. Otherwise, a null instance indicates that this - // CacheComponent inherits from a parent boundary. We can use this to infer - // whether to push/pop the cache context. - workInProgress.stateNode = cache; - const nextChildren = workInProgress.pendingProps.children; reconcileChildren(current, workInProgress, nextChildren, renderLanes); return workInProgress.child; @@ -1197,19 +1149,17 @@ function updateHostRoot(current, workInProgress, renderLanes) { const root: FiberRoot = workInProgress.stateNode; if (enableCache) { - pushRootCachePool(root); - const nextCache: Cache = nextState.cache; + pushRootCachePool(root); + pushCacheProvider(workInProgress, nextCache); if (nextCache !== prevState.cache) { - pushFreshCacheProvider(workInProgress, nextCache); - propagateCacheRefresh(workInProgress, renderLanes); - } else { - if (prevChildren === null) { - // If there are no children, this must be the initial render. - pushFreshCacheProvider(workInProgress, nextCache); - } else { - pushStaleCacheProvider(workInProgress, nextCache); - } + // The root cache refreshed. + propagateContextChange( + workInProgress, + CacheContext, + MAX_SIGNED_31_BIT_INT, + renderLanes, + ); } } @@ -1777,40 +1727,9 @@ const SUSPENDED_MARKER: SuspenseState = { }; function mountSuspenseOffscreenState(renderLanes: Lanes): OffscreenState { - let suspendedCache: SuspendedCache | null = null; - if (enableCache) { - // Keep a reference to the in-flight cache so we can resume later. - const freshCache = getFreshCacheProviderIfExists(); - if (freshCache !== null) { - suspendedCache = ({ - tag: SuspendedCacheFreshTag, - cache: freshCache, - }: SuspendedCacheFresh); - } else { - // If there's no cache on the stack, a nested Cache boundary may have - // spawned a new one. Check the cache pool. - const root = getWorkInProgressRoot(); - invariant( - root !== null, - 'Expected a work-in-progress root. This is a bug in React. Please ' + - 'file an issue.', - ); - // If a nested cache accessed the pool during this render, it will - // returned by this function. It will also return a cache that was - // accessed by a sibling tree, but that's also fine, since that's the - // cache that would have been claimed by any nested caches. - const pooledCache = getPooledCacheIfExists(); - if (pooledCache !== null) { - suspendedCache = ({ - tag: SuspendedCachePoolTag, - cache: pooledCache, - }: SuspendedCachePool); - } - } - } return { baseLanes: renderLanes, - cache: suspendedCache, + cachePool: getSuspendedCachePool(), }; } @@ -1818,38 +1737,33 @@ function updateSuspenseOffscreenState( prevOffscreenState: OffscreenState, renderLanes: Lanes, ): OffscreenState { - let suspendedCache: SuspendedCache | null = null; + let cachePool: SpawnedCachePool | null = null; if (enableCache) { - // Keep a reference to the in-flight cache so we can resume later. - const freshCache = getFreshCacheProviderIfExists(); - if (freshCache !== null) { - suspendedCache = ({ - tag: SuspendedCacheFreshTag, - cache: freshCache, - }: SuspendedCacheFresh); - } else { - // If there's no cache on the stack, check if there's a cache from the - // previous render. This is what we would have used for new content - // during the first pass when we attempted to unhide. - suspendedCache = prevOffscreenState.cache; - if (suspendedCache === null) { - // If a nested cache accessed the pool during this render, it will - // returned by this function. It will also return a cache that was - // accessed by a sibling tree, but that's also fine, since that's the - // cache that would have been claimed by any nested caches. - const pooledCache = getPooledCacheIfExists(); - if (pooledCache !== null) { - suspendedCache = ({ - tag: SuspendedCachePoolTag, - cache: pooledCache, - }: SuspendedCachePool); - } + const prevCachePool: SpawnedCachePool | null = prevOffscreenState.cachePool; + if (prevCachePool !== null) { + const parentCache = isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2; + if (prevCachePool.parent !== parentCache) { + // Detected a refresh in the parent. This overrides any previously + // suspended cache. + cachePool = { + parent: parentCache, + pool: parentCache, + }; + } else { + // We can reuse the cache from last time. The only thing that would have + // overridden it is a parent refresh, which we checked for above. + cachePool = prevCachePool; } + } else { + // If there's no previous cache pool, grab the current one. + cachePool = getSuspendedCachePool(); } } return { baseLanes: mergeLanes(prevOffscreenState.baseLanes, renderLanes), - cache: suspendedCache, + cachePool, }; } @@ -3349,10 +3263,9 @@ function beginWork( pushHostRootContext(workInProgress); if (enableCache) { const root: FiberRoot = workInProgress.stateNode; + const cache: Cache = current.memoizedState.cache; + pushCacheProvider(workInProgress, cache); pushRootCachePool(root); - - const nextCache: Cache = current.memoizedState.cache; - pushStaleCacheProvider(workInProgress, nextCache); } resetHydrationState(); break; @@ -3523,11 +3436,8 @@ function beginWork( } case CacheComponent: { if (enableCache) { - const cache: Cache | null = current.stateNode; - if (cache !== null) { - pushStaleCacheProvider(workInProgress, cache); - } - workInProgress.stateNode = cache; + const cache: Cache = current.memoizedState.cache; + pushCacheProvider(workInProgress, cache); } break; } diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.new.js b/packages/react-reconciler/src/ReactFiberCacheComponent.new.js index 629211211405d..b5ae3ccc611a8 100644 --- a/packages/react-reconciler/src/ReactFiberCacheComponent.new.js +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.new.js @@ -10,30 +10,27 @@ import type {ReactContext} from 'shared/ReactTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.new'; +import type {StackCursor} from './ReactFiberStack.new'; import {enableCache} from 'shared/ReactFeatureFlags'; import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols'; -import {HostRoot} from './ReactWorkTags'; +import {isPrimaryRenderer} from './ReactFiberHostConfig'; +import {createCursor, push, pop} from './ReactFiberStack.new'; import {pushProvider, popProvider} from './ReactFiberNewContext.new'; export type Cache = Map<() => mixed, mixed>; -export type SuspendedCacheFresh = {| - tag: 0, - cache: Cache, +export type CacheComponentState = {| + +parent: Cache, + +cache: Cache, |}; -export type SuspendedCachePool = {| - tag: 1, - cache: Cache, +export type SpawnedCachePool = {| + +parent: Cache, + +pool: Cache, |}; -export type SuspendedCache = SuspendedCacheFresh | SuspendedCachePool; - -export const SuspendedCacheFreshTag = 0; -export const SuspendedCachePoolTag = 1; - export const CacheContext: ReactContext = enableCache ? { $$typeof: REACT_CONTEXT_TYPE, @@ -53,45 +50,18 @@ if (__DEV__ && enableCache) { CacheContext._currentRenderer2 = null; } -// A parent cache refresh always overrides any nested cache. So there will only -// ever be a single fresh cache on the context stack. -let freshCache: Cache | null = null; - -// The cache that we retrived from the pool during this render, if any +// The cache that newly mounted Cache boundaries should use. It's either +// retrieved from the cache pool, or the result of a refresh. let pooledCache: Cache | null = null; -export function pushStaleCacheProvider(workInProgress: Fiber, cache: Cache) { - if (!enableCache) { - return; - } - if (__DEV__) { - if (freshCache !== null) { - console.error( - 'Already inside a fresh cache boundary. This is a bug in React.', - ); - } - } - pushProvider(workInProgress, CacheContext, cache); -} +// When retrying a Suspense/Offscreen boundary, we override pooledCache with the +// cache from the render that suspended. +const prevFreshCacheOnStack: StackCursor = createCursor(null); -export function pushFreshCacheProvider(workInProgress: Fiber, cache: Cache) { +export function pushCacheProvider(workInProgress: Fiber, cache: Cache) { if (!enableCache) { return; } - if (__DEV__) { - if ( - freshCache !== null && - // TODO: Remove this exception for roots. There are a few tests that throw - // in pushHostContainer, before the cache context is pushed. Not a huge - // issue, but should still fix. - workInProgress.tag !== HostRoot - ) { - console.error( - 'Already inside a fresh cache boundary. This is a bug in React.', - ); - } - } - freshCache = cache; pushProvider(workInProgress, CacheContext, cache); } @@ -99,31 +69,9 @@ export function popCacheProvider(workInProgress: Fiber, cache: Cache) { if (!enableCache) { return; } - if (__DEV__) { - if (freshCache !== null && freshCache !== cache) { - console.error( - 'Unexpected cache instance on context. This is a bug in React.', - ); - } - } - freshCache = null; popProvider(CacheContext, workInProgress); } -export function hasFreshCacheProvider() { - if (!enableCache) { - return false; - } - return freshCache !== null; -} - -export function getFreshCacheProviderIfExists(): Cache | null { - if (!enableCache) { - return null; - } - return freshCache; -} - export function requestCacheFromPool(renderLanes: Lanes): Cache { if (!enableCache) { return (null: any); @@ -136,10 +84,6 @@ export function requestCacheFromPool(renderLanes: Lanes): Cache { return pooledCache; } -export function getPooledCacheIfExists(): Cache | null { - return pooledCache; -} - export function pushRootCachePool(root: FiberRoot) { if (!enableCache) { return; @@ -161,37 +105,100 @@ export function popRootCachePool(root: FiberRoot, renderLanes: Lanes) { // once all the transitions that depend on it (which we track with // `pooledCacheLanes`) have committed. root.pooledCache = pooledCache; - root.pooledCacheLanes |= renderLanes; + if (pooledCache !== null) { + root.pooledCacheLanes |= renderLanes; + } } -export function pushCachePool(suspendedCache: SuspendedCachePool) { +export function restoreSpawnedCachePool( + offscreenWorkInProgress: Fiber, + prevCachePool: SpawnedCachePool, +): SpawnedCachePool | null { if (!enableCache) { - return; + return (null: any); + } + const nextParentCache = isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2; + if (nextParentCache !== prevCachePool.parent) { + // There was a refresh. Don't bother restoring anything since the refresh + // will override it. + return null; + } else { + // No refresh. Resume with the previous cache. This will override the cache + // pool so that any new Cache boundaries in the subtree use this one instead + // of requesting a fresh one. + push(prevFreshCacheOnStack, pooledCache, offscreenWorkInProgress); + pooledCache = prevCachePool.pool; + + // Return the cache pool to signal that we did in fact push it. We will + // assign this to the field on the fiber so we know to pop the context. + return prevCachePool; } - // This will temporarily override the pooled cache for this render, so that - // any new Cache boundaries in the subtree use this one. The previous value on - // the "stack" is stored on the cache instance. We will restore it during the - // complete phase. - // - // The more straightforward way to do this would be to use the array-based - // stack (push/pop). Maybe this is too clever. - const prevPooledCacheOnStack = pooledCache; - pooledCache = suspendedCache.cache; - // This is never supposed to be null. I'm cheating. Sorry. It will be reset to - // the correct type when we pop. - suspendedCache.cache = ((prevPooledCacheOnStack: any): Cache); } -export function popCachePool(suspendedCache: SuspendedCachePool) { +// Note: Ideally, `popCachePool` would return this value, and then we would pass +// it to `getSuspendedCachePool`. But factoring reasons, those two functions are +// in different phases/files. They are always called in sequence, though, so we +// can stash the value here temporarily. +let _suspendedPooledCache: Cache | null = null; + +export function popCachePool(workInProgress: Fiber) { if (!enableCache) { return; } - const retryCache: Cache = (pooledCache: any); - if (__DEV__) { - if (retryCache === null) { - console.error('Expected to have a pooled cache. This is a bug in React.'); + _suspendedPooledCache = pooledCache; + pooledCache = prevFreshCacheOnStack.current; + pop(prevFreshCacheOnStack, workInProgress); +} + +export function getSuspendedCachePool(): SpawnedCachePool | null { + if (!enableCache) { + return null; + } + + // We check the cache on the stack first, since that's the one any new Caches + // would have accessed. + let pool = pooledCache; + if (pool === null) { + // There's no pooled cache above us in the stack. However, a child in the + // suspended tree may have requested a fresh cache pool. If so, we would + // have unwound it with `popCachePool`. + if (_suspendedPooledCache !== null) { + pool = _suspendedPooledCache; + _suspendedPooledCache = null; + } else { + // There's no suspended cache pool. + return null; } } - pooledCache = suspendedCache.cache; - suspendedCache.cache = retryCache; + + return { + // We must also save the parent, so that when we resume we can detect + // a refresh. + parent: isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2, + pool, + }; +} + +export function getOffscreenDeferredCachePool(): SpawnedCachePool | null { + if (!enableCache) { + return null; + } + + if (pooledCache === null) { + // There's no deferred cache pool. + return null; + } + + return { + // We must also store the parent, so that when we resume we can detect + // a refresh. + parent: isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2, + pool: pooledCache, + }; } diff --git a/packages/react-reconciler/src/ReactFiberCacheComponent.old.js b/packages/react-reconciler/src/ReactFiberCacheComponent.old.js index da5ed5b88de21..8882f7dbd2d48 100644 --- a/packages/react-reconciler/src/ReactFiberCacheComponent.old.js +++ b/packages/react-reconciler/src/ReactFiberCacheComponent.old.js @@ -10,30 +10,27 @@ import type {ReactContext} from 'shared/ReactTypes'; import type {FiberRoot} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.old'; +import type {StackCursor} from './ReactFiberStack.old'; import {enableCache} from 'shared/ReactFeatureFlags'; import {REACT_CONTEXT_TYPE} from 'shared/ReactSymbols'; -import {HostRoot} from './ReactWorkTags'; +import {isPrimaryRenderer} from './ReactFiberHostConfig'; +import {createCursor, push, pop} from './ReactFiberStack.old'; import {pushProvider, popProvider} from './ReactFiberNewContext.old'; export type Cache = Map<() => mixed, mixed>; -export type SuspendedCacheFresh = {| - tag: 0, - cache: Cache, +export type CacheComponentState = {| + +parent: Cache, + +cache: Cache, |}; -export type SuspendedCachePool = {| - tag: 1, - cache: Cache, +export type SpawnedCachePool = {| + +parent: Cache, + +pool: Cache, |}; -export type SuspendedCache = SuspendedCacheFresh | SuspendedCachePool; - -export const SuspendedCacheFreshTag = 0; -export const SuspendedCachePoolTag = 1; - export const CacheContext: ReactContext = enableCache ? { $$typeof: REACT_CONTEXT_TYPE, @@ -53,45 +50,18 @@ if (__DEV__ && enableCache) { CacheContext._currentRenderer2 = null; } -// A parent cache refresh always overrides any nested cache. So there will only -// ever be a single fresh cache on the context stack. -let freshCache: Cache | null = null; - -// The cache that we retrived from the pool during this render, if any +// The cache that newly mounted Cache boundaries should use. It's either +// retrieved from the cache pool, or the result of a refresh. let pooledCache: Cache | null = null; -export function pushStaleCacheProvider(workInProgress: Fiber, cache: Cache) { - if (!enableCache) { - return; - } - if (__DEV__) { - if (freshCache !== null) { - console.error( - 'Already inside a fresh cache boundary. This is a bug in React.', - ); - } - } - pushProvider(workInProgress, CacheContext, cache); -} +// When retrying a Suspense/Offscreen boundary, we override pooledCache with the +// cache from the render that suspended. +const prevFreshCacheOnStack: StackCursor = createCursor(null); -export function pushFreshCacheProvider(workInProgress: Fiber, cache: Cache) { +export function pushCacheProvider(workInProgress: Fiber, cache: Cache) { if (!enableCache) { return; } - if (__DEV__) { - if ( - freshCache !== null && - // TODO: Remove this exception for roots. There are a few tests that throw - // in pushHostContainer, before the cache context is pushed. Not a huge - // issue, but should still fix. - workInProgress.tag !== HostRoot - ) { - console.error( - 'Already inside a fresh cache boundary. This is a bug in React.', - ); - } - } - freshCache = cache; pushProvider(workInProgress, CacheContext, cache); } @@ -99,31 +69,9 @@ export function popCacheProvider(workInProgress: Fiber, cache: Cache) { if (!enableCache) { return; } - if (__DEV__) { - if (freshCache !== null && freshCache !== cache) { - console.error( - 'Unexpected cache instance on context. This is a bug in React.', - ); - } - } - freshCache = null; popProvider(CacheContext, workInProgress); } -export function hasFreshCacheProvider() { - if (!enableCache) { - return false; - } - return freshCache !== null; -} - -export function getFreshCacheProviderIfExists(): Cache | null { - if (!enableCache) { - return null; - } - return freshCache; -} - export function requestCacheFromPool(renderLanes: Lanes): Cache { if (!enableCache) { return (null: any); @@ -136,10 +84,6 @@ export function requestCacheFromPool(renderLanes: Lanes): Cache { return pooledCache; } -export function getPooledCacheIfExists(): Cache | null { - return pooledCache; -} - export function pushRootCachePool(root: FiberRoot) { if (!enableCache) { return; @@ -161,37 +105,100 @@ export function popRootCachePool(root: FiberRoot, renderLanes: Lanes) { // once all the transitions that depend on it (which we track with // `pooledCacheLanes`) have committed. root.pooledCache = pooledCache; - root.pooledCacheLanes |= renderLanes; + if (pooledCache !== null) { + root.pooledCacheLanes |= renderLanes; + } } -export function pushCachePool(suspendedCache: SuspendedCachePool) { +export function restoreSpawnedCachePool( + offscreenWorkInProgress: Fiber, + prevCachePool: SpawnedCachePool, +): SpawnedCachePool | null { if (!enableCache) { - return; + return (null: any); + } + const nextParentCache = isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2; + if (nextParentCache !== prevCachePool.parent) { + // There was a refresh. Don't bother restoring anything since the refresh + // will override it. + return null; + } else { + // No refresh. Resume with the previous cache. This will override the cache + // pool so that any new Cache boundaries in the subtree use this one instead + // of requesting a fresh one. + push(prevFreshCacheOnStack, pooledCache, offscreenWorkInProgress); + pooledCache = prevCachePool.pool; + + // Return the cache pool to signal that we did in fact push it. We will + // assign this to the field on the fiber so we know to pop the context. + return prevCachePool; } - // This will temporarily override the pooled cache for this render, so that - // any new Cache boundaries in the subtree use this one. The previous value on - // the "stack" is stored on the cache instance. We will restore it during the - // complete phase. - // - // The more straightforward way to do this would be to use the array-based - // stack (push/pop). Maybe this is too clever. - const prevPooledCacheOnStack = pooledCache; - pooledCache = suspendedCache.cache; - // This is never supposed to be null. I'm cheating. Sorry. It will be reset to - // the correct type when we pop. - suspendedCache.cache = ((prevPooledCacheOnStack: any): Cache); } -export function popCachePool(suspendedCache: SuspendedCachePool) { +// Note: Ideally, `popCachePool` would return this value, and then we would pass +// it to `getSuspendedCachePool`. But factoring reasons, those two functions are +// in different phases/files. They are always called in sequence, though, so we +// can stash the value here temporarily. +let _suspendedPooledCache: Cache | null = null; + +export function popCachePool(workInProgress: Fiber) { if (!enableCache) { return; } - const retryCache: Cache = (pooledCache: any); - if (__DEV__) { - if (retryCache === null) { - console.error('Expected to have a pooled cache. This is a bug in React.'); + _suspendedPooledCache = pooledCache; + pooledCache = prevFreshCacheOnStack.current; + pop(prevFreshCacheOnStack, workInProgress); +} + +export function getSuspendedCachePool(): SpawnedCachePool | null { + if (!enableCache) { + return null; + } + + // We check the cache on the stack first, since that's the one any new Caches + // would have accessed. + let pool = pooledCache; + if (pool === null) { + // There's no pooled cache above us in the stack. However, a child in the + // suspended tree may have requested a fresh cache pool. If so, we would + // have unwound it with `popCachePool`. + if (_suspendedPooledCache !== null) { + pool = _suspendedPooledCache; + _suspendedPooledCache = null; + } else { + // There's no suspended cache pool. + return null; } } - pooledCache = suspendedCache.cache; - suspendedCache.cache = retryCache; + + return { + // We must also save the parent, so that when we resume we can detect + // a refresh. + parent: isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2, + pool, + }; +} + +export function getOffscreenDeferredCachePool(): SpawnedCachePool | null { + if (!enableCache) { + return null; + } + + if (pooledCache === null) { + // There's no deferred cache pool. + return null; + } + + return { + // We must also store the parent, so that when we resume we can detect + // a refresh. + parent: isPrimaryRenderer + ? CacheContext._currentValue + : CacheContext._currentValue2, + pool: pooledCache, + }; } diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js index 6f29a28b92c2b..92fa3eeacb603 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.new.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.new.js @@ -28,12 +28,7 @@ import type { } from './ReactFiberSuspenseComponent.new'; import type {SuspenseContext} from './ReactFiberSuspenseContext.new'; import type {OffscreenState} from './ReactFiberOffscreenComponent'; -import type { - SuspendedCache, - SuspendedCacheFresh, - SuspendedCachePool, - Cache, -} from './ReactFiberCacheComponent.new'; +import type {Cache, SpawnedCachePool} from './ReactFiberCacheComponent.new'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.new'; @@ -163,7 +158,6 @@ import {resetChildFibers} from './ReactChildFiber.new'; import {createScopeInstance} from './ReactFiberScope.new'; import {transferActualDuration} from './ReactProfilerTimer.new'; import { - SuspendedCacheFreshTag, popCacheProvider, popRootCachePool, popCachePool, @@ -1502,16 +1496,9 @@ function completeWork( } if (enableCache) { - const suspendedCache: SuspendedCache | null = (workInProgress.updateQueue: any); - if (suspendedCache !== null) { - if (suspendedCache.tag === SuspendedCacheFreshTag) { - popCacheProvider( - workInProgress, - (suspendedCache: SuspendedCacheFresh).cache, - ); - } else { - popCachePool((suspendedCache: SuspendedCachePool)); - } + const spawnedCachePool: SpawnedCachePool | null = (workInProgress.updateQueue: any); + if (spawnedCachePool !== null) { + popCachePool(workInProgress); } } @@ -1519,11 +1506,8 @@ function completeWork( } case CacheComponent: { if (enableCache) { - const cache: Cache | null = workInProgress.stateNode; - if (cache !== null) { - // This is a cache provider. - popCacheProvider(workInProgress, cache); - } + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); bubbleProperties(workInProgress); return null; } diff --git a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js index 1e12c0ccad875..a44029c8347c4 100644 --- a/packages/react-reconciler/src/ReactFiberCompleteWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCompleteWork.old.js @@ -28,12 +28,7 @@ import type { } from './ReactFiberSuspenseComponent.old'; import type {SuspenseContext} from './ReactFiberSuspenseContext.old'; import type {OffscreenState} from './ReactFiberOffscreenComponent'; -import type { - SuspendedCache, - SuspendedCacheFresh, - SuspendedCachePool, - Cache, -} from './ReactFiberCacheComponent.old'; +import type {Cache, SpawnedCachePool} from './ReactFiberCacheComponent.old'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.old'; @@ -163,7 +158,6 @@ import {resetChildFibers} from './ReactChildFiber.old'; import {createScopeInstance} from './ReactFiberScope.old'; import {transferActualDuration} from './ReactProfilerTimer.old'; import { - SuspendedCacheFreshTag, popCacheProvider, popRootCachePool, popCachePool, @@ -1502,16 +1496,9 @@ function completeWork( } if (enableCache) { - const suspendedCache: SuspendedCache | null = (workInProgress.updateQueue: any); - if (suspendedCache !== null) { - if (suspendedCache.tag === SuspendedCacheFreshTag) { - popCacheProvider( - workInProgress, - (suspendedCache: SuspendedCacheFresh).cache, - ); - } else { - popCachePool((suspendedCache: SuspendedCachePool)); - } + const spawnedCachePool: SpawnedCachePool | null = (workInProgress.updateQueue: any); + if (spawnedCachePool !== null) { + popCachePool(workInProgress); } } @@ -1519,11 +1506,8 @@ function completeWork( } case CacheComponent: { if (enableCache) { - const cache: Cache | null = workInProgress.stateNode; - if (cache !== null) { - // This is a cache provider. - popCacheProvider(workInProgress, cache); - } + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); bubbleProperties(workInProgress); return null; } diff --git a/packages/react-reconciler/src/ReactFiberHooks.new.js b/packages/react-reconciler/src/ReactFiberHooks.new.js index 617b704bb78ef..d5ad5048926ea 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.new.js +++ b/packages/react-reconciler/src/ReactFiberHooks.new.js @@ -1726,6 +1726,7 @@ function updateRefresh() { } function refreshCache(fiber: Fiber, seedKey: ?() => T, seedValue: T) { + // TODO: Does Cache work in legacy mode? Should decide and write a test. // TODO: Consider warning if the refresh is at discrete priority, or if we // otherwise suspect that it wasn't batched properly. let provider = fiber.return; @@ -1733,9 +1734,8 @@ function refreshCache(fiber: Fiber, seedKey: ?() => T, seedValue: T) { switch (provider.tag) { case CacheComponent: case HostRoot: { - const eventTime = requestEventTime(); const lane = requestUpdateLane(provider); - // TODO: Does Cache work in legacy mode? Should decide and write a test. + const eventTime = requestEventTime(); const root = scheduleUpdateOnFiber(provider, lane, eventTime); const seededCache = new Map(); @@ -1757,7 +1757,6 @@ function refreshCache(fiber: Fiber, seedKey: ?() => T, seedValue: T) { } provider = provider.return; } - // TODO: Warn if unmounted? } diff --git a/packages/react-reconciler/src/ReactFiberHooks.old.js b/packages/react-reconciler/src/ReactFiberHooks.old.js index 922d6e793d09d..45ff2b50665cc 100644 --- a/packages/react-reconciler/src/ReactFiberHooks.old.js +++ b/packages/react-reconciler/src/ReactFiberHooks.old.js @@ -1726,6 +1726,7 @@ function updateRefresh() { } function refreshCache(fiber: Fiber, seedKey: ?() => T, seedValue: T) { + // TODO: Does Cache work in legacy mode? Should decide and write a test. // TODO: Consider warning if the refresh is at discrete priority, or if we // otherwise suspect that it wasn't batched properly. let provider = fiber.return; @@ -1733,9 +1734,8 @@ function refreshCache(fiber: Fiber, seedKey: ?() => T, seedValue: T) { switch (provider.tag) { case CacheComponent: case HostRoot: { - const eventTime = requestEventTime(); const lane = requestUpdateLane(provider); - // TODO: Does Cache work in legacy mode? Should decide and write a test. + const eventTime = requestEventTime(); const root = scheduleUpdateOnFiber(provider, lane, eventTime); const seededCache = new Map(); @@ -1757,7 +1757,6 @@ function refreshCache(fiber: Fiber, seedKey: ?() => T, seedValue: T) { } provider = provider.return; } - // TODO: Warn if unmounted? } diff --git a/packages/react-reconciler/src/ReactFiberNewContext.new.js b/packages/react-reconciler/src/ReactFiberNewContext.new.js index f20bed4b23dfd..2a084390dcc75 100644 --- a/packages/react-reconciler/src/ReactFiberNewContext.new.js +++ b/packages/react-reconciler/src/ReactFiberNewContext.new.js @@ -19,7 +19,6 @@ import { ContextProvider, ClassComponent, DehydratedFragment, - CacheComponent, } from './ReactWorkTags'; import { NoLanes, @@ -34,11 +33,7 @@ import invariant from 'shared/invariant'; import is from 'shared/objectIs'; import {createUpdate, enqueueUpdate, ForceUpdate} from './ReactUpdateQueue.new'; import {markWorkInProgressReceivedUpdate} from './ReactFiberBeginWork.new'; -import {CacheContext} from './ReactFiberCacheComponent.new'; -import { - enableSuspenseServerRenderer, - enableCache, -} from 'shared/ReactFeatureFlags'; +import {enableSuspenseServerRenderer} from 'shared/ReactFeatureFlags'; const valueCursor: StackCursor = createCursor(null); @@ -186,9 +181,9 @@ export function scheduleWorkOnParentPath( } } -export function propagateContextChange( +export function propagateContextChange( workInProgress: Fiber, - context: ReactContext, + context: ReactContext, changedBits: number, renderLanes: Lanes, ): void { @@ -301,114 +296,6 @@ export function propagateContextChange( } } -export function propagateCacheRefresh( - workInProgress: Fiber, - renderLanes: Lanes, -): void { - if (!enableCache) { - return; - } - - let fiber = workInProgress.child; - if (fiber !== null) { - // Set the return pointer of the child to the work-in-progress fiber. - fiber.return = workInProgress; - } - while (fiber !== null) { - let nextFiber; - - // Visit this fiber. - const list = fiber.dependencies; - if (list !== null) { - nextFiber = fiber.child; - - let dependency = list.firstContext; - while (dependency !== null) { - // Check if the context matches. - if (dependency.context === CacheContext) { - // Match! Schedule an update on this fiber. - - if (fiber.tag === ClassComponent) { - // Schedule a force update on the work-in-progress. - const update = createUpdate( - NoTimestamp, - pickArbitraryLane(renderLanes), - ); - update.tag = ForceUpdate; - // TODO: Because we don't have a work-in-progress, this will add the - // update to the current fiber, too, which means it will persist even if - // this render is thrown away. Since it's a race condition, not sure it's - // worth fixing. - enqueueUpdate(fiber, update); - } - fiber.lanes = mergeLanes(fiber.lanes, renderLanes); - const alternate = fiber.alternate; - if (alternate !== null) { - alternate.lanes = mergeLanes(alternate.lanes, renderLanes); - } - scheduleWorkOnParentPath(fiber.return, renderLanes); - - // Mark the updated lanes on the list, too. - list.lanes = mergeLanes(list.lanes, renderLanes); - - // Since we already found a match, we can stop traversing the - // dependency list. - break; - } - dependency = dependency.next; - } - } else if (fiber.tag === CacheComponent) { - const nestedCache = fiber.memoizedState; - if (nestedCache !== null) { - // Found a nested cache boundary with its own cache. The parent refresh - // should override it. Mark it with an update. - fiber.lanes = mergeLanes(fiber.lanes, renderLanes); - const alternate = fiber.alternate; - if (alternate !== null) { - alternate.lanes = mergeLanes(alternate.lanes, renderLanes); - } - scheduleWorkOnParentPath(fiber.return, renderLanes); - } - - // Unlike propagateContextChange, we don't stop traversing when we reach a - // nested cache boundary; refreshes propagate through the entire subtree. - // The refreshed cache will override nested caches. - // - // We also don't need to do anything special with DehydratedFragments, - // since the Fast Boot renderer is not allowed to fetch data. - nextFiber = fiber.child; - } else { - // Traverse down. - nextFiber = fiber.child; - } - - if (nextFiber !== null) { - // Set the return pointer of the child to the work-in-progress fiber. - nextFiber.return = fiber; - } else { - // No child. Traverse to next sibling. - nextFiber = fiber; - while (nextFiber !== null) { - if (nextFiber === workInProgress) { - // We're back to the root of this subtree. Exit. - nextFiber = null; - break; - } - const sibling = nextFiber.sibling; - if (sibling !== null) { - // Set the return pointer of the sibling to the work-in-progress fiber. - sibling.return = nextFiber.return; - nextFiber = sibling; - break; - } - // No more siblings. Traverse up. - nextFiber = nextFiber.return; - } - } - fiber = nextFiber; - } -} - export function prepareToReadContext( workInProgress: Fiber, renderLanes: Lanes, diff --git a/packages/react-reconciler/src/ReactFiberNewContext.old.js b/packages/react-reconciler/src/ReactFiberNewContext.old.js index b7e677bc3b6e0..934bc6b3fcd5c 100644 --- a/packages/react-reconciler/src/ReactFiberNewContext.old.js +++ b/packages/react-reconciler/src/ReactFiberNewContext.old.js @@ -19,7 +19,6 @@ import { ContextProvider, ClassComponent, DehydratedFragment, - CacheComponent, } from './ReactWorkTags'; import { NoLanes, @@ -34,11 +33,7 @@ import invariant from 'shared/invariant'; import is from 'shared/objectIs'; import {createUpdate, enqueueUpdate, ForceUpdate} from './ReactUpdateQueue.old'; import {markWorkInProgressReceivedUpdate} from './ReactFiberBeginWork.old'; -import {CacheContext} from './ReactFiberCacheComponent.old'; -import { - enableSuspenseServerRenderer, - enableCache, -} from 'shared/ReactFeatureFlags'; +import {enableSuspenseServerRenderer} from 'shared/ReactFeatureFlags'; const valueCursor: StackCursor = createCursor(null); @@ -186,9 +181,9 @@ export function scheduleWorkOnParentPath( } } -export function propagateContextChange( +export function propagateContextChange( workInProgress: Fiber, - context: ReactContext, + context: ReactContext, changedBits: number, renderLanes: Lanes, ): void { @@ -301,114 +296,6 @@ export function propagateContextChange( } } -export function propagateCacheRefresh( - workInProgress: Fiber, - renderLanes: Lanes, -): void { - if (!enableCache) { - return; - } - - let fiber = workInProgress.child; - if (fiber !== null) { - // Set the return pointer of the child to the work-in-progress fiber. - fiber.return = workInProgress; - } - while (fiber !== null) { - let nextFiber; - - // Visit this fiber. - const list = fiber.dependencies; - if (list !== null) { - nextFiber = fiber.child; - - let dependency = list.firstContext; - while (dependency !== null) { - // Check if the context matches. - if (dependency.context === CacheContext) { - // Match! Schedule an update on this fiber. - - if (fiber.tag === ClassComponent) { - // Schedule a force update on the work-in-progress. - const update = createUpdate( - NoTimestamp, - pickArbitraryLane(renderLanes), - ); - update.tag = ForceUpdate; - // TODO: Because we don't have a work-in-progress, this will add the - // update to the current fiber, too, which means it will persist even if - // this render is thrown away. Since it's a race condition, not sure it's - // worth fixing. - enqueueUpdate(fiber, update); - } - fiber.lanes = mergeLanes(fiber.lanes, renderLanes); - const alternate = fiber.alternate; - if (alternate !== null) { - alternate.lanes = mergeLanes(alternate.lanes, renderLanes); - } - scheduleWorkOnParentPath(fiber.return, renderLanes); - - // Mark the updated lanes on the list, too. - list.lanes = mergeLanes(list.lanes, renderLanes); - - // Since we already found a match, we can stop traversing the - // dependency list. - break; - } - dependency = dependency.next; - } - } else if (fiber.tag === CacheComponent) { - const nestedCache = fiber.memoizedState; - if (nestedCache !== null) { - // Found a nested cache boundary with its own cache. The parent refresh - // should override it. Mark it with an update. - fiber.lanes = mergeLanes(fiber.lanes, renderLanes); - const alternate = fiber.alternate; - if (alternate !== null) { - alternate.lanes = mergeLanes(alternate.lanes, renderLanes); - } - scheduleWorkOnParentPath(fiber.return, renderLanes); - } - - // Unlike propagateContextChange, we don't stop traversing when we reach a - // nested cache boundary; refreshes propagate through the entire subtree. - // The refreshed cache will override nested caches. - // - // We also don't need to do anything special with DehydratedFragments, - // since the Fast Boot renderer is not allowed to fetch data. - nextFiber = fiber.child; - } else { - // Traverse down. - nextFiber = fiber.child; - } - - if (nextFiber !== null) { - // Set the return pointer of the child to the work-in-progress fiber. - nextFiber.return = fiber; - } else { - // No child. Traverse to next sibling. - nextFiber = fiber; - while (nextFiber !== null) { - if (nextFiber === workInProgress) { - // We're back to the root of this subtree. Exit. - nextFiber = null; - break; - } - const sibling = nextFiber.sibling; - if (sibling !== null) { - // Set the return pointer of the sibling to the work-in-progress fiber. - sibling.return = nextFiber.return; - nextFiber = sibling; - break; - } - // No more siblings. Traverse up. - nextFiber = nextFiber.return; - } - } - fiber = nextFiber; - } -} - export function prepareToReadContext( workInProgress: Fiber, renderLanes: Lanes, diff --git a/packages/react-reconciler/src/ReactFiberOffscreenComponent.js b/packages/react-reconciler/src/ReactFiberOffscreenComponent.js index f190a9bbb7235..acae8d206194b 100644 --- a/packages/react-reconciler/src/ReactFiberOffscreenComponent.js +++ b/packages/react-reconciler/src/ReactFiberOffscreenComponent.js @@ -9,7 +9,7 @@ import type {ReactNodeList} from 'shared/ReactTypes'; import type {Lanes} from './ReactFiberLane.old'; -import type {SuspendedCache} from './ReactFiberCacheComponent.new'; +import type {SpawnedCachePool} from './ReactFiberCacheComponent.new'; export type OffscreenProps = {| // TODO: Pick an API before exposing the Offscreen type. I've chosen an enum @@ -29,5 +29,5 @@ export type OffscreenState = {| // will represent the pending work that must be included in the render in // order to unhide the component. baseLanes: Lanes, - cache: SuspendedCache | null, + cachePool: SpawnedCachePool | null, |}; diff --git a/packages/react-reconciler/src/ReactFiberRoot.new.js b/packages/react-reconciler/src/ReactFiberRoot.new.js index 036b7289d78fe..9057137ec61a8 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.new.js +++ b/packages/react-reconciler/src/ReactFiberRoot.new.js @@ -104,12 +104,11 @@ export function createFiberRoot( uninitializedFiber.stateNode = root; if (enableCache) { + const initialCache = new Map(); + root.pooledCache = initialCache; const initialState = { element: null, - // For the root cache, we won't bother to lazily initialize the map. Seed an - // empty one. This saves use the trouble of having to initialize in an - // updater function. - cache: new Map(), + cache: initialCache, }; uninitializedFiber.memoizedState = initialState; } else { diff --git a/packages/react-reconciler/src/ReactFiberRoot.old.js b/packages/react-reconciler/src/ReactFiberRoot.old.js index 49d427b61788f..768bc56398d2a 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.old.js +++ b/packages/react-reconciler/src/ReactFiberRoot.old.js @@ -104,12 +104,11 @@ export function createFiberRoot( uninitializedFiber.stateNode = root; if (enableCache) { + const initialCache = new Map(); + root.pooledCache = initialCache; const initialState = { element: null, - // For the root cache, we won't bother to lazily initialize the map. Seed an - // empty one. This saves use the trouble of having to initialize in an - // updater function. - cache: new Map(), + cache: initialCache, }; uninitializedFiber.memoizedState = initialState; } else { diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js index f778f3366f0df..152837286f5d2 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.new.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.new.js @@ -11,12 +11,7 @@ import type {ReactContext} from 'shared/ReactTypes'; import type {Fiber, FiberRoot} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.new'; import type {SuspenseState} from './ReactFiberSuspenseComponent.new'; -import type { - Cache, - SuspendedCache, - SuspendedCacheFresh, - SuspendedCachePool, -} from './ReactFiberCacheComponent.new'; +import type {Cache, SpawnedCachePool} from './ReactFiberCacheComponent.new'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.new'; import { @@ -50,7 +45,6 @@ import { import {popProvider} from './ReactFiberNewContext.new'; import {popRenderLanes} from './ReactFiberWorkLoop.new'; import { - SuspendedCacheFreshTag, popCacheProvider, popRootCachePool, popCachePool, @@ -149,25 +143,16 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { case LegacyHiddenComponent: popRenderLanes(workInProgress); if (enableCache) { - const suspendedCache: SuspendedCache | null = (workInProgress.updateQueue: any); - if (suspendedCache !== null) { - if (suspendedCache.tag === SuspendedCacheFreshTag) { - popCacheProvider( - workInProgress, - (suspendedCache: SuspendedCacheFresh).cache, - ); - } else { - popCachePool((suspendedCache: SuspendedCachePool)); - } + const spawnedCachePool: SpawnedCachePool | null = (workInProgress.updateQueue: any); + if (spawnedCachePool !== null) { + popCachePool(workInProgress); } } return null; case CacheComponent: if (enableCache) { - const cache: Cache | null = workInProgress.stateNode; - if (cache !== null) { - popCacheProvider(workInProgress, cache); - } + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); } return null; default: @@ -218,25 +203,17 @@ function unwindInterruptedWork(interruptedWork: Fiber, renderLanes: Lanes) { case LegacyHiddenComponent: popRenderLanes(interruptedWork); if (enableCache) { - const suspendedCache: SuspendedCache | null = (interruptedWork.updateQueue: any); - if (suspendedCache !== null) { - if (suspendedCache.tag === SuspendedCacheFreshTag) { - popCacheProvider( - interruptedWork, - (suspendedCache: SuspendedCacheFresh).cache, - ); - } else { - popCachePool((suspendedCache: SuspendedCachePool)); - } + const spawnedCachePool: SpawnedCachePool | null = (interruptedWork.updateQueue: any); + if (spawnedCachePool !== null) { + popCachePool(interruptedWork); } } + break; case CacheComponent: if (enableCache) { - const cache: Cache | null = interruptedWork.stateNode; - if (cache !== null) { - popCacheProvider(interruptedWork, cache); - } + const cache: Cache = interruptedWork.memoizedState.cache; + popCacheProvider(interruptedWork, cache); } break; default: diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js index 7fa2dabacfa62..453a4bc2988a0 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js @@ -11,12 +11,7 @@ import type {ReactContext} from 'shared/ReactTypes'; import type {Fiber, FiberRoot} from './ReactInternalTypes'; import type {Lanes} from './ReactFiberLane.old'; import type {SuspenseState} from './ReactFiberSuspenseComponent.old'; -import type { - Cache, - SuspendedCache, - SuspendedCacheFresh, - SuspendedCachePool, -} from './ReactFiberCacheComponent.old'; +import type {Cache, SpawnedCachePool} from './ReactFiberCacheComponent.old'; import {resetWorkInProgressVersions as resetMutableSourceWorkInProgressVersions} from './ReactMutableSource.old'; import { @@ -50,7 +45,6 @@ import { import {popProvider} from './ReactFiberNewContext.old'; import {popRenderLanes} from './ReactFiberWorkLoop.old'; import { - SuspendedCacheFreshTag, popCacheProvider, popRootCachePool, popCachePool, @@ -149,25 +143,16 @@ function unwindWork(workInProgress: Fiber, renderLanes: Lanes) { case LegacyHiddenComponent: popRenderLanes(workInProgress); if (enableCache) { - const suspendedCache: SuspendedCache | null = (workInProgress.updateQueue: any); - if (suspendedCache !== null) { - if (suspendedCache.tag === SuspendedCacheFreshTag) { - popCacheProvider( - workInProgress, - (suspendedCache: SuspendedCacheFresh).cache, - ); - } else { - popCachePool((suspendedCache: SuspendedCachePool)); - } + const spawnedCachePool: SpawnedCachePool | null = (workInProgress.updateQueue: any); + if (spawnedCachePool !== null) { + popCachePool(workInProgress); } } return null; case CacheComponent: if (enableCache) { - const cache: Cache | null = workInProgress.stateNode; - if (cache !== null) { - popCacheProvider(workInProgress, cache); - } + const cache: Cache = workInProgress.memoizedState.cache; + popCacheProvider(workInProgress, cache); } return null; default: @@ -218,25 +203,17 @@ function unwindInterruptedWork(interruptedWork: Fiber, renderLanes: Lanes) { case LegacyHiddenComponent: popRenderLanes(interruptedWork); if (enableCache) { - const suspendedCache: SuspendedCache | null = (interruptedWork.updateQueue: any); - if (suspendedCache !== null) { - if (suspendedCache.tag === SuspendedCacheFreshTag) { - popCacheProvider( - interruptedWork, - (suspendedCache: SuspendedCacheFresh).cache, - ); - } else { - popCachePool((suspendedCache: SuspendedCachePool)); - } + const spawnedCachePool: SpawnedCachePool | null = (interruptedWork.updateQueue: any); + if (spawnedCachePool !== null) { + popCachePool(interruptedWork); } } + break; case CacheComponent: if (enableCache) { - const cache: Cache | null = interruptedWork.stateNode; - if (cache !== null) { - popCacheProvider(interruptedWork, cache); - } + const cache: Cache = interruptedWork.memoizedState.cache; + popCacheProvider(interruptedWork, cache); } break; default: diff --git a/packages/react-reconciler/src/__tests__/ReactCache-test.js b/packages/react-reconciler/src/__tests__/ReactCache-test.js index 9d2991859952d..1574fc222898f 100644 --- a/packages/react-reconciler/src/__tests__/ReactCache-test.js +++ b/packages/react-reconciler/src/__tests__/ReactCache-test.js @@ -218,7 +218,7 @@ describe('ReactCache', () => { 'nested cache boundaries share the same cache as the root during ' + 'the initial render', async () => { - function App({text}) { + function App() { return ( }> @@ -231,7 +231,7 @@ describe('ReactCache', () => { const root = ReactNoop.createRoot(); await ReactNoop.act(async () => { - root.render(); + root.render(); }); // Even though there are two new trees, they should share the same // data cache. So there should be only a single cache miss for A. diff --git a/scripts/error-codes/codes.json b/scripts/error-codes/codes.json index 2c2e5175a5eb2..12a8db733b0ce 100644 --- a/scripts/error-codes/codes.json +++ b/scripts/error-codes/codes.json @@ -372,6 +372,5 @@ "381": "This feature is not supported by ReactSuspenseTestUtils.", "382": "This query has received more parameters than the last time the same query was used. Always pass the exact number of parameters that the query needs.", "383": "This query has received fewer parameters than the last time the same query was used. Always pass the exact number of parameters that the query needs.", - "384": "Internal React error: Should always have a cache.", - "385": "Refreshing the cache is not supported in Server Components." + "384": "Refreshing the cache is not supported in Server Components." } From 7c7d2ba9fc0466f08c10479b7417766e38555091 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Fri, 18 Dec 2020 12:05:46 -0600 Subject: [PATCH 30/30] Fix cross-fork discrepancy I missed a few lines when syncing an earlier step. Usually I would find which one and patch it but I'm about to squash and merge so meh. --- packages/react-reconciler/src/ReactFiberCommitWork.old.js | 1 + packages/react-reconciler/src/ReactFiberRoot.old.js | 2 +- packages/react-reconciler/src/ReactFiberUnwindWork.old.js | 2 +- packages/react-reconciler/src/ReactFiberWorkLoop.new.js | 1 + packages/react-reconciler/src/ReactFiberWorkLoop.old.js | 1 + 5 files changed, 5 insertions(+), 2 deletions(-) diff --git a/packages/react-reconciler/src/ReactFiberCommitWork.old.js b/packages/react-reconciler/src/ReactFiberCommitWork.old.js index 31c91974577b0..95e48b1e4ae4f 100644 --- a/packages/react-reconciler/src/ReactFiberCommitWork.old.js +++ b/packages/react-reconciler/src/ReactFiberCommitWork.old.js @@ -24,6 +24,7 @@ import type {FunctionComponentUpdateQueue} from './ReactFiberHooks.old'; import type {Wakeable} from 'shared/ReactTypes'; import type {ReactPriorityLevel} from './ReactInternalTypes'; import type {OffscreenState} from './ReactFiberOffscreenComponent'; +import type {HookFlags} from './ReactHookEffectTags'; import {unstable_wrap as Schedule_tracing_wrap} from 'scheduler/tracing'; import { diff --git a/packages/react-reconciler/src/ReactFiberRoot.old.js b/packages/react-reconciler/src/ReactFiberRoot.old.js index 768bc56398d2a..92ec811dd5589 100644 --- a/packages/react-reconciler/src/ReactFiberRoot.old.js +++ b/packages/react-reconciler/src/ReactFiberRoot.old.js @@ -54,8 +54,8 @@ function FiberRootNode(containerInfo, tag, hydrate) { this.entanglements = createLaneMap(NoLanes); if (enableCache) { - this.caches = createLaneMap(null); this.pooledCache = null; + this.pooledCacheLanes = NoLanes; } if (supportsHydration) { diff --git a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js index 453a4bc2988a0..88861db778be3 100644 --- a/packages/react-reconciler/src/ReactFiberUnwindWork.old.js +++ b/packages/react-reconciler/src/ReactFiberUnwindWork.old.js @@ -170,7 +170,6 @@ function unwindInterruptedWork(interruptedWork: Fiber, renderLanes: Lanes) { break; } case HostRoot: { - popHostContainer(interruptedWork); if (enableCache) { const root: FiberRoot = interruptedWork.stateNode; popRootCachePool(root, renderLanes); @@ -178,6 +177,7 @@ function unwindInterruptedWork(interruptedWork: Fiber, renderLanes: Lanes) { const cache: Cache = interruptedWork.memoizedState.cache; popCacheProvider(interruptedWork, cache); } + popHostContainer(interruptedWork); popTopLevelLegacyContextObject(interruptedWork); resetMutableSourceWorkInProgressVersions(); break; diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js index 646d728f62d9c..03831988e2b2b 100644 --- a/packages/react-reconciler/src/ReactFiberWorkLoop.new.js +++ b/packages/react-reconciler/src/ReactFiberWorkLoop.new.js @@ -15,6 +15,7 @@ import type {Interaction} from 'scheduler/src/Tracing'; import type {SuspenseState} from './ReactFiberSuspenseComponent.new'; import type {Effect as HookEffect} from './ReactFiberHooks.new'; import type {StackCursor} from './ReactFiberStack.new'; +import type {FunctionComponentUpdateQueue} from './ReactFiberHooks.new'; import { warnAboutDeprecatedLifecycles, diff --git a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js index b03fbbb05b9ee..6966472e0d16f 100644 --- a/packages/react-reconciler/src/ReactFiberWorkLoop.old.js +++ b/packages/react-reconciler/src/ReactFiberWorkLoop.old.js @@ -15,6 +15,7 @@ import type {Interaction} from 'scheduler/src/Tracing'; import type {SuspenseState} from './ReactFiberSuspenseComponent.old'; import type {Effect as HookEffect} from './ReactFiberHooks.old'; import type {StackCursor} from './ReactFiberStack.old'; +import type {FunctionComponentUpdateQueue} from './ReactFiberHooks.old'; import { warnAboutDeprecatedLifecycles,