Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Error handling for hanging promises in "use cache" closures #74750

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 30 additions & 0 deletions packages/next/src/server/app-render/dynamic-rendering.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ import {
VIEWPORT_BOUNDARY_NAME,
OUTLET_BOUNDARY_NAME,
} from '../../lib/metadata/metadata-constants'
import { scheduleOnNextTick } from '../../lib/scheduler'

const hasPostpone = typeof React.unstable_postpone === 'function'

Expand Down Expand Up @@ -518,6 +519,35 @@ export function createPostponedAbortSignal(reason: string): AbortSignal {
return controller.signal
}

/**
* In a prerender, we may end up with hanging Promises as inputs due them
* stalling on connection() or because they're loading dynamic data. In that
* case we need to abort the encoding of arguments since they'll never complete.
*/
export function createHangingInputAbortSignal(
workUnitStore: PrerenderStoreModern
): AbortSignal {
const controller = new AbortController()

if (workUnitStore.cacheSignal) {
// If we have a cacheSignal it means we're in a prospective render. If the input
// we're waiting on is coming from another cache, we do want to wait for it so that
// we can resolve this cache entry too.
workUnitStore.cacheSignal.inputReady().then(() => {
controller.abort()
})
} else {
// Otherwise we're in the final render and we should already have all our caches
// filled. We might still be waiting on some microtasks so we wait one tick before
// giving up. When we give up, we still want to render the content of this cache
// as deeply as we can so that we can suspend as deeply as possible in the tree
// or not at all if we don't end up waiting for the input.
scheduleOnNextTick(() => controller.abort())
}

return controller.signal
}

export function annotateDynamicAccess(
expression: string,
prerenderStore: PrerenderStoreModern
Expand Down
74 changes: 65 additions & 9 deletions packages/next/src/server/app-render/encryption.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,16 @@ import {
getRenderResumeDataCache,
workUnitAsyncStorage,
} from './work-unit-async-storage.external'
import { createHangingInputAbortSignal } from './dynamic-rendering'

const isEdgeRuntime = process.env.NEXT_RUNTIME === 'edge'

const textEncoder = new TextEncoder()
const textDecoder = new TextDecoder()

/**
* Decrypt the serialized string with the action id as the salt.
*/
async function decodeActionBoundArg(actionId: string, arg: string) {
const key = await getActionEncryptionKey()
if (typeof key === 'undefined') {
Expand Down Expand Up @@ -81,17 +85,29 @@ async function encodeActionBoundArg(actionId: string, arg: string) {
export async function encryptActionBoundArgs(actionId: string, args: any[]) {
const { clientModules } = getClientReferenceManifestForRsc()

// Create an error before any asynchrounous calls, to capture the original
// Create an error before any asynchronous calls, to capture the original
// call stack in case we need it when the serialization errors.
const error = new Error()
Error.captureStackTrace(error, encryptActionBoundArgs)

let didCatchError = false

const workUnitStore = workUnitAsyncStorage.getStore()

const hangingInputAbortSignal =
workUnitStore?.type === 'prerender'
? createHangingInputAbortSignal(workUnitStore)
: undefined

// Using Flight to serialize the args into a string.
const serialized = await streamToString(
renderToReadableStream(args, clientModules, {
signal: hangingInputAbortSignal,
onError(err) {
if (hangingInputAbortSignal?.aborted) {
return
}

// We're only reporting one error at a time, starting with the first.
if (didCatchError) {
return
Expand All @@ -103,7 +119,11 @@ export async function encryptActionBoundArgs(actionId: string, args: any[]) {
// stack, because err.stack is a useless Flight Server call stack.
error.message = err instanceof Error ? err.message : String(err)
},
})
}),
// We pass the abort signal to `streamToString` so that no chunks are
// included that are emitted after the signal was already aborted. This
// ensures that we can encode hanging promises.
hangingInputAbortSignal
)

if (didCatchError) {
Expand All @@ -117,8 +137,6 @@ export async function encryptActionBoundArgs(actionId: string, args: any[]) {
throw error
}

const workUnitStore = workUnitAsyncStorage.getStore()

if (!workUnitStore) {
return encodeActionBoundArg(actionId, serialized)
}
Expand Down Expand Up @@ -151,20 +169,58 @@ export async function encryptActionBoundArgs(actionId: string, args: any[]) {
// Decrypts the action's bound args from the encrypted string.
export async function decryptActionBoundArgs(
actionId: string,
encrypted: Promise<string>
encryptedPromise: Promise<string>
) {
const encrypted = await encryptedPromise
const workUnitStore = workUnitAsyncStorage.getStore()

let decrypted: string | undefined

if (workUnitStore) {
const cacheSignal =
workUnitStore.type === 'prerender' ? workUnitStore.cacheSignal : undefined

const prerenderResumeDataCache = getPrerenderResumeDataCache(workUnitStore)
const renderResumeDataCache = getRenderResumeDataCache(workUnitStore)

decrypted =
prerenderResumeDataCache?.decryptedBoundArgs.get(encrypted) ??
renderResumeDataCache?.decryptedBoundArgs.get(encrypted)

if (!decrypted) {
cacheSignal?.beginRead()
decrypted = await decodeActionBoundArg(actionId, encrypted)
cacheSignal?.endRead()
prerenderResumeDataCache?.decryptedBoundArgs.set(encrypted, decrypted)
}
} else {
decrypted = await decodeActionBoundArg(actionId, encrypted)
}

const { edgeRscModuleMapping, rscModuleMapping } =
getClientReferenceManifestForRsc()

// Decrypt the serialized string with the action id as the salt.
const decrypted = await decodeActionBoundArg(actionId, await encrypted)

// Using Flight to deserialize the args from the string.
const deserialized = await createFromReadableStream(
new ReadableStream({
start(controller) {
controller.enqueue(textEncoder.encode(decrypted))
controller.close()

if (workUnitStore?.type === 'prerender') {
// Explicitly don't close the stream here (until prerendering is
// complete) so that hanging promises are not rejected.
if (workUnitStore.renderSignal.aborted) {
controller.close()
} else {
workUnitStore.renderSignal.addEventListener(
'abort',
() => controller.close(),
{ once: true }
)
}
} else {
controller.close()
}
},
}),
{
Expand Down
33 changes: 17 additions & 16 deletions packages/next/src/server/app-render/postponed-state.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,22 +36,23 @@ describe('getDynamicHTMLPostponedState', () => {

const parsed = parsePostponedState(state, { slug: '123' })
expect(parsed).toMatchInlineSnapshot(`
{
"data": {
"123": "123",
"nested": {
"123": "123",
},
},
"renderResumeDataCache": {
"cache": Map {
"1" => Promise {},
},
"encryptedBoundArgs": Map {},
"fetch": Map {},
},
"type": 2,
}
{
"data": {
"123": "123",
"nested": {
"123": "123",
},
},
"renderResumeDataCache": {
"cache": Map {
"1" => Promise {},
},
"decryptedBoundArgs": Map {},
"encryptedBoundArgs": Map {},
"fetch": Map {},
},
"type": 2,
}
`)

const value = await parsed.renderResumeDataCache.cache.get('1')
Expand Down
6 changes: 6 additions & 0 deletions packages/next/src/server/resume-data-cache/cache-store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,12 @@ export type FetchCacheStore = CacheStore<CachedFetchValue>
*/
export type EncryptedBoundArgsCacheStore = CacheStore<string>

/**
* An in-memory-only cache store for decrypted bound args of inline server
* functions.
*/
export type DecryptedBoundArgsCacheStore = CacheStore<string>

/**
* Serialized format for "use cache" entries
*/
Expand Down
20 changes: 20 additions & 0 deletions packages/next/src/server/resume-data-cache/resume-data-cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import {
type EncryptedBoundArgsCacheStore,
serializeUseCacheCacheStore,
parseUseCacheCacheStore,
type DecryptedBoundArgsCacheStore,
} from './cache-store'

/**
Expand All @@ -29,6 +30,14 @@ export interface RenderResumeDataCache {
* The 'set' operation is omitted to enforce immutability.
*/
readonly encryptedBoundArgs: Omit<EncryptedBoundArgsCacheStore, 'set'>

/**
* A read-only Map store for decrypted bound args of inline server functions.
* This is only intended for in-memory usage during pre-rendering, and must
* not be persisted in the resume store. The 'set' operation is omitted to
* enforce immutability.
*/
readonly decryptedBoundArgs: Omit<DecryptedBoundArgsCacheStore, 'set'>
}

/**
Expand Down Expand Up @@ -56,6 +65,14 @@ export interface PrerenderResumeDataCache {
* pre-rendering.
*/
readonly encryptedBoundArgs: EncryptedBoundArgsCacheStore

/**
* A mutable Map store for decrypted bound args of inline server functions.
* This is only intended for in-memory usage during pre-rendering, and must
* not be persisted in the resume store. Supports both 'get' and 'set'
* operations to build the cache during pre-rendering.
*/
readonly decryptedBoundArgs: DecryptedBoundArgsCacheStore
}

type ResumeStoreSerialized = {
Expand Down Expand Up @@ -125,6 +142,7 @@ export function createPrerenderResumeDataCache(): PrerenderResumeDataCache {
cache: new Map(),
fetch: new Map(),
encryptedBoundArgs: new Map(),
decryptedBoundArgs: new Map(),
}
}

Expand Down Expand Up @@ -162,6 +180,7 @@ export function createRenderResumeDataCache(
cache: new Map(),
fetch: new Map(),
encryptedBoundArgs: new Map(),
decryptedBoundArgs: new Map(),
}
}

Expand All @@ -182,6 +201,7 @@ export function createRenderResumeDataCache(
encryptedBoundArgs: new Map(
Object.entries(json.store.encryptedBoundArgs)
),
decryptedBoundArgs: new Map(),
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -101,12 +101,17 @@ export async function streamToBuffer(
}

export async function streamToString(
stream: ReadableStream<Uint8Array>
stream: ReadableStream<Uint8Array>,
signal?: AbortSignal
): Promise<string> {
const decoder = new TextDecoder('utf-8', { fatal: true })
let string = ''

for await (const chunk of stream) {
if (signal?.aborted) {
return string
}

string += decoder.decode(chunk, { stream: true })
}

Expand Down
32 changes: 7 additions & 25 deletions packages/next/src/server/use-cache/use-cache-wrapper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ import { InvariantError } from '../../shared/lib/invariant-error'
import { getDigestForWellKnownError } from '../app-render/create-error-handler'
import { cacheHandlerGlobal, DYNAMIC_EXPIRE } from './constants'
import { UseCacheTimeoutError } from './use-cache-errors'
import { createHangingInputAbortSignal } from '../app-render/dynamic-rendering'

const isEdgeRuntime = process.env.NEXT_RUNTIME === 'edge'

Expand Down Expand Up @@ -312,7 +313,7 @@ async function generateCacheEntryImpl(
{ temporaryReferences }
)

// Track the timestamp when we started copmuting the result.
// Track the timestamp when we started computing the result.
const startTime = performance.timeOrigin + performance.now()
// Invoke the inner function to load a new result.
const result = fn.apply(null, args)
Expand Down Expand Up @@ -506,29 +507,10 @@ export function cache(
// the implementation.
const buildId = workStore.buildId

let abortHangingInputSignal: undefined | AbortSignal
if (workUnitStore && workUnitStore.type === 'prerender') {
// In a prerender, we may end up with hanging Promises as inputs due them stalling
// on connection() or because they're loading dynamic data. In that case we need to
// abort the encoding of the arguments since they'll never complete.
const controller = new AbortController()
abortHangingInputSignal = controller.signal
if (workUnitStore.cacheSignal) {
// If we have a cacheSignal it means we're in a prospective render. If the input
// we're waiting on is coming from another cache, we do want to wait for it so that
// we can resolve this cache entry too.
workUnitStore.cacheSignal.inputReady().then(() => {
controller.abort()
})
} else {
// Otherwise we're in the final render and we should already have all our caches
// filled. We might still be waiting on some microtasks so we wait one tick before
// giving up. When we give up, we still want to render the content of this cache
// as deeply as we can so that we can suspend as deeply as possible in the tree
// or not at all if we don't end up waiting for the input.
process.nextTick(() => controller.abort())
}
}
const hangingInputAbortSignal =
workUnitStore?.type === 'prerender'
? createHangingInputAbortSignal(workUnitStore)
: undefined

if (boundArgsLength > 0) {
if (args.length === 0) {
Expand Down Expand Up @@ -558,7 +540,7 @@ export function cache(
const temporaryReferences = createClientTemporaryReferenceSet()
const encodedArguments: FormData | string = await encodeReply(
[buildId, id, args],
{ temporaryReferences, signal: abortHangingInputSignal }
{ temporaryReferences, signal: hangingInputAbortSignal }
)

const serializedCacheKey =
Expand Down
7 changes: 4 additions & 3 deletions test/e2e/app-dir/dynamic-io/dynamic-io.server-action.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,10 @@ describe('dynamic-io', () => {
})

if (process.env.__NEXT_EXPERIMENTAL_PPR && isNextDev) {
// TODO: Remove this branch for PPR in dev mode when the issue is resolved
// where the inclusion of server timings in the RSC payload makes the
// serialized bound args not suitable to be used as a cache key.
// TODO(react-time-info): Remove this branch for PPR in dev mode when the
// issue is resolved where the inclusion of server timings in the RSC
// payload makes the serialized bound args not suitable to be used as a
// cache key.
expect(next.cliOutput).toMatch('Error: Route "/server-action-inline"')
} else {
expect(next.cliOutput).not.toMatch('Error: Route "/server-action-inline"')
Expand Down
Loading
Loading