diff --git a/packages/next/src/server/app-render/dynamic-rendering.ts b/packages/next/src/server/app-render/dynamic-rendering.ts index 45c9501a08be6..06b958e5a3e5a 100644 --- a/packages/next/src/server/app-render/dynamic-rendering.ts +++ b/packages/next/src/server/app-render/dynamic-rendering.ts @@ -41,6 +41,7 @@ import { VIEWPORT_BOUNDARY_NAME, OUTLET_BOUNDARY_NAME, } from '../../lib/metadata/metadata-constants' +import { scheduleOnNextTick } from '../../lib/scheduler' const hasPostpone = typeof React.unstable_postpone === 'function' @@ -518,6 +519,35 @@ export function createPostponedAbortSignal(reason: string): AbortSignal { return controller.signal } +/** + * In a prerender, we may end up with hanging Promises as inputs due them + * stalling on connection() or because they're loading dynamic data. In that + * case we need to abort the encoding of arguments since they'll never complete. + */ +export function createHangingInputAbortSignal( + workUnitStore: PrerenderStoreModern +): AbortSignal { + const controller = new AbortController() + + if (workUnitStore.cacheSignal) { + // If we have a cacheSignal it means we're in a prospective render. If the input + // we're waiting on is coming from another cache, we do want to wait for it so that + // we can resolve this cache entry too. + workUnitStore.cacheSignal.inputReady().then(() => { + controller.abort() + }) + } else { + // Otherwise we're in the final render and we should already have all our caches + // filled. We might still be waiting on some microtasks so we wait one tick before + // giving up. When we give up, we still want to render the content of this cache + // as deeply as we can so that we can suspend as deeply as possible in the tree + // or not at all if we don't end up waiting for the input. + scheduleOnNextTick(() => controller.abort()) + } + + return controller.signal +} + export function annotateDynamicAccess( expression: string, prerenderStore: PrerenderStoreModern diff --git a/packages/next/src/server/app-render/encryption.ts b/packages/next/src/server/app-render/encryption.ts index 83b799673c57b..8e5ecaf4a0311 100644 --- a/packages/next/src/server/app-render/encryption.ts +++ b/packages/next/src/server/app-render/encryption.ts @@ -21,12 +21,16 @@ import { getRenderResumeDataCache, workUnitAsyncStorage, } from './work-unit-async-storage.external' +import { createHangingInputAbortSignal } from './dynamic-rendering' const isEdgeRuntime = process.env.NEXT_RUNTIME === 'edge' const textEncoder = new TextEncoder() const textDecoder = new TextDecoder() +/** + * Decrypt the serialized string with the action id as the salt. + */ async function decodeActionBoundArg(actionId: string, arg: string) { const key = await getActionEncryptionKey() if (typeof key === 'undefined') { @@ -81,17 +85,29 @@ async function encodeActionBoundArg(actionId: string, arg: string) { export async function encryptActionBoundArgs(actionId: string, args: any[]) { const { clientModules } = getClientReferenceManifestForRsc() - // Create an error before any asynchrounous calls, to capture the original + // Create an error before any asynchronous calls, to capture the original // call stack in case we need it when the serialization errors. const error = new Error() Error.captureStackTrace(error, encryptActionBoundArgs) let didCatchError = false + const workUnitStore = workUnitAsyncStorage.getStore() + + const hangingInputAbortSignal = + workUnitStore?.type === 'prerender' + ? createHangingInputAbortSignal(workUnitStore) + : undefined + // Using Flight to serialize the args into a string. const serialized = await streamToString( renderToReadableStream(args, clientModules, { + signal: hangingInputAbortSignal, onError(err) { + if (hangingInputAbortSignal?.aborted) { + return + } + // We're only reporting one error at a time, starting with the first. if (didCatchError) { return @@ -103,7 +119,11 @@ export async function encryptActionBoundArgs(actionId: string, args: any[]) { // stack, because err.stack is a useless Flight Server call stack. error.message = err instanceof Error ? err.message : String(err) }, - }) + }), + // We pass the abort signal to `streamToString` so that no chunks are + // included that are emitted after the signal was already aborted. This + // ensures that we can encode hanging promises. + hangingInputAbortSignal ) if (didCatchError) { @@ -117,8 +137,6 @@ export async function encryptActionBoundArgs(actionId: string, args: any[]) { throw error } - const workUnitStore = workUnitAsyncStorage.getStore() - if (!workUnitStore) { return encodeActionBoundArg(actionId, serialized) } @@ -151,20 +169,58 @@ export async function encryptActionBoundArgs(actionId: string, args: any[]) { // Decrypts the action's bound args from the encrypted string. export async function decryptActionBoundArgs( actionId: string, - encrypted: Promise + encryptedPromise: Promise ) { + const encrypted = await encryptedPromise + const workUnitStore = workUnitAsyncStorage.getStore() + + let decrypted: string | undefined + + if (workUnitStore) { + const cacheSignal = + workUnitStore.type === 'prerender' ? workUnitStore.cacheSignal : undefined + + const prerenderResumeDataCache = getPrerenderResumeDataCache(workUnitStore) + const renderResumeDataCache = getRenderResumeDataCache(workUnitStore) + + decrypted = + prerenderResumeDataCache?.decryptedBoundArgs.get(encrypted) ?? + renderResumeDataCache?.decryptedBoundArgs.get(encrypted) + + if (!decrypted) { + cacheSignal?.beginRead() + decrypted = await decodeActionBoundArg(actionId, encrypted) + cacheSignal?.endRead() + prerenderResumeDataCache?.decryptedBoundArgs.set(encrypted, decrypted) + } + } else { + decrypted = await decodeActionBoundArg(actionId, encrypted) + } + const { edgeRscModuleMapping, rscModuleMapping } = getClientReferenceManifestForRsc() - // Decrypt the serialized string with the action id as the salt. - const decrypted = await decodeActionBoundArg(actionId, await encrypted) - // Using Flight to deserialize the args from the string. const deserialized = await createFromReadableStream( new ReadableStream({ start(controller) { controller.enqueue(textEncoder.encode(decrypted)) - controller.close() + + if (workUnitStore?.type === 'prerender') { + // Explicitly don't close the stream here (until prerendering is + // complete) so that hanging promises are not rejected. + if (workUnitStore.renderSignal.aborted) { + controller.close() + } else { + workUnitStore.renderSignal.addEventListener( + 'abort', + () => controller.close(), + { once: true } + ) + } + } else { + controller.close() + } }, }), { diff --git a/packages/next/src/server/app-render/postponed-state.test.ts b/packages/next/src/server/app-render/postponed-state.test.ts index 6ec47afb14a7c..f94fc5563a873 100644 --- a/packages/next/src/server/app-render/postponed-state.test.ts +++ b/packages/next/src/server/app-render/postponed-state.test.ts @@ -36,22 +36,23 @@ describe('getDynamicHTMLPostponedState', () => { const parsed = parsePostponedState(state, { slug: '123' }) expect(parsed).toMatchInlineSnapshot(` - { - "data": { - "123": "123", - "nested": { - "123": "123", - }, - }, - "renderResumeDataCache": { - "cache": Map { - "1" => Promise {}, - }, - "encryptedBoundArgs": Map {}, - "fetch": Map {}, - }, - "type": 2, - } + { + "data": { + "123": "123", + "nested": { + "123": "123", + }, + }, + "renderResumeDataCache": { + "cache": Map { + "1" => Promise {}, + }, + "decryptedBoundArgs": Map {}, + "encryptedBoundArgs": Map {}, + "fetch": Map {}, + }, + "type": 2, + } `) const value = await parsed.renderResumeDataCache.cache.get('1') diff --git a/packages/next/src/server/resume-data-cache/cache-store.ts b/packages/next/src/server/resume-data-cache/cache-store.ts index 0616b7f447ec2..c01a2aa0b95e9 100644 --- a/packages/next/src/server/resume-data-cache/cache-store.ts +++ b/packages/next/src/server/resume-data-cache/cache-store.ts @@ -20,6 +20,12 @@ export type FetchCacheStore = CacheStore */ export type EncryptedBoundArgsCacheStore = CacheStore +/** + * An in-memory-only cache store for decrypted bound args of inline server + * functions. + */ +export type DecryptedBoundArgsCacheStore = CacheStore + /** * Serialized format for "use cache" entries */ diff --git a/packages/next/src/server/resume-data-cache/resume-data-cache.ts b/packages/next/src/server/resume-data-cache/resume-data-cache.ts index 2c0db5b72bc91..27c71757f7533 100644 --- a/packages/next/src/server/resume-data-cache/resume-data-cache.ts +++ b/packages/next/src/server/resume-data-cache/resume-data-cache.ts @@ -5,6 +5,7 @@ import { type EncryptedBoundArgsCacheStore, serializeUseCacheCacheStore, parseUseCacheCacheStore, + type DecryptedBoundArgsCacheStore, } from './cache-store' /** @@ -29,6 +30,14 @@ export interface RenderResumeDataCache { * The 'set' operation is omitted to enforce immutability. */ readonly encryptedBoundArgs: Omit + + /** + * A read-only Map store for decrypted bound args of inline server functions. + * This is only intended for in-memory usage during pre-rendering, and must + * not be persisted in the resume store. The 'set' operation is omitted to + * enforce immutability. + */ + readonly decryptedBoundArgs: Omit } /** @@ -56,6 +65,14 @@ export interface PrerenderResumeDataCache { * pre-rendering. */ readonly encryptedBoundArgs: EncryptedBoundArgsCacheStore + + /** + * A mutable Map store for decrypted bound args of inline server functions. + * This is only intended for in-memory usage during pre-rendering, and must + * not be persisted in the resume store. Supports both 'get' and 'set' + * operations to build the cache during pre-rendering. + */ + readonly decryptedBoundArgs: DecryptedBoundArgsCacheStore } type ResumeStoreSerialized = { @@ -125,6 +142,7 @@ export function createPrerenderResumeDataCache(): PrerenderResumeDataCache { cache: new Map(), fetch: new Map(), encryptedBoundArgs: new Map(), + decryptedBoundArgs: new Map(), } } @@ -162,6 +180,7 @@ export function createRenderResumeDataCache( cache: new Map(), fetch: new Map(), encryptedBoundArgs: new Map(), + decryptedBoundArgs: new Map(), } } @@ -182,6 +201,7 @@ export function createRenderResumeDataCache( encryptedBoundArgs: new Map( Object.entries(json.store.encryptedBoundArgs) ), + decryptedBoundArgs: new Map(), } } } diff --git a/packages/next/src/server/stream-utils/node-web-streams-helper.ts b/packages/next/src/server/stream-utils/node-web-streams-helper.ts index 4e07dd231c6b0..98e389e264581 100644 --- a/packages/next/src/server/stream-utils/node-web-streams-helper.ts +++ b/packages/next/src/server/stream-utils/node-web-streams-helper.ts @@ -101,12 +101,17 @@ export async function streamToBuffer( } export async function streamToString( - stream: ReadableStream + stream: ReadableStream, + signal?: AbortSignal ): Promise { const decoder = new TextDecoder('utf-8', { fatal: true }) let string = '' for await (const chunk of stream) { + if (signal?.aborted) { + return string + } + string += decoder.decode(chunk, { stream: true }) } diff --git a/packages/next/src/server/use-cache/use-cache-wrapper.ts b/packages/next/src/server/use-cache/use-cache-wrapper.ts index 7b84de7102c33..84015b7bef743 100644 --- a/packages/next/src/server/use-cache/use-cache-wrapper.ts +++ b/packages/next/src/server/use-cache/use-cache-wrapper.ts @@ -41,6 +41,7 @@ import { InvariantError } from '../../shared/lib/invariant-error' import { getDigestForWellKnownError } from '../app-render/create-error-handler' import { cacheHandlerGlobal, DYNAMIC_EXPIRE } from './constants' import { UseCacheTimeoutError } from './use-cache-errors' +import { createHangingInputAbortSignal } from '../app-render/dynamic-rendering' const isEdgeRuntime = process.env.NEXT_RUNTIME === 'edge' @@ -312,7 +313,7 @@ async function generateCacheEntryImpl( { temporaryReferences } ) - // Track the timestamp when we started copmuting the result. + // Track the timestamp when we started computing the result. const startTime = performance.timeOrigin + performance.now() // Invoke the inner function to load a new result. const result = fn.apply(null, args) @@ -506,29 +507,10 @@ export function cache( // the implementation. const buildId = workStore.buildId - let abortHangingInputSignal: undefined | AbortSignal - if (workUnitStore && workUnitStore.type === 'prerender') { - // In a prerender, we may end up with hanging Promises as inputs due them stalling - // on connection() or because they're loading dynamic data. In that case we need to - // abort the encoding of the arguments since they'll never complete. - const controller = new AbortController() - abortHangingInputSignal = controller.signal - if (workUnitStore.cacheSignal) { - // If we have a cacheSignal it means we're in a prospective render. If the input - // we're waiting on is coming from another cache, we do want to wait for it so that - // we can resolve this cache entry too. - workUnitStore.cacheSignal.inputReady().then(() => { - controller.abort() - }) - } else { - // Otherwise we're in the final render and we should already have all our caches - // filled. We might still be waiting on some microtasks so we wait one tick before - // giving up. When we give up, we still want to render the content of this cache - // as deeply as we can so that we can suspend as deeply as possible in the tree - // or not at all if we don't end up waiting for the input. - process.nextTick(() => controller.abort()) - } - } + const hangingInputAbortSignal = + workUnitStore?.type === 'prerender' + ? createHangingInputAbortSignal(workUnitStore) + : undefined if (boundArgsLength > 0) { if (args.length === 0) { @@ -558,7 +540,7 @@ export function cache( const temporaryReferences = createClientTemporaryReferenceSet() const encodedArguments: FormData | string = await encodeReply( [buildId, id, args], - { temporaryReferences, signal: abortHangingInputSignal } + { temporaryReferences, signal: hangingInputAbortSignal } ) const serializedCacheKey = diff --git a/test/e2e/app-dir/dynamic-io/dynamic-io.server-action.test.ts b/test/e2e/app-dir/dynamic-io/dynamic-io.server-action.test.ts index 690eedc2b1e34..724c7ed28bb6a 100644 --- a/test/e2e/app-dir/dynamic-io/dynamic-io.server-action.test.ts +++ b/test/e2e/app-dir/dynamic-io/dynamic-io.server-action.test.ts @@ -26,9 +26,10 @@ describe('dynamic-io', () => { }) if (process.env.__NEXT_EXPERIMENTAL_PPR && isNextDev) { - // TODO: Remove this branch for PPR in dev mode when the issue is resolved - // where the inclusion of server timings in the RSC payload makes the - // serialized bound args not suitable to be used as a cache key. + // TODO(react-time-info): Remove this branch for PPR in dev mode when the + // issue is resolved where the inclusion of server timings in the RSC + // payload makes the serialized bound args not suitable to be used as a + // cache key. expect(next.cliOutput).toMatch('Error: Route "/server-action-inline"') } else { expect(next.cliOutput).not.toMatch('Error: Route "/server-action-inline"') diff --git a/test/e2e/app-dir/use-cache-hanging-inputs/app/bound-args/page.tsx b/test/e2e/app-dir/use-cache-hanging-inputs/app/bound-args/page.tsx new file mode 100644 index 0000000000000..8e87c1578bd44 --- /dev/null +++ b/test/e2e/app-dir/use-cache-hanging-inputs/app/bound-args/page.tsx @@ -0,0 +1,25 @@ +import { connection } from 'next/server' +import React from 'react' + +async function fetchUncachedData() { + await connection() + + return Math.random() +} + +export default async function Page() { + const uncachedDataPromise = fetchUncachedData() + + const Foo = async () => { + 'use cache' + + return ( + <> +

{await uncachedDataPromise}

+

{Math.random()}

+ + ) + } + + return +} diff --git a/test/e2e/app-dir/use-cache-hanging-inputs/next.config.js b/test/e2e/app-dir/use-cache-hanging-inputs/next.config.js index 3dac20d4703cf..df2126c94163a 100644 --- a/test/e2e/app-dir/use-cache-hanging-inputs/next.config.js +++ b/test/e2e/app-dir/use-cache-hanging-inputs/next.config.js @@ -5,6 +5,7 @@ const nextConfig = { experimental: { dynamicIO: true, prerenderEarlyExit: false, + ppr: process.env.__NEXT_EXPERIMENTAL_PPR === 'true', }, } diff --git a/test/e2e/app-dir/use-cache-hanging-inputs/use-cache-hanging-inputs.test.ts b/test/e2e/app-dir/use-cache-hanging-inputs/use-cache-hanging-inputs.test.ts index b81ad36324ec3..fa586fe958a28 100644 --- a/test/e2e/app-dir/use-cache-hanging-inputs/use-cache-hanging-inputs.test.ts +++ b/test/e2e/app-dir/use-cache-hanging-inputs/use-cache-hanging-inputs.test.ts @@ -134,6 +134,57 @@ describe('use-cache-hanging-inputs', () => { }, 180_000) }) + describe('when a "use cache" function is closing over an uncached promise', () => { + it('should show an error toast after a timeout', async () => { + const outputIndex = next.cliOutput.length + const browser = await next.browser('/bound-args') + + // The request is pending while we stall on the hanging inputs, and + // playwright will wait for the load even before continuing. So we don't + // need to wait for the "use cache" timeout of 50 seconds here. + + await openRedbox(browser) + + const errorDescription = await getRedboxDescription(browser) + const errorSource = await getRedboxSource(browser) + + if (process.env.__NEXT_EXPERIMENTAL_PPR) { + // TODO(react-time-info): Remove this branch for PPR when the issue is + // resolved where the inclusion of server timings in the RSC payload + // makes the serialized bound args not suitable to be used as a cache + // key. + + const expectedErrorMessagePpr = + 'Error: Route "/bound-args": A component accessed data, headers, params, searchParams, or a short-lived cache without a Suspense boundary nor a "use cache" above it. We don\'t have the exact line number added to error messages yet but you can see which component in the stack below. See more info: https://nextjs.org/docs/messages/next-prerender-missing-suspense' + + expect(errorDescription).toBe(`[ Server ] ${expectedErrorMessagePpr}`) + + const cliOutput = stripAnsi(next.cliOutput.slice(outputIndex)) + + expect(cliOutput).toContain( + `${expectedErrorMessagePpr} + at Page [Server] ()` + ) + } else { + expect(errorDescription).toBe(`[ Cache ] ${expectedErrorMessage}`) + + // TODO(veil): This should have an error source if the source mapping works. + expect(errorSource).toBe(null) + + const cliOutput = stripAnsi(next.cliOutput.slice(outputIndex)) + + // TODO(veil): Should include properly source mapped stack frames. + expect(cliOutput).toContain( + isTurbopack + ? `${expectedErrorMessage} + at [project]/app/bound-args/page.tsx [app-rsc] (ecmascript)` + : `${expectedErrorMessage} + at eval (webpack-internal:///(rsc)/./app/bound-args/page.tsx:25:97)` + ) + } + }, 180_000) + }) + describe('when an error is thrown', () => { it('should show an error overlay with only one error', async () => { const browser = await next.browser('/error') @@ -157,6 +208,10 @@ describe('use-cache-hanging-inputs', () => { expect(cliOutput).toInclude(expectedErrorMessage) + expect(cliOutput).toInclude( + 'Error occurred prerendering page "/bound-args"' + ) + expect(cliOutput).toInclude( 'Error occurred prerendering page "/search-params"' )