Skip to content

Commit

Permalink
Restore and enhance error handling for hanging inputs in "use cache" (
Browse files Browse the repository at this point in the history
vercel#74652)

Uncached and forever hanging promises are currently not supported as input for `"use cache"` functions. One prominent example of this is the use of `searchParams` in a page with `"use cache"`. Until Next.js version 15.1.1, the usage of such inputs led to an error during build after a timeout of 50 seconds. This behaviour relied on a bug in `decodeReply` though, and was hence broken after React fixed the bug. A build will now fail early with `Error: Connection closed.`.

With this PR, we are restoring the timeout error behaviour by switching to the new `decodeReplyFromAsyncIterable` API, which allows us to keep an open stream when decoding the cached function's arguments.

In addition, in dev mode, we are now also propagating the error to the dev overlay.
  • Loading branch information
unstubbable authored Jan 22, 2025
1 parent b6a4b0d commit 338eb99
Show file tree
Hide file tree
Showing 13 changed files with 396 additions and 48 deletions.
22 changes: 18 additions & 4 deletions packages/next/src/server/app-render/app-render.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,7 @@ import {
} from '../resume-data-cache/resume-data-cache'
import type { MetadataErrorType } from '../../lib/metadata/resolve-metadata'
import isError from '../../lib/is-error'
import { isUseCacheTimeoutError } from '../use-cache/use-cache-errors'

export type GetDynamicParamFromSegment = (
// [slug] / [[slug]] / [...slug]
Expand Down Expand Up @@ -610,7 +611,7 @@ async function generateDynamicFlightRenderResult(
ctx.clientReferenceManifest,
ctx.workStore.route,
requestStore
).catch(resolveValidation) // avoid unhandled rejections and a forever hanging promise
)
}

// For app dir, use the bundled version of Flight server renderer (renderToReadableStream)
Expand Down Expand Up @@ -1796,7 +1797,7 @@ async function renderToStream(
clientReferenceManifest,
workStore.route,
requestStore
).catch(resolveValidation) // avoid unhandled rejections and a forever hanging promise
)

reactServerResult = new ReactServerResult(reactServerStream)
} else {
Expand Down Expand Up @@ -2356,6 +2357,10 @@ async function spawnDynamicValidationInDev(
clientReferenceManifest.clientModules,
{
onError: (err) => {
if (isUseCacheTimeoutError(err)) {
return err.digest
}

if (
finalServerController.signal.aborted &&
isPrerenderInterruptedError(err)
Expand Down Expand Up @@ -2392,6 +2397,12 @@ async function spawnDynamicValidationInDev(
{
signal: finalClientController.signal,
onError: (err, errorInfo) => {
if (isUseCacheTimeoutError(err)) {
dynamicValidation.dynamicErrors.push(err)

return
}

if (
isPrerenderInterruptedError(err) ||
finalClientController.signal.aborted
Expand Down Expand Up @@ -2427,8 +2438,11 @@ async function spawnDynamicValidationInDev(
) {
// we don't have a root because the abort errored in the root. We can just ignore this error
} else {
// This error is something else and should bubble up
throw err
// If an error is thrown in the root before prerendering is aborted, we
// don't want to rethrow it here, otherwise this would lead to a hanging
// response and unhandled rejection. We also don't want to log it, because
// it's most likely already logged as part of the normal render. So we
// just fall through here, to make sure `resolveValidation` is called.
}
}

Expand Down
3 changes: 1 addition & 2 deletions packages/next/src/server/lib/cache-handlers/default.ts
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,7 @@ const DefaultCacheHandler: CacheHandler = {
errorRetryCount: 0,
size,
})
} catch (err) {
console.error(`Error while saving cache key: ${cacheKey}`, err)
} catch {
// TODO: store partial buffer with error after we retry 3 times
} finally {
resolvePending()
Expand Down
26 changes: 26 additions & 0 deletions packages/next/src/server/use-cache/use-cache-errors.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
const USE_CACHE_TIMEOUT_ERROR_CODE = 'USE_CACHE_TIMEOUT'

export class UseCacheTimeoutError extends Error {
digest: typeof USE_CACHE_TIMEOUT_ERROR_CODE = USE_CACHE_TIMEOUT_ERROR_CODE

constructor() {
super(
'Filling a cache during prerender timed out, likely because request-specific arguments such as params, searchParams, cookies() or dynamic data were used inside "use cache".'
)
}
}

export function isUseCacheTimeoutError(
err: unknown
): err is UseCacheTimeoutError {
if (
typeof err !== 'object' ||
err === null ||
!('digest' in err) ||
typeof err.digest !== 'string'
) {
return false
}

return err.digest === USE_CACHE_TIMEOUT_ERROR_CODE
}
118 changes: 76 additions & 42 deletions packages/next/src/server/use-cache/use-cache-wrapper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import type { DeepReadonly } from '../../shared/lib/deep-readonly'
import {
renderToReadableStream,
decodeReply,
decodeReplyFromAsyncIterable,
createTemporaryReferenceSet as createServerTemporaryReferenceSet,
} from 'react-server-dom-webpack/server.edge'
/* eslint-disable import/no-extraneous-dependencies */
Expand Down Expand Up @@ -39,6 +40,7 @@ import { decryptActionBoundArgs } from '../app-render/encryption'
import { InvariantError } from '../../shared/lib/invariant-error'
import { getDigestForWellKnownError } from '../app-render/create-error-handler'
import { cacheHandlerGlobal, DYNAMIC_EXPIRE } from './constants'
import { UseCacheTimeoutError } from './use-cache-errors'

const isEdgeRuntime = process.env.NEXT_RUNTIME === 'edge'

Expand All @@ -49,7 +51,8 @@ function generateCacheEntry(
outerWorkUnitStore: WorkUnitStore | undefined,
clientReferenceManifest: DeepReadonly<ClientReferenceManifestForRsc>,
encodedArguments: FormData | string,
fn: any
fn: any,
timeoutError: UseCacheTimeoutError
): Promise<[ReadableStream, Promise<CacheEntry>]> {
// We need to run this inside a clean AsyncLocalStorage snapshot so that the cache
// generation cannot read anything from the context we're currently executing which
Expand All @@ -62,7 +65,8 @@ function generateCacheEntry(
outerWorkUnitStore,
clientReferenceManifest,
encodedArguments,
fn
fn,
timeoutError
)
}

Expand All @@ -71,7 +75,8 @@ function generateCacheEntryWithRestoredWorkStore(
outerWorkUnitStore: WorkUnitStore | undefined,
clientReferenceManifest: DeepReadonly<ClientReferenceManifestForRsc>,
encodedArguments: FormData | string,
fn: any
fn: any,
timeoutError: UseCacheTimeoutError
) {
// Since we cleared the AsyncLocalStorage we need to restore the workStore.
// Note: We explicitly don't restore the RequestStore nor the PrerenderStore.
Expand All @@ -87,7 +92,8 @@ function generateCacheEntryWithRestoredWorkStore(
outerWorkUnitStore,
clientReferenceManifest,
encodedArguments,
fn
fn,
timeoutError
)
}

Expand All @@ -96,7 +102,8 @@ function generateCacheEntryWithCacheContext(
outerWorkUnitStore: WorkUnitStore | undefined,
clientReferenceManifest: DeepReadonly<ClientReferenceManifestForRsc>,
encodedArguments: FormData | string,
fn: any
fn: any,
timeoutError: UseCacheTimeoutError
) {
if (!workStore.cacheLifeProfiles) {
throw new Error(
Expand Down Expand Up @@ -135,12 +142,12 @@ function generateCacheEntryWithCacheContext(
return workUnitAsyncStorage.run(
cacheStore,
generateCacheEntryImpl,
workStore,
outerWorkUnitStore,
cacheStore,
clientReferenceManifest,
encodedArguments,
fn
fn,
timeoutError
)
}

Expand Down Expand Up @@ -261,22 +268,49 @@ async function collectResult(
}

async function generateCacheEntryImpl(
workStore: WorkStore,
outerWorkUnitStore: WorkUnitStore | undefined,
innerCacheStore: UseCacheStore,
clientReferenceManifest: DeepReadonly<ClientReferenceManifestForRsc>,
encodedArguments: FormData | string,
fn: any
fn: any,
timeoutError: UseCacheTimeoutError
): Promise<[ReadableStream, Promise<CacheEntry>]> {
const temporaryReferences = createServerTemporaryReferenceSet()

const [, , args] = await decodeReply<any[]>(
encodedArguments,
getServerModuleMap(),
{
temporaryReferences,
}
)
const [, , args] =
typeof encodedArguments === 'string'
? await decodeReply<any[]>(encodedArguments, getServerModuleMap(), {
temporaryReferences,
})
: await decodeReplyFromAsyncIterable<any[]>(
{
async *[Symbol.asyncIterator]() {
for (const entry of encodedArguments) {
yield entry
}

// The encoded arguments might contain hanging promises. In this
// case we don't want to reject with "Error: Connection closed.",
// so we intentionally keep the iterable alive. This is similar to
// the halting trick that we do while rendering.
if (outerWorkUnitStore?.type === 'prerender') {
await new Promise<void>((resolve) => {
if (outerWorkUnitStore.renderSignal.aborted) {
resolve()
} else {
outerWorkUnitStore.renderSignal.addEventListener(
'abort',
() => resolve(),
{ once: true }
)
}
})
}
},
},
getServerModuleMap(),
{ temporaryReferences }
)

// Track the timestamp when we started copmuting the result.
const startTime = performance.timeOrigin + performance.now()
Expand All @@ -287,17 +321,12 @@ async function generateCacheEntryImpl(

let timer = undefined
const controller = new AbortController()
if (workStore.isStaticGeneration) {
if (outerWorkUnitStore?.type === 'prerender') {
// If we're prerendering, we give you 50 seconds to fill a cache entry. Otherwise
// we assume you stalled on hanging input and deopt. This needs to be lower than
// just the general timeout of 60 seconds.
timer = setTimeout(() => {
controller.abort(
new Error(
'Filling a cache during prerender timed out, likely because request-specific arguments such as ' +
'params, searchParams, cookies() or dynamic data were used inside "use cache".'
)
)
controller.abort(timeoutError)
}, 50000)
}

Expand All @@ -319,10 +348,19 @@ async function generateCacheEntryImpl(
return digest
}

// TODO: For now we're also reporting the error here, because in
// production, the "Server" environment will only get the obfuscated
// error (created by the Flight Client in the cache wrapper).
console.error(error)
if (process.env.NODE_ENV !== 'development') {
// TODO: For now we're also reporting the error here, because in
// production, the "Server" environment will only get the obfuscated
// error (created by the Flight Client in the cache wrapper).
console.error(error)
}

if (error === timeoutError) {
// The timeout error already aborted the whole stream. We don't need
// to also push this error into the `errors` array.
return timeoutError.digest
}

errors.push(error)
},
}
Expand Down Expand Up @@ -441,6 +479,11 @@ export function cache(
if (cacheHandler === undefined) {
throw new Error('Unknown cache handler: ' + kind)
}

// Capture the timeout error here to ensure a useful stack.
const timeoutError = new UseCacheTimeoutError()
Error.captureStackTrace(timeoutError, cache)

const name = fn.name
const cachedFn = {
[name]: async function (...args: any[]) {
Expand All @@ -463,7 +506,7 @@ export function cache(
// the implementation.
const buildId = workStore.buildId

let abortHangingInputSignal: null | AbortSignal = null
let abortHangingInputSignal: undefined | AbortSignal
if (workUnitStore && workUnitStore.type === 'prerender') {
// In a prerender, we may end up with hanging Promises as inputs due them stalling
// on connection() or because they're loading dynamic data. In that case we need to
Expand Down Expand Up @@ -515,18 +558,7 @@ export function cache(
const temporaryReferences = createClientTemporaryReferenceSet()
const encodedArguments: FormData | string = await encodeReply(
[buildId, id, args],
// Right now this is enough to cause the input to generate hanging Promises
// but that's really due to what is probably a React bug in decodeReply.
// If that's fixed we may need a different strategy. We can also just skip
// the serialization/cache in this scenario and pass-through raw objects.
abortHangingInputSignal
? {
temporaryReferences,
signal: abortHangingInputSignal,
}
: {
temporaryReferences,
}
{ temporaryReferences, signal: abortHangingInputSignal }
)

const serializedCacheKey =
Expand Down Expand Up @@ -656,7 +688,8 @@ export function cache(
workUnitStore,
clientReferenceManifest,
encodedArguments,
fn
fn,
timeoutError
)

let savedCacheEntry
Expand Down Expand Up @@ -715,7 +748,8 @@ export function cache(
undefined, // This is not running within the context of this unit.
clientReferenceManifest,
encodedArguments,
fn
fn,
timeoutError
)

let savedCacheEntry: Promise<CacheEntry>
Expand Down
7 changes: 7 additions & 0 deletions packages/next/types/$$compiled.internal.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,13 @@ declare module 'react-server-dom-webpack/server.edge' {
temporaryReferences?: TemporaryReferenceSet
}
): Promise<T>
export function decodeReplyFromAsyncIterable<T>(
iterable: AsyncIterable<[string, string | File]>,
webpackMap: ServerManifest,
options?: {
temporaryReferences?: TemporaryReferenceSet
}
): Promise<T>
export function decodeAction<T>(
body: FormData,
serverManifest: ServerManifest
Expand Down
7 changes: 7 additions & 0 deletions test/e2e/app-dir/use-cache-hanging-inputs/app/error/page.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
'use cache'

export default async function Page() {
throw new Error('kaputt!')

return null
}
8 changes: 8 additions & 0 deletions test/e2e/app-dir/use-cache-hanging-inputs/app/layout.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import { ReactNode } from 'react'
export default function Root({ children }: { children: ReactNode }) {
return (
<html>
<body>{children}</body>
</html>
)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
'use cache'

export default async function Page({
searchParams,
}: {
searchParams: Promise<{ n: string }>
}) {
return <p>search params not used</p>
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
'use cache'

export default async function Page({
searchParams,
}: {
searchParams: Promise<{ n: string }>
}) {
const { n } = await searchParams

return <p>search param: {n}</p>
}
Loading

0 comments on commit 338eb99

Please sign in to comment.