diff --git a/.changeset/few-ducks-listen.md b/.changeset/few-ducks-listen.md new file mode 100644 index 00000000..21decb8a --- /dev/null +++ b/.changeset/few-ducks-listen.md @@ -0,0 +1,9 @@ +--- +"@opennextjs/cloudflare": patch +--- + +fix: make sure that fetch cache `set`s are properly awaited + +Next.js does not await promises that update the incremental cache for fetch requests, +that is needed in our runtime otherwise the cache updates get lost, so this change +makes sure that the promise is properly awaited via `waitUntil` diff --git a/examples/e2e/app-router/e2e/ssr.test.ts b/examples/e2e/app-router/e2e/ssr.test.ts index c2156714..ec6f0bfe 100644 --- a/examples/e2e/app-router/e2e/ssr.test.ts +++ b/examples/e2e/app-router/e2e/ssr.test.ts @@ -28,7 +28,7 @@ test.skip("Server Side Render and loading.tsx", async ({ page }) => { } }); -test.skip("Fetch cache properly cached", async ({ page }) => { +test("Fetch cache properly cached", async ({ page }) => { await page.goto("/ssr"); const originalDate = await page.getByText("Cached fetch:").textContent(); await page.waitForTimeout(2000); diff --git a/packages/cloudflare/src/cli/build/bundle-server.ts b/packages/cloudflare/src/cli/build/bundle-server.ts index 025c1453..e4cc837e 100644 --- a/packages/cloudflare/src/cli/build/bundle-server.ts +++ b/packages/cloudflare/src/cli/build/bundle-server.ts @@ -10,6 +10,7 @@ import { patchVercelOgLibrary } from "./patches/ast/patch-vercel-og-library.js"; import { patchWebpackRuntime } from "./patches/ast/webpack-runtime.js"; import * as patches from "./patches/index.js"; import { ContentUpdater } from "./patches/plugins/content-updater.js"; +import { patchFetchCacheSetMissingWaitUntil } from "./patches/plugins/fetch-cache-wait-until.js"; import { patchLoadInstrumentation } from "./patches/plugins/load-instrumentation.js"; import { handleOptionalDependencies } from "./patches/plugins/optional-deps.js"; import { fixRequire } from "./patches/plugins/require.js"; @@ -87,6 +88,7 @@ export async function bundleServer(buildOpts: BuildOptions): Promise { fixRequire(updater), handleOptionalDependencies(optionalDependencies), patchLoadInstrumentation(updater), + patchFetchCacheSetMissingWaitUntil(updater), // Apply updater updaters, must be the last plugin updater.plugin, ], diff --git a/packages/cloudflare/src/cli/build/patches/plugins/content-updater.ts b/packages/cloudflare/src/cli/build/patches/plugins/content-updater.ts index 5687d833..964c69e9 100644 --- a/packages/cloudflare/src/cli/build/patches/plugins/content-updater.ts +++ b/packages/cloudflare/src/cli/build/patches/plugins/content-updater.ts @@ -62,10 +62,10 @@ export class ContentUpdater { if (namespace !== undefined && args.namespace !== namespace) { continue; } - if (!filter.test(args.path)) { + if (!args.path.match(filter)) { continue; } - if (!contentFilter.test(contents)) { + if (!contents.match(contentFilter)) { continue; } contents = (await callback({ contents, path: args.path })) ?? contents; diff --git a/packages/cloudflare/src/cli/build/patches/plugins/fetch-cache-wait-until.spec.ts b/packages/cloudflare/src/cli/build/patches/plugins/fetch-cache-wait-until.spec.ts new file mode 100644 index 00000000..f51a110f --- /dev/null +++ b/packages/cloudflare/src/cli/build/patches/plugins/fetch-cache-wait-until.spec.ts @@ -0,0 +1,460 @@ +import { describe, expect, test } from "vitest"; + +import { patchCode } from "../ast/util.js"; +import { rule } from "./fetch-cache-wait-until.js"; + +describe("patchFetchCacheSetMissingWaitUntil", () => { + test("on minified code", () => { + const code = ` +{ + let [o4, a2] = (0, d2.cloneResponse)(e3); + return o4.arrayBuffer().then(async (e4) => { + var a3; + let i4 = Buffer.from(e4), s3 = { headers: Object.fromEntries(o4.headers.entries()), body: i4.toString("base64"), status: o4.status, url: o4.url }; + null == $ || null == (a3 = $.serverComponentsHmrCache) || a3.set(n2, s3), F && await H.set(n2, { kind: c2.CachedRouteKind.FETCH, data: s3, revalidate: t5 }, { fetchCache: true, revalidate: r4, fetchUrl: _, fetchIdx: q, tags: A2 }); + }).catch((e4) => console.warn("Failed to set fetch cache", u4, e4)).finally(X), a2; +}`; + + expect(patchCode(code, rule)).toMatchInlineSnapshot(` + "{ + let [o4, a2] = (0, d2.cloneResponse)(e3); + return globalThis.__openNextAls?.getStore()?.waitUntil?.(o4.arrayBuffer().then(async (e4) => { + var a3; + let i4 = Buffer.from(e4), s3 = { headers: Object.fromEntries(o4.headers.entries()), body: i4.toString("base64"), status: o4.status, url: o4.url }; + null == $ || null == (a3 = $.serverComponentsHmrCache) || a3.set(n2, s3), F && await H.set(n2, { kind: c2.CachedRouteKind.FETCH, data: s3, revalidate: t5 }, { fetchCache: true, revalidate: r4, fetchUrl: _, fetchIdx: q, tags: A2 }); + }).catch((e4) => console.warn("Failed to set fetch cache", u4, e4)).finally(X)) + , a2; + }" + `); + }); + + describe("on non-minified code", () => { + test("15.1.0", () => { + // source: https://github.com/vercel/next.js/blob/fe45b74fdac83d3/packages/next/src/server/lib/patch-fetch.ts#L627-L732 + const code = `if ( + res.status === 200 && + incrementalCache && + cacheKey && + (isCacheableRevalidate || + useCacheOrRequestStore?.serverComponentsHmrCache) + ) { + const normalizedRevalidate = + finalRevalidate >= INFINITE_CACHE + ? CACHE_ONE_YEAR + : finalRevalidate + const externalRevalidate = + finalRevalidate >= INFINITE_CACHE ? false : finalRevalidate + + if (workUnitStore && workUnitStore.type === 'prerender') { + // We are prerendering at build time or revalidate time with dynamicIO so we need to + // buffer the response so we can guarantee it can be read in a microtask + const bodyBuffer = await res.arrayBuffer() + + const fetchedData = { + headers: Object.fromEntries(res.headers.entries()), + body: Buffer.from(bodyBuffer).toString('base64'), + status: res.status, + url: res.url, + } + + // We can skip checking the serverComponentsHmrCache because we aren't in + // dev mode. + + await incrementalCache.set( + cacheKey, + { + kind: CachedRouteKind.FETCH, + data: fetchedData, + revalidate: normalizedRevalidate, + }, + { + fetchCache: true, + revalidate: externalRevalidate, + fetchUrl, + fetchIdx, + tags, + } + ) + await handleUnlock() + + // We return a new Response to the caller. + return new Response(bodyBuffer, { + headers: res.headers, + status: res.status, + statusText: res.statusText, + }) + } else { + // We're cloning the response using this utility because there + // exists a bug in the undici library around response cloning. + // See the following pull request for more details: + // https://github.com/vercel/next.js/pull/73274 + + const [cloned1, cloned2] = cloneResponse(res) + + // We are dynamically rendering including dev mode. We want to return + // the response to the caller as soon as possible because it might stream + // over a very long time. + cloned1 + .arrayBuffer() + .then(async (arrayBuffer) => { + const bodyBuffer = Buffer.from(arrayBuffer) + + const fetchedData = { + headers: Object.fromEntries(cloned1.headers.entries()), + body: bodyBuffer.toString('base64'), + status: cloned1.status, + url: cloned1.url, + } + + useCacheOrRequestStore?.serverComponentsHmrCache?.set( + cacheKey, + fetchedData + ) + + if (isCacheableRevalidate) { + await incrementalCache.set( + cacheKey, + { + kind: CachedRouteKind.FETCH, + data: fetchedData, + revalidate: normalizedRevalidate, + }, + { + fetchCache: true, + revalidate: externalRevalidate, + fetchUrl, + fetchIdx, + tags, + } + ) + } + }) + .catch((error) => + console.warn(\`Failed to set fetch cache\`, input, error) + ) + .finally(handleUnlock) + + return cloned2 + } + } + `; + + expect(patchCode(code, rule)).toMatchInlineSnapshot(` + "if ( + res.status === 200 && + incrementalCache && + cacheKey && + (isCacheableRevalidate || + useCacheOrRequestStore?.serverComponentsHmrCache) + ) { + const normalizedRevalidate = + finalRevalidate >= INFINITE_CACHE + ? CACHE_ONE_YEAR + : finalRevalidate + const externalRevalidate = + finalRevalidate >= INFINITE_CACHE ? false : finalRevalidate + + if (workUnitStore && workUnitStore.type === 'prerender') { + // We are prerendering at build time or revalidate time with dynamicIO so we need to + // buffer the response so we can guarantee it can be read in a microtask + const bodyBuffer = await res.arrayBuffer() + + const fetchedData = { + headers: Object.fromEntries(res.headers.entries()), + body: Buffer.from(bodyBuffer).toString('base64'), + status: res.status, + url: res.url, + } + + // We can skip checking the serverComponentsHmrCache because we aren't in + // dev mode. + + await incrementalCache.set( + cacheKey, + { + kind: CachedRouteKind.FETCH, + data: fetchedData, + revalidate: normalizedRevalidate, + }, + { + fetchCache: true, + revalidate: externalRevalidate, + fetchUrl, + fetchIdx, + tags, + } + ) + await handleUnlock() + + // We return a new Response to the caller. + return new Response(bodyBuffer, { + headers: res.headers, + status: res.status, + statusText: res.statusText, + }) + } else { + // We're cloning the response using this utility because there + // exists a bug in the undici library around response cloning. + // See the following pull request for more details: + // https://github.com/vercel/next.js/pull/73274 + + const [cloned1, cloned2] = cloneResponse(res) + + // We are dynamically rendering including dev mode. We want to return + // the response to the caller as soon as possible because it might stream + // over a very long time. + globalThis.__openNextAls?.getStore()?.waitUntil?.(cloned1 + .arrayBuffer() + .then(async (arrayBuffer) => { + const bodyBuffer = Buffer.from(arrayBuffer) + + const fetchedData = { + headers: Object.fromEntries(cloned1.headers.entries()), + body: bodyBuffer.toString('base64'), + status: cloned1.status, + url: cloned1.url, + } + + useCacheOrRequestStore?.serverComponentsHmrCache?.set( + cacheKey, + fetchedData + ) + + if (isCacheableRevalidate) { + await incrementalCache.set( + cacheKey, + { + kind: CachedRouteKind.FETCH, + data: fetchedData, + revalidate: normalizedRevalidate, + }, + { + fetchCache: true, + revalidate: externalRevalidate, + fetchUrl, + fetchIdx, + tags, + } + ) + } + }) + .catch((error) => + console.warn(\`Failed to set fetch cache\`, input, error) + ) + .finally(handleUnlock)) + + + return cloned2 + } + } + " + `); + }); + + test("Next.js 15.0.4", () => { + // source: https://github.com/vercel/next.js/blob/d6a6aa14069/packages/next/src/server/lib/patch-fetch.ts#L627-L725 + const code = `if ( + res.status === 200 && + incrementalCache && + cacheKey && + (isCacheableRevalidate || requestStore?.serverComponentsHmrCache) + ) { + const normalizedRevalidate = + finalRevalidate >= INFINITE_CACHE + ? CACHE_ONE_YEAR + : finalRevalidate + const externalRevalidate = + finalRevalidate >= INFINITE_CACHE ? false : finalRevalidate + + if (workUnitStore && workUnitStore.type === 'prerender') { + // We are prerendering at build time or revalidate time with dynamicIO so we need to + // buffer the response so we can guarantee it can be read in a microtask + const bodyBuffer = await res.arrayBuffer() + + const fetchedData = { + headers: Object.fromEntries(res.headers.entries()), + body: Buffer.from(bodyBuffer).toString('base64'), + status: res.status, + url: res.url, + } + + // We can skip checking the serverComponentsHmrCache because we aren't in + // dev mode. + + await incrementalCache.set( + cacheKey, + { + kind: CachedRouteKind.FETCH, + data: fetchedData, + revalidate: normalizedRevalidate, + }, + { + fetchCache: true, + revalidate: externalRevalidate, + fetchUrl, + fetchIdx, + tags, + } + ) + await handleUnlock() + + // We we return a new Response to the caller. + return new Response(bodyBuffer, { + headers: res.headers, + status: res.status, + statusText: res.statusText, + }) + } else { + // We are dynamically rendering including dev mode. We want to return + // the response to the caller as soon as possible because it might stream + // over a very long time. + res + .clone() + .arrayBuffer() + .then(async (arrayBuffer) => { + const bodyBuffer = Buffer.from(arrayBuffer) + + const fetchedData = { + headers: Object.fromEntries(res.headers.entries()), + body: bodyBuffer.toString('base64'), + status: res.status, + url: res.url, + } + + requestStore?.serverComponentsHmrCache?.set( + cacheKey, + fetchedData + ) + + if (isCacheableRevalidate) { + await incrementalCache.set( + cacheKey, + { + kind: CachedRouteKind.FETCH, + data: fetchedData, + revalidate: normalizedRevalidate, + }, + { + fetchCache: true, + revalidate: externalRevalidate, + fetchUrl, + fetchIdx, + tags, + } + ) + } + }) + .catch((error) => + console.warn(\`Failed to set fetch cache\`, input, error) + ) + .finally(handleUnlock) + + return res + } + }`; + + expect(patchCode(code, rule)).toMatchInlineSnapshot(` + "if ( + res.status === 200 && + incrementalCache && + cacheKey && + (isCacheableRevalidate || requestStore?.serverComponentsHmrCache) + ) { + const normalizedRevalidate = + finalRevalidate >= INFINITE_CACHE + ? CACHE_ONE_YEAR + : finalRevalidate + const externalRevalidate = + finalRevalidate >= INFINITE_CACHE ? false : finalRevalidate + + if (workUnitStore && workUnitStore.type === 'prerender') { + // We are prerendering at build time or revalidate time with dynamicIO so we need to + // buffer the response so we can guarantee it can be read in a microtask + const bodyBuffer = await res.arrayBuffer() + + const fetchedData = { + headers: Object.fromEntries(res.headers.entries()), + body: Buffer.from(bodyBuffer).toString('base64'), + status: res.status, + url: res.url, + } + + // We can skip checking the serverComponentsHmrCache because we aren't in + // dev mode. + + await incrementalCache.set( + cacheKey, + { + kind: CachedRouteKind.FETCH, + data: fetchedData, + revalidate: normalizedRevalidate, + }, + { + fetchCache: true, + revalidate: externalRevalidate, + fetchUrl, + fetchIdx, + tags, + } + ) + await handleUnlock() + + // We we return a new Response to the caller. + return new Response(bodyBuffer, { + headers: res.headers, + status: res.status, + statusText: res.statusText, + }) + } else { + // We are dynamically rendering including dev mode. We want to return + // the response to the caller as soon as possible because it might stream + // over a very long time. + globalThis.__openNextAls?.getStore()?.waitUntil?.(res + .clone() + .arrayBuffer() + .then(async (arrayBuffer) => { + const bodyBuffer = Buffer.from(arrayBuffer) + + const fetchedData = { + headers: Object.fromEntries(res.headers.entries()), + body: bodyBuffer.toString('base64'), + status: res.status, + url: res.url, + } + + requestStore?.serverComponentsHmrCache?.set( + cacheKey, + fetchedData + ) + + if (isCacheableRevalidate) { + await incrementalCache.set( + cacheKey, + { + kind: CachedRouteKind.FETCH, + data: fetchedData, + revalidate: normalizedRevalidate, + }, + { + fetchCache: true, + revalidate: externalRevalidate, + fetchUrl, + fetchIdx, + tags, + } + ) + } + }) + .catch((error) => + console.warn(\`Failed to set fetch cache\`, input, error) + ) + .finally(handleUnlock)) + + + return res + } + }" + `); + }); + }); +}); diff --git a/packages/cloudflare/src/cli/build/patches/plugins/fetch-cache-wait-until.ts b/packages/cloudflare/src/cli/build/patches/plugins/fetch-cache-wait-until.ts new file mode 100644 index 00000000..bd37aa2c --- /dev/null +++ b/packages/cloudflare/src/cli/build/patches/plugins/fetch-cache-wait-until.ts @@ -0,0 +1,51 @@ +import { getCrossPlatformPathRegex } from "@opennextjs/aws/utils/regex.js"; + +import { patchCode } from "../ast/util.js"; +import type { ContentUpdater } from "./content-updater.js"; + +/** + * The following Next.js code sets values in the incremental cache for fetch calls: + * https://github.com/vercel/next.js/blob/e5fc495e3d4/packages/next/src/server/lib/patch-fetch.ts#L690-L728 + * + * The issue here is that this promise is never awaited in the Next.js code (since in a standard node.js server + * the promise will eventually simply just run) but we do need to run it inside `waitUntil` (so that the worker + * is not killed before the promise is fully executed), without that this promise gets discarded and values + * don't get saved in the incremental cache. + * + * This function wraps the promise in a `waitUntil` call (retrieved from `globalThis.__openNextAls.getStore()`). + */ +export function patchFetchCacheSetMissingWaitUntil(updater: ContentUpdater) { + return updater.updateContent( + "patch-fetch-cache-set-missing-wait-until", + { + filter: getCrossPlatformPathRegex( + String.raw`(server/chunks/.*\.js|.*\.runtime\..*\.js|patch-fetch\.js)$`, + { escape: false } + ), + contentFilter: /arrayBuffer\(\)\s*\.then/, + }, + ({ contents }) => patchCode(contents, rule) + ); +} + +export const rule = ` +rule: + kind: call_expression + pattern: $PROMISE + all: + - has: { pattern: $_.arrayBuffer().then, stopBy: end } + - has: { pattern: "Buffer.from", stopBy: end } + - any: + - inside: + kind: sequence_expression + inside: + kind: return_statement + - inside: + kind: expression_statement + precedes: + kind: return_statement + - has: { pattern: $_.FETCH, stopBy: end } + +fix: | + globalThis.__openNextAls?.getStore()?.waitUntil?.($PROMISE) +`;