Skip to content

Commit

Permalink
Support dynamicIO in middlware routes and generateStaticParams
Browse files Browse the repository at this point in the history
route.ts files (and other routes like metadata routes) still need dynamicIO semantics when runnign in edge runtime. This change adds support for configuring dynamicIO for edge routes. It is hard to test properly because edge routes never statically generate and at the moment there are no other observable semantics. If we introduce new semantics that are distinct for dynamicIO that affect dynamic rendering we should update these tests to assert them.

Similarly generateStaticParams also needs dynamicIO semantics when configured. Right now it's not quite possible to assert this because there are no observable semantics. We should have one which is that fetchCache is not configurable with dynamicIO on however that isn't implemented yet. This change adds tests but they will need to be updated once we update the fetchCache behavior
  • Loading branch information
gnoff committed Sep 26, 2024
1 parent e6b1232 commit 8a9d641
Show file tree
Hide file tree
Showing 23 changed files with 629 additions and 46 deletions.
2 changes: 1 addition & 1 deletion packages/next/src/build/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1422,7 +1422,7 @@ export async function buildAppStaticPaths({
isRevalidate: false,
experimental: {
after: false,
dynamicIO: false,
dynamicIO,
},
},
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,7 @@ export function getDefineEnv({
config.devIndicators.appIsrStatus
),
'process.env.__NEXT_PPR': checkIsAppPPREnabled(config.experimental.ppr),
'process.env.__NEXT_DYNAMIC_IO': config.experimental.dynamicIO ?? false,
'process.env.__NEXT_AFTER': config.experimental.after ?? false,
'process.env.NEXT_DEPLOYMENT_ID': config.deploymentId || false,
'process.env.__NEXT_FETCH_CACHE_KEY_PREFIX': fetchCacheKeyPrefix ?? '',
Expand Down
2 changes: 1 addition & 1 deletion packages/next/src/server/web/edge-route-module-wrapper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ export class EdgeRouteModuleWrapper {
: undefined,
experimental: {
after: isAfterEnabled,
dynamicIO: false,
dynamicIO: !!process.env.__NEXT_DYNAMIC_IO,
},
},
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
// TODO once we make fetchCache inert with dynamicIO this test is expected
// to start failing. Right now the force cache causes the fetches to be identical
// and we get only one prebuilt route. once we remove the caching behavior of fetchCache
// when dynamicIO is on we will get more than one route.
// The ideal test wouldn't even use fetchCache but at the moment the default caching for fetch
// is to not cache and so we can't rely on the default to produce a differentiating result.
export const fetchCache = 'default-cache'

export async function generateStaticParams() {
const set = new Set()
set.add(await fetchRandom('a'))
set.add(await fetchRandom('a'))

return Array.from(set).map((value) => {
return {
slug: ('' + value).slice(2),
}
})
}

export default async function Layout({ children, params }) {
return (
<>
<h1>{await params.slug}</h1>
<section>{children}</section>
</>
)
}

const fetchRandom = async (entropy: string) => {
const response = await fetch(
'https://next-data-api-endpoint.vercel.app/api/random?b=' + entropy
)
return response.text()
}
27 changes: 27 additions & 0 deletions test/e2e/app-dir/dynamic-io/app/routes/-edge/[dyn]/async/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import type { NextRequest } from 'next/server'

import { getSentinelValue } from '../../../../getSentinelValue'

export const runtime = 'edge'

export async function generateStaticParams() {
return [
{
dyn: '1',
},
]
}

export async function GET(
request: NextRequest,
props: { params: Promise<{ dyn: string }> }
) {
const { dyn } = await props.params
return new Response(
JSON.stringify({
value: getSentinelValue(),
type: 'dynamic params',
param: dyn,
})
)
}
29 changes: 29 additions & 0 deletions test/e2e/app-dir/dynamic-io/app/routes/-edge/[dyn]/sync/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import type { NextRequest, UnsafeUnwrappedParams } from 'next/server'

import { getSentinelValue } from '../../../../getSentinelValue'

export const runtime = 'edge'

export async function generateStaticParams() {
return [
{
dyn: '1',
},
]
}

export async function GET(
request: NextRequest,
props: { params: Promise<{ dyn: string }> }
) {
const dyn = (
props.params as unknown as UnsafeUnwrappedParams<typeof props.params>
).dyn
return new Response(
JSON.stringify({
value: getSentinelValue(),
type: 'dynamic params',
param: dyn,
})
)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import type { NextRequest } from 'next/server'

import { cookies } from 'next/headers'

import { getSentinelValue } from '../../../getSentinelValue'

export const runtime = 'edge'

export async function GET(request: NextRequest) {
const sentinel = (await cookies()).get('x-sentinel')
return new Response(
JSON.stringify({
value: getSentinelValue(),
type: 'cookies',
'x-sentinel': sentinel,
})
)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import type { NextRequest } from 'next/server'

import { headers } from 'next/headers'

import { getSentinelValue } from '../../../getSentinelValue'

export const runtime = 'edge'

export async function GET(request: NextRequest) {
const sentinel = (await headers()).get('x-sentinel')
return new Response(
JSON.stringify({
value: getSentinelValue(),
type: 'headers',
'x-sentinel': sentinel,
})
)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import type { NextRequest } from 'next/server'

import { getSentinelValue } from '../../../getSentinelValue'

export const runtime = 'edge'

export async function GET(request: NextRequest) {
const result = JSON.stringify({
value: getSentinelValue(),
message: 'dynamic stream',
})
const part1 = result.slice(0, result.length / 2)
const part2 = result.slice(result.length / 2)

const encoder = new TextEncoder()
const chunks = [encoder.encode(part1), encoder.encode(part2)]

let sent = 0
const stream = new ReadableStream({
async pull(controller) {
controller.enqueue(chunks[sent++])
await new Promise((r) => setTimeout(r, 1))
if (sent === chunks.length) {
controller.close()
}
},
})
return new Response(stream)
}
15 changes: 15 additions & 0 deletions test/e2e/app-dir/dynamic-io/app/routes/-edge/dynamic-url/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import type { NextRequest } from 'next/server'

import { getSentinelValue } from '../../../getSentinelValue'

export const runtime = 'edge'

export async function GET(request: NextRequest) {
const search = request.nextUrl.search
return new Response(
JSON.stringify({
value: getSentinelValue(),
search,
})
)
}
25 changes: 25 additions & 0 deletions test/e2e/app-dir/dynamic-io/app/routes/-edge/fetch-cached/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import type { NextRequest } from 'next/server'

import { getSentinelValue } from '../../../getSentinelValue'

export const runtime = 'edge'

export async function GET(request: NextRequest) {
const fetcheda = await fetchRandomCached('a')
const fetchedb = await fetchRandomCached('b')
return new Response(
JSON.stringify({
value: getSentinelValue(),
random1: fetcheda,
random2: fetchedb,
})
)
}

const fetchRandomCached = async (entropy: string) => {
const response = await fetch(
'https://next-data-api-endpoint.vercel.app/api/random?b=' + entropy,
{ cache: 'force-cache' }
)
return response.text()
}
32 changes: 32 additions & 0 deletions test/e2e/app-dir/dynamic-io/app/routes/-edge/fetch-mixed/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import type { NextRequest } from 'next/server'

import { getSentinelValue } from '../../../getSentinelValue'

export const runtime = 'edge'

export async function GET(request: NextRequest) {
const fetcheda = await fetchRandomCached('a')
const fetchedb = await fetchRandomUncached('b')
return new Response(
JSON.stringify({
value: getSentinelValue(),
random1: fetcheda,
random2: fetchedb,
})
)
}

const fetchRandomCached = async (entropy: string) => {
const response = await fetch(
'https://next-data-api-endpoint.vercel.app/api/random?b=' + entropy,
{ cache: 'force-cache' }
)
return response.text()
}

const fetchRandomUncached = async (entropy: string) => {
const response = await fetch(
'https://next-data-api-endpoint.vercel.app/api/random?b=' + entropy
)
return response.text()
}
27 changes: 27 additions & 0 deletions test/e2e/app-dir/dynamic-io/app/routes/-edge/io-cached/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import type { NextRequest } from 'next/server'

import { unstable_cache as cache } from 'next/cache'

import { getSentinelValue } from '../../../getSentinelValue'

export const runtime = 'edge'

export async function GET(request: NextRequest) {
const messagea = await getCachedMessage('hello cached fast', 0)
const messageb = await getCachedMessage('hello cached slow', 20)
return new Response(
JSON.stringify({
value: getSentinelValue(),
message1: messagea,
message2: messageb,
})
)
}

async function getMessage(echo, delay) {
const tag = ((Math.random() * 10000) | 0).toString(16)
await new Promise((r) => setTimeout(r, delay))
return `${tag}:${echo}`
}

const getCachedMessage = cache(getMessage)
27 changes: 27 additions & 0 deletions test/e2e/app-dir/dynamic-io/app/routes/-edge/io-mixed/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import type { NextRequest } from 'next/server'

import { unstable_cache as cache } from 'next/cache'

import { getSentinelValue } from '../../../getSentinelValue'

export const runtime = 'edge'

export async function GET(request: NextRequest) {
const messagea = await getCachedMessage('hello cached fast', 0)
const messageb = await getMessage('hello uncached slow', 20)
return new Response(
JSON.stringify({
value: getSentinelValue(),
message1: messagea,
message2: messageb,
})
)
}

async function getMessage(echo, delay) {
const tag = ((Math.random() * 10000) | 0).toString(16)
await new Promise((r) => setTimeout(r, delay))
return `${tag}:${echo}`
}

const getCachedMessage = cache(getMessage)
14 changes: 14 additions & 0 deletions test/e2e/app-dir/dynamic-io/app/routes/-edge/microtask/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import type { NextRequest } from 'next/server'

import { getSentinelValue } from '../../../getSentinelValue'

export const runtime = 'edge'

export async function GET(request: NextRequest) {
await Promise.resolve()
const response = JSON.stringify({
value: getSentinelValue(),
message: 'microtask',
})
return new Response(response)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import type { NextRequest } from 'next/server'

import { getSentinelValue } from '../../../getSentinelValue'

export const runtime = 'edge'

export async function GET(request: NextRequest) {
const response = JSON.stringify({
value: getSentinelValue(),
message: 'stream response',
})
const part1 = response.slice(0, Math.floor(response.length / 2))
const part2 = response.slice(Math.floor(response.length / 2))

const encoder = new TextEncoder()
const chunks = [encoder.encode(part1), encoder.encode(part2)]

let sent = 0
const stream = new ReadableStream({
pull(controller) {
controller.enqueue(chunks[sent++])
if (sent === chunks.length) {
controller.close()
}
},
})
return new Response(stream)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import type { NextRequest } from 'next/server'

import { getSentinelValue } from '../../../getSentinelValue'

export const runtime = 'edge'

export function GET(request: NextRequest) {
const response = JSON.stringify({
value: getSentinelValue(),
message: 'stream response',
})
const part1 = response.slice(0, Math.floor(response.length / 2))
const part2 = response.slice(Math.floor(response.length / 2))

const encoder = new TextEncoder()
const chunks = [encoder.encode(part1), encoder.encode(part2)]

let sent = 0
const stream = new ReadableStream({
pull(controller) {
controller.enqueue(chunks[sent++])
if (sent === chunks.length) {
controller.close()
}
},
})
return new Response(stream)
}
Loading

0 comments on commit 8a9d641

Please sign in to comment.