This commit is contained in:
Kismet Hasanaj
2026-05-02 20:07:02 +02:00
parent ce8672e283
commit 34dc9aec52
9428 changed files with 1733330 additions and 0 deletions
@@ -0,0 +1,2 @@
import type { ActionAsyncStorage } from './action-async-storage.external';
export declare const actionAsyncStorageInstance: ActionAsyncStorage;
@@ -0,0 +1,14 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "actionAsyncStorageInstance", {
enumerable: true,
get: function() {
return actionAsyncStorageInstance;
}
});
const _asynclocalstorage = require("./async-local-storage");
const actionAsyncStorageInstance = (0, _asynclocalstorage.createAsyncLocalStorage)();
//# sourceMappingURL=action-async-storage-instance.js.map
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/action-async-storage-instance.ts"],"sourcesContent":["import type { ActionAsyncStorage } from './action-async-storage.external'\nimport { createAsyncLocalStorage } from './async-local-storage'\n\nexport const actionAsyncStorageInstance: ActionAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["actionAsyncStorageInstance","createAsyncLocalStorage"],"mappings":";;;;+BAGaA;;;eAAAA;;;mCAF2B;AAEjC,MAAMA,6BACXC,IAAAA,0CAAuB","ignoreList":[0]}
@@ -0,0 +1,8 @@
import type { AsyncLocalStorage } from 'async_hooks';
import { actionAsyncStorageInstance } from './action-async-storage-instance';
export interface ActionStore {
readonly isAction?: boolean;
readonly isAppRoute?: boolean;
}
export type ActionAsyncStorage = AsyncLocalStorage<ActionStore>;
export { actionAsyncStorageInstance as actionAsyncStorage };
@@ -0,0 +1,13 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "actionAsyncStorage", {
enumerable: true,
get: function() {
return _actionasyncstorageinstance.actionAsyncStorageInstance;
}
});
const _actionasyncstorageinstance = require("./action-async-storage-instance");
//# sourceMappingURL=action-async-storage.external.js.map
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/action-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { actionAsyncStorageInstance } from './action-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\nexport interface ActionStore {\n readonly isAction?: boolean\n readonly isAppRoute?: boolean\n}\n\nexport type ActionAsyncStorage = AsyncLocalStorage<ActionStore>\n\nexport { actionAsyncStorageInstance as actionAsyncStorage }\n"],"names":["actionAsyncStorage","actionAsyncStorageInstance"],"mappings":";;;;+BAWuCA;;;eAA9BC,sDAA0B;;;4CARQ","ignoreList":[0]}
+42
View File
@@ -0,0 +1,42 @@
import type { IncomingHttpHeaders } from 'node:http';
import type { SizeLimit } from '../../types';
import type { RequestStore } from '../app-render/work-unit-async-storage.external';
import type { AppRenderContext, GenerateFlight } from './app-render';
import type { AppPageModule } from '../route-modules/app-page/module';
import type { BaseNextRequest, BaseNextResponse } from '../base-http';
import RenderResult, { type AppPageRenderResultMetadata } from '../render-result';
import type { WorkStore } from '../app-render/work-async-storage.external';
declare const enum HostType {
XForwardedHost = "x-forwarded-host",
Host = "host"
}
export declare function parseHostHeader(headers: IncomingHttpHeaders, originDomain?: string): {
type: HostType;
value: string;
} | undefined;
type ServerActionsConfig = {
bodySizeLimit?: SizeLimit;
allowedOrigins?: string[];
};
type HandleActionResult = {
/** An MPA action threw notFound(), and we need to render the appropriate HTML */
type: 'not-found';
} | {
type: 'done';
result: RenderResult | undefined;
formState?: any;
}
/** The request turned out not to be a server action. */
| null;
export declare function handleAction({ req, res, ComponentMod, generateFlight, workStore, requestStore, serverActions, ctx, metadata, }: {
req: BaseNextRequest;
res: BaseNextResponse;
ComponentMod: AppPageModule;
generateFlight: GenerateFlight;
workStore: WorkStore;
requestStore: RequestStore;
serverActions?: ServerActionsConfig;
ctx: AppRenderContext;
metadata: AppPageRenderResultMetadata;
}): Promise<HandleActionResult>;
export {};
File diff suppressed because it is too large Load Diff
File diff suppressed because one or more lines are too long
@@ -0,0 +1,2 @@
import type { AfterTaskAsyncStorage } from './after-task-async-storage.external';
export declare const afterTaskAsyncStorageInstance: AfterTaskAsyncStorage;
@@ -0,0 +1,14 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "afterTaskAsyncStorageInstance", {
enumerable: true,
get: function() {
return afterTaskAsyncStorageInstance;
}
});
const _asynclocalstorage = require("./async-local-storage");
const afterTaskAsyncStorageInstance = (0, _asynclocalstorage.createAsyncLocalStorage)();
//# sourceMappingURL=after-task-async-storage-instance.js.map
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/after-task-async-storage-instance.ts"],"sourcesContent":["import type { AfterTaskAsyncStorage } from './after-task-async-storage.external'\nimport { createAsyncLocalStorage } from './async-local-storage'\n\nexport const afterTaskAsyncStorageInstance: AfterTaskAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["afterTaskAsyncStorageInstance","createAsyncLocalStorage"],"mappings":";;;;+BAGaA;;;eAAAA;;;mCAF2B;AAEjC,MAAMA,gCACXC,IAAAA,0CAAuB","ignoreList":[0]}
@@ -0,0 +1,13 @@
import type { AsyncLocalStorage } from 'async_hooks';
import { afterTaskAsyncStorageInstance as afterTaskAsyncStorage } from './after-task-async-storage-instance';
import type { WorkUnitStore } from './work-unit-async-storage.external';
export interface AfterTaskStore {
/** The phase in which the topmost `after` was called.
*
* NOTE: Can be undefined when running `generateStaticParams`,
* where we only have a `workStore`, no `workUnitStore`.
*/
readonly rootTaskSpawnPhase: WorkUnitStore['phase'] | undefined;
}
export type AfterTaskAsyncStorage = AsyncLocalStorage<AfterTaskStore>;
export { afterTaskAsyncStorage };
@@ -0,0 +1,13 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "afterTaskAsyncStorage", {
enumerable: true,
get: function() {
return _aftertaskasyncstorageinstance.afterTaskAsyncStorageInstance;
}
});
const _aftertaskasyncstorageinstance = require("./after-task-async-storage-instance");
//# sourceMappingURL=after-task-async-storage.external.js.map
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/after-task-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { afterTaskAsyncStorageInstance as afterTaskAsyncStorage } from './after-task-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\nimport type { WorkUnitStore } from './work-unit-async-storage.external'\n\nexport interface AfterTaskStore {\n /** The phase in which the topmost `after` was called.\n *\n * NOTE: Can be undefined when running `generateStaticParams`,\n * where we only have a `workStore`, no `workUnitStore`.\n */\n readonly rootTaskSpawnPhase: WorkUnitStore['phase'] | undefined\n}\n\nexport type AfterTaskAsyncStorage = AsyncLocalStorage<AfterTaskStore>\n\nexport { afterTaskAsyncStorage }\n"],"names":["afterTaskAsyncStorage"],"mappings":";;;;+BAiBSA;;;eAAAA,4DAAqB;;;+CAdyC","ignoreList":[0]}
@@ -0,0 +1,26 @@
export declare class ReactServerResult {
private _stream;
constructor(stream: ReadableStream<Uint8Array>);
tee(): ReadableStream<Uint8Array<ArrayBufferLike>>;
consume(): ReadableStream<Uint8Array<ArrayBufferLike>>;
}
export type ReactServerPrerenderResolveToType = {
prelude: ReadableStream<Uint8Array>;
};
export declare function createReactServerPrerenderResult(underlying: Promise<ReactServerPrerenderResolveToType>): Promise<ReactServerPrerenderResult>;
export declare function createReactServerPrerenderResultFromRender(underlying: ReadableStream<Uint8Array>): Promise<ReactServerPrerenderResult>;
export declare class ReactServerPrerenderResult {
private _chunks;
private assertChunks;
private consumeChunks;
consume(): void;
constructor(chunks: Array<Uint8Array>);
asUnclosingStream(): ReadableStream<Uint8Array>;
consumeAsUnclosingStream(): ReadableStream<Uint8Array>;
asStream(): ReadableStream<Uint8Array>;
consumeAsStream(): ReadableStream<Uint8Array>;
}
export declare function processPrelude(unprocessedPrelude: ReadableStream<Uint8Array>): Promise<{
prelude: ReadableStream<Uint8Array<ArrayBufferLike>>;
preludeIsEmpty: boolean;
}>;
+167
View File
@@ -0,0 +1,167 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
ReactServerPrerenderResult: null,
ReactServerResult: null,
createReactServerPrerenderResult: null,
createReactServerPrerenderResultFromRender: null,
processPrelude: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
ReactServerPrerenderResult: function() {
return ReactServerPrerenderResult;
},
ReactServerResult: function() {
return ReactServerResult;
},
createReactServerPrerenderResult: function() {
return createReactServerPrerenderResult;
},
createReactServerPrerenderResultFromRender: function() {
return createReactServerPrerenderResultFromRender;
},
processPrelude: function() {
return processPrelude;
}
});
const _invarianterror = require("../../shared/lib/invariant-error");
class ReactServerResult {
constructor(stream){
this._stream = stream;
}
tee() {
if (this._stream === null) {
throw Object.defineProperty(new Error('Cannot tee a ReactServerResult that has already been consumed'), "__NEXT_ERROR_CODE", {
value: "E106",
enumerable: false,
configurable: true
});
}
const tee = this._stream.tee();
this._stream = tee[0];
return tee[1];
}
consume() {
if (this._stream === null) {
throw Object.defineProperty(new Error('Cannot consume a ReactServerResult that has already been consumed'), "__NEXT_ERROR_CODE", {
value: "E470",
enumerable: false,
configurable: true
});
}
const stream = this._stream;
this._stream = null;
return stream;
}
}
async function createReactServerPrerenderResult(underlying) {
const chunks = [];
const { prelude } = await underlying;
const reader = prelude.getReader();
while(true){
const { done, value } = await reader.read();
if (done) {
return new ReactServerPrerenderResult(chunks);
} else {
chunks.push(value);
}
}
}
async function createReactServerPrerenderResultFromRender(underlying) {
const chunks = [];
const reader = underlying.getReader();
while(true){
const { done, value } = await reader.read();
if (done) {
break;
} else {
chunks.push(value);
}
}
return new ReactServerPrerenderResult(chunks);
}
class ReactServerPrerenderResult {
assertChunks(expression) {
if (this._chunks === null) {
throw Object.defineProperty(new _invarianterror.InvariantError(`Cannot \`${expression}\` on a ReactServerPrerenderResult that has already been consumed.`), "__NEXT_ERROR_CODE", {
value: "E593",
enumerable: false,
configurable: true
});
}
return this._chunks;
}
consumeChunks(expression) {
const chunks = this.assertChunks(expression);
this.consume();
return chunks;
}
consume() {
this._chunks = null;
}
constructor(chunks){
this._chunks = chunks;
}
asUnclosingStream() {
const chunks = this.assertChunks('asUnclosingStream()');
return createUnclosingStream(chunks);
}
consumeAsUnclosingStream() {
const chunks = this.consumeChunks('consumeAsUnclosingStream()');
return createUnclosingStream(chunks);
}
asStream() {
const chunks = this.assertChunks('asStream()');
return createClosingStream(chunks);
}
consumeAsStream() {
const chunks = this.consumeChunks('consumeAsStream()');
return createClosingStream(chunks);
}
}
function createUnclosingStream(chunks) {
let i = 0;
return new ReadableStream({
async pull (controller) {
if (i < chunks.length) {
controller.enqueue(chunks[i++]);
}
// we intentionally keep the stream open. The consumer will clear
// out chunks once finished and the remaining memory will be GC'd
// when this object goes out of scope
}
});
}
function createClosingStream(chunks) {
let i = 0;
return new ReadableStream({
async pull (controller) {
if (i < chunks.length) {
controller.enqueue(chunks[i++]);
} else {
controller.close();
}
}
});
}
async function processPrelude(unprocessedPrelude) {
const [prelude, peek] = unprocessedPrelude.tee();
const reader = peek.getReader();
const firstResult = await reader.read();
reader.cancel();
const preludeIsEmpty = firstResult.done === true;
return {
prelude,
preludeIsEmpty
};
}
//# sourceMappingURL=app-render-prerender-utils.js.map
File diff suppressed because one or more lines are too long
@@ -0,0 +1,8 @@
/**
* This is a utility function to make scheduling sequential tasks that run back to back easier.
* We schedule on the same queue (setTimeout) at the same time to ensure no other events can sneak in between.
*
* The first function runs in the first task. Each subsequent function runs in its own task.
* The returned promise resolves after the last task completes.
*/
export declare function runInSequentialTasks<R>(first: () => R, ...rest: Array<() => void>): Promise<Awaited<R>>;
+76
View File
@@ -0,0 +1,76 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "runInSequentialTasks", {
enumerable: true,
get: function() {
return runInSequentialTasks;
}
});
const _invarianterror = require("../../shared/lib/invariant-error");
const _apprenderscheduling = require("./app-render-scheduling");
const _fastsetimmediateexternal = require("../node-environment-extensions/fast-set-immediate.external");
const _isthenable = require("../../shared/lib/is-thenable");
function noop() {}
function runInSequentialTasks(first, ...rest) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new _invarianterror.InvariantError('`runInSequentialTasks` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
value: "E1054",
enumerable: false,
configurable: true
});
} else {
return new Promise((resolve, reject)=>{
const scheduleTimeout = (0, _apprenderscheduling.createAtomicTimerGroup)();
const ids = [];
let result;
ids.push(scheduleTimeout(()=>{
try {
(0, _fastsetimmediateexternal.DANGEROUSLY_runPendingImmediatesAfterCurrentTask)();
result = first();
// If the first function returns a thenable, suppress unhandled
// rejections. A later task in the sequence (e.g. an abort) may
// cause the promise to reject, and we don't want that to surface
// as an unhandled rejection — the caller will observe the
// rejection when they await the returned promise.
if ((0, _isthenable.isThenable)(result)) {
result.then(noop, noop);
}
} catch (err) {
for(let i = 1; i < ids.length; i++){
clearTimeout(ids[i]);
}
reject(err);
}
}));
for(let i = 0; i < rest.length; i++){
const fn = rest[i];
let index = ids.length;
ids.push(scheduleTimeout(()=>{
try {
(0, _fastsetimmediateexternal.DANGEROUSLY_runPendingImmediatesAfterCurrentTask)();
fn();
} catch (err) {
// clear remaining timeouts
while(++index < ids.length){
clearTimeout(ids[index]);
}
reject(err);
}
}));
}
// We wait a task before resolving
ids.push(scheduleTimeout(()=>{
try {
(0, _fastsetimmediateexternal.expectNoPendingImmediates)();
resolve(result);
} catch (err) {
reject(err);
}
}));
});
}
}
//# sourceMappingURL=app-render-render-utils.js.map
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/app-render-render-utils.ts"],"sourcesContent":["import { InvariantError } from '../../shared/lib/invariant-error'\nimport { createAtomicTimerGroup } from './app-render-scheduling'\nimport {\n DANGEROUSLY_runPendingImmediatesAfterCurrentTask,\n expectNoPendingImmediates,\n} from '../node-environment-extensions/fast-set-immediate.external'\nimport { isThenable } from '../../shared/lib/is-thenable'\n\nfunction noop() {}\n\n/**\n * This is a utility function to make scheduling sequential tasks that run back to back easier.\n * We schedule on the same queue (setTimeout) at the same time to ensure no other events can sneak in between.\n *\n * The first function runs in the first task. Each subsequent function runs in its own task.\n * The returned promise resolves after the last task completes.\n */\nexport function runInSequentialTasks<R>(\n first: () => R,\n ...rest: Array<() => void>\n): Promise<Awaited<R>> {\n if (process.env.NEXT_RUNTIME === 'edge') {\n throw new InvariantError(\n '`runInSequentialTasks` should not be called in edge runtime.'\n )\n } else {\n return new Promise((resolve, reject) => {\n const scheduleTimeout = createAtomicTimerGroup()\n const ids: ReturnType<typeof scheduleTimeout>[] = []\n\n let result: R\n ids.push(\n scheduleTimeout(() => {\n try {\n DANGEROUSLY_runPendingImmediatesAfterCurrentTask()\n result = first()\n // If the first function returns a thenable, suppress unhandled\n // rejections. A later task in the sequence (e.g. an abort) may\n // cause the promise to reject, and we don't want that to surface\n // as an unhandled rejection — the caller will observe the\n // rejection when they await the returned promise.\n if (isThenable(result)) {\n result.then(noop, noop)\n }\n } catch (err) {\n for (let i = 1; i < ids.length; i++) {\n clearTimeout(ids[i])\n }\n reject(err)\n }\n })\n )\n\n for (let i = 0; i < rest.length; i++) {\n const fn = rest[i]\n let index = ids.length\n\n ids.push(\n scheduleTimeout(() => {\n try {\n DANGEROUSLY_runPendingImmediatesAfterCurrentTask()\n fn()\n } catch (err) {\n // clear remaining timeouts\n while (++index < ids.length) {\n clearTimeout(ids[index])\n }\n reject(err)\n }\n })\n )\n }\n\n // We wait a task before resolving\n ids.push(\n scheduleTimeout(() => {\n try {\n expectNoPendingImmediates()\n resolve(result as Awaited<R>)\n } catch (err) {\n reject(err)\n }\n })\n )\n })\n }\n}\n"],"names":["runInSequentialTasks","noop","first","rest","process","env","NEXT_RUNTIME","InvariantError","Promise","resolve","reject","scheduleTimeout","createAtomicTimerGroup","ids","result","push","DANGEROUSLY_runPendingImmediatesAfterCurrentTask","isThenable","then","err","i","length","clearTimeout","fn","index","expectNoPendingImmediates"],"mappings":";;;;+BAiBgBA;;;eAAAA;;;gCAjBe;qCACQ;0CAIhC;4BACoB;AAE3B,SAASC,QAAQ;AASV,SAASD,qBACdE,KAAc,EACd,GAAGC,IAAuB;IAE1B,IAAIC,QAAQC,GAAG,CAACC,YAAY,KAAK,QAAQ;QACvC,MAAM,qBAEL,CAFK,IAAIC,8BAAc,CACtB,iEADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF,OAAO;QACL,OAAO,IAAIC,QAAQ,CAACC,SAASC;YAC3B,MAAMC,kBAAkBC,IAAAA,2CAAsB;YAC9C,MAAMC,MAA4C,EAAE;YAEpD,IAAIC;YACJD,IAAIE,IAAI,CACNJ,gBAAgB;gBACd,IAAI;oBACFK,IAAAA,0EAAgD;oBAChDF,SAASZ;oBACT,+DAA+D;oBAC/D,+DAA+D;oBAC/D,iEAAiE;oBACjE,0DAA0D;oBAC1D,kDAAkD;oBAClD,IAAIe,IAAAA,sBAAU,EAACH,SAAS;wBACtBA,OAAOI,IAAI,CAACjB,MAAMA;oBACpB;gBACF,EAAE,OAAOkB,KAAK;oBACZ,IAAK,IAAIC,IAAI,GAAGA,IAAIP,IAAIQ,MAAM,EAAED,IAAK;wBACnCE,aAAaT,GAAG,CAACO,EAAE;oBACrB;oBACAV,OAAOS;gBACT;YACF;YAGF,IAAK,IAAIC,IAAI,GAAGA,IAAIjB,KAAKkB,MAAM,EAAED,IAAK;gBACpC,MAAMG,KAAKpB,IAAI,CAACiB,EAAE;gBAClB,IAAII,QAAQX,IAAIQ,MAAM;gBAEtBR,IAAIE,IAAI,CACNJ,gBAAgB;oBACd,IAAI;wBACFK,IAAAA,0EAAgD;wBAChDO;oBACF,EAAE,OAAOJ,KAAK;wBACZ,2BAA2B;wBAC3B,MAAO,EAAEK,QAAQX,IAAIQ,MAAM,CAAE;4BAC3BC,aAAaT,GAAG,CAACW,MAAM;wBACzB;wBACAd,OAAOS;oBACT;gBACF;YAEJ;YAEA,kCAAkC;YAClCN,IAAIE,IAAI,CACNJ,gBAAgB;gBACd,IAAI;oBACFc,IAAAA,mDAAyB;oBACzBhB,QAAQK;gBACV,EAAE,OAAOK,KAAK;oBACZT,OAAOS;gBACT;YACF;QAEJ;IACF;AACF","ignoreList":[0]}
+8
View File
@@ -0,0 +1,8 @@
/**
* Allows scheduling multiple timers (equivalent to `setTimeout(cb, delayMs)`)
* that are guaranteed to run in the same iteration of the event loop.
*
* @param delayMs - the delay to pass to `setTimeout`. (default: 0)
*
* */
export declare function createAtomicTimerGroup(delayMs?: number): (callback: () => void) => NodeJS.Timeout;
+188
View File
@@ -0,0 +1,188 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "createAtomicTimerGroup", {
enumerable: true,
get: function() {
return createAtomicTimerGroup;
}
});
const _invarianterror = require("../../shared/lib/invariant-error");
const _fastsetimmediateexternal = require("../node-environment-extensions/fast-set-immediate.external");
/*
==========================
| Background |
==========================
Node.js does not guarantee that two timers scheduled back to back will run
on the same iteration of the event loop:
```ts
setTimeout(one, 0)
setTimeout(two, 0)
```
Internally, each timer is assigned a `_idleStart` property that holds
an internal libuv timestamp in millisecond resolution.
This will be used to determine if the timer is already "expired" and should be executed.
However, even in sync code, it's possible for two timers to get different `_idleStart` values.
This can cause one of the timers to be executed, and the other to be delayed until the next timer phase.
The delaying happens [here](https://github.com/nodejs/node/blob/c208ffc66bb9418ff026c4e3fa82e5b4387bd147/lib/internal/timers.js#L556-L564).
and can be debugged by running node with `NODE_DEBUG=timer`.
The easiest way to observe it is to run this program in a loop until it exits with status 1:
```
// test.js
let immediateRan = false
const t1 = setTimeout(() => {
console.log('timeout 1')
setImmediate(() => {
console.log('immediate 1')
immediateRan = true
})
})
const t2 = setTimeout(() => {
console.log('timeout 2')
if (immediateRan) {
console.log('immediate ran before the second timeout!')
console.log(
`t1._idleStart: ${t1._idleStart}, t2_idleStart: ${t2._idleStart}`
);
process.exit(1)
}
})
```
```bash
#!/usr/bin/env bash
i=1;
while true; do
output="$(NODE_DEBUG=timer node test.js 2>&1)";
if [ "$?" -eq 1 ]; then
echo "failed after $i iterations";
echo "$output";
break;
fi;
i=$((i+1));
done
```
If `t2` is deferred to the next iteration of the event loop,
then the immediate scheduled from inside `t1` will run first.
When this occurs, `_idleStart` is reliably different between `t1` and `t2`.
==========================
| Solution |
==========================
We can guarantee that multiple timers (with the same delay, usually `0`)
run together without any delays by making sure that their `_idleStart`s are the same,
because that's what's used to determine if a timer should be deferred or not.
Luckily, this property is currently exposed to userland and mutable,
so we can patch it.
Another related trick we could potentially apply is making
a timer immediately be considered expired by doing `timer._idleStart -= 2`.
(the value must be more than `1`, the delay that actually gets set for `setTimeout(cb, 0)`).
This makes node view this timer as "a 1ms timer scheduled 2ms ago",
meaning that it should definitely run in the next timer phase.
However, I'm not confident we know all the side effects of doing this,
so for now, simply ensuring coordination is enough.
*/ let shouldAttemptPatching = true;
function warnAboutTimers() {
console.warn("Next.js cannot guarantee that Cache Components will run as expected due to the current runtime's implementation of `setTimeout()`.\nPlease report a github issue here: https://github.com/vercel/next.js/issues/new/");
}
function createAtomicTimerGroup(delayMs = 0) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new _invarianterror.InvariantError('createAtomicTimerGroup cannot be called in the edge runtime'), "__NEXT_ERROR_CODE", {
value: "E934",
enumerable: false,
configurable: true
});
} else {
let isFirstCallback = true;
let firstTimerIdleStart = null;
let didFirstTimerRun = false;
// As a sanity check, we schedule an immediate from the first timeout
// to check if the execution was interrupted (i.e. if it ran between the timeouts).
// Note that we're deliberately bypassing the "fast setImmediate" patch here --
// otherwise, this check would always fail, because the immediate
// would always run before the second timeout.
let didImmediateRun = false;
function runFirstCallback(callback) {
didFirstTimerRun = true;
if (shouldAttemptPatching) {
(0, _fastsetimmediateexternal.unpatchedSetImmediate)(()=>{
didImmediateRun = true;
});
}
return callback();
}
function runSubsequentCallback(callback) {
if (shouldAttemptPatching) {
if (didImmediateRun) {
// If the immediate managed to run between the timers, then we're not
// able to provide the guarantees that we're supposed to
shouldAttemptPatching = false;
warnAboutTimers();
}
}
return callback();
}
return function scheduleTimeout(callback) {
if (didFirstTimerRun) {
throw Object.defineProperty(new _invarianterror.InvariantError('Cannot schedule more timers into a group that already executed'), "__NEXT_ERROR_CODE", {
value: "E935",
enumerable: false,
configurable: true
});
}
const timer = setTimeout(isFirstCallback ? runFirstCallback : runSubsequentCallback, delayMs, callback);
isFirstCallback = false;
if (!shouldAttemptPatching) {
// We already tried patching some timers, and it didn't work.
// No point trying again.
return timer;
}
// NodeJS timers have a `_idleStart` property, but it doesn't exist e.g. in Bun.
// If it's not present, we'll warn and try to continue.
try {
if ('_idleStart' in timer && typeof timer._idleStart === 'number') {
// If this is the first timer that was scheduled, save its `_idleStart`.
// We'll copy it onto subsequent timers to guarantee that they'll all be
// considered expired in the same iteration of the event loop
// and thus will all be executed in the same timer phase.
if (firstTimerIdleStart === null) {
firstTimerIdleStart = timer._idleStart;
} else {
timer._idleStart = firstTimerIdleStart;
}
} else {
shouldAttemptPatching = false;
warnAboutTimers();
}
} catch (err) {
// This should never fail in current Node, but it might start failing in the future.
// We might be okay even without tweaking the timers, so warn and try to continue.
console.error(Object.defineProperty(new _invarianterror.InvariantError('An unexpected error occurred while adjusting `_idleStart` on an atomic timer', {
cause: err
}), "__NEXT_ERROR_CODE", {
value: "E933",
enumerable: false,
configurable: true
}));
shouldAttemptPatching = false;
warnAboutTimers();
}
return timer;
};
}
}
//# sourceMappingURL=app-render-scheduling.js.map
File diff suppressed because one or more lines are too long
+90
View File
@@ -0,0 +1,90 @@
import type { RenderOpts, PreloadCallbacks } from './types';
import type { ActionResult, DynamicParamTypesShort, DynamicSegmentTuple, FlightRouterState, CacheNodeSeedData } from '../../shared/lib/app-router-types';
import { type WorkStore } from '../app-render/work-async-storage.external';
import type { RequestStore } from '../app-render/work-unit-async-storage.external';
import type { NextParsedUrlQuery } from '../request-meta';
import type { LoaderTree } from '../lib/app-dir-module';
import type { AppPageModule } from '../route-modules/app-page/module';
import type { BaseNextRequest, BaseNextResponse } from '../base-http';
import RenderResult, { type AppPageRenderResultMetadata } from '../render-result';
import { type ImplicitTags } from '../lib/implicit-tags';
import { parseRelativeUrl } from '../../shared/lib/router/utils/parse-relative-url';
import type { ServerComponentsHmrCache } from '../response-cache';
import { type OpaqueFallbackRouteParams } from '../request/fallback-params';
import type { Params } from '../request/params';
export type GetDynamicParamFromSegment = (loaderTree: LoaderTree) => DynamicParam | null;
export type DynamicParam = {
param: string;
value: string | string[] | null;
treeSegment: DynamicSegmentTuple;
type: DynamicParamTypesShort;
};
export type GenerateFlight = typeof generateDynamicFlightRenderResult;
export type AppSharedContext = {
buildId: string;
deploymentId: string;
clientAssetToken: string;
};
export type AppRenderContext = {
sharedContext: AppSharedContext;
workStore: WorkStore;
url: ReturnType<typeof parseRelativeUrl>;
componentMod: AppPageModule;
renderOpts: RenderOpts;
parsedRequestHeaders: ParsedRequestHeaders;
getDynamicParamFromSegment: GetDynamicParamFromSegment;
interpolatedParams: Params;
query: NextParsedUrlQuery;
isPrefetch: boolean;
isPossibleServerAction: boolean;
requestTimestamp: number;
appUsingSizeAdjustment: boolean;
flightRouterState?: FlightRouterState;
requestId: string;
htmlRequestId: string;
pagePath: string;
assetPrefix: string;
isNotFoundPath: boolean;
nonce: string | undefined;
res: BaseNextResponse;
/**
* For now, the implicit tags are common for the whole route. If we ever start
* rendering/revalidating segments independently, they need to move to the
* work unit store.
*/
implicitTags: ImplicitTags;
};
interface ParsedRequestHeaders {
/**
* Router state provided from the client-side router. Used to handle rendering
* from the common layout down. This value will be undefined if the request is
* not a client-side navigation request, or if the request is a prefetch
* request.
*/
readonly flightRouterState: FlightRouterState | undefined;
readonly isPrefetchRequest: boolean;
readonly isRuntimePrefetchRequest: boolean;
readonly isRouteTreePrefetchRequest: boolean;
readonly isHmrRefresh: boolean;
readonly isRSCRequest: boolean;
readonly nonce: string | undefined;
readonly previouslyRevalidatedTags: string[];
readonly requestId: string | undefined;
readonly htmlRequestId: string | undefined;
}
/**
* Produces a RenderResult containing the Flight data for the given request. See
* `generateDynamicRSCPayload` for information on the contents of the render result.
*/
declare function generateDynamicFlightRenderResult(req: BaseNextRequest, ctx: AppRenderContext, requestStore: RequestStore, options?: {
actionResult: ActionResult;
skipPageRendering: boolean;
componentTree?: CacheNodeSeedData;
preloadCallbacks?: PreloadCallbacks;
temporaryReferences?: WeakMap<any, string>;
waitUntil?: Promise<unknown>;
}): Promise<RenderResult>;
export type BinaryStreamOf<T> = ReadableStream<Uint8Array>;
export type AppPageRender = (req: BaseNextRequest, res: BaseNextResponse, pagePath: string, query: NextParsedUrlQuery, fallbackRouteParams: OpaqueFallbackRouteParams | null, renderOpts: RenderOpts, serverComponentsHmrCache: ServerComponentsHmrCache | undefined, sharedContext: AppSharedContext) => Promise<RenderResult<AppPageRenderResultMetadata>>;
export declare const renderToHTMLOrFlight: AppPageRender;
export {};
File diff suppressed because it is too large Load Diff
File diff suppressed because one or more lines are too long
+4
View File
@@ -0,0 +1,4 @@
import type { AsyncLocalStorage } from 'async_hooks';
export declare function createAsyncLocalStorage<Store extends {}>(): AsyncLocalStorage<Store>;
export declare function bindSnapshot<T>(fn: T): T;
export declare function createSnapshot(): <R, TArgs extends any[]>(fn: (...args: TArgs) => R, ...args: TArgs) => R;
+76
View File
@@ -0,0 +1,76 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
bindSnapshot: null,
createAsyncLocalStorage: null,
createSnapshot: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
bindSnapshot: function() {
return bindSnapshot;
},
createAsyncLocalStorage: function() {
return createAsyncLocalStorage;
},
createSnapshot: function() {
return createSnapshot;
}
});
const sharedAsyncLocalStorageNotAvailableError = Object.defineProperty(new Error('Invariant: AsyncLocalStorage accessed in runtime where it is not available'), "__NEXT_ERROR_CODE", {
value: "E504",
enumerable: false,
configurable: true
});
class FakeAsyncLocalStorage {
disable() {
throw sharedAsyncLocalStorageNotAvailableError;
}
getStore() {
// This fake implementation of AsyncLocalStorage always returns `undefined`.
return undefined;
}
run() {
throw sharedAsyncLocalStorageNotAvailableError;
}
exit() {
throw sharedAsyncLocalStorageNotAvailableError;
}
enterWith() {
throw sharedAsyncLocalStorageNotAvailableError;
}
static bind(fn) {
return fn;
}
}
const maybeGlobalAsyncLocalStorage = typeof globalThis !== 'undefined' && globalThis.AsyncLocalStorage;
function createAsyncLocalStorage() {
if (maybeGlobalAsyncLocalStorage) {
return new maybeGlobalAsyncLocalStorage();
}
return new FakeAsyncLocalStorage();
}
function bindSnapshot(// WARNING: Don't pass a named function to this argument! See: https://github.com/facebook/react/pull/34911
fn) {
if (maybeGlobalAsyncLocalStorage) {
return maybeGlobalAsyncLocalStorage.bind(fn);
}
return FakeAsyncLocalStorage.bind(fn);
}
function createSnapshot() {
if (maybeGlobalAsyncLocalStorage) {
return maybeGlobalAsyncLocalStorage.snapshot();
}
return function(fn, ...args) {
return fn(...args);
};
}
//# sourceMappingURL=async-local-storage.js.map
+1
View File
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/async-local-storage.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\nconst sharedAsyncLocalStorageNotAvailableError = new Error(\n 'Invariant: AsyncLocalStorage accessed in runtime where it is not available'\n)\n\nclass FakeAsyncLocalStorage<Store extends {}>\n implements AsyncLocalStorage<Store>\n{\n disable(): void {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n getStore(): Store | undefined {\n // This fake implementation of AsyncLocalStorage always returns `undefined`.\n return undefined\n }\n\n run<R>(): R {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n exit<R>(): R {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n enterWith(): void {\n throw sharedAsyncLocalStorageNotAvailableError\n }\n\n static bind<T>(fn: T): T {\n return fn\n }\n}\n\nconst maybeGlobalAsyncLocalStorage =\n typeof globalThis !== 'undefined' && (globalThis as any).AsyncLocalStorage\n\nexport function createAsyncLocalStorage<\n Store extends {},\n>(): AsyncLocalStorage<Store> {\n if (maybeGlobalAsyncLocalStorage) {\n return new maybeGlobalAsyncLocalStorage()\n }\n return new FakeAsyncLocalStorage()\n}\n\nexport function bindSnapshot<T>(\n // WARNING: Don't pass a named function to this argument! See: https://github.com/facebook/react/pull/34911\n fn: T\n): T {\n if (maybeGlobalAsyncLocalStorage) {\n return maybeGlobalAsyncLocalStorage.bind(fn)\n }\n return FakeAsyncLocalStorage.bind(fn)\n}\n\nexport function createSnapshot(): <R, TArgs extends any[]>(\n fn: (...args: TArgs) => R,\n ...args: TArgs\n) => R {\n if (maybeGlobalAsyncLocalStorage) {\n return maybeGlobalAsyncLocalStorage.snapshot()\n }\n return function (fn: any, ...args: any[]) {\n return fn(...args)\n }\n}\n"],"names":["bindSnapshot","createAsyncLocalStorage","createSnapshot","sharedAsyncLocalStorageNotAvailableError","Error","FakeAsyncLocalStorage","disable","getStore","undefined","run","exit","enterWith","bind","fn","maybeGlobalAsyncLocalStorage","globalThis","AsyncLocalStorage","snapshot","args"],"mappings":";;;;;;;;;;;;;;;;IA+CgBA,YAAY;eAAZA;;IATAC,uBAAuB;eAAvBA;;IAmBAC,cAAc;eAAdA;;;AAvDhB,MAAMC,2CAA2C,qBAEhD,CAFgD,IAAIC,MACnD,+EAD+C,qBAAA;WAAA;gBAAA;kBAAA;AAEjD;AAEA,MAAMC;IAGJC,UAAgB;QACd,MAAMH;IACR;IAEAI,WAA8B;QAC5B,4EAA4E;QAC5E,OAAOC;IACT;IAEAC,MAAY;QACV,MAAMN;IACR;IAEAO,OAAa;QACX,MAAMP;IACR;IAEAQ,YAAkB;QAChB,MAAMR;IACR;IAEA,OAAOS,KAAQC,EAAK,EAAK;QACvB,OAAOA;IACT;AACF;AAEA,MAAMC,+BACJ,OAAOC,eAAe,eAAe,AAACA,WAAmBC,iBAAiB;AAErE,SAASf;IAGd,IAAIa,8BAA8B;QAChC,OAAO,IAAIA;IACb;IACA,OAAO,IAAIT;AACb;AAEO,SAASL,aACd,2GAA2G;AAC3Ga,EAAK;IAEL,IAAIC,8BAA8B;QAChC,OAAOA,6BAA6BF,IAAI,CAACC;IAC3C;IACA,OAAOR,sBAAsBO,IAAI,CAACC;AACpC;AAEO,SAASX;IAId,IAAIY,8BAA8B;QAChC,OAAOA,6BAA6BG,QAAQ;IAC9C;IACA,OAAO,SAAUJ,EAAO,EAAE,GAAGK,IAAW;QACtC,OAAOL,MAAMK;IACf;AACF","ignoreList":[0]}
+34
View File
@@ -0,0 +1,34 @@
/**
* This class is used to detect when all cache reads for a given render are settled.
* We do this to allow for cache warming the prerender without having to continue rendering
* the remainder of the page. This feature is really only useful when the cacheComponents flag is on
* and should only be used in codepaths gated with this feature.
*/
export declare class CacheSignal {
private count;
private earlyListeners;
private listeners;
private tickPending;
private pendingTimeoutCleanup;
private subscribedSignals;
constructor();
private noMorePendingCaches;
private invokeListenersIfNoPendingReads;
/**
* This promise waits until there are no more in progress cache reads but no later.
* This allows for adding more cache reads after to delay cacheReady.
*/
inputReady(): Promise<void>;
/**
* If there are inflight cache reads this Promise can resolve in a microtask however
* if there are no inflight cache reads then we wait at least one task to allow initial
* cache reads to be initiated.
*/
cacheReady(): Promise<void>;
beginRead(): void;
endRead(): void;
hasPendingReads(): boolean;
trackRead<T>(promise: Promise<T>): Promise<T>;
subscribeToReads(subscriber: CacheSignal): () => void;
unsubscribeFromReads(subscriber: CacheSignal): void;
}
+181
View File
@@ -0,0 +1,181 @@
/**
* This class is used to detect when all cache reads for a given render are settled.
* We do this to allow for cache warming the prerender without having to continue rendering
* the remainder of the page. This feature is really only useful when the cacheComponents flag is on
* and should only be used in codepaths gated with this feature.
*/ "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "CacheSignal", {
enumerable: true,
get: function() {
return CacheSignal;
}
});
const _invarianterror = require("../../shared/lib/invariant-error");
class CacheSignal {
constructor(){
this.count = 0;
this.earlyListeners = [];
this.listeners = [];
this.tickPending = false;
this.pendingTimeoutCleanup = null;
this.subscribedSignals = null;
this.invokeListenersIfNoPendingReads = ()=>{
this.pendingTimeoutCleanup = null;
if (this.count === 0) {
for(let i = 0; i < this.listeners.length; i++){
this.listeners[i]();
}
this.listeners.length = 0;
}
};
if (process.env.NEXT_RUNTIME === 'edge') {
// we rely on `process.nextTick`, which is not supported in edge
throw Object.defineProperty(new _invarianterror.InvariantError('CacheSignal cannot be used in the edge runtime, because `cacheComponents` does not support it.'), "__NEXT_ERROR_CODE", {
value: "E728",
enumerable: false,
configurable: true
});
}
}
noMorePendingCaches() {
if (!this.tickPending) {
this.tickPending = true;
queueMicrotask(()=>process.nextTick(()=>{
this.tickPending = false;
if (this.count === 0) {
for(let i = 0; i < this.earlyListeners.length; i++){
this.earlyListeners[i]();
}
this.earlyListeners.length = 0;
}
}));
}
// After a cache resolves, React will schedule new rendering work:
// - in a microtask (when prerendering)
// - in setImmediate (when rendering)
// To cover both of these, we have to make sure that we let immediates execute at least once after each cache resolved.
// We don't know when the pending timeout was scheduled (and if it's about to resolve),
// so by scheduling a new one, we can be sure that we'll go around the event loop at least once.
if (this.pendingTimeoutCleanup) {
// We cancel the timeout in beginRead, so this shouldn't ever be active here,
// but we still cancel it defensively.
this.pendingTimeoutCleanup();
}
this.pendingTimeoutCleanup = scheduleImmediateAndTimeoutWithCleanup(this.invokeListenersIfNoPendingReads);
}
/**
* This promise waits until there are no more in progress cache reads but no later.
* This allows for adding more cache reads after to delay cacheReady.
*/ inputReady() {
return new Promise((resolve)=>{
this.earlyListeners.push(resolve);
if (this.count === 0) {
this.noMorePendingCaches();
}
});
}
/**
* If there are inflight cache reads this Promise can resolve in a microtask however
* if there are no inflight cache reads then we wait at least one task to allow initial
* cache reads to be initiated.
*/ cacheReady() {
return new Promise((resolve)=>{
this.listeners.push(resolve);
if (this.count === 0) {
this.noMorePendingCaches();
}
});
}
beginRead() {
this.count++;
// There's a new pending cache, so if there's a `noMorePendingCaches` timeout running,
// we should cancel it.
if (this.pendingTimeoutCleanup) {
this.pendingTimeoutCleanup();
this.pendingTimeoutCleanup = null;
}
if (this.subscribedSignals !== null) {
for (const subscriber of this.subscribedSignals){
subscriber.beginRead();
}
}
}
endRead() {
if (this.count === 0) {
throw Object.defineProperty(new _invarianterror.InvariantError('CacheSignal got more endRead() calls than beginRead() calls'), "__NEXT_ERROR_CODE", {
value: "E678",
enumerable: false,
configurable: true
});
}
// If this is the last read we need to wait a task before we can claim the cache is settled.
// The cache read will likely ping a Server Component which can read from the cache again and this
// will play out in a microtask so we need to only resolve pending listeners if we're still at 0
// after at least one task.
// We only want one task scheduled at a time so when we hit count 1 we don't decrement the counter immediately.
// If intervening reads happen before the scheduled task runs they will never observe count 1 preventing reentrency
this.count--;
if (this.count === 0) {
this.noMorePendingCaches();
}
if (this.subscribedSignals !== null) {
for (const subscriber of this.subscribedSignals){
subscriber.endRead();
}
}
}
hasPendingReads() {
return this.count > 0;
}
trackRead(promise) {
this.beginRead();
// `promise.finally()` still rejects, so don't use it here to avoid unhandled rejections
const onFinally = this.endRead.bind(this);
promise.then(onFinally, onFinally);
return promise;
}
subscribeToReads(subscriber) {
if (subscriber === this) {
throw Object.defineProperty(new _invarianterror.InvariantError('A CacheSignal cannot subscribe to itself'), "__NEXT_ERROR_CODE", {
value: "E679",
enumerable: false,
configurable: true
});
}
if (this.subscribedSignals === null) {
this.subscribedSignals = new Set();
}
this.subscribedSignals.add(subscriber);
// we'll notify the subscriber of each endRead() on this signal,
// so we need to give it a corresponding beginRead() for each read we have in flight now.
for(let i = 0; i < this.count; i++){
subscriber.beginRead();
}
return this.unsubscribeFromReads.bind(this, subscriber);
}
unsubscribeFromReads(subscriber) {
if (!this.subscribedSignals) {
return;
}
this.subscribedSignals.delete(subscriber);
// we don't need to set the set back to `null` if it's empty --
// if other signals are subscribing to this one, it'll likely get more subscriptions later,
// so we'd have to allocate a fresh set again when that happens.
}
}
function scheduleImmediateAndTimeoutWithCleanup(cb) {
// If we decide to clean up the timeout, we want to remove
// either the immediate or the timeout, whichever is still pending.
let clearPending;
const immediate = setImmediate(()=>{
const timeout = setTimeout(cb, 0);
clearPending = clearTimeout.bind(null, timeout);
});
clearPending = clearImmediate.bind(null, immediate);
return ()=>clearPending();
}
//# sourceMappingURL=cache-signal.js.map
File diff suppressed because one or more lines are too long
+71
View File
@@ -0,0 +1,71 @@
import type { DynamicParamTypesShort, PrefetchHints } from '../../shared/lib/app-router-types';
import type { ManifestNode } from '../../build/webpack/plugins/flight-manifest-plugin';
import { type SegmentRequestKey } from '../../shared/lib/segment-cache/segment-value-encoding';
export type RootTreePrefetch = {
buildId?: string;
tree: TreePrefetch;
staleTime: number;
};
export type TreePrefetchParam = {
type: DynamicParamTypesShort;
key: string | null;
siblings: readonly string[] | null;
};
export type TreePrefetch = {
name: string;
param: TreePrefetchParam | null;
slots: null | {
[parallelRouteKey: string]: TreePrefetch;
};
/** Bitmask of PrefetchHint flags for this segment and its subtree */
prefetchHints: number;
};
export type SegmentPrefetch = {
buildId?: string;
rsc: React.ReactNode | null;
isPartial: boolean;
staleTime: number;
/**
* The set of params that this segment's output depends on. Used by the client
* cache to determine which entries can be reused across different param
* values.
* - `null` means vary params were not tracked (conservative: assume all
* params matter)
* - Empty set means no params were accessed (segment is reusable for any
* param values)
*/
varyParams: Set<string> | null;
};
/**
* A node in the inlined prefetch tree. Wraps a SegmentPrefetch with child
* slots so all segments for a route can be bundled into a single response.
*
* This is a separate type from SegmentPrefetch because the inlined flow is
* still gated behind a feature flag. Eventually inlining will always be
* enabled, and the per-segment and inlined paths will merge.
*/
export type InlinedSegmentPrefetch = {
segment: SegmentPrefetch;
slots: null | {
[parallelRouteKey: string]: InlinedSegmentPrefetch;
};
};
/**
* The response shape for the /_inlined prefetch endpoint. Contains all segment
* data for a route bundled into a single tree structure, plus the head segment.
*/
export type InlinedPrefetchResponse = {
tree: InlinedSegmentPrefetch;
head: SegmentPrefetch;
};
export declare function collectSegmentData(isCacheComponentsEnabled: boolean, fullPageDataBuffer: Buffer, staleTime: number, clientModules: ManifestNode, serverConsumerManifest: any, prefetchInlining: boolean, hints: PrefetchHints | null): Promise<Map<SegmentRequestKey, Buffer>>;
/**
* Compute prefetch hints for a route by measuring segment sizes and deciding
* which segments should be inlined. Only runs at build time. The results are
* written to prefetch-hints.json and loaded at server startup.
*
* This is a separate pass from collectSegmentData so that the inlining
* decisions can be fed back into collectSegmentData to control which segments
* are output as separate entries vs. inlined into their parent.
*/
export declare function collectPrefetchHints(fullPageDataBuffer: Buffer, staleTime: number, clientModules: ManifestNode, serverConsumerManifest: any, maxSize: number, maxBundleSize: number): Promise<PrefetchHints>;
+600
View File
@@ -0,0 +1,600 @@
/* eslint-disable @next/internal/no-ambiguous-jsx -- Bundled in entry-base so it gets the right JSX runtime. */ "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
collectPrefetchHints: null,
collectSegmentData: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
collectPrefetchHints: function() {
return collectPrefetchHints;
},
collectSegmentData: function() {
return collectSegmentData;
}
});
const _jsxruntime = require("react/jsx-runtime");
const _approutertypes = require("../../shared/lib/app-router-types");
const _varyparamsdecoding = require("../../shared/lib/segment-cache/vary-params-decoding");
const _segment = require("../../shared/lib/segment");
const _client = require("react-server-dom-webpack/client");
const _static = require("react-server-dom-webpack/static");
const _nodewebstreamshelper = require("../stream-utils/node-web-streams-helper");
const _scheduler = require("../../lib/scheduler");
const _segmentvalueencoding = require("../../shared/lib/segment-cache/segment-value-encoding");
const _createerrorhandler = require("./create-error-handler");
const _prospectiverenderutils = require("./prospective-render-utils");
const _workasyncstorageexternal = require("./work-async-storage.external");
const filterStackFrame = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').filterStackFrameDEV : undefined;
const findSourceMapURL = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').findSourceMapURLDEV : undefined;
function onSegmentPrerenderError(error) {
const digest = (0, _createerrorhandler.getDigestForWellKnownError)(error);
if (digest) {
return digest;
}
// We don't need to log the errors because we would have already done that
// when generating the original Flight stream for the whole page.
if (process.env.NEXT_DEBUG_BUILD || process.env.__NEXT_VERBOSE_LOGGING) {
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
(0, _prospectiverenderutils.printDebugThrownValueForProspectiveRender)(error, (workStore == null ? void 0 : workStore.route) ?? 'unknown route', _prospectiverenderutils.Phase.SegmentCollection);
}
}
/**
* Extract the FlightRouterState, seed data, and head from a prerendered
* InitialRSCPayload. Returns null if the payload doesn't match the expected
* shape (single path with 3 elements).
*/ function extractFlightData(initialRSCPayload) {
const flightDataPaths = initialRSCPayload.f;
// FlightDataPath is an unsound type, hence the additional checks.
if (flightDataPaths.length !== 1 && flightDataPaths[0].length !== 3) {
console.error('Internal Next.js error: InitialRSCPayload does not match the expected ' + 'shape for a prerendered page during segment prefetch generation.');
return null;
}
return {
buildId: initialRSCPayload.b,
flightRouterState: flightDataPaths[0][0],
seedData: flightDataPaths[0][1],
head: flightDataPaths[0][2]
};
}
async function collectSegmentData(isCacheComponentsEnabled, fullPageDataBuffer, staleTime, clientModules, serverConsumerManifest, prefetchInlining, hints) {
// Traverse the router tree and generate a prefetch response for each segment.
// A mutable map to collect the results as we traverse the route tree.
const resultMap = new Map();
// Before we start, warm up the module cache by decoding the page data once.
// Then we can assume that any remaining async tasks that occur the next time
// are due to hanging promises caused by dynamic data access. Note we only
// have to do this once per page, not per individual segment.
//
try {
await (0, _client.createFromReadableStream)((0, _nodewebstreamshelper.streamFromBuffer)(fullPageDataBuffer), {
findSourceMapURL,
serverConsumerManifest
});
await (0, _scheduler.waitAtLeastOneReactRenderTask)();
} catch {}
// Create an abort controller that we'll use to stop the stream.
const abortController = new AbortController();
const onCompletedProcessingRouteTree = async ()=>{
// Since all we're doing is decoding and re-encoding a cached prerender, if
// serializing the stream takes longer than a microtask, it must because of
// hanging promises caused by dynamic data.
await (0, _scheduler.waitAtLeastOneReactRenderTask)();
abortController.abort();
};
// Generate a stream for the route tree prefetch. While we're walking the
// tree, we'll also spawn additional tasks to generate the segment prefetches.
// The promises for these tasks are pushed to a mutable array that we will
// await once the route tree is fully rendered.
const segmentTasks = [];
const { prelude: treeStream } = await (0, _static.prerender)(// RootTreePrefetch is not a valid return type for a React component, but
// we need to use a component so that when we decode the original stream
// inside of it, the side effects are transferred to the new stream.
// @ts-expect-error
/*#__PURE__*/ (0, _jsxruntime.jsx)(PrefetchTreeData, {
isClientParamParsingEnabled: isCacheComponentsEnabled,
fullPageDataBuffer: fullPageDataBuffer,
serverConsumerManifest: serverConsumerManifest,
clientModules: clientModules,
staleTime: staleTime,
segmentTasks: segmentTasks,
onCompletedProcessingRouteTree: onCompletedProcessingRouteTree,
prefetchInlining: prefetchInlining,
hints: hints
}), clientModules, {
filterStackFrame,
signal: abortController.signal,
onError: onSegmentPrerenderError
});
// Write the route tree to a special `/_tree` segment.
const treeBuffer = await (0, _nodewebstreamshelper.streamToBuffer)(treeStream);
resultMap.set('/_tree', treeBuffer);
// Also output the entire full page data response
resultMap.set('/_full', fullPageDataBuffer);
// Now that we've finished rendering the route tree, all the segment tasks
// should have been spawned. Await them in parallel and write the segment
// prefetches to the result map.
for (const [segmentPath, buffer] of (await Promise.all(segmentTasks))){
resultMap.set(segmentPath, buffer);
}
return resultMap;
}
async function collectPrefetchHints(fullPageDataBuffer, staleTime, clientModules, serverConsumerManifest, maxSize, maxBundleSize) {
// Warm up the module cache, same as collectSegmentData.
try {
await (0, _client.createFromReadableStream)((0, _nodewebstreamshelper.streamFromBuffer)(fullPageDataBuffer), {
findSourceMapURL,
serverConsumerManifest
});
await (0, _scheduler.waitAtLeastOneReactRenderTask)();
} catch {}
// Decode the Flight data to walk the route tree.
const initialRSCPayload = await (0, _client.createFromReadableStream)(createUnclosingPrefetchStream((0, _nodewebstreamshelper.streamFromBuffer)(fullPageDataBuffer)), {
findSourceMapURL,
serverConsumerManifest
});
const flightData = extractFlightData(initialRSCPayload);
if (flightData === null) {
return {
hints: 0,
slots: null
};
}
const { buildId, flightRouterState, seedData, head } = flightData;
// Measure the head (metadata/viewport) gzip size so the main traversal
// can decide whether to inline it into a page's bundle.
const headVaryParamsThenable = initialRSCPayload.h;
const headVaryParams = headVaryParamsThenable !== null ? (0, _varyparamsdecoding.readVaryParams)(headVaryParamsThenable) : null;
const [, headBuffer] = await renderSegmentPrefetch(buildId, staleTime, head, _segmentvalueencoding.HEAD_REQUEST_KEY, headVaryParams, clientModules);
const headGzipSize = await getGzipSize(headBuffer);
// Mutable accumulator: the first page leaf that can fit the head sets
// this to true. Once set, subsequent leaves skip the check.
const headInlineState = {
inlined: false
};
// Walk the tree with the parent-first, child-decides algorithm.
const { node } = await collectPrefetchHintsImpl(flightRouterState, buildId, staleTime, seedData, clientModules, _segmentvalueencoding.ROOT_SEGMENT_REQUEST_KEY, null, maxSize, maxBundleSize, headGzipSize, headInlineState);
if (!headInlineState.inlined) {
// No page could accept the head. Set HeadOutlined on the root so the
// client knows to fetch the head separately.
node.hints |= _approutertypes.PrefetchHint.HeadOutlined;
}
return node;
}
// Measure a segment's gzip size and decide whether it should be inlined.
//
// These hints are computed once during build and never change for the
// lifetime of that deployment. The client can assume that hints delivered as
// part of one request will be the same during a subsequent request, given
// the same build ID. There's no skew to worry about as long as the build
// itself is consistent.
//
// In the Segment Cache, we split page prefetches into multiple requests so
// that each one can be cached and deduped independently. However, some
// segments are small enough that the potential caching benefits are not worth
// the additional network overhead. For these, we inline a parent's data into
// one of its children's responses, avoiding a separate request. The parent
// is inlined into the child (not the other way around) because the parent's
// response is more likely to be shared across multiple pages. The child's
// response is already page-specific, so adding the parent's data there
// doesn't meaningfully reduce deduplication. It's similar to how JS bundlers
// decide whether to inline a module into a chunk.
//
// The algorithm is parent-first, child-decides: the parent measures itself
// and passes its gzip size down. Each child decides whether to accept. A
// child rejects if the parent exceeds maxSize or if accepting would push
// the cumulative inlined bytes past maxBundleSize. This produces
// both ParentInlinedIntoSelf (on the child) and InlinedIntoChild (on the
// parent) in a single pass.
async function collectPrefetchHintsImpl(route, buildId, staleTime, seedData, clientModules, // TODO: Consider persisting the computed requestKey into the hints output
// so it doesn't need to be recomputed during the build. This might also
// suggest renaming prefetch-hints.json to something like
// segment-manifest.json, since it would contain more than just hints.
requestKey, parentGzipSize, maxSize, maxBundleSize, headGzipSize, headInlineState) {
// Render current segment and measure its gzip size.
let currentGzipSize = null;
if (seedData !== null) {
const varyParamsThenable = seedData[4];
const varyParams = varyParamsThenable !== null ? (0, _varyparamsdecoding.readVaryParams)(varyParamsThenable) : null;
const [, buffer] = await renderSegmentPrefetch(buildId, staleTime, seedData[0], requestKey, varyParams, clientModules);
currentGzipSize = await getGzipSize(buffer);
}
// Only offer this segment to its children for inlining if its gzip size
// is below maxSize. Segments above this get their own response.
const sizeToInline = currentGzipSize !== null && currentGzipSize < maxSize ? currentGzipSize : null;
// Process children serially (not in parallel) to ensure deterministic
// results. Since this only runs at build time and the rendering is just
// re-encoding cached prerenders, this won't impact build times. Each child
// receives our gzip size and decides whether to inline us. Once a child
// accepts, we stop offering to remaining siblings — the parent is only
// inlined into one child. In parallel routes, this avoids duplicating the
// parent's data across multiple sibling responses.
const children = route[1];
const seedDataChildren = seedData !== null ? seedData[1] : null;
let slots = null;
let didInlineIntoChild = false;
let acceptingChildInlinedBytes = 0;
// Track the smallest inlinedBytes across all children so we know how much
// budget remains along the best path. When our own parent asks whether we
// can accept its data, the parent's bytes would flow through to the child
// with the most remaining headroom.
let smallestChildInlinedBytes = Infinity;
let hasChildren = false;
for(const parallelRouteKey in children){
hasChildren = true;
const childRoute = children[parallelRouteKey];
const childSegment = childRoute[0];
const childSeedData = seedDataChildren !== null ? seedDataChildren[parallelRouteKey] : null;
const childRequestKey = (0, _segmentvalueencoding.appendSegmentRequestKeyPart)(requestKey, parallelRouteKey, (0, _segmentvalueencoding.createSegmentRequestKeyPart)(childSegment));
const childResult = await collectPrefetchHintsImpl(childRoute, buildId, staleTime, childSeedData, clientModules, childRequestKey, // Once a child has accepted us, stop offering to remaining siblings.
didInlineIntoChild ? null : sizeToInline, maxSize, maxBundleSize, headGzipSize, headInlineState);
if (slots === null) {
slots = {};
}
slots[parallelRouteKey] = childResult.node;
if (childResult.node.hints & _approutertypes.PrefetchHint.ParentInlinedIntoSelf) {
// This child accepted our data — it will include our segment's
// response in its own. No need to track headroom anymore since
// we already know which child we're inlined into.
didInlineIntoChild = true;
acceptingChildInlinedBytes = childResult.inlinedBytes;
} else if (!didInlineIntoChild) {
// Track the child with the most remaining headroom. Used below
// when deciding whether to accept our own parent's data.
if (childResult.inlinedBytes < smallestChildInlinedBytes) {
smallestChildInlinedBytes = childResult.inlinedBytes;
}
}
}
// Leaf segment: no children have consumed any budget yet.
if (!hasChildren) {
smallestChildInlinedBytes = 0;
}
// Mark this segment as InlinedIntoChild if one of its children accepted.
// This means this segment doesn't need its own prefetch response — its
// data is included in the accepting child's response instead.
let hints = 0;
if (didInlineIntoChild) {
hints |= _approutertypes.PrefetchHint.InlinedIntoChild;
}
// inlinedBytes represents the total gzipped bytes of parent data inlined
// into the deepest "inlining target" along this branch. It starts at 0 at
// the leaves and grows as parents are inlined going back up the tree. If a
// child accepted us, our size is already counted in that child's value.
let inlinedBytes = didInlineIntoChild ? acceptingChildInlinedBytes : smallestChildInlinedBytes;
// At leaf nodes (pages), try to inline the head (metadata/viewport) into
// this page's response. The head is treated like an additional inlined
// entry — it counts against the same total budget. Only the first page
// that has room gets the head; subsequent pages skip via the shared
// headInlineState accumulator.
if (!hasChildren && !headInlineState.inlined) {
if (inlinedBytes + headGzipSize < maxBundleSize) {
hints |= _approutertypes.PrefetchHint.HeadInlinedIntoSelf;
inlinedBytes += headGzipSize;
headInlineState.inlined = true;
}
}
// Decide whether to accept our own parent's data. Two conditions:
//
// 1. The parent offered us a size (parentGzipSize is not null). It's null
// when the parent is too large to inline or when this is the root.
//
// 2. The total inlined bytes along this branch wouldn't exceed the budget.
// Even if each segment is individually small, at some point it no
// longer makes sense to keep adding bytes because the combined response
// is unique per URL and can't be deduped.
//
// A node can be both InlinedIntoChild and ParentInlinedIntoSelf. This
// happens in multi-level chains: GP → P → C where all are small. C
// accepts P (P is InlinedIntoChild), then P also accepts GP (P is
// ParentInlinedIntoSelf). The result: C's response includes both P's
// and GP's data. The parent's data flows through to the deepest
// accepting descendant.
if (parentGzipSize !== null) {
if (inlinedBytes + parentGzipSize < maxBundleSize) {
hints |= _approutertypes.PrefetchHint.ParentInlinedIntoSelf;
inlinedBytes += parentGzipSize;
}
}
return {
node: {
hints,
slots
},
inlinedBytes
};
}
// We use gzip size rather than raw size because it better reflects the actual
// transfer cost. The inlining trade-off is about whether the overhead of an
// additional HTTP request (connection setup, headers, round trip) is worth
// the deduplication benefit of keeping a segment separate. Below some
// compressed size, the request overhead dominates and inlining is better.
// Above it, the deduplication benefit of a cacheable standalone response
// wins out.
async function getGzipSize(buffer) {
const stream = new Blob([
new Uint8Array(buffer)
]).stream().pipeThrough(new CompressionStream('gzip'));
const compressedBlob = await new Response(stream).blob();
return compressedBlob.size;
}
async function PrefetchTreeData({ isClientParamParsingEnabled, fullPageDataBuffer, serverConsumerManifest, clientModules, staleTime, segmentTasks, onCompletedProcessingRouteTree, prefetchInlining, hints }) {
// We're currently rendering a Flight response for the route tree prefetch.
// Inside this component, decode the Flight stream for the whole page. This is
// a hack to transfer the side effects from the original Flight stream (e.g.
// Float preloads) onto the Flight stream for the tree prefetch.
// TODO: React needs a better way to do this. Needed for Server Actions, too.
const initialRSCPayload = await (0, _client.createFromReadableStream)(createUnclosingPrefetchStream((0, _nodewebstreamshelper.streamFromBuffer)(fullPageDataBuffer)), {
findSourceMapURL,
serverConsumerManifest
});
const flightData = extractFlightData(initialRSCPayload);
if (flightData === null) {
return null;
}
const { buildId, flightRouterState, seedData, head } = flightData;
// Extract the head vary params from the decoded response.
// The head vary params thenable should be fulfilled by now; if not, treat
// as unknown (null).
const headVaryParamsThenable = initialRSCPayload.h;
const headVaryParams = headVaryParamsThenable !== null ? (0, _varyparamsdecoding.readVaryParams)(headVaryParamsThenable) : null;
// Compute the route metadata tree by traversing the FlightRouterState. As we
// walk the tree, we will also spawn a task to produce a prefetch response for
// each segment (unless prefetch inlining is enabled, in which case all
// segments are bundled into a single /_inlined response).
const tree = collectSegmentDataImpl(isClientParamParsingEnabled, flightRouterState, buildId, staleTime, seedData, clientModules, _segmentvalueencoding.ROOT_SEGMENT_REQUEST_KEY, segmentTasks, prefetchInlining, hints);
if (prefetchInlining) {
// When prefetch inlining is enabled, bundle all segment data into a single
// /_inlined response instead of individual per-segment responses. The head
// is also included in the inlined response.
segmentTasks.push((0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>renderInlinedPrefetchResponse(flightRouterState, buildId, staleTime, seedData, head, headVaryParams, clientModules)));
} else {
// Also spawn a task to produce a prefetch response for the "head" segment.
// The head contains metadata, like the title; it's not really a route
// segment, but it contains RSC data, so it's treated like a segment by
// the client cache.
segmentTasks.push((0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>renderSegmentPrefetch(buildId, staleTime, head, _segmentvalueencoding.HEAD_REQUEST_KEY, headVaryParams, clientModules)));
}
// Notify the abort controller that we're done processing the route tree.
// Anything async that happens after this point must be due to hanging
// promises in the original stream.
onCompletedProcessingRouteTree();
// Render the route tree to a special `/_tree` segment.
const treePrefetch = {
tree,
staleTime
};
if (buildId) {
treePrefetch.buildId = buildId;
}
return treePrefetch;
}
function collectSegmentDataImpl(isClientParamParsingEnabled, route, buildId, staleTime, seedData, clientModules, requestKey, segmentTasks, prefetchInlining, hintTree) {
// Metadata about the segment. Sent as part of the tree prefetch. Null if
// there are no children.
let slotMetadata = null;
const children = route[1];
const seedDataChildren = seedData !== null ? seedData[1] : null;
for(const parallelRouteKey in children){
const childRoute = children[parallelRouteKey];
const childSegment = childRoute[0];
const childSeedData = seedDataChildren !== null ? seedDataChildren[parallelRouteKey] : null;
const childRequestKey = (0, _segmentvalueencoding.appendSegmentRequestKeyPart)(requestKey, parallelRouteKey, (0, _segmentvalueencoding.createSegmentRequestKeyPart)(childSegment));
const childHintTree = hintTree !== null && hintTree.slots !== null ? hintTree.slots[parallelRouteKey] ?? null : null;
const childTree = collectSegmentDataImpl(isClientParamParsingEnabled, childRoute, buildId, staleTime, childSeedData, clientModules, childRequestKey, segmentTasks, prefetchInlining, childHintTree);
if (slotMetadata === null) {
slotMetadata = {};
}
slotMetadata[parallelRouteKey] = childTree;
}
// Union the hints already embedded in the FlightRouterState with the
// separately-computed build-time hints. During the initial build, the
// FlightRouterState was produced before collectPrefetchHints ran, so
// inlining hints (ParentInlinedIntoSelf, InlinedIntoChild) won't be in
// route[4] yet. On subsequent renders the hints are already in the
// FlightRouterState, so the union is idempotent.
const prefetchHints = (route[4] ?? 0) | (hintTree !== null ? hintTree.hints : 0);
// Determine which params this segment varies on.
// Read the vary params thenable directly from the seed data. By the time
// collectSegmentData runs, the thenable should be fulfilled. If it's not
// fulfilled or null, treat as unknown (null means we can't share cache
// entries across param values).
const varyParamsThenable = seedData !== null ? seedData[4] : null;
const varyParams = varyParamsThenable !== null ? (0, _varyparamsdecoding.readVaryParams)(varyParamsThenable) : null;
if (!prefetchInlining) {
// When prefetch inlining is disabled, spawn individual segment tasks.
// When enabled, segment data is bundled into the /_inlined response
// instead, so we skip per-segment tasks here.
if (seedData !== null) {
// Spawn a task to write the segment data to a new Flight stream.
segmentTasks.push(// Since we're already in the middle of a render, wait until after the
// current task to escape the current rendering context.
(0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>renderSegmentPrefetch(buildId, staleTime, seedData[0], requestKey, varyParams, clientModules)));
} else {
// This segment does not have any seed data. Skip generating a prefetch
// response for it. We'll still include it in the route tree, though.
// TODO: We should encode in the route tree whether a segment is missing
// so we don't attempt to fetch it for no reason. As of now this shouldn't
// ever happen in practice, though.
}
}
const segment = route[0];
let name;
let param;
if (typeof segment === 'string') {
name = segment;
param = null;
} else {
name = segment[0];
param = {
type: segment[2],
// This value is omitted from the prefetch response when cacheComponents
// is enabled.
key: isClientParamParsingEnabled ? null : segment[1],
siblings: segment[3]
};
}
// Metadata about the segment. Sent to the client as part of the
// tree prefetch.
return {
name,
param,
prefetchHints,
slots: slotMetadata
};
}
async function renderSegmentPrefetch(buildId, staleTime, rsc, requestKey, varyParams, clientModules) {
// Render the segment data to a stream.
const segmentPrefetch = {
rsc,
isPartial: await isPartialRSCData(rsc, clientModules),
staleTime,
varyParams
};
if (buildId) {
segmentPrefetch.buildId = buildId;
}
// Since all we're doing is decoding and re-encoding a cached prerender, if
// it takes longer than a microtask, it must because of hanging promises
// caused by dynamic data. Abort the stream at the end of the current task.
const abortController = new AbortController();
(0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>abortController.abort());
const { prelude: segmentStream } = await (0, _static.prerender)(segmentPrefetch, clientModules, {
filterStackFrame,
signal: abortController.signal,
onError: onSegmentPrerenderError
});
const segmentBuffer = await (0, _nodewebstreamshelper.streamToBuffer)(segmentStream);
if (requestKey === _segmentvalueencoding.ROOT_SEGMENT_REQUEST_KEY) {
return [
'/_index',
segmentBuffer
];
} else {
return [
requestKey,
segmentBuffer
];
}
}
async function renderInlinedPrefetchResponse(route, buildId, staleTime, seedData, head, headVaryParams, clientModules) {
// Build the inlined tree by walking the route and collecting all segments.
const inlinedTree = await buildInlinedSegmentPrefetch(route, buildId, staleTime, seedData, clientModules);
// Build the head segment.
const headPrefetch = {
rsc: head,
isPartial: await isPartialRSCData(head, clientModules),
staleTime,
varyParams: headVaryParams
};
if (buildId) {
headPrefetch.buildId = buildId;
}
const response = {
tree: inlinedTree,
head: headPrefetch
};
// Render as a single Flight response.
const abortController = new AbortController();
(0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>abortController.abort());
const { prelude } = await (0, _static.prerender)(response, clientModules, {
filterStackFrame,
signal: abortController.signal,
onError: onSegmentPrerenderError
});
const buffer = await (0, _nodewebstreamshelper.streamToBuffer)(prelude);
return [
'/' + _segment.PAGE_SEGMENT_KEY,
buffer
];
}
async function buildInlinedSegmentPrefetch(route, buildId, staleTime, seedData, clientModules) {
let slots = null;
const children = route[1];
const seedDataChildren = seedData !== null ? seedData[1] : null;
for(const parallelRouteKey in children){
const childRoute = children[parallelRouteKey];
const childSeedData = seedDataChildren !== null ? seedDataChildren[parallelRouteKey] : null;
const childPrefetch = await buildInlinedSegmentPrefetch(childRoute, buildId, staleTime, childSeedData, clientModules);
if (slots === null) {
slots = {};
}
slots[parallelRouteKey] = childPrefetch;
}
const rsc = seedData !== null ? seedData[0] : null;
const varyParamsThenable = seedData !== null ? seedData[4] : null;
const varyParams = varyParamsThenable !== null ? (0, _varyparamsdecoding.readVaryParams)(varyParamsThenable) : null;
const segment = {
rsc,
isPartial: rsc !== null ? await isPartialRSCData(rsc, clientModules) : true,
staleTime,
varyParams
};
if (buildId) {
segment.buildId = buildId;
}
return {
segment,
slots
};
}
async function isPartialRSCData(rsc, clientModules) {
// We can determine if a segment contains only partial data if it takes longer
// than a task to encode, because dynamic data is encoded as an infinite
// promise. We must do this in a separate Flight prerender from the one that
// actually generates the prefetch stream because we need to include
// `isPartial` in the stream itself.
let isPartial = false;
const abortController = new AbortController();
(0, _scheduler.waitAtLeastOneReactRenderTask)().then(()=>{
// If we haven't yet finished the outer task, then it must be because we
// accessed dynamic data.
isPartial = true;
abortController.abort();
});
await (0, _static.prerender)(rsc, clientModules, {
filterStackFrame,
signal: abortController.signal,
onError () {}
});
return isPartial;
}
function createUnclosingPrefetchStream(originalFlightStream) {
// When PPR is enabled, prefetch streams may contain references that never
// resolve, because that's how we encode dynamic data access. In the decoded
// object returned by the Flight client, these are reified into hanging
// promises that suspend during render, which is effectively what we want.
// The UI resolves when it switches to the dynamic data stream
// (via useDeferredValue(dynamic, static)).
//
// However, the Flight implementation currently errors if the server closes
// the response before all the references are resolved. As a cheat to work
// around this, we wrap the original stream in a new stream that never closes,
// and therefore doesn't error.
const reader = originalFlightStream.getReader();
return new ReadableStream({
async pull (controller) {
while(true){
const { done, value } = await reader.read();
if (!done) {
// Pass to the target stream and keep consuming the Flight response
// from the server.
controller.enqueue(value);
continue;
}
// The server stream has closed. Exit, but intentionally do not close
// the target stream.
return;
}
}
});
}
//# sourceMappingURL=collect-segment-data.js.map
File diff suppressed because one or more lines are too long
@@ -0,0 +1,2 @@
import type { ConsoleAsyncStorage } from './console-async-storage.external';
export declare const consoleAsyncStorageInstance: ConsoleAsyncStorage;
@@ -0,0 +1,14 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "consoleAsyncStorageInstance", {
enumerable: true,
get: function() {
return consoleAsyncStorageInstance;
}
});
const _asynclocalstorage = require("./async-local-storage");
const consoleAsyncStorageInstance = (0, _asynclocalstorage.createAsyncLocalStorage)();
//# sourceMappingURL=console-async-storage-instance.js.map
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/console-async-storage-instance.ts"],"sourcesContent":["import { createAsyncLocalStorage } from './async-local-storage'\nimport type { ConsoleAsyncStorage } from './console-async-storage.external'\n\nexport const consoleAsyncStorageInstance: ConsoleAsyncStorage =\n createAsyncLocalStorage()\n"],"names":["consoleAsyncStorageInstance","createAsyncLocalStorage"],"mappings":";;;;+BAGaA;;;eAAAA;;;mCAH2B;AAGjC,MAAMA,8BACXC,IAAAA,0CAAuB","ignoreList":[0]}
@@ -0,0 +1,12 @@
import type { AsyncLocalStorage } from 'async_hooks';
import { consoleAsyncStorageInstance } from './console-async-storage-instance';
export interface ConsoleStore {
/**
* if true the color of logs output will be dimmed to indicate the log is
* from a repeat or validation render that is not typically relevant to
* the primary action the server is taking.
*/
readonly dim: boolean;
}
export type ConsoleAsyncStorage = AsyncLocalStorage<ConsoleStore>;
export { consoleAsyncStorageInstance as consoleAsyncStorage };
@@ -0,0 +1,13 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "consoleAsyncStorage", {
enumerable: true,
get: function() {
return _consoleasyncstorageinstance.consoleAsyncStorageInstance;
}
});
const _consoleasyncstorageinstance = require("./console-async-storage-instance");
//# sourceMappingURL=console-async-storage.external.js.map
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/console-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { consoleAsyncStorageInstance } from './console-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\n\nexport interface ConsoleStore {\n /**\n * if true the color of logs output will be dimmed to indicate the log is\n * from a repeat or validation render that is not typically relevant to\n * the primary action the server is taking.\n */\n readonly dim: boolean\n}\n\nexport type ConsoleAsyncStorage = AsyncLocalStorage<ConsoleStore>\n\nexport { consoleAsyncStorageInstance as consoleAsyncStorage }\n"],"names":["consoleAsyncStorage","consoleAsyncStorageInstance"],"mappings":";;;;+BAgBwCA;;;eAA/BC,wDAA2B;;;6CAbQ","ignoreList":[0]}
@@ -0,0 +1,8 @@
import type { AppRenderContext } from './app-render';
export declare function createComponentStylesAndScripts({ filePath, getComponent, injectedCSS, injectedJS, ctx, }: {
filePath: string;
getComponent: () => any;
injectedCSS: Set<string>;
injectedJS: Set<string>;
ctx: AppRenderContext;
}): Promise<[React.ComponentType<any>, React.ReactNode, React.ReactNode]>;
@@ -0,0 +1,33 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "createComponentStylesAndScripts", {
enumerable: true,
get: function() {
return createComponentStylesAndScripts;
}
});
const _interopdefault = require("./interop-default");
const _getcssinlinedlinktags = require("./get-css-inlined-link-tags");
const _getassetquerystring = require("./get-asset-query-string");
const _encodeuripath = require("../../shared/lib/encode-uri-path");
const _rendercssresource = require("./render-css-resource");
async function createComponentStylesAndScripts({ filePath, getComponent, injectedCSS, injectedJS, ctx }) {
const { componentMod: { createElement } } = ctx;
const { styles: entryCssFiles, scripts: jsHrefs } = (0, _getcssinlinedlinktags.getLinkAndScriptTags)(filePath, injectedCSS, injectedJS);
const styles = (0, _rendercssresource.renderCssResource)(entryCssFiles, ctx);
const scripts = jsHrefs ? jsHrefs.map((href, index)=>createElement('script', {
src: `${ctx.assetPrefix}/_next/${(0, _encodeuripath.encodeURIPath)(href)}${(0, _getassetquerystring.getAssetQueryString)(ctx, true)}`,
async: true,
key: `script-${index}`
})) : null;
const Comp = (0, _interopdefault.interopDefault)(await getComponent());
return [
Comp,
styles,
scripts
];
}
//# sourceMappingURL=create-component-styles-and-scripts.js.map
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/create-component-styles-and-scripts.tsx"],"sourcesContent":["import { interopDefault } from './interop-default'\nimport { getLinkAndScriptTags } from './get-css-inlined-link-tags'\nimport type { AppRenderContext } from './app-render'\nimport { getAssetQueryString } from './get-asset-query-string'\nimport { encodeURIPath } from '../../shared/lib/encode-uri-path'\nimport { renderCssResource } from './render-css-resource'\n\nexport async function createComponentStylesAndScripts({\n filePath,\n getComponent,\n injectedCSS,\n injectedJS,\n ctx,\n}: {\n filePath: string\n getComponent: () => any\n injectedCSS: Set<string>\n injectedJS: Set<string>\n ctx: AppRenderContext\n}): Promise<[React.ComponentType<any>, React.ReactNode, React.ReactNode]> {\n const {\n componentMod: { createElement },\n } = ctx\n const { styles: entryCssFiles, scripts: jsHrefs } = getLinkAndScriptTags(\n filePath,\n injectedCSS,\n injectedJS\n )\n\n const styles = renderCssResource(entryCssFiles, ctx)\n\n const scripts = jsHrefs\n ? jsHrefs.map((href, index) =>\n createElement('script', {\n src: `${ctx.assetPrefix}/_next/${encodeURIPath(href)}${getAssetQueryString(ctx, true)}`,\n async: true,\n key: `script-${index}`,\n })\n )\n : null\n\n const Comp = interopDefault(await getComponent())\n\n return [Comp, styles, scripts]\n}\n"],"names":["createComponentStylesAndScripts","filePath","getComponent","injectedCSS","injectedJS","ctx","componentMod","createElement","styles","entryCssFiles","scripts","jsHrefs","getLinkAndScriptTags","renderCssResource","map","href","index","src","assetPrefix","encodeURIPath","getAssetQueryString","async","key","Comp","interopDefault"],"mappings":";;;;+BAOsBA;;;eAAAA;;;gCAPS;uCACM;qCAED;+BACN;mCACI;AAE3B,eAAeA,gCAAgC,EACpDC,QAAQ,EACRC,YAAY,EACZC,WAAW,EACXC,UAAU,EACVC,GAAG,EAOJ;IACC,MAAM,EACJC,cAAc,EAAEC,aAAa,EAAE,EAChC,GAAGF;IACJ,MAAM,EAAEG,QAAQC,aAAa,EAAEC,SAASC,OAAO,EAAE,GAAGC,IAAAA,2CAAoB,EACtEX,UACAE,aACAC;IAGF,MAAMI,SAASK,IAAAA,oCAAiB,EAACJ,eAAeJ;IAEhD,MAAMK,UAAUC,UACZA,QAAQG,GAAG,CAAC,CAACC,MAAMC,QACjBT,cAAc,UAAU;YACtBU,KAAK,GAAGZ,IAAIa,WAAW,CAAC,OAAO,EAAEC,IAAAA,4BAAa,EAACJ,QAAQK,IAAAA,wCAAmB,EAACf,KAAK,OAAO;YACvFgB,OAAO;YACPC,KAAK,CAAC,OAAO,EAAEN,OAAO;QACxB,MAEF;IAEJ,MAAMO,OAAOC,IAAAA,8BAAc,EAAC,MAAMtB;IAElC,OAAO;QAACqB;QAAMf;QAAQE;KAAQ;AAChC","ignoreList":[0]}
+25
View File
@@ -0,0 +1,25 @@
import type { ComponentType } from 'react';
import type { CacheNodeSeedData } from '../../shared/lib/app-router-types';
import type { PreloadCallbacks } from './types';
import type { LoaderTree } from '../lib/app-dir-module';
import type { AppRenderContext, GetDynamicParamFromSegment } from './app-render';
import type { Params } from '../request/params';
/**
* Use the provided loader tree to create the React Component tree.
*/
export declare function createComponentTree(props: {
loaderTree: LoaderTree;
parentParams: Params;
parentOptionalCatchAllParamName: string | null;
parentRuntimePrefetchable: false;
rootLayoutIncluded: boolean;
injectedCSS: Set<string>;
injectedJS: Set<string>;
injectedFontPreloadTags: Set<string>;
ctx: AppRenderContext;
missingSlots?: Set<string>;
preloadCallbacks: PreloadCallbacks;
authInterrupts: boolean;
MetadataOutlet: ComponentType;
}): Promise<CacheNodeSeedData>;
export declare function getRootParams(loaderTree: LoaderTree, getDynamicParamFromSegment: GetDynamicParamFromSegment): Params;
+840
View File
@@ -0,0 +1,840 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
createComponentTree: null,
getRootParams: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
createComponentTree: function() {
return createComponentTree;
},
getRootParams: function() {
return getRootParams;
}
});
const _clientandserverreferences = require("../../lib/client-and-server-references");
const _appdirmodule = require("../lib/app-dir-module");
const _interopdefault = require("./interop-default");
const _parseloadertree = require("../../shared/lib/router/utils/parse-loader-tree");
const _createcomponentstylesandscripts = require("./create-component-styles-and-scripts");
const _getlayerassets = require("./get-layer-assets");
const _hasloadingcomponentintree = require("./has-loading-component-in-tree");
const _patchfetch = require("../lib/patch-fetch");
const _default = require("../../client/components/builtin/default");
const _tracer = require("../lib/trace/tracer");
const _constants = require("../lib/trace/constants");
const _staticgenerationbailout = require("../../client/components/static-generation-bailout");
const _workunitasyncstorageexternal = require("./work-unit-async-storage.external");
const _varyparams = require("./vary-params");
const _segment = require("../../shared/lib/segment");
const _segmentexplorerpath = require("./segment-explorer-path");
const _stagedrendering = require("./staged-rendering");
function createComponentTree(props) {
return (0, _tracer.getTracer)().trace(_constants.NextNodeServerSpan.createComponentTree, {
spanName: 'build component tree'
}, ()=>createComponentTreeInternal(props, true));
}
function errorMissingDefaultExport(pagePath, convention) {
const normalizedPagePath = pagePath === '/' ? '' : pagePath;
throw Object.defineProperty(new Error(`The default export is not a React Component in "${normalizedPagePath}/${convention}"`), "__NEXT_ERROR_CODE", {
value: "E45",
enumerable: false,
configurable: true
});
}
const cacheNodeKey = 'c';
async function createComponentTreeInternal({ loaderTree: tree, parentParams, parentOptionalCatchAllParamName, parentRuntimePrefetchable, rootLayoutIncluded, injectedCSS, injectedJS, injectedFontPreloadTags, ctx, missingSlots, preloadCallbacks, authInterrupts, MetadataOutlet }, isRoot) {
const { renderOpts: { nextConfigOutput, experimental, cacheComponents }, workStore, componentMod: { createElement, Fragment, SegmentViewNode, HTTPAccessFallbackBoundary, LayoutRouter, RenderFromTemplateContext, ClientPageRoot, ClientSegmentRoot, createServerSearchParamsForServerPage, createPrerenderSearchParamsForClientPage, createServerParamsForServerSegment, createPrerenderParamsForClientSegment, serverHooks: { DynamicServerError }, Postpone }, pagePath, getDynamicParamFromSegment, isPrefetch, query } = ctx;
const { page, conventionPath, segment, modules, parallelRoutes } = (0, _parseloadertree.parseLoaderTree)(tree);
const { layout, template, error, loading, 'not-found': notFound, forbidden, unauthorized } = modules;
const injectedCSSWithCurrentLayout = new Set(injectedCSS);
const injectedJSWithCurrentLayout = new Set(injectedJS);
const injectedFontPreloadTagsWithCurrentLayout = new Set(injectedFontPreloadTags);
const layerAssets = (0, _getlayerassets.getLayerAssets)({
preloadCallbacks,
ctx,
layoutOrPagePath: conventionPath,
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout,
injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout
});
const [Template, templateStyles, templateScripts] = template ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
ctx,
filePath: template[1],
getComponent: template[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [
Fragment
];
const [ErrorComponent, errorStyles, errorScripts] = error ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
ctx,
filePath: error[1],
getComponent: error[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const [Loading, loadingStyles, loadingScripts] = loading ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
ctx,
filePath: loading[1],
getComponent: loading[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const isLayout = typeof layout !== 'undefined';
const isPage = typeof page !== 'undefined';
const { mod: layoutOrPageMod, modType } = await (0, _tracer.getTracer)().trace(_constants.NextNodeServerSpan.getLayoutOrPageModule, {
hideSpan: !(isLayout || isPage),
spanName: 'resolve segment modules',
attributes: {
'next.segment': segment
}
}, ()=>(0, _appdirmodule.getLayoutOrPageModule)(tree));
/**
* Checks if the current segment is a root layout.
*/ const rootLayoutAtThisLevel = isLayout && !rootLayoutIncluded;
/**
* Checks if the current segment or any level above it has a root layout.
*/ const rootLayoutIncludedAtThisLevelOrAbove = rootLayoutIncluded || rootLayoutAtThisLevel;
const [NotFound, notFoundStyles] = notFound ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
ctx,
filePath: notFound[1],
getComponent: notFound[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const instantConfig = layoutOrPageMod ? layoutOrPageMod.unstable_instant : undefined;
const hasRuntimePrefetch = instantConfig && typeof instantConfig === 'object' ? instantConfig.prefetch === 'runtime' : false;
const isRuntimePrefetchable = hasRuntimePrefetch || parentRuntimePrefetchable;
const [Forbidden, forbiddenStyles] = authInterrupts && forbidden ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
ctx,
filePath: forbidden[1],
getComponent: forbidden[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
const [Unauthorized, unauthorizedStyles] = authInterrupts && unauthorized ? await (0, _createcomponentstylesandscripts.createComponentStylesAndScripts)({
ctx,
filePath: unauthorized[1],
getComponent: unauthorized[0],
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout
}) : [];
let dynamic = layoutOrPageMod == null ? void 0 : layoutOrPageMod.dynamic;
if (nextConfigOutput === 'export') {
if (!dynamic || dynamic === 'auto') {
dynamic = 'error';
} else if (dynamic === 'force-dynamic') {
// force-dynamic is always incompatible with 'export'. We must interrupt the build
throw Object.defineProperty(new _staticgenerationbailout.StaticGenBailoutError(`Page with \`dynamic = "force-dynamic"\` couldn't be exported. \`output: "export"\` requires all pages be renderable statically because there is no runtime server to dynamically render routes in this output format. Learn more: https://nextjs.org/docs/app/building-your-application/deploying/static-exports`), "__NEXT_ERROR_CODE", {
value: "E527",
enumerable: false,
configurable: true
});
}
}
if (typeof dynamic === 'string') {
// the nested most config wins so we only force-static
// if it's configured above any parent that configured
// otherwise
if (dynamic === 'error') {
workStore.dynamicShouldError = true;
} else if (dynamic === 'force-dynamic') {
workStore.forceDynamic = true;
// TODO: (PPR) remove this bailout once PPR is the default
if (workStore.isStaticGeneration && !experimental.isRoutePPREnabled) {
// If the postpone API isn't available, we can't postpone the render and
// therefore we can't use the dynamic API.
const err = Object.defineProperty(new DynamicServerError(`Page with \`dynamic = "force-dynamic"\` won't be rendered statically.`), "__NEXT_ERROR_CODE", {
value: "E585",
enumerable: false,
configurable: true
});
workStore.dynamicUsageDescription = err.message;
workStore.dynamicUsageStack = err.stack;
throw err;
}
} else {
workStore.dynamicShouldError = false;
workStore.forceStatic = dynamic === 'force-static';
}
}
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.fetchCache) === 'string') {
workStore.fetchCache = layoutOrPageMod == null ? void 0 : layoutOrPageMod.fetchCache;
}
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate) !== 'undefined') {
(0, _patchfetch.validateRevalidate)(layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate, workStore.route);
}
if (typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.revalidate) === 'number') {
const defaultRevalidate = layoutOrPageMod.revalidate;
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
if (workUnitStore) {
switch(workUnitStore.type){
case 'prerender':
case 'prerender-runtime':
case 'prerender-legacy':
case 'prerender-ppr':
if (workUnitStore.revalidate > defaultRevalidate) {
workUnitStore.revalidate = defaultRevalidate;
}
break;
case 'request':
break;
// createComponentTree is not called for these stores:
case 'cache':
case 'private-cache':
case 'prerender-client':
case 'validation-client':
case 'unstable-cache':
case 'generate-static-params':
break;
default:
workUnitStore;
}
}
if (!workStore.forceStatic && workStore.isStaticGeneration && defaultRevalidate === 0 && // If the postpone API isn't available, we can't postpone the render and
// therefore we can't use the dynamic API.
!experimental.isRoutePPREnabled) {
const dynamicUsageDescription = `revalidate: 0 configured ${segment}`;
workStore.dynamicUsageDescription = dynamicUsageDescription;
throw Object.defineProperty(new DynamicServerError(dynamicUsageDescription), "__NEXT_ERROR_CODE", {
value: "E1005",
enumerable: false,
configurable: true
});
}
}
// Read unstable_dynamicStaleTime from page modules (not layouts) and track it on
// the store's stale field. This affects the segment cache stale time via
// the StaleTimeIterable.
if (isPage && typeof (layoutOrPageMod == null ? void 0 : layoutOrPageMod.unstable_dynamicStaleTime) === 'number') {
const pageStaleTime = layoutOrPageMod.unstable_dynamicStaleTime;
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
if (workUnitStore) {
switch(workUnitStore.type){
case 'prerender':
case 'prerender-runtime':
case 'prerender-legacy':
case 'prerender-ppr':
if (workUnitStore.stale > pageStaleTime) {
workUnitStore.stale = pageStaleTime;
}
break;
case 'request':
if (workUnitStore.stale === undefined || workUnitStore.stale > pageStaleTime) {
workUnitStore.stale = pageStaleTime;
}
break;
// createComponentTree is not called for these stores:
case 'cache':
case 'private-cache':
case 'prerender-client':
case 'validation-client':
case 'unstable-cache':
case 'generate-static-params':
break;
default:
workUnitStore;
}
}
}
const isStaticGeneration = workStore.isStaticGeneration;
// Assume the segment we're rendering contains only partial data if PPR is
// enabled and this is a statically generated response. This is used by the
// client Segment Cache after a prefetch to determine if it can skip the
// second request to fill in the dynamic data.
//
// It's OK for this to be `true` when the data is actually fully static, but
// it's not OK for this to be `false` when the data possibly contains holes.
// Although the value here is overly pessimistic, for prefetches, it will be
// replaced by a more specific value when the data is later processed into
// per-segment responses (see collect-segment-data.tsx)
//
// For dynamic requests, this must always be `false` because dynamic responses
// are never partial.
const isPossiblyPartialResponse = isStaticGeneration && experimental.isRoutePPREnabled === true;
const LayoutOrPage = layoutOrPageMod ? (0, _interopdefault.interopDefault)(layoutOrPageMod) : undefined;
/**
* The React Component to render.
*/ let MaybeComponent = LayoutOrPage;
if (process.env.NODE_ENV === 'development' || isStaticGeneration) {
const { isValidElementType } = require('next/dist/compiled/react-is');
if (typeof MaybeComponent !== 'undefined' && !isValidElementType(MaybeComponent)) {
errorMissingDefaultExport(pagePath, modType ?? 'page');
}
if (typeof ErrorComponent !== 'undefined' && !isValidElementType(ErrorComponent)) {
errorMissingDefaultExport(pagePath, 'error');
}
if (typeof Loading !== 'undefined' && !isValidElementType(Loading)) {
errorMissingDefaultExport(pagePath, 'loading');
}
if (typeof NotFound !== 'undefined' && !isValidElementType(NotFound)) {
errorMissingDefaultExport(pagePath, 'not-found');
}
if (typeof Forbidden !== 'undefined' && !isValidElementType(Forbidden)) {
errorMissingDefaultExport(pagePath, 'forbidden');
}
if (typeof Unauthorized !== 'undefined' && !isValidElementType(Unauthorized)) {
errorMissingDefaultExport(pagePath, 'unauthorized');
}
}
// Handle dynamic segment params.
const segmentParam = getDynamicParamFromSegment(tree);
// Create object holding the parent params and current params
let currentParams = parentParams;
if (segmentParam && segmentParam.value !== null) {
currentParams = {
...parentParams,
[segmentParam.param]: segmentParam.value
};
}
// Track optional catch-all params with no value (e.g., [[...slug]] at /).
// These params won't exist as properties on the params object, so vary
// params tracking needs to use a Proxy to detect access. We propagate this
// through the tree so that child segments (like __PAGE__) also know about
// the missing param. In practice, this only gets passed down one level —
// from the optional catch-all layout segment to the page segment — so it's
// always very close to the leaf of the tree.
const optionalCatchAllParamName = (segmentParam == null ? void 0 : segmentParam.type) === 'oc' && segmentParam.value === null ? segmentParam.param : parentOptionalCatchAllParamName;
// Resolve the segment param
const isSegmentViewEnabled = !!process.env.__NEXT_DEV_SERVER;
const dir = (process.env.NEXT_RUNTIME === 'edge' ? process.env.__NEXT_EDGE_PROJECT_DIR : ctx.renderOpts.dir) || '';
const [notFoundElement, notFoundFilePath] = await createBoundaryConventionElement({
ctx,
conventionName: 'not-found',
Component: NotFound,
styles: notFoundStyles,
tree
});
const [forbiddenElement] = await createBoundaryConventionElement({
ctx,
conventionName: 'forbidden',
Component: Forbidden,
styles: forbiddenStyles,
tree
});
const [unauthorizedElement] = await createBoundaryConventionElement({
ctx,
conventionName: 'unauthorized',
Component: Unauthorized,
styles: unauthorizedStyles,
tree
});
// TODO: Combine this `map` traversal with the loop below that turns the array
// into an object.
const parallelRouteMap = await Promise.all(Object.keys(parallelRoutes).map(async (parallelRouteKey)=>{
const isChildrenRouteKey = parallelRouteKey === 'children';
const parallelRoute = parallelRoutes[parallelRouteKey];
const notFoundComponent = isChildrenRouteKey ? notFoundElement : undefined;
const forbiddenComponent = isChildrenRouteKey ? forbiddenElement : undefined;
const unauthorizedComponent = isChildrenRouteKey ? unauthorizedElement : undefined;
// if we're prefetching and that there's a Loading component, we bail out
// otherwise we keep rendering for the prefetch.
// We also want to bail out if there's no Loading component in the tree.
let childCacheNodeSeedData = null;
if (// Before PPR, the way instant navigations work in Next.js is we
// prefetch everything up to the first route segment that defines a
// loading.tsx boundary. (We do the same if there's no loading
// boundary in the entire tree, because we don't want to prefetch too
// much) The rest of the tree is deferred until the actual navigation.
// It does not take into account whether the data is dynamic — even if
// the tree is completely static, it will still defer everything
// inside the loading boundary.
//
// This behavior predates PPR and is only relevant if the
// PPR flag is not enabled.
isPrefetch && (Loading || !(0, _hasloadingcomponentintree.hasLoadingComponentInTree)(parallelRoute)) && // The approach with PPR is different — loading.tsx behaves like a
// regular Suspense boundary and has no special behavior.
//
// With PPR, we prefetch as deeply as possible, and only defer when
// dynamic data is accessed. If so, we only defer the nearest parent
// Suspense boundary of the dynamic data access, regardless of whether
// the boundary is defined by loading.tsx or a normal <Suspense>
// component in userspace.
//
// NOTE: In practice this usually means we'll end up prefetching more
// than we were before PPR, which may or may not be considered a
// performance regression by some apps. The plan is to address this
// before General Availability of PPR by introducing granular
// per-segment fetching, so we can reuse as much of the tree as
// possible during both prefetches and dynamic navigations. But during
// the beta period, we should be clear about this trade off in our
// communications.
!experimental.isRoutePPREnabled) {
// Don't prefetch this child. This will trigger a lazy fetch by the
// client router.
} else {
// Create the child component
if (process.env.NODE_ENV === 'development' && missingSlots) {
var _parsedTree_conventionPath;
// When we detect the default fallback (which triggers a 404), we collect the missing slots
// to provide more helpful debug information during development mode.
const parsedTree = (0, _parseloadertree.parseLoaderTree)(parallelRoute);
if ((_parsedTree_conventionPath = parsedTree.conventionPath) == null ? void 0 : _parsedTree_conventionPath.endsWith(_default.PARALLEL_ROUTE_DEFAULT_PATH)) {
missingSlots.add(parallelRouteKey);
}
}
const seedData = await createComponentTreeInternal({
loaderTree: parallelRoute,
parentParams: currentParams,
parentOptionalCatchAllParamName: optionalCatchAllParamName,
parentRuntimePrefetchable: isRuntimePrefetchable,
rootLayoutIncluded: rootLayoutIncludedAtThisLevelOrAbove,
injectedCSS: injectedCSSWithCurrentLayout,
injectedJS: injectedJSWithCurrentLayout,
injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout,
ctx,
missingSlots,
preloadCallbacks,
authInterrupts,
// `StreamingMetadataOutlet` is used to conditionally throw. In the case of parallel routes we will have more than one page
// but we only want to throw on the first one.
MetadataOutlet: isChildrenRouteKey ? MetadataOutlet : null
}, false);
childCacheNodeSeedData = seedData;
}
const templateNode = createElement(Template, null, createElement(RenderFromTemplateContext, null));
const templateFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'template');
const errorFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'error');
const loadingFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'loading');
const globalErrorFilePath = isRoot ? (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'global-error') : undefined;
const wrappedErrorStyles = isSegmentViewEnabled && errorFilePath ? createElement(SegmentViewNode, {
type: 'error',
pagePath: errorFilePath
}, errorStyles) : errorStyles;
// Add a suffix to avoid conflict with the segment view node representing rendered file.
// existence: not-found.tsx@boundary
// rendered: not-found.tsx
const fileNameSuffix = _segmentexplorerpath.BOUNDARY_SUFFIX;
const segmentViewBoundaries = isSegmentViewEnabled ? createElement(Fragment, null, notFoundFilePath && createElement(SegmentViewNode, {
type: `${_segmentexplorerpath.BOUNDARY_PREFIX}not-found`,
pagePath: notFoundFilePath + fileNameSuffix
}), loadingFilePath && createElement(SegmentViewNode, {
type: `${_segmentexplorerpath.BOUNDARY_PREFIX}loading`,
pagePath: loadingFilePath + fileNameSuffix
}), errorFilePath && createElement(SegmentViewNode, {
type: `${_segmentexplorerpath.BOUNDARY_PREFIX}error`,
pagePath: errorFilePath + fileNameSuffix
}), globalErrorFilePath && createElement(SegmentViewNode, {
type: `${_segmentexplorerpath.BOUNDARY_PREFIX}global-error`,
pagePath: (0, _segmentexplorerpath.isNextjsBuiltinFilePath)(globalErrorFilePath) ? `${_segmentexplorerpath.BUILTIN_PREFIX}global-error.js${fileNameSuffix}` : globalErrorFilePath
})) : null;
return [
parallelRouteKey,
createElement(LayoutRouter, {
parallelRouterKey: parallelRouteKey,
error: ErrorComponent,
errorStyles: wrappedErrorStyles,
errorScripts: errorScripts,
template: isSegmentViewEnabled && templateFilePath ? createElement(SegmentViewNode, {
type: 'template',
pagePath: templateFilePath
}, templateNode) : templateNode,
templateStyles: templateStyles,
templateScripts: templateScripts,
notFound: notFoundComponent,
forbidden: forbiddenComponent,
unauthorized: unauthorizedComponent,
...isSegmentViewEnabled && {
segmentViewBoundaries
}
}),
childCacheNodeSeedData
];
}));
// Convert the parallel route map into an object after all promises have been resolved.
let parallelRouteProps = {};
let parallelRouteCacheNodeSeedData = {};
for (const parallelRoute of parallelRouteMap){
const [parallelRouteKey, parallelRouteProp, flightData] = parallelRoute;
parallelRouteProps[parallelRouteKey] = parallelRouteProp;
parallelRouteCacheNodeSeedData[parallelRouteKey] = flightData;
}
let loadingElement = Loading ? createElement(Loading, {
key: 'l'
}) : null;
const loadingFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'loading');
if (isSegmentViewEnabled && loadingElement) {
if (loadingFilePath) {
loadingElement = createElement(SegmentViewNode, {
key: cacheNodeKey + '-loading',
type: 'loading',
pagePath: loadingFilePath
}, loadingElement);
}
}
const loadingData = loadingElement ? [
loadingElement,
loadingStyles,
loadingScripts
] : null;
// When the segment does not have a layout or page we still have to add the layout router to ensure the path holds the loading component
if (!MaybeComponent) {
return createSeedData(ctx, createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, parallelRouteProps.children), parallelRouteCacheNodeSeedData, loadingData, isPossiblyPartialResponse, isRuntimePrefetchable, // No user-provided component, so no params will be accessed. Use the
// pre-resolved empty tracker.
_varyparams.emptyVaryParamsAccumulator);
}
const Component = MaybeComponent;
// If force-dynamic is used and the current render supports postponing, we
// replace it with a node that will postpone the render. This ensures that the
// postpone is invoked during the react render phase and not during the next
// render phase.
// @TODO this does not actually do what it seems like it would or should do. The idea is that
// if we are rendering in a force-dynamic mode and we can postpone we should only make the segments
// that ask for force-dynamic to be dynamic, allowing other segments to still prerender. However
// because this comes after the children traversal and the static generation store is mutated every segment
// along the parent path of a force-dynamic segment will hit this condition effectively making the entire
// render force-dynamic. We should refactor this function so that we can correctly track which segments
// need to be dynamic
if (workStore.isStaticGeneration && workStore.forceDynamic && experimental.isRoutePPREnabled) {
return createSeedData(ctx, createElement(Fragment, {
key: cacheNodeKey
}, createElement(Postpone, {
reason: 'dynamic = "force-dynamic" was used',
route: workStore.route
}), layerAssets), parallelRouteCacheNodeSeedData, loadingData, true, isRuntimePrefetchable, // force-dynamic postpones without rendering the component, so no params
// are accessed. The vary params are empty.
_varyparams.emptyVaryParamsAccumulator);
}
const isClientComponent = (0, _clientandserverreferences.isClientReference)(layoutOrPageMod);
const varyParamsAccumulator = isClientComponent && cacheComponents ? // from the server, so they have an empty vary params set.
_varyparams.emptyVaryParamsAccumulator : (0, _varyparams.createVaryParamsAccumulator)();
if (process.env.NODE_ENV === 'development' && 'params' in parallelRouteProps) {
// @TODO consider making this an error and running the check in build as well
console.error(`"params" is a reserved prop in Layouts and Pages and cannot be used as the name of a parallel route in ${segment}`);
}
if (isPage) {
const PageComponent = Component;
// Assign searchParams to props if this is a page
let pageElement;
if (isClientComponent) {
if (cacheComponents) {
// Params are omitted when Cache Components is enabled
pageElement = createElement(ClientPageRoot, {
Component: PageComponent,
serverProvidedParams: null
});
} else if (isStaticGeneration) {
const promiseOfParams = createPrerenderParamsForClientSegment(currentParams);
const promiseOfSearchParams = createPrerenderSearchParamsForClientPage();
pageElement = createElement(ClientPageRoot, {
Component: PageComponent,
serverProvidedParams: {
searchParams: query,
params: currentParams,
promises: [
promiseOfSearchParams,
promiseOfParams
]
}
});
} else {
pageElement = createElement(ClientPageRoot, {
Component: PageComponent,
serverProvidedParams: {
searchParams: query,
params: currentParams,
promises: null
}
});
}
} else {
// If we are passing params to a server component Page we need to track
// their usage in case the current render mode tracks dynamic API usage.
const params = createServerParamsForServerSegment(currentParams, optionalCatchAllParamName, varyParamsAccumulator, isRuntimePrefetchable);
// If we are passing searchParams to a server component Page we need to
// track their usage in case the current render mode tracks dynamic API
// usage.
let searchParams = createServerSearchParamsForServerPage(query, varyParamsAccumulator, isRuntimePrefetchable);
if ((0, _clientandserverreferences.isUseCacheFunction)(PageComponent)) {
const UseCachePageComponent = PageComponent;
pageElement = createElement(UseCachePageComponent, {
params: params,
searchParams: searchParams,
$$isPage: true
});
} else {
pageElement = createElement(PageComponent, {
params: params,
searchParams: searchParams
});
}
}
const isDefaultSegment = segment === _segment.DEFAULT_SEGMENT_KEY;
const pageFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'page') ?? (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'defaultPage');
const segmentType = isDefaultSegment ? 'default' : 'page';
const wrappedPageElement = isSegmentViewEnabled && pageFilePath ? createElement(SegmentViewNode, {
key: cacheNodeKey + '-' + segmentType,
type: segmentType,
pagePath: pageFilePath
}, pageElement) : pageElement;
return createSeedData(ctx, createElement(Fragment, {
key: cacheNodeKey
}, wrappedPageElement, layerAssets, MetadataOutlet ? createElement(MetadataOutlet, null) : null), parallelRouteCacheNodeSeedData, loadingData, isPossiblyPartialResponse, isRuntimePrefetchable, varyParamsAccumulator);
} else {
const SegmentComponent = Component;
const isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot = rootLayoutAtThisLevel && 'children' in parallelRoutes && Object.keys(parallelRoutes).length > 1;
let segmentNode;
if (isClientComponent) {
let clientSegment;
if (cacheComponents) {
// Params are omitted when Cache Components is enabled
clientSegment = createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: parallelRouteProps,
serverProvidedParams: null
});
} else if (isStaticGeneration) {
const promiseOfParams = createPrerenderParamsForClientSegment(currentParams);
clientSegment = createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: parallelRouteProps,
serverProvidedParams: {
params: currentParams,
promises: [
promiseOfParams
]
}
});
} else {
clientSegment = createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: parallelRouteProps,
serverProvidedParams: {
params: currentParams,
promises: null
}
});
}
if (isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot) {
let notfoundClientSegment;
let forbiddenClientSegment;
let unauthorizedClientSegment;
// TODO-APP: This is a hack to support unmatched parallel routes, which will throw `notFound()`.
// This ensures that a `HTTPAccessFallbackBoundary` is available for when that happens,
// but it's not ideal, as it needlessly invokes the `NotFound` component and renders the `RootLayout` twice.
// We should instead look into handling the fallback behavior differently in development mode so that it doesn't
// rely on the `NotFound` behavior.
notfoundClientSegment = createErrorBoundaryClientSegmentRoot({
ctx,
ErrorBoundaryComponent: NotFound,
errorElement: notFoundElement,
ClientSegmentRoot,
layerAssets,
SegmentComponent,
currentParams
});
forbiddenClientSegment = createErrorBoundaryClientSegmentRoot({
ctx,
ErrorBoundaryComponent: Forbidden,
errorElement: forbiddenElement,
ClientSegmentRoot,
layerAssets,
SegmentComponent,
currentParams
});
unauthorizedClientSegment = createErrorBoundaryClientSegmentRoot({
ctx,
ErrorBoundaryComponent: Unauthorized,
errorElement: unauthorizedElement,
ClientSegmentRoot,
layerAssets,
SegmentComponent,
currentParams
});
if (notfoundClientSegment || forbiddenClientSegment || unauthorizedClientSegment) {
segmentNode = createElement(HTTPAccessFallbackBoundary, {
key: cacheNodeKey,
notFound: notfoundClientSegment,
forbidden: forbiddenClientSegment,
unauthorized: unauthorizedClientSegment
}, layerAssets, clientSegment);
} else {
segmentNode = createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, clientSegment);
}
} else {
segmentNode = createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, clientSegment);
}
} else {
const params = createServerParamsForServerSegment(currentParams, optionalCatchAllParamName, varyParamsAccumulator, isRuntimePrefetchable);
let serverSegment;
if ((0, _clientandserverreferences.isUseCacheFunction)(SegmentComponent)) {
const UseCacheLayoutComponent = SegmentComponent;
serverSegment = createElement(UseCacheLayoutComponent, {
...parallelRouteProps,
params: params,
$$isLayout: true
}, // Force static children here so that they're validated.
// See https://github.com/facebook/react/pull/34846
parallelRouteProps.children);
} else {
serverSegment = createElement(SegmentComponent, {
...parallelRouteProps,
params: params
}, // Force static children here so that they're validated.
// See https://github.com/facebook/react/pull/34846
parallelRouteProps.children);
}
if (isRootLayoutWithChildrenSlotAndAtLeastOneMoreSlot) {
// TODO-APP: This is a hack to support unmatched parallel routes, which will throw `notFound()`.
// This ensures that a `HTTPAccessFallbackBoundary` is available for when that happens,
// but it's not ideal, as it needlessly invokes the `NotFound` component and renders the `RootLayout` twice.
// We should instead look into handling the fallback behavior differently in development mode so that it doesn't
// rely on the `NotFound` behavior.
segmentNode = createElement(HTTPAccessFallbackBoundary, {
key: cacheNodeKey,
notFound: notFoundElement ? createElement(Fragment, null, layerAssets, createElement(SegmentComponent, {
params: params
}, notFoundStyles, notFoundElement)) : undefined
}, layerAssets, serverSegment);
} else {
segmentNode = createElement(Fragment, {
key: cacheNodeKey
}, layerAssets, serverSegment);
}
}
const layoutFilePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, 'layout');
const wrappedSegmentNode = isSegmentViewEnabled && layoutFilePath ? createElement(SegmentViewNode, {
key: 'layout',
type: 'layout',
pagePath: layoutFilePath
}, segmentNode) : segmentNode;
// For layouts we just render the component
return createSeedData(ctx, wrappedSegmentNode, parallelRouteCacheNodeSeedData, loadingData, isPossiblyPartialResponse, isRuntimePrefetchable, varyParamsAccumulator);
}
}
function createErrorBoundaryClientSegmentRoot({ ctx, ErrorBoundaryComponent, errorElement, ClientSegmentRoot, layerAssets, SegmentComponent, currentParams }) {
const { componentMod: { createElement, Fragment } } = ctx;
if (ErrorBoundaryComponent) {
const notFoundParallelRouteProps = {
children: errorElement
};
return createElement(Fragment, null, layerAssets, createElement(ClientSegmentRoot, {
Component: SegmentComponent,
slots: notFoundParallelRouteProps,
params: currentParams
}));
}
return null;
}
function getRootParams(loaderTree, getDynamicParamFromSegment) {
return getRootParamsImpl({}, loaderTree, getDynamicParamFromSegment);
}
function getRootParamsImpl(parentParams, loaderTree, getDynamicParamFromSegment) {
const { modules: { layout }, parallelRoutes } = (0, _parseloadertree.parseLoaderTree)(loaderTree);
const segmentParam = getDynamicParamFromSegment(loaderTree);
let currentParams = parentParams;
if (segmentParam && segmentParam.value !== null) {
currentParams = {
...parentParams,
[segmentParam.param]: segmentParam.value
};
}
const isRootLayout = typeof layout !== 'undefined';
if (isRootLayout) {
return currentParams;
} else if (!parallelRoutes.children) {
// This should really be an error but there are bugs in Turbopack that cause
// the _not-found LoaderTree to not have any layouts. For rootParams sake
// this is somewhat irrelevant when you are not customizing the 404 page.
// If you are customizing 404
// TODO update rootParams to make all params optional if `/app/not-found.tsx` is defined
return currentParams;
} else {
return getRootParamsImpl(currentParams, // We stop looking for root params as soon as we hit the first layout
// and it is not possible to use parallel route children above the root layout
// so every parallelRoutes object that this function can visit will necessarily
// have a single `children` prop and no others.
parallelRoutes.children, getDynamicParamFromSegment);
}
}
async function createBoundaryConventionElement({ ctx, conventionName, Component, styles, tree }) {
const { componentMod: { createElement, Fragment } } = ctx;
const isSegmentViewEnabled = !!process.env.__NEXT_DEV_SERVER;
const dir = (process.env.NEXT_RUNTIME === 'edge' ? process.env.__NEXT_EDGE_PROJECT_DIR : ctx.renderOpts.dir) || '';
const { SegmentViewNode } = ctx.componentMod;
const element = Component ? createElement(Fragment, null, createElement(Component, null), styles) : undefined;
const pagePath = (0, _segmentexplorerpath.getConventionPathByType)(tree, dir, conventionName);
const wrappedElement = isSegmentViewEnabled && element ? createElement(SegmentViewNode, {
key: cacheNodeKey + '-' + conventionName,
type: conventionName,
// TODO: Discovered when moving to `createElement`.
// `SegmentViewNode` doesn't support undefined `pagePath`
pagePath: pagePath
}, element) : element;
return [
wrappedElement,
pagePath
];
}
function createSeedData(ctx, rsc, parallelRoutes, loading, isPossiblyPartialResponse, isRuntimePrefetchable, varyParamsAccumulator) {
const createElement = ctx.componentMod.createElement;
// When this segment is NOT runtime-prefetchable, delay it until the Static
// stage by wrapping the node in a promise. This allows runtime-prefetchable
// segments (the lower tree) to render first during EarlyStatic, so their
// runtime data resolves in EarlyRuntime where sync IO can be checked.
// React will suspend on the thenable and resume when the stage advances.
if (!isRuntimePrefetchable) {
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
if (workUnitStore) {
let stagedRendering;
switch(workUnitStore.type){
case 'request':
case 'prerender-runtime':
stagedRendering = workUnitStore.stagedRendering;
if (stagedRendering) {
const deferredRsc = rsc;
rsc = stagedRendering.waitForStage(_stagedrendering.RenderStage.Static).then(()=>deferredRsc);
}
break;
case 'prerender':
case 'prerender-client':
case 'validation-client':
case 'prerender-ppr':
case 'prerender-legacy':
case 'cache':
case 'private-cache':
case 'unstable-cache':
case 'generate-static-params':
break;
default:
workUnitStore;
}
}
}
if (loading !== null) {
// If a loading.tsx boundary is present, wrap the component data in an
// additional context provider to pass the loading data to the next
// set of children.
// NOTE: The reason this is a separate wrapper from LayoutRouter is because
// not all segments render a LayoutRouter component, e.g. the root segment.
const LoadingBoundaryProvider = ctx.componentMod.LoadingBoundaryProvider;
rsc = createElement(LoadingBoundaryProvider, {
loading: loading,
children: rsc
});
}
return [
rsc,
parallelRoutes,
null,
isPossiblyPartialResponse,
varyParamsAccumulator ? (0, _varyparams.getVaryParamsThenable)(varyParamsAccumulator) : null
];
}
//# sourceMappingURL=create-component-tree.js.map
File diff suppressed because one or more lines are too long
+20
View File
@@ -0,0 +1,20 @@
import type { ErrorInfo } from 'react';
declare global {
var __next_log_error__: undefined | ((err: unknown) => void);
}
type RSCErrorHandler = (err: unknown) => string | undefined;
type SSRErrorHandler = (err: unknown, errorInfo?: ErrorInfo) => string | undefined;
export type DigestedError = Error & {
digest: string;
environmentName?: string;
};
/**
* Returns a digest for well-known Next.js errors, otherwise `undefined`. If a
* digest is returned this also means that the error does not need to be
* reported.
*/
export declare function getDigestForWellKnownError(error: unknown): string | undefined;
export declare function createReactServerErrorHandler(shouldFormatError: boolean, isBuildTimePrerendering: boolean, reactServerErrors: Map<string, DigestedError>, onReactServerRenderError: (err: DigestedError, silenceLog: boolean) => void, spanToRecordOn?: any): RSCErrorHandler;
export declare function createHTMLErrorHandler(shouldFormatError: boolean, isBuildTimePrerendering: boolean, reactServerErrors: Map<string, DigestedError>, allCapturedErrors: Array<unknown>, onHTMLRenderSSRError: (err: DigestedError, errorInfo?: ErrorInfo) => void, spanToRecordOn?: any): SSRErrorHandler;
export declare function isUserLandError(err: any): boolean;
export {};
+189
View File
@@ -0,0 +1,189 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
createHTMLErrorHandler: null,
createReactServerErrorHandler: null,
getDigestForWellKnownError: null,
isUserLandError: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
createHTMLErrorHandler: function() {
return createHTMLErrorHandler;
},
createReactServerErrorHandler: function() {
return createReactServerErrorHandler;
},
getDigestForWellKnownError: function() {
return getDigestForWellKnownError;
},
isUserLandError: function() {
return isUserLandError;
}
});
const _stringhash = /*#__PURE__*/ _interop_require_default(require("next/dist/compiled/string-hash"));
const _formatservererror = require("../../lib/format-server-error");
const _tracer = require("../lib/trace/tracer");
const _pipereadable = require("../pipe-readable");
const _bailouttocsr = require("../../shared/lib/lazy-dynamic/bailout-to-csr");
const _hooksservercontext = require("../../client/components/hooks-server-context");
const _isnextroutererror = require("../../client/components/is-next-router-error");
const _dynamicrendering = require("./dynamic-rendering");
const _iserror = require("../../lib/is-error");
const _errortelemetryutils = require("../../lib/error-telemetry-utils");
const _reactlargeshellerror = require("./react-large-shell-error");
const _instantvalidationerror = require("./instant-validation/instant-validation-error");
function _interop_require_default(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
function getDigestForWellKnownError(error) {
// If we're bailing out to CSR, we don't need to log the error.
if ((0, _bailouttocsr.isBailoutToCSRError)(error)) return error.digest;
// If this is a navigation error, we don't need to log the error.
if ((0, _isnextroutererror.isNextRouterError)(error)) return error.digest;
// If this error occurs, we know that we should be stopping the static
// render. This is only thrown in static generation when PPR is not enabled,
// which causes the whole page to be marked as dynamic. We don't need to
// tell the user about this error, as it's not actionable.
if ((0, _hooksservercontext.isDynamicServerError)(error)) return error.digest;
// If this is a prerender interrupted error, we don't need to log the error.
if ((0, _dynamicrendering.isPrerenderInterruptedError)(error)) return error.digest;
if ((0, _instantvalidationerror.isInstantValidationError)(error)) return error.digest;
return undefined;
}
function createReactServerErrorHandler(shouldFormatError, isBuildTimePrerendering, reactServerErrors, onReactServerRenderError, spanToRecordOn) {
return (thrownValue)=>{
var _err_message;
if (typeof thrownValue === 'string') {
// TODO-APP: look at using webcrypto instead. Requires a promise to be awaited.
return (0, _stringhash.default)(thrownValue).toString();
}
// If the response was closed, we don't need to log the error.
if ((0, _pipereadable.isAbortError)(thrownValue)) return;
const digest = getDigestForWellKnownError(thrownValue);
if (digest) {
return digest;
}
if ((0, _reactlargeshellerror.isReactLargeShellError)(thrownValue)) {
// TODO: Aggregate
console.error(thrownValue);
return undefined;
}
let err = (0, _iserror.getProperError)(thrownValue);
let silenceLog = false;
// If the error already has a digest, respect the original digest,
// so it won't get re-generated into another new error.
if (err.digest) {
if (process.env.NODE_ENV === 'production' && reactServerErrors.has(err.digest)) {
// This error is likely an obfuscated error from another react-server
// environment (e.g. 'use cache'). We recover the original error here
// for reporting purposes.
err = reactServerErrors.get(err.digest);
// We don't log it again though, as it was already logged in the
// original environment.
silenceLog = true;
} else {
// Either we're in development (where we want to keep the transported
// error with environmentName), or the error is not in reactServerErrors
// but has a digest from other means. Keep the error as-is.
}
} else {
err.digest = (0, _errortelemetryutils.createDigestWithErrorCode)(err, // TODO-APP: look at using webcrypto instead. Requires a promise to be awaited.
(0, _stringhash.default)(err.message + (err.stack || '')).toString());
}
// @TODO by putting this here and not at the top it is possible that
// we don't error the build in places we actually expect to
if (!reactServerErrors.has(err.digest)) {
reactServerErrors.set(err.digest, err);
}
// Format server errors in development to add more helpful error messages
if (shouldFormatError) {
(0, _formatservererror.formatServerError)(err);
}
// Don't log the suppressed error during export
if (!(isBuildTimePrerendering && (err == null ? void 0 : (_err_message = err.message) == null ? void 0 : _err_message.includes('The specific message is omitted in production builds to avoid leaking sensitive details.')))) {
// Record exception on the provided span if available, otherwise try active span.
const span = spanToRecordOn ?? (0, _tracer.getTracer)().getActiveScopeSpan();
if (span) {
span.recordException(err);
span.setAttribute('error.type', err.name);
span.setStatus({
code: _tracer.SpanStatusCode.ERROR,
message: err.message
});
}
onReactServerRenderError(err, silenceLog);
}
return err.digest;
};
}
function createHTMLErrorHandler(shouldFormatError, isBuildTimePrerendering, reactServerErrors, allCapturedErrors, onHTMLRenderSSRError, spanToRecordOn) {
return (thrownValue, errorInfo)=>{
var _err_message;
if ((0, _reactlargeshellerror.isReactLargeShellError)(thrownValue)) {
// TODO: Aggregate
console.error(thrownValue);
return undefined;
}
let isSSRError = true;
allCapturedErrors.push(thrownValue);
// If the response was closed, we don't need to log the error.
if ((0, _pipereadable.isAbortError)(thrownValue)) return;
const digest = getDigestForWellKnownError(thrownValue);
if (digest) {
return digest;
}
const err = (0, _iserror.getProperError)(thrownValue);
// If the error already has a digest, respect the original digest,
// so it won't get re-generated into another new error.
if (err.digest) {
if (reactServerErrors.has(err.digest)) {
// This error is likely an obfuscated error from react-server.
// We recover the original error here.
thrownValue = reactServerErrors.get(err.digest);
isSSRError = false;
} else {
// The error is not from react-server but has a digest
// from other means so we don't need to produce a new one
}
} else {
err.digest = (0, _errortelemetryutils.createDigestWithErrorCode)(err, (0, _stringhash.default)(err.message + ((errorInfo == null ? void 0 : errorInfo.componentStack) || err.stack || '')).toString());
}
// Format server errors in development to add more helpful error messages
if (shouldFormatError) {
(0, _formatservererror.formatServerError)(err);
}
// Don't log the suppressed error during export
if (!(isBuildTimePrerendering && (err == null ? void 0 : (_err_message = err.message) == null ? void 0 : _err_message.includes('The specific message is omitted in production builds to avoid leaking sensitive details.')))) {
// HTML errors contain RSC errors as well, filter them out before reporting
if (isSSRError) {
// Record exception on the provided span if available, otherwise try active span.
const span = spanToRecordOn ?? (0, _tracer.getTracer)().getActiveScopeSpan();
if (span) {
span.recordException(err);
span.setAttribute('error.type', err.name);
span.setStatus({
code: _tracer.SpanStatusCode.ERROR,
message: err.message
});
}
onHTMLRenderSSRError(err, errorInfo);
}
}
return err.digest;
};
}
function isUserLandError(err) {
return !(0, _pipereadable.isAbortError)(err) && !(0, _bailouttocsr.isBailoutToCSRError)(err) && !(0, _isnextroutererror.isNextRouterError)(err);
}
//# sourceMappingURL=create-error-handler.js.map
File diff suppressed because one or more lines are too long
@@ -0,0 +1,5 @@
import type { LoaderTree } from '../lib/app-dir-module';
import { type FlightRouterState, type PrefetchHints } from '../../shared/lib/app-router-types';
import type { GetDynamicParamFromSegment } from './app-render';
export declare function createFlightRouterStateFromLoaderTree(loaderTree: LoaderTree, hintTree: PrefetchHints | null, getDynamicParamFromSegment: GetDynamicParamFromSegment, searchParams: any): Promise<FlightRouterState>;
export declare function createRouteTreePrefetch(loaderTree: LoaderTree, hintTree: PrefetchHints | null, getDynamicParamFromSegment: GetDynamicParamFromSegment): Promise<FlightRouterState>;
@@ -0,0 +1,102 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
createFlightRouterStateFromLoaderTree: null,
createRouteTreePrefetch: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
createFlightRouterStateFromLoaderTree: function() {
return createFlightRouterStateFromLoaderTree;
},
createRouteTreePrefetch: function() {
return createRouteTreePrefetch;
}
});
const _approutertypes = require("../../shared/lib/app-router-types");
const _segment = require("../../shared/lib/segment");
async function createFlightRouterStateFromLoaderTreeImpl(loaderTree, hintTree, getDynamicParamFromSegment, searchParams, didFindRootLayout) {
const [segment, parallelRoutes, { layout, loading, page }] = loaderTree;
const dynamicParam = getDynamicParamFromSegment(loaderTree);
const treeSegment = dynamicParam ? dynamicParam.treeSegment : segment;
const segmentTree = [
(0, _segment.addSearchParamsIfPageSegment)(treeSegment, searchParams),
{}
];
// Load the layout or page module to check for unstable_instant config
const mod = layout ? await layout[0]() : page ? await page[0]() : undefined;
const instantConfig = mod ? mod.unstable_instant : undefined;
let prefetchHints = 0;
// Union in the precomputed build-time hints (e.g. segment inlining
// decisions) if available. When hints are not available (e.g. dev mode or
// if prefetch-hints.json was not generated), we fall through and still
// compute the other hints below. In the future this should be a build
// error, but for now we gracefully degrade.
//
// TODO: Move more of the hints computation (IsRootLayout, instant config,
// loading boundary detection) into the build-time measurement step in
// collectPrefetchHints, so this function only needs to union the
// precomputed bitmask rather than re-derive hints on every render.
if (hintTree !== null) {
prefetchHints |= hintTree.hints;
}
// Mark the first segment that has a layout as the "root" layout
if (!didFindRootLayout && typeof layout !== 'undefined') {
didFindRootLayout = true;
prefetchHints |= _approutertypes.PrefetchHint.IsRootLayout;
}
if (instantConfig && typeof instantConfig === 'object') {
prefetchHints |= _approutertypes.PrefetchHint.SubtreeHasInstant;
if (instantConfig.prefetch === 'runtime') {
prefetchHints |= _approutertypes.PrefetchHint.HasRuntimePrefetch;
}
}
// Check if this segment has a loading boundary
if (loading) {
prefetchHints |= _approutertypes.PrefetchHint.SegmentHasLoadingBoundary;
}
const children = {};
for(const parallelRouteKey in parallelRoutes){
var _hintTree_slots;
// Look up the child hint node by parallel route key, traversing the
// hint tree in parallel with the loader tree.
const childHintNode = (hintTree == null ? void 0 : (_hintTree_slots = hintTree.slots) == null ? void 0 : _hintTree_slots[parallelRouteKey]) ?? null;
const child = await createFlightRouterStateFromLoaderTreeImpl(parallelRoutes[parallelRouteKey], childHintNode, getDynamicParamFromSegment, searchParams, didFindRootLayout);
// Propagate subtree flags from children
if (child[4] !== undefined) {
prefetchHints |= child[4] & (_approutertypes.PrefetchHint.SubtreeHasInstant | _approutertypes.PrefetchHint.SubtreeHasLoadingBoundary);
// If a child has a loading boundary (either directly or in its subtree),
// propagate that as SubtreeHasLoadingBoundary to this segment.
if (child[4] & (_approutertypes.PrefetchHint.SegmentHasLoadingBoundary | _approutertypes.PrefetchHint.SubtreeHasLoadingBoundary)) {
prefetchHints |= _approutertypes.PrefetchHint.SubtreeHasLoadingBoundary;
}
}
children[parallelRouteKey] = child;
}
segmentTree[1] = children;
if (prefetchHints !== 0) {
segmentTree[4] = prefetchHints;
}
return segmentTree;
}
async function createFlightRouterStateFromLoaderTree(loaderTree, hintTree, getDynamicParamFromSegment, searchParams) {
const didFindRootLayout = false;
return createFlightRouterStateFromLoaderTreeImpl(loaderTree, hintTree, getDynamicParamFromSegment, searchParams, didFindRootLayout);
}
async function createRouteTreePrefetch(loaderTree, hintTree, getDynamicParamFromSegment) {
// Search params should not be added to page segment's cache key during a
// route tree prefetch request, because they do not affect the structure of
// the route. The client cache has its own logic to handle search params.
const searchParams = {};
const didFindRootLayout = false;
return createFlightRouterStateFromLoaderTreeImpl(loaderTree, hintTree, getDynamicParamFromSegment, searchParams, didFindRootLayout);
}
//# sourceMappingURL=create-flight-router-state-from-loader-tree.js.map
File diff suppressed because one or more lines are too long
+1
View File
@@ -0,0 +1 @@
export declare const isCsrfOriginAllowed: (originDomain: string, allowedOrigins?: string[]) => boolean;
+86
View File
@@ -0,0 +1,86 @@
// micromatch is only available at node runtime, so it cannot be used here since the code path that calls this function
// can be run from edge. This is a simple implementation that safely achieves the required functionality.
// the goal is to match the functionality for remotePatterns as defined here -
// https://nextjs.org/docs/app/api-reference/components/image#remotepatterns
// TODO - retrofit micromatch to work in edge and use that instead
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "isCsrfOriginAllowed", {
enumerable: true,
get: function() {
return isCsrfOriginAllowed;
}
});
function matchWildcardDomain(domain, pattern) {
// DNS names are case-insensitive per RFC 1035
// Use ASCII-only toLowerCase to avoid unicode issues
const normalizedDomain = domain.replace(/[A-Z]/g, (c)=>c.toLowerCase());
const normalizedPattern = pattern.replace(/[A-Z]/g, (c)=>c.toLowerCase());
const domainParts = normalizedDomain.split('.');
const patternParts = normalizedPattern.split('.');
if (patternParts.length < 1) {
// pattern is empty and therefore invalid to match against
return false;
}
if (domainParts.length < patternParts.length) {
// domain has too few segments and thus cannot match
return false;
}
// Prevent wildcards from matching entire domains (e.g. '**' or '*.com')
// This ensures wildcards can only match subdomains, not the main domain
if (patternParts.length === 1 && (patternParts[0] === '*' || patternParts[0] === '**')) {
return false;
}
while(patternParts.length){
const patternPart = patternParts.pop();
const domainPart = domainParts.pop();
switch(patternPart){
case '':
{
// invalid pattern. pattern segments must be non empty
return false;
}
case '*':
{
// wildcard matches anything so we continue if the domain part is non-empty
if (domainPart) {
continue;
} else {
return false;
}
}
case '**':
{
// if this is not the last item in the pattern the pattern is invalid
if (patternParts.length > 0) {
return false;
}
// recursive wildcard matches anything so we terminate here if the domain part is non empty
return domainPart !== undefined;
}
case undefined:
default:
{
if (domainPart !== patternPart) {
return false;
}
}
}
}
// We exhausted the pattern. If we also exhausted the domain we have a match
return domainParts.length === 0;
}
const isCsrfOriginAllowed = (originDomain, allowedOrigins = [])=>{
// DNS names are case-insensitive per RFC 1035
// Use ASCII-only toLowerCase to avoid unicode issues
const normalizedOrigin = originDomain.replace(/[A-Z]/g, (c)=>c.toLowerCase());
return allowedOrigins.some((allowedOrigin)=>{
if (!allowedOrigin) return false;
const normalizedAllowed = allowedOrigin.replace(/[A-Z]/g, (c)=>c.toLowerCase());
return normalizedAllowed === normalizedOrigin || matchWildcardDomain(originDomain, allowedOrigin);
});
};
//# sourceMappingURL=csrf-protection.js.map
File diff suppressed because one or more lines are too long
+8
View File
@@ -0,0 +1,8 @@
/**
* Compile-time switcher for debug channel operations.
*
* Simple re-export from the web implementation.
* A future change will add a conditional branch for node streams.
*/
export type { DebugChannelPair, DebugChannelServer, } from './debug-channel-server.web';
export { createDebugChannel, toNodeDebugChannel, } from './debug-channel-server.web';
+30
View File
@@ -0,0 +1,30 @@
/**
* Compile-time switcher for debug channel operations.
*
* Simple re-export from the web implementation.
* A future change will add a conditional branch for node streams.
*/ "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
createDebugChannel: null,
toNodeDebugChannel: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
createDebugChannel: function() {
return _debugchannelserverweb.createDebugChannel;
},
toNodeDebugChannel: function() {
return _debugchannelserverweb.toNodeDebugChannel;
}
});
const _debugchannelserverweb = require("./debug-channel-server.web");
//# sourceMappingURL=debug-channel-server.js.map
+1
View File
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/debug-channel-server.ts"],"sourcesContent":["/**\n * Compile-time switcher for debug channel operations.\n *\n * Simple re-export from the web implementation.\n * A future change will add a conditional branch for node streams.\n */\nexport type {\n DebugChannelPair,\n DebugChannelServer,\n} from './debug-channel-server.web'\n\nexport {\n createDebugChannel,\n toNodeDebugChannel,\n} from './debug-channel-server.web'\n"],"names":["createDebugChannel","toNodeDebugChannel"],"mappings":"AAAA;;;;;CAKC;;;;;;;;;;;;;;;IAOCA,kBAAkB;eAAlBA,yCAAkB;;IAClBC,kBAAkB;eAAlBA,yCAAkB;;;uCACb","ignoreList":[0]}
+24
View File
@@ -0,0 +1,24 @@
/**
* Web debug channel implementation.
* Loaded by debug-channel-server.ts.
*/
export type DebugChannelPair = {
serverSide: DebugChannelServer;
clientSide: DebugChannelClient;
};
export type DebugChannelServer = {
readable?: ReadableStream<Uint8Array>;
writable: WritableStream<Uint8Array>;
};
type DebugChannelClient = {
readable: ReadableStream<Uint8Array>;
writable?: WritableStream<Uint8Array>;
};
export declare function createDebugChannel(): DebugChannelPair | undefined;
export declare function createWebDebugChannel(): DebugChannelPair;
/**
* toNodeDebugChannel is a no-op stub on the web path.
* It should never be called in edge/web builds.
*/
export declare function toNodeDebugChannel(_webDebugChannel: DebugChannelServer): never;
export {};
+71
View File
@@ -0,0 +1,71 @@
/**
* Web debug channel implementation.
* Loaded by debug-channel-server.ts.
*/ // Types defined inline for now; will move to debug-channel-server.node.ts later.
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
createDebugChannel: null,
createWebDebugChannel: null,
toNodeDebugChannel: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
createDebugChannel: function() {
return createDebugChannel;
},
createWebDebugChannel: function() {
return createWebDebugChannel;
},
toNodeDebugChannel: function() {
return toNodeDebugChannel;
}
});
function createDebugChannel() {
if (process.env.NODE_ENV === 'production') {
return undefined;
}
return createWebDebugChannel();
}
function createWebDebugChannel() {
let readableController;
const clientSideReadable = new ReadableStream({
start (controller) {
readableController = controller;
}
});
return {
serverSide: {
writable: new WritableStream({
write (chunk) {
readableController == null ? void 0 : readableController.enqueue(chunk);
},
close () {
readableController == null ? void 0 : readableController.close();
},
abort (err) {
readableController == null ? void 0 : readableController.error(err);
}
})
},
clientSide: {
readable: clientSideReadable
}
};
}
function toNodeDebugChannel(_webDebugChannel) {
throw Object.defineProperty(new Error('toNodeDebugChannel cannot be used in edge/web runtime, this is a bug in the Next.js codebase'), "__NEXT_ERROR_CODE", {
value: "E1071",
enumerable: false,
configurable: true
});
}
//# sourceMappingURL=debug-channel-server.web.js.map
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/debug-channel-server.web.ts"],"sourcesContent":["/**\n * Web debug channel implementation.\n * Loaded by debug-channel-server.ts.\n */\n\n// Types defined inline for now; will move to debug-channel-server.node.ts later.\nexport type DebugChannelPair = {\n serverSide: DebugChannelServer\n clientSide: DebugChannelClient\n}\n\nexport type DebugChannelServer = {\n readable?: ReadableStream<Uint8Array>\n writable: WritableStream<Uint8Array>\n}\n\ntype DebugChannelClient = {\n readable: ReadableStream<Uint8Array>\n writable?: WritableStream<Uint8Array>\n}\n\nexport function createDebugChannel(): DebugChannelPair | undefined {\n if (process.env.NODE_ENV === 'production') {\n return undefined\n }\n return createWebDebugChannel()\n}\n\nexport function createWebDebugChannel(): DebugChannelPair {\n let readableController: ReadableStreamDefaultController | undefined\n\n const clientSideReadable = new ReadableStream<Uint8Array>({\n start(controller) {\n readableController = controller\n },\n })\n\n return {\n serverSide: {\n writable: new WritableStream<Uint8Array>({\n write(chunk) {\n readableController?.enqueue(chunk)\n },\n close() {\n readableController?.close()\n },\n abort(err) {\n readableController?.error(err)\n },\n }),\n },\n clientSide: { readable: clientSideReadable },\n }\n}\n\n/**\n * toNodeDebugChannel is a no-op stub on the web path.\n * It should never be called in edge/web builds.\n */\nexport function toNodeDebugChannel(\n _webDebugChannel: DebugChannelServer\n): never {\n throw new Error(\n 'toNodeDebugChannel cannot be used in edge/web runtime, this is a bug in the Next.js codebase'\n )\n}\n"],"names":["createDebugChannel","createWebDebugChannel","toNodeDebugChannel","process","env","NODE_ENV","undefined","readableController","clientSideReadable","ReadableStream","start","controller","serverSide","writable","WritableStream","write","chunk","enqueue","close","abort","err","error","clientSide","readable","_webDebugChannel","Error"],"mappings":"AAAA;;;CAGC,GAED,iFAAiF;;;;;;;;;;;;;;;;;IAgBjEA,kBAAkB;eAAlBA;;IAOAC,qBAAqB;eAArBA;;IA+BAC,kBAAkB;eAAlBA;;;AAtCT,SAASF;IACd,IAAIG,QAAQC,GAAG,CAACC,QAAQ,KAAK,cAAc;QACzC,OAAOC;IACT;IACA,OAAOL;AACT;AAEO,SAASA;IACd,IAAIM;IAEJ,MAAMC,qBAAqB,IAAIC,eAA2B;QACxDC,OAAMC,UAAU;YACdJ,qBAAqBI;QACvB;IACF;IAEA,OAAO;QACLC,YAAY;YACVC,UAAU,IAAIC,eAA2B;gBACvCC,OAAMC,KAAK;oBACTT,sCAAAA,mBAAoBU,OAAO,CAACD;gBAC9B;gBACAE;oBACEX,sCAAAA,mBAAoBW,KAAK;gBAC3B;gBACAC,OAAMC,GAAG;oBACPb,sCAAAA,mBAAoBc,KAAK,CAACD;gBAC5B;YACF;QACF;QACAE,YAAY;YAAEC,UAAUf;QAAmB;IAC7C;AACF;AAMO,SAASN,mBACdsB,gBAAoC;IAEpC,MAAM,qBAEL,CAFK,IAAIC,MACR,iGADI,qBAAA;eAAA;oBAAA;sBAAA;IAEN;AACF","ignoreList":[0]}
@@ -0,0 +1,2 @@
import type { DynamicAccessStorage } from './dynamic-access-async-storage.external';
export declare const dynamicAccessAsyncStorageInstance: DynamicAccessStorage;
@@ -0,0 +1,14 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "dynamicAccessAsyncStorageInstance", {
enumerable: true,
get: function() {
return dynamicAccessAsyncStorageInstance;
}
});
const _asynclocalstorage = require("./async-local-storage");
const dynamicAccessAsyncStorageInstance = (0, _asynclocalstorage.createAsyncLocalStorage)();
//# sourceMappingURL=dynamic-access-async-storage-instance.js.map
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/dynamic-access-async-storage-instance.ts"],"sourcesContent":["import { createAsyncLocalStorage } from './async-local-storage'\nimport type { DynamicAccessStorage } from './dynamic-access-async-storage.external'\n\nexport const dynamicAccessAsyncStorageInstance: DynamicAccessStorage =\n createAsyncLocalStorage()\n"],"names":["dynamicAccessAsyncStorageInstance","createAsyncLocalStorage"],"mappings":";;;;+BAGaA;;;eAAAA;;;mCAH2B;AAGjC,MAAMA,oCACXC,IAAAA,0CAAuB","ignoreList":[0]}
@@ -0,0 +1,7 @@
import type { AsyncLocalStorage } from 'async_hooks';
import { dynamicAccessAsyncStorageInstance } from './dynamic-access-async-storage-instance';
export interface DynamicAccessAsyncStore {
readonly abortController: AbortController;
}
export type DynamicAccessStorage = AsyncLocalStorage<DynamicAccessAsyncStore>;
export { dynamicAccessAsyncStorageInstance as dynamicAccessAsyncStorage };
@@ -0,0 +1,13 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "dynamicAccessAsyncStorage", {
enumerable: true,
get: function() {
return _dynamicaccessasyncstorageinstance.dynamicAccessAsyncStorageInstance;
}
});
const _dynamicaccessasyncstorageinstance = require("./dynamic-access-async-storage-instance");
//# sourceMappingURL=dynamic-access-async-storage.external.js.map
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/dynamic-access-async-storage.external.ts"],"sourcesContent":["import type { AsyncLocalStorage } from 'async_hooks'\n\n// Share the instance module in the next-shared layer\nimport { dynamicAccessAsyncStorageInstance } from './dynamic-access-async-storage-instance' with { 'turbopack-transition': 'next-shared' }\n\nexport interface DynamicAccessAsyncStore {\n readonly abortController: AbortController\n}\n\nexport type DynamicAccessStorage = AsyncLocalStorage<DynamicAccessAsyncStore>\nexport { dynamicAccessAsyncStorageInstance as dynamicAccessAsyncStorage }\n"],"names":["dynamicAccessAsyncStorage","dynamicAccessAsyncStorageInstance"],"mappings":";;;;+BAU8CA;;;eAArCC,oEAAiC;;;mDAPQ","ignoreList":[0]}
+133
View File
@@ -0,0 +1,133 @@
/**
* The functions provided by this module are used to communicate certain properties
* about the currently running code so that Next.js can make decisions on how to handle
* the current execution in different rendering modes such as pre-rendering, resuming, and SSR.
*
* Today Next.js treats all code as potentially static. Certain APIs may only make sense when dynamically rendering.
* Traditionally this meant deopting the entire render to dynamic however with PPR we can now deopt parts
* of a React tree as dynamic while still keeping other parts static. There are really two different kinds of
* Dynamic indications.
*
* The first is simply an intention to be dynamic. unstable_noStore is an example of this where
* the currently executing code simply declares that the current scope is dynamic but if you use it
* inside unstable_cache it can still be cached. This type of indication can be removed if we ever
* make the default dynamic to begin with because the only way you would ever be static is inside
* a cache scope which this indication does not affect.
*
* The second is an indication that a dynamic data source was read. This is a stronger form of dynamic
* because it means that it is inappropriate to cache this at all. using a dynamic data source inside
* unstable_cache should error. If you want to use some dynamic data inside unstable_cache you should
* read that data outside the cache and pass it in as an argument to the cached function.
*/
import type { WorkStore } from '../app-render/work-async-storage.external';
import type { WorkUnitStore, PrerenderStoreModern, ValidationStoreClient } from '../app-render/work-unit-async-storage.external';
import type { ValidationBoundaryTracking } from './instant-validation/boundary-tracking';
import type { InstantValidationSampleTracking } from './instant-validation/instant-samples';
export type DynamicAccess = {
/**
* If debugging, this will contain the stack trace of where the dynamic access
* occurred. This is used to provide more information to the user about why
* their page is being rendered dynamically.
*/
stack?: string;
/**
* The expression that was accessed dynamically.
*/
expression: string;
};
export type DynamicTrackingState = {
/**
* When true, stack information will also be tracked during dynamic access.
*/
readonly isDebugDynamicAccesses: boolean | undefined;
/**
* The dynamic accesses that occurred during the render.
*/
readonly dynamicAccesses: Array<DynamicAccess>;
syncDynamicErrorWithStack: null | Error;
};
export type DynamicValidationState = {
hasSuspenseAboveBody: boolean;
hasDynamicMetadata: boolean;
dynamicMetadata: null | Error;
hasDynamicViewport: boolean;
hasAllowedDynamic: boolean;
dynamicErrors: Array<Error>;
};
export declare function createDynamicTrackingState(isDebugDynamicAccesses: boolean | undefined): DynamicTrackingState;
export declare function createDynamicValidationState(): DynamicValidationState;
export declare function getFirstDynamicReason(trackingState: DynamicTrackingState): undefined | string;
/**
* This function communicates that the current scope should be treated as dynamic.
*
* In most cases this function is a no-op but if called during
* a PPR prerender it will postpone the current sub-tree and calling
* it during a normal prerender will cause the entire prerender to abort
*/
export declare function markCurrentScopeAsDynamic(store: WorkStore, workUnitStore: undefined | Exclude<WorkUnitStore, PrerenderStoreModern>, expression: string): void;
export declare function abortOnSynchronousPlatformIOAccess(route: string, expression: string, errorWithStack: Error, prerenderStore: PrerenderStoreModern): void;
/**
* This component will call `React.postpone` that throws the postponed error.
*/
type PostponeProps = {
reason: string;
route: string;
};
export declare function Postpone({ reason, route }: PostponeProps): never;
export declare function postponeWithTracking(route: string, expression: string, dynamicTracking: null | DynamicTrackingState): never;
export declare function isDynamicPostpone(err: unknown): boolean;
type DigestError = Error & {
digest: string;
};
export declare function isPrerenderInterruptedError(error: unknown): error is DigestError;
export declare function accessedDynamicData(dynamicAccesses: Array<DynamicAccess>): boolean;
export declare function consumeDynamicAccess(serverDynamic: DynamicTrackingState, clientDynamic: DynamicTrackingState): DynamicTrackingState['dynamicAccesses'];
export declare function formatDynamicAPIAccesses(dynamicAccesses: Array<DynamicAccess>): string[];
/**
* This is a bit of a hack to allow us to abort a render using a Postpone instance instead of an Error which changes React's
* abort semantics slightly.
*/
export declare function createRenderInBrowserAbortSignal(): AbortSignal;
/**
* In a prerender, we may end up with hanging Promises as inputs due them
* stalling on connection() or because they're loading dynamic data. In that
* case we need to abort the encoding of arguments since they'll never complete.
*/
export declare function createHangingInputAbortSignal(workUnitStore: WorkUnitStore): AbortSignal | undefined;
export declare function annotateDynamicAccess(expression: string, prerenderStore: PrerenderStoreModern | ValidationStoreClient): void;
export declare function useDynamicRouteParams(expression: string): undefined;
export declare function useDynamicSearchParams(expression: string): void;
export declare function trackAllowedDynamicAccess(workStore: WorkStore, componentStack: string, dynamicValidation: DynamicValidationState, clientDynamic: DynamicTrackingState): void;
export declare enum DynamicHoleKind {
/** We know that this hole is caused by runtime data. */
Runtime = 1,
/** We know that this hole is caused by dynamic data. */
Dynamic = 2
}
/** Stores dynamic reasons used during an SSR render in instant validation. */
export type InstantValidationState = {
hasDynamicMetadata: boolean;
hasAllowedClientDynamicAboveBoundary: boolean;
dynamicMetadata: null | Error;
hasDynamicViewport: boolean;
hasAllowedDynamic: boolean;
dynamicErrors: Array<Error>;
validationPreventingErrors: Array<Error>;
thrownErrorsOutsideBoundary: Array<unknown>;
createInstantStack: (() => Error) | null;
};
export declare function createInstantValidationState(createInstantStack: (() => Error) | null): InstantValidationState;
export declare function trackDynamicHoleInNavigation(workStore: WorkStore, componentStack: string, dynamicValidation: InstantValidationState, clientDynamic: DynamicTrackingState, kind: DynamicHoleKind, boundaryState: ValidationBoundaryTracking): void;
export declare function trackThrownErrorInNavigation(workStore: WorkStore, dynamicValidation: InstantValidationState, thrownValue: unknown, componentStack: string): void;
export declare function trackDynamicHoleInRuntimeShell(workStore: WorkStore, componentStack: string, dynamicValidation: DynamicValidationState, clientDynamic: DynamicTrackingState): void;
export declare function trackDynamicHoleInStaticShell(workStore: WorkStore, componentStack: string, dynamicValidation: DynamicValidationState, clientDynamic: DynamicTrackingState): void;
export declare enum PreludeState {
Full = 0,
Empty = 1,
Errored = 2
}
export declare function logDisallowedDynamicError(workStore: WorkStore, error: Error): void;
export declare function throwIfDisallowedDynamic(workStore: WorkStore, prelude: PreludeState, dynamicValidation: DynamicValidationState, serverDynamic: DynamicTrackingState): void;
export declare function getStaticShellDisallowedDynamicReasons(workStore: WorkStore, prelude: PreludeState, dynamicValidation: DynamicValidationState, configAllowsBlocking: boolean): Array<Error>;
export declare function getNavigationDisallowedDynamicReasons(workStore: WorkStore, prelude: PreludeState, dynamicValidation: InstantValidationState, validationSampleTracking: InstantValidationSampleTracking | null, boundaryState: ValidationBoundaryTracking): Array<Error>;
export {};
File diff suppressed because it is too large Load Diff
File diff suppressed because one or more lines are too long
@@ -0,0 +1,4 @@
export declare function generateEncryptionKeyBase64({ isBuild, distDir, }: {
isBuild: boolean;
distDir: string;
}): Promise<string>;
+114
View File
@@ -0,0 +1,114 @@
// This file should never be bundled into application's runtime code and should
// stay in the Next.js server.
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "generateEncryptionKeyBase64", {
enumerable: true,
get: function() {
return generateEncryptionKeyBase64;
}
});
const _path = /*#__PURE__*/ _interop_require_default(require("path"));
const _fs = /*#__PURE__*/ _interop_require_default(require("fs"));
const _cachedir = require("../cache-dir");
const _encryptionutils = require("./encryption-utils");
function _interop_require_default(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
// Keep the key in memory as it should never change during the lifetime of the server in
// both development and production.
let __next_encryption_key_generation_promise = null;
const CONFIG_FILE = '.rscinfo';
const ENCRYPTION_KEY = 'encryption.key';
const ENCRYPTION_EXPIRE_AT = 'encryption.expire_at';
const EXPIRATION = 1000 * 60 * 60 * 24 * 14 // 14 days
;
async function writeCache(distDir, configValue) {
const cacheBaseDir = (0, _cachedir.getStorageDirectory)(distDir);
if (!cacheBaseDir) return;
const configPath = _path.default.join(cacheBaseDir, CONFIG_FILE);
if (!_fs.default.existsSync(cacheBaseDir)) {
await _fs.default.promises.mkdir(cacheBaseDir, {
recursive: true
});
}
await _fs.default.promises.writeFile(configPath, JSON.stringify({
[ENCRYPTION_KEY]: configValue,
[ENCRYPTION_EXPIRE_AT]: Date.now() + EXPIRATION
}));
}
// This utility is used to get a key for the cache directory. If the
// key is not present, it will generate a new one and store it in the
// cache directory inside dist.
// The key will also expire after a certain amount of time. Once it
// expires, a new one will be generated.
// During the lifetime of the server, it will be reused and never refreshed.
async function loadOrGenerateKey(distDir, isBuild, generateKey) {
const cacheBaseDir = (0, _cachedir.getStorageDirectory)(distDir);
if (!cacheBaseDir) {
// There's no persistent storage available. We generate a new key.
// This also covers development time.
return await generateKey();
}
const configPath = _path.default.join(cacheBaseDir, CONFIG_FILE);
async function hasCachedKey() {
if (!_fs.default.existsSync(configPath)) return false;
try {
const config = JSON.parse(await _fs.default.promises.readFile(configPath, 'utf8'));
if (!config) return false;
if (typeof config[ENCRYPTION_KEY] !== 'string' || typeof config[ENCRYPTION_EXPIRE_AT] !== 'number') {
return false;
}
// For build time, we need to rotate the key if it's expired. Otherwise
// (next start) we have to keep the key as it is so the runtime key matches
// the build time key.
if (isBuild && config[ENCRYPTION_EXPIRE_AT] < Date.now()) {
return false;
}
const cachedKey = config[ENCRYPTION_KEY];
// If encryption key is provided via env, and it's not same as valid cache,
// we should not use the cached key and respect the env key.
if (cachedKey && process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY && cachedKey !== process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY) {
return false;
}
return cachedKey;
} catch {
// Broken config file. We should generate a new key and overwrite it.
return false;
}
}
const maybeValidKey = await hasCachedKey();
if (typeof maybeValidKey === 'string') {
return maybeValidKey;
}
const key = await generateKey();
await writeCache(distDir, key);
return key;
}
async function generateEncryptionKeyBase64({ isBuild, distDir }) {
// This avoids it being generated multiple times in parallel.
if (!__next_encryption_key_generation_promise) {
__next_encryption_key_generation_promise = loadOrGenerateKey(distDir, isBuild, async ()=>{
const providedKey = process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY;
if (providedKey) {
return providedKey;
}
const key = await crypto.subtle.generateKey({
name: 'AES-GCM',
length: 256
}, true, [
'encrypt',
'decrypt'
]);
const exported = await crypto.subtle.exportKey('raw', key);
return btoa((0, _encryptionutils.arrayBufferToString)(exported));
});
}
return __next_encryption_key_generation_promise;
}
//# sourceMappingURL=encryption-utils-server.js.map
File diff suppressed because one or more lines are too long
+5
View File
@@ -0,0 +1,5 @@
export declare function arrayBufferToString(buffer: ArrayBuffer | Uint8Array<ArrayBufferLike>): string;
export declare function stringToUint8Array(binary: string): Uint8Array<ArrayBuffer>;
export declare function encrypt(key: CryptoKey, iv: Uint8Array<ArrayBuffer>, data: Uint8Array<ArrayBuffer>): Promise<ArrayBuffer>;
export declare function decrypt(key: CryptoKey, iv: Uint8Array<ArrayBuffer>, data: Uint8Array<ArrayBuffer>): Promise<ArrayBuffer>;
export declare function getActionEncryptionKey(): Promise<CryptoKey>;
+93
View File
@@ -0,0 +1,93 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
arrayBufferToString: null,
decrypt: null,
encrypt: null,
getActionEncryptionKey: null,
stringToUint8Array: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
arrayBufferToString: function() {
return arrayBufferToString;
},
decrypt: function() {
return decrypt;
},
encrypt: function() {
return encrypt;
},
getActionEncryptionKey: function() {
return getActionEncryptionKey;
},
stringToUint8Array: function() {
return stringToUint8Array;
}
});
const _invarianterror = require("../../shared/lib/invariant-error");
const _manifestssingleton = require("./manifests-singleton");
let __next_loaded_action_key;
function arrayBufferToString(buffer) {
const bytes = new Uint8Array(buffer);
const len = bytes.byteLength;
// @anonrig: V8 has a limit of 65535 arguments in a function.
// For len < 65535, this is faster.
// https://github.com/vercel/next.js/pull/56377#pullrequestreview-1656181623
if (len < 65535) {
return String.fromCharCode.apply(null, bytes);
}
let binary = '';
for(let i = 0; i < len; i++){
binary += String.fromCharCode(bytes[i]);
}
return binary;
}
function stringToUint8Array(binary) {
const len = binary.length;
const arr = new Uint8Array(len);
for(let i = 0; i < len; i++){
arr[i] = binary.charCodeAt(i);
}
return arr;
}
function encrypt(key, iv, data) {
return crypto.subtle.encrypt({
name: 'AES-GCM',
iv
}, key, data);
}
function decrypt(key, iv, data) {
return crypto.subtle.decrypt({
name: 'AES-GCM',
iv
}, key, data);
}
async function getActionEncryptionKey() {
if (__next_loaded_action_key) {
return __next_loaded_action_key;
}
const serverActionsManifest = (0, _manifestssingleton.getServerActionsManifest)();
const rawKey = process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY || serverActionsManifest.encryptionKey;
if (rawKey === undefined) {
throw Object.defineProperty(new _invarianterror.InvariantError('Missing encryption key for Server Actions'), "__NEXT_ERROR_CODE", {
value: "E571",
enumerable: false,
configurable: true
});
}
__next_loaded_action_key = await crypto.subtle.importKey('raw', stringToUint8Array(atob(rawKey)), 'AES-GCM', true, [
'encrypt',
'decrypt'
]);
return __next_loaded_action_key;
}
//# sourceMappingURL=encryption-utils.js.map
+1
View File
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/encryption-utils.ts"],"sourcesContent":["import { InvariantError } from '../../shared/lib/invariant-error'\nimport { getServerActionsManifest } from './manifests-singleton'\n\nlet __next_loaded_action_key: CryptoKey\n\nexport function arrayBufferToString(\n buffer: ArrayBuffer | Uint8Array<ArrayBufferLike>\n) {\n const bytes = new Uint8Array(buffer)\n const len = bytes.byteLength\n\n // @anonrig: V8 has a limit of 65535 arguments in a function.\n // For len < 65535, this is faster.\n // https://github.com/vercel/next.js/pull/56377#pullrequestreview-1656181623\n if (len < 65535) {\n return String.fromCharCode.apply(null, bytes as unknown as number[])\n }\n\n let binary = ''\n for (let i = 0; i < len; i++) {\n binary += String.fromCharCode(bytes[i])\n }\n return binary\n}\n\nexport function stringToUint8Array(binary: string) {\n const len = binary.length\n const arr = new Uint8Array(len)\n\n for (let i = 0; i < len; i++) {\n arr[i] = binary.charCodeAt(i)\n }\n\n return arr\n}\n\nexport function encrypt(\n key: CryptoKey,\n iv: Uint8Array<ArrayBuffer>,\n data: Uint8Array<ArrayBuffer>\n) {\n return crypto.subtle.encrypt(\n {\n name: 'AES-GCM',\n iv,\n },\n key,\n data\n )\n}\n\nexport function decrypt(\n key: CryptoKey,\n iv: Uint8Array<ArrayBuffer>,\n data: Uint8Array<ArrayBuffer>\n) {\n return crypto.subtle.decrypt(\n {\n name: 'AES-GCM',\n iv,\n },\n key,\n data\n )\n}\n\nexport async function getActionEncryptionKey() {\n if (__next_loaded_action_key) {\n return __next_loaded_action_key\n }\n\n const serverActionsManifest = getServerActionsManifest()\n\n const rawKey =\n process.env.NEXT_SERVER_ACTIONS_ENCRYPTION_KEY ||\n serverActionsManifest.encryptionKey\n\n if (rawKey === undefined) {\n throw new InvariantError('Missing encryption key for Server Actions')\n }\n\n __next_loaded_action_key = await crypto.subtle.importKey(\n 'raw',\n stringToUint8Array(atob(rawKey)),\n 'AES-GCM',\n true,\n ['encrypt', 'decrypt']\n )\n\n return __next_loaded_action_key\n}\n"],"names":["arrayBufferToString","decrypt","encrypt","getActionEncryptionKey","stringToUint8Array","__next_loaded_action_key","buffer","bytes","Uint8Array","len","byteLength","String","fromCharCode","apply","binary","i","length","arr","charCodeAt","key","iv","data","crypto","subtle","name","serverActionsManifest","getServerActionsManifest","rawKey","process","env","NEXT_SERVER_ACTIONS_ENCRYPTION_KEY","encryptionKey","undefined","InvariantError","importKey","atob"],"mappings":";;;;;;;;;;;;;;;;;;IAKgBA,mBAAmB;eAAnBA;;IA8CAC,OAAO;eAAPA;;IAfAC,OAAO;eAAPA;;IA8BMC,sBAAsB;eAAtBA;;IAzCNC,kBAAkB;eAAlBA;;;gCAzBe;oCACU;AAEzC,IAAIC;AAEG,SAASL,oBACdM,MAAiD;IAEjD,MAAMC,QAAQ,IAAIC,WAAWF;IAC7B,MAAMG,MAAMF,MAAMG,UAAU;IAE5B,6DAA6D;IAC7D,mCAAmC;IACnC,4EAA4E;IAC5E,IAAID,MAAM,OAAO;QACf,OAAOE,OAAOC,YAAY,CAACC,KAAK,CAAC,MAAMN;IACzC;IAEA,IAAIO,SAAS;IACb,IAAK,IAAIC,IAAI,GAAGA,IAAIN,KAAKM,IAAK;QAC5BD,UAAUH,OAAOC,YAAY,CAACL,KAAK,CAACQ,EAAE;IACxC;IACA,OAAOD;AACT;AAEO,SAASV,mBAAmBU,MAAc;IAC/C,MAAML,MAAMK,OAAOE,MAAM;IACzB,MAAMC,MAAM,IAAIT,WAAWC;IAE3B,IAAK,IAAIM,IAAI,GAAGA,IAAIN,KAAKM,IAAK;QAC5BE,GAAG,CAACF,EAAE,GAAGD,OAAOI,UAAU,CAACH;IAC7B;IAEA,OAAOE;AACT;AAEO,SAASf,QACdiB,GAAc,EACdC,EAA2B,EAC3BC,IAA6B;IAE7B,OAAOC,OAAOC,MAAM,CAACrB,OAAO,CAC1B;QACEsB,MAAM;QACNJ;IACF,GACAD,KACAE;AAEJ;AAEO,SAASpB,QACdkB,GAAc,EACdC,EAA2B,EAC3BC,IAA6B;IAE7B,OAAOC,OAAOC,MAAM,CAACtB,OAAO,CAC1B;QACEuB,MAAM;QACNJ;IACF,GACAD,KACAE;AAEJ;AAEO,eAAelB;IACpB,IAAIE,0BAA0B;QAC5B,OAAOA;IACT;IAEA,MAAMoB,wBAAwBC,IAAAA,4CAAwB;IAEtD,MAAMC,SACJC,QAAQC,GAAG,CAACC,kCAAkC,IAC9CL,sBAAsBM,aAAa;IAErC,IAAIJ,WAAWK,WAAW;QACxB,MAAM,qBAA+D,CAA/D,IAAIC,8BAAc,CAAC,8CAAnB,qBAAA;mBAAA;wBAAA;0BAAA;QAA8D;IACtE;IAEA5B,2BAA2B,MAAMiB,OAAOC,MAAM,CAACW,SAAS,CACtD,OACA9B,mBAAmB+B,KAAKR,UACxB,WACA,MACA;QAAC;QAAW;KAAU;IAGxB,OAAOtB;AACT","ignoreList":[0]}
+3
View File
@@ -0,0 +1,3 @@
import 'server-only';
export declare const encryptActionBoundArgs: (actionId: string, ...args: any[]) => Promise<string>;
export declare function decryptActionBoundArgs(actionId: string, encryptedPromise: Promise<string>): Promise<unknown>;
+259
View File
@@ -0,0 +1,259 @@
/* eslint-disable import/no-extraneous-dependencies */ "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
decryptActionBoundArgs: null,
encryptActionBoundArgs: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
decryptActionBoundArgs: function() {
return decryptActionBoundArgs;
},
encryptActionBoundArgs: function() {
return encryptActionBoundArgs;
}
});
require("server-only");
const _server = require("react-server-dom-webpack/server");
const _client = require("react-server-dom-webpack/client");
const _nodewebstreamshelper = require("../stream-utils/node-web-streams-helper");
const _encryptionutils = require("./encryption-utils");
const _manifestssingleton = require("./manifests-singleton");
const _workunitasyncstorageexternal = require("./work-unit-async-storage.external");
const _dynamicrendering = require("./dynamic-rendering");
const _react = /*#__PURE__*/ _interop_require_default(require("react"));
function _interop_require_default(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
const isEdgeRuntime = process.env.NEXT_RUNTIME === 'edge';
const textEncoder = new TextEncoder();
const textDecoder = new TextDecoder();
const filterStackFrame = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').filterStackFrameDEV : undefined;
const findSourceMapURL = process.env.NODE_ENV !== 'production' ? require('../lib/source-maps').findSourceMapURLDEV : undefined;
/**
* Decrypt the serialized string with the action id as the salt.
*/ async function decodeActionBoundArg(actionId, arg) {
const key = await (0, _encryptionutils.getActionEncryptionKey)();
if (typeof key === 'undefined') {
throw Object.defineProperty(new Error(`Missing encryption key for Server Action. This is a bug in Next.js`), "__NEXT_ERROR_CODE", {
value: "E65",
enumerable: false,
configurable: true
});
}
// Get the iv (16 bytes) and the payload from the arg.
const originalPayload = atob(arg);
const ivValue = originalPayload.slice(0, 16);
const payload = originalPayload.slice(16);
const decrypted = textDecoder.decode(await (0, _encryptionutils.decrypt)(key, (0, _encryptionutils.stringToUint8Array)(ivValue), (0, _encryptionutils.stringToUint8Array)(payload)));
if (!decrypted.startsWith(actionId)) {
throw Object.defineProperty(new Error('Invalid Server Action payload: failed to decrypt.'), "__NEXT_ERROR_CODE", {
value: "E191",
enumerable: false,
configurable: true
});
}
return decrypted.slice(actionId.length);
}
/**
* Encrypt the serialized string with the action id as the salt. Add a prefix to
* later ensure that the payload is correctly decrypted, similar to a checksum.
*/ async function encodeActionBoundArg(actionId, arg) {
const key = await (0, _encryptionutils.getActionEncryptionKey)();
if (key === undefined) {
throw Object.defineProperty(new Error(`Missing encryption key for Server Action. This is a bug in Next.js`), "__NEXT_ERROR_CODE", {
value: "E65",
enumerable: false,
configurable: true
});
}
// Get 16 random bytes as iv.
const randomBytes = new Uint8Array(16);
_workunitasyncstorageexternal.workUnitAsyncStorage.exit(()=>crypto.getRandomValues(randomBytes));
const ivValue = (0, _encryptionutils.arrayBufferToString)(randomBytes.buffer);
const encrypted = await (0, _encryptionutils.encrypt)(key, randomBytes, textEncoder.encode(actionId + arg));
return btoa(ivValue + (0, _encryptionutils.arrayBufferToString)(encrypted));
}
var ReadStatus = /*#__PURE__*/ function(ReadStatus) {
ReadStatus[ReadStatus["Ready"] = 0] = "Ready";
ReadStatus[ReadStatus["Pending"] = 1] = "Pending";
ReadStatus[ReadStatus["Complete"] = 2] = "Complete";
return ReadStatus;
}(ReadStatus || {});
const encryptActionBoundArgs = _react.default.cache(async function encryptActionBoundArgs(actionId, ...args) {
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
const cacheSignal = workUnitStore ? (0, _workunitasyncstorageexternal.getCacheSignal)(workUnitStore) : undefined;
const { clientModules } = (0, _manifestssingleton.getClientReferenceManifest)();
// Create an error before any asynchronous calls, to capture the original
// call stack in case we need it when the serialization errors.
const error = new Error();
Error.captureStackTrace(error, encryptActionBoundArgs);
let didCatchError = false;
const hangingInputAbortSignal = workUnitStore ? (0, _dynamicrendering.createHangingInputAbortSignal)(workUnitStore) : undefined;
let readStatus = 0;
function startReadOnce() {
if (readStatus === 0) {
readStatus = 1;
cacheSignal == null ? void 0 : cacheSignal.beginRead();
}
}
function endReadIfStarted() {
if (readStatus === 1) {
cacheSignal == null ? void 0 : cacheSignal.endRead();
}
readStatus = 2;
}
// streamToString might take longer than a microtask to resolve and then other things
// waiting on the cache signal might not realize there is another cache to fill so if
// we are no longer waiting on the bound args serialization via the hangingInputAbortSignal
// we should eagerly start the cache read to prevent other readers of the cache signal from
// missing this cache fill. We use a idempotent function to only start reading once because
// it's also possible that streamToString finishes before the hangingInputAbortSignal aborts.
if (hangingInputAbortSignal && cacheSignal) {
hangingInputAbortSignal.addEventListener('abort', startReadOnce, {
once: true
});
}
const prerenderResumeDataCache = workUnitStore ? (0, _workunitasyncstorageexternal.getPrerenderResumeDataCache)(workUnitStore) : null;
const renderResumeDataCache = workUnitStore ? (0, _workunitasyncstorageexternal.getRenderResumeDataCache)(workUnitStore) : null;
// Using Flight to serialize the args into a string.
const serialized = await (0, _nodewebstreamshelper.streamToString)((0, _server.renderToReadableStream)(args, clientModules, {
filterStackFrame,
signal: hangingInputAbortSignal,
debugChannel: // In Cache Components, we want to cache the encrypted result,
// and we use the unencrypted bound args as a cache key.
// In order to do that we need to strip debug info, because it
// contains timing information and thus changes each time we serialize the args.
// We can do this by piping debug info into a debug channel that throws it away.
//
// Note that this can result in dangling debug info references when we decode the bound args,
// but React ignores those as long as no debug channel is passed on the decode side, so it's fine:
// https://github.com/facebook/react/blob/bb8a76c6cc77ea2976d690ea09f5a1b3d9b1792a/packages/react-client/src/ReactFlightClient.js#L1711-L1729
// https://github.com/facebook/react/blob/bb8a76c6cc77ea2976d690ea09f5a1b3d9b1792a/packages/react-client/src/ReactFlightClient.js#L4005-L4025
process.env.NODE_ENV === 'development' && (prerenderResumeDataCache || renderResumeDataCache) ? {
writable: new WritableStream()
} : undefined,
onError (err) {
if (hangingInputAbortSignal == null ? void 0 : hangingInputAbortSignal.aborted) {
return;
}
// We're only reporting one error at a time, starting with the first.
if (didCatchError) {
return;
}
didCatchError = true;
// Use the original error message together with the previously created
// stack, because err.stack is a useless Flight Server call stack.
error.message = err instanceof Error ? err.message : String(err);
}
}), // We pass the abort signal to `streamToString` so that no chunks are
// included that are emitted after the signal was already aborted. This
// ensures that we can encode hanging promises.
hangingInputAbortSignal);
if (didCatchError) {
if (process.env.NODE_ENV === 'development') {
// Logging the error is needed for server functions that are passed to the
// client where the decryption is not done during rendering. Console
// replaying allows us to still show the error dev overlay in this case.
console.error(error);
}
endReadIfStarted();
throw error;
}
if (!workUnitStore) {
// We don't need to call cacheSignal.endRead here because we can't have a cacheSignal
// if we do not have a workUnitStore.
return encodeActionBoundArg(actionId, serialized);
}
startReadOnce();
const cacheKey = actionId + serialized;
const cachedEncrypted = (prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.encryptedBoundArgs.get(cacheKey)) ?? (renderResumeDataCache == null ? void 0 : renderResumeDataCache.encryptedBoundArgs.get(cacheKey));
if (cachedEncrypted) {
return cachedEncrypted;
}
const encrypted = await encodeActionBoundArg(actionId, serialized);
endReadIfStarted();
prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.encryptedBoundArgs.set(cacheKey, encrypted);
return encrypted;
});
async function decryptActionBoundArgs(actionId, encryptedPromise) {
const encrypted = await encryptedPromise;
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
let decrypted;
if (workUnitStore) {
const cacheSignal = (0, _workunitasyncstorageexternal.getCacheSignal)(workUnitStore);
const prerenderResumeDataCache = (0, _workunitasyncstorageexternal.getPrerenderResumeDataCache)(workUnitStore);
const renderResumeDataCache = (0, _workunitasyncstorageexternal.getRenderResumeDataCache)(workUnitStore);
decrypted = (prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.decryptedBoundArgs.get(encrypted)) ?? (renderResumeDataCache == null ? void 0 : renderResumeDataCache.decryptedBoundArgs.get(encrypted));
if (!decrypted) {
cacheSignal == null ? void 0 : cacheSignal.beginRead();
decrypted = await decodeActionBoundArg(actionId, encrypted);
cacheSignal == null ? void 0 : cacheSignal.endRead();
prerenderResumeDataCache == null ? void 0 : prerenderResumeDataCache.decryptedBoundArgs.set(encrypted, decrypted);
}
} else {
decrypted = await decodeActionBoundArg(actionId, encrypted);
}
const { edgeRscModuleMapping, rscModuleMapping } = (0, _manifestssingleton.getClientReferenceManifest)();
// Using Flight to deserialize the args from the string.
const deserialized = await (0, _client.createFromReadableStream)(new ReadableStream({
start (controller) {
controller.enqueue(textEncoder.encode(decrypted));
switch(workUnitStore == null ? void 0 : workUnitStore.type){
case 'prerender':
case 'prerender-runtime':
// Explicitly don't close the stream here (until prerendering is
// complete) so that hanging promises are not rejected.
if (workUnitStore.renderSignal.aborted) {
controller.close();
} else {
workUnitStore.renderSignal.addEventListener('abort', ()=>controller.close(), {
once: true
});
}
break;
case 'prerender-client':
case 'validation-client':
case 'prerender-ppr':
case 'prerender-legacy':
case 'request':
case 'cache':
case 'private-cache':
case 'unstable-cache':
case 'generate-static-params':
case undefined:
return controller.close();
default:
workUnitStore;
}
}
}), {
findSourceMapURL,
// NOTE: When we serialized the bound args, we may have used a dummy debug channel to strip debug info.
// In that case, it's important that we also *don't* pass a debug channel here, because that will make
// the Flight Client ignore the dangling references:
// https://github.com/facebook/react/blob/bb8a76c6cc77ea2976d690ea09f5a1b3d9b1792a/packages/react-client/src/ReactFlightClient.js#L1711-L1729
// https://github.com/facebook/react/blob/bb8a76c6cc77ea2976d690ea09f5a1b3d9b1792a/packages/react-client/src/ReactFlightClient.js#L4005-L4025
debugChannel: undefined,
serverConsumerManifest: {
// moduleLoading must be null because we don't want to trigger preloads of ClientReferences
// to be added to the current execution. Instead, we'll wait for any ClientReference
// to be emitted which themselves will handle the preloading.
moduleLoading: null,
moduleMap: isEdgeRuntime ? edgeRscModuleMapping : rscModuleMapping,
serverModuleMap: (0, _manifestssingleton.getServerModuleMap)()
}
});
return deserialized;
}
//# sourceMappingURL=encryption.js.map
File diff suppressed because one or more lines are too long
+31
View File
@@ -0,0 +1,31 @@
export { createTemporaryReferenceSet, renderToReadableStream, decodeReply, decodeAction, decodeFormState, } from 'react-server-dom-webpack/server';
export { prerender } from 'react-server-dom-webpack/static';
export { captureOwnerStack, createElement, Fragment } from 'react';
export { default as LayoutRouter, LoadingBoundaryProvider, } from '../../client/components/layout-router';
export { default as RenderFromTemplateContext } from '../../client/components/render-from-template-context';
export { workAsyncStorage } from '../app-render/work-async-storage.external';
export { workUnitAsyncStorage } from './work-unit-async-storage.external';
export { actionAsyncStorage } from '../app-render/action-async-storage.external';
export { ClientPageRoot } from '../../client/components/client-page';
export { ClientSegmentRoot } from '../../client/components/client-segment';
export { createServerSearchParamsForServerPage, createPrerenderSearchParamsForClientPage, } from '../request/search-params';
export { createServerParamsForServerSegment, createPrerenderParamsForClientSegment, } from '../request/params';
export * as serverHooks from '../../client/components/hooks-server-context';
export { HTTPAccessFallbackBoundary } from '../../client/components/http-access-fallback/error-boundary';
export { createMetadataComponents } from '../../lib/metadata/metadata';
export { RootLayoutBoundary } from '../../lib/framework/boundary-components';
export { preloadStyle, preloadFont, preconnect } from './rsc/preloads';
export { Postpone } from './rsc/postpone';
export { taintObjectReference } from './rsc/taint';
export { collectSegmentData, collectPrefetchHints, } from './collect-segment-data';
export declare const InstantValidation: () => typeof import("./instant-validation/instant-validation") | undefined;
import type { NodeJsPartialHmrUpdate } from '../../build/swc/types';
declare let SegmentViewNode: typeof import('../../next-devtools/userspace/app/segment-explorer-node').SegmentViewNode;
declare let SegmentViewStateNode: typeof import('../../next-devtools/userspace/app/segment-explorer-node').SegmentViewStateNode;
declare global {
var __next__clear_chunk_cache__: (() => void) | null | undefined;
var __turbopack_clear_chunk_cache__: () => void | null | undefined;
var __turbopack_server_hmr_apply__: ((update: NodeJsPartialHmrUpdate) => boolean) | undefined;
}
export declare function patchFetch(): void;
export { SegmentViewNode, SegmentViewStateNode };
+256
View File
@@ -0,0 +1,256 @@
// eslint-disable-next-line import/no-extraneous-dependencies
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
0 && (module.exports = {
ClientPageRoot: null,
ClientSegmentRoot: null,
Fragment: null,
HTTPAccessFallbackBoundary: null,
InstantValidation: null,
LayoutRouter: null,
LoadingBoundaryProvider: null,
Postpone: null,
RenderFromTemplateContext: null,
RootLayoutBoundary: null,
SegmentViewNode: null,
SegmentViewStateNode: null,
actionAsyncStorage: null,
captureOwnerStack: null,
collectPrefetchHints: null,
collectSegmentData: null,
createElement: null,
createMetadataComponents: null,
createPrerenderParamsForClientSegment: null,
createPrerenderSearchParamsForClientPage: null,
createServerParamsForServerSegment: null,
createServerSearchParamsForServerPage: null,
createTemporaryReferenceSet: null,
decodeAction: null,
decodeFormState: null,
decodeReply: null,
patchFetch: null,
preconnect: null,
preloadFont: null,
preloadStyle: null,
prerender: null,
renderToReadableStream: null,
serverHooks: null,
taintObjectReference: null,
workAsyncStorage: null,
workUnitAsyncStorage: null
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
ClientPageRoot: function() {
return _clientpage.ClientPageRoot;
},
ClientSegmentRoot: function() {
return _clientsegment.ClientSegmentRoot;
},
Fragment: function() {
return _react.Fragment;
},
HTTPAccessFallbackBoundary: function() {
return _errorboundary.HTTPAccessFallbackBoundary;
},
InstantValidation: function() {
return InstantValidation;
},
LayoutRouter: function() {
return _layoutrouter.default;
},
LoadingBoundaryProvider: function() {
return _layoutrouter.LoadingBoundaryProvider;
},
Postpone: function() {
return _postpone.Postpone;
},
RenderFromTemplateContext: function() {
return _renderfromtemplatecontext.default;
},
RootLayoutBoundary: function() {
return _boundarycomponents.RootLayoutBoundary;
},
SegmentViewNode: function() {
return SegmentViewNode;
},
SegmentViewStateNode: function() {
return SegmentViewStateNode;
},
actionAsyncStorage: function() {
return _actionasyncstorageexternal.actionAsyncStorage;
},
captureOwnerStack: function() {
return _react.captureOwnerStack;
},
collectPrefetchHints: function() {
return _collectsegmentdata.collectPrefetchHints;
},
collectSegmentData: function() {
return _collectsegmentdata.collectSegmentData;
},
createElement: function() {
return _react.createElement;
},
createMetadataComponents: function() {
return _metadata.createMetadataComponents;
},
createPrerenderParamsForClientSegment: function() {
return _params.createPrerenderParamsForClientSegment;
},
createPrerenderSearchParamsForClientPage: function() {
return _searchparams.createPrerenderSearchParamsForClientPage;
},
createServerParamsForServerSegment: function() {
return _params.createServerParamsForServerSegment;
},
createServerSearchParamsForServerPage: function() {
return _searchparams.createServerSearchParamsForServerPage;
},
createTemporaryReferenceSet: function() {
return _server.createTemporaryReferenceSet;
},
decodeAction: function() {
return _server.decodeAction;
},
decodeFormState: function() {
return _server.decodeFormState;
},
decodeReply: function() {
return _server.decodeReply;
},
patchFetch: function() {
return patchFetch;
},
preconnect: function() {
return _preloads.preconnect;
},
preloadFont: function() {
return _preloads.preloadFont;
},
preloadStyle: function() {
return _preloads.preloadStyle;
},
prerender: function() {
return _static.prerender;
},
renderToReadableStream: function() {
return _server.renderToReadableStream;
},
serverHooks: function() {
return _hooksservercontext;
},
taintObjectReference: function() {
return _taint.taintObjectReference;
},
workAsyncStorage: function() {
return _workasyncstorageexternal.workAsyncStorage;
},
workUnitAsyncStorage: function() {
return _workunitasyncstorageexternal.workUnitAsyncStorage;
}
});
const _server = require("react-server-dom-webpack/server");
const _static = require("react-server-dom-webpack/static");
const _react = require("react");
const _layoutrouter = /*#__PURE__*/ _interop_require_wildcard(require("../../client/components/layout-router"));
const _renderfromtemplatecontext = /*#__PURE__*/ _interop_require_default(require("../../client/components/render-from-template-context"));
const _workasyncstorageexternal = require("../app-render/work-async-storage.external");
const _workunitasyncstorageexternal = require("./work-unit-async-storage.external");
const _actionasyncstorageexternal = require("../app-render/action-async-storage.external");
const _clientpage = require("../../client/components/client-page");
const _clientsegment = require("../../client/components/client-segment");
const _searchparams = require("../request/search-params");
const _params = require("../request/params");
const _hooksservercontext = /*#__PURE__*/ _interop_require_wildcard(require("../../client/components/hooks-server-context"));
const _errorboundary = require("../../client/components/http-access-fallback/error-boundary");
const _metadata = require("../../lib/metadata/metadata");
const _boundarycomponents = require("../../lib/framework/boundary-components");
const _preloads = require("./rsc/preloads");
const _postpone = require("./rsc/postpone");
const _taint = require("./rsc/taint");
const _collectsegmentdata = require("./collect-segment-data");
const _patchfetch = require("../lib/patch-fetch");
function _interop_require_default(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
function _getRequireWildcardCache(nodeInterop) {
if (typeof WeakMap !== "function") return null;
var cacheBabelInterop = new WeakMap();
var cacheNodeInterop = new WeakMap();
return (_getRequireWildcardCache = function(nodeInterop) {
return nodeInterop ? cacheNodeInterop : cacheBabelInterop;
})(nodeInterop);
}
function _interop_require_wildcard(obj, nodeInterop) {
if (!nodeInterop && obj && obj.__esModule) {
return obj;
}
if (obj === null || typeof obj !== "object" && typeof obj !== "function") {
return {
default: obj
};
}
var cache = _getRequireWildcardCache(nodeInterop);
if (cache && cache.has(obj)) {
return cache.get(obj);
}
var newObj = {
__proto__: null
};
var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor;
for(var key in obj){
if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) {
var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null;
if (desc && (desc.get || desc.set)) {
Object.defineProperty(newObj, key, desc);
} else {
newObj[key] = obj[key];
}
}
}
newObj.default = obj;
if (cache) {
cache.set(obj, newObj);
}
return newObj;
}
const InstantValidation = ()=>{
if (process.env.NEXT_RUNTIME !== 'edge' && process.env.__NEXT_CACHE_COMPONENTS) {
return require('./instant-validation/instant-validation');
} else {
return undefined;
}
};
let SegmentViewNode = ()=>null;
let SegmentViewStateNode = ()=>null;
if (process.env.NODE_ENV === 'development') {
const mod = require('../../next-devtools/userspace/app/segment-explorer-node');
SegmentViewNode = mod.SegmentViewNode;
SegmentViewStateNode = mod.SegmentViewStateNode;
}
// hot-reloader modules are not bundled so we need to inject `__next__clear_chunk_cache__`
// into globalThis from this file which is bundled.
if (process.env.TURBOPACK) {
globalThis.__next__clear_chunk_cache__ = __turbopack_clear_chunk_cache__;
} else {
// Webpack does not have chunks on the server
globalThis.__next__clear_chunk_cache__ = null;
}
function patchFetch() {
return (0, _patchfetch.patchFetch)({
workAsyncStorage: _workasyncstorageexternal.workAsyncStorage,
workUnitAsyncStorage: _workunitasyncstorageexternal.workUnitAsyncStorage
});
}
//# sourceMappingURL=entry-base.js.map
File diff suppressed because one or more lines are too long
+7
View File
@@ -0,0 +1,7 @@
import RenderResult, { type RenderResultMetadata } from '../render-result';
/**
* Flight Response is always set to RSC_CONTENT_TYPE_HEADER to ensure it does not get interpreted as HTML.
*/
export declare class FlightRenderResult extends RenderResult {
constructor(response: string | ReadableStream<Uint8Array>, metadata?: RenderResultMetadata, waitUntil?: Promise<unknown>);
}
+28
View File
@@ -0,0 +1,28 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "FlightRenderResult", {
enumerable: true,
get: function() {
return FlightRenderResult;
}
});
const _approuterheaders = require("../../client/components/app-router-headers");
const _renderresult = /*#__PURE__*/ _interop_require_default(require("../render-result"));
function _interop_require_default(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
class FlightRenderResult extends _renderresult.default {
constructor(response, metadata = {}, waitUntil){
super(response, {
contentType: _approuterheaders.RSC_CONTENT_TYPE_HEADER,
metadata,
waitUntil
});
}
}
//# sourceMappingURL=flight-render-result.js.map
+1
View File
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/flight-render-result.ts"],"sourcesContent":["import { RSC_CONTENT_TYPE_HEADER } from '../../client/components/app-router-headers'\nimport RenderResult, { type RenderResultMetadata } from '../render-result'\n\n/**\n * Flight Response is always set to RSC_CONTENT_TYPE_HEADER to ensure it does not get interpreted as HTML.\n */\nexport class FlightRenderResult extends RenderResult {\n constructor(\n response: string | ReadableStream<Uint8Array>,\n metadata: RenderResultMetadata = {},\n waitUntil?: Promise<unknown>\n ) {\n super(response, {\n contentType: RSC_CONTENT_TYPE_HEADER,\n metadata,\n waitUntil,\n })\n }\n}\n"],"names":["FlightRenderResult","RenderResult","constructor","response","metadata","waitUntil","contentType","RSC_CONTENT_TYPE_HEADER"],"mappings":";;;;+BAMaA;;;eAAAA;;;kCAN2B;qEACgB;;;;;;AAKjD,MAAMA,2BAA2BC,qBAAY;IAClDC,YACEC,QAA6C,EAC7CC,WAAiC,CAAC,CAAC,EACnCC,SAA4B,CAC5B;QACA,KAAK,CAACF,UAAU;YACdG,aAAaC,yCAAuB;YACpCH;YACAC;QACF;IACF;AACF","ignoreList":[0]}
+2
View File
@@ -0,0 +1,2 @@
import type { AppRenderContext } from './app-render';
export declare function getAssetQueryString(ctx: AppRenderContext, addTimestamp: boolean): string;
+29
View File
@@ -0,0 +1,29 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "getAssetQueryString", {
enumerable: true,
get: function() {
return getAssetQueryString;
}
});
const isDev = process.env.NODE_ENV === 'development';
const isTurbopack = !!process.env.TURBOPACK;
function getAssetQueryString(ctx, addTimestamp) {
let qs = '';
// In development we add the request timestamp to allow react to
// reload assets when a new RSC response is received.
// Turbopack handles HMR of assets itself and react doesn't need to reload them
// so this approach is not needed for Turbopack.
const shouldAddVersion = isDev && !isTurbopack && addTimestamp;
if (shouldAddVersion) {
qs += `?v=${ctx.requestTimestamp}`;
}
if (ctx.sharedContext.clientAssetToken) {
qs += `${shouldAddVersion ? '&' : '?'}dpl=${ctx.sharedContext.clientAssetToken}`;
}
return qs;
}
//# sourceMappingURL=get-asset-query-string.js.map
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/get-asset-query-string.ts"],"sourcesContent":["import type { AppRenderContext } from './app-render'\n\nconst isDev = process.env.NODE_ENV === 'development'\nconst isTurbopack = !!process.env.TURBOPACK\n\nexport function getAssetQueryString(\n ctx: AppRenderContext,\n addTimestamp: boolean\n) {\n let qs = ''\n\n // In development we add the request timestamp to allow react to\n // reload assets when a new RSC response is received.\n // Turbopack handles HMR of assets itself and react doesn't need to reload them\n // so this approach is not needed for Turbopack.\n const shouldAddVersion = isDev && !isTurbopack && addTimestamp\n if (shouldAddVersion) {\n qs += `?v=${ctx.requestTimestamp}`\n }\n\n if (ctx.sharedContext.clientAssetToken) {\n qs += `${shouldAddVersion ? '&' : '?'}dpl=${ctx.sharedContext.clientAssetToken}`\n }\n return qs\n}\n"],"names":["getAssetQueryString","isDev","process","env","NODE_ENV","isTurbopack","TURBOPACK","ctx","addTimestamp","qs","shouldAddVersion","requestTimestamp","sharedContext","clientAssetToken"],"mappings":";;;;+BAKgBA;;;eAAAA;;;AAHhB,MAAMC,QAAQC,QAAQC,GAAG,CAACC,QAAQ,KAAK;AACvC,MAAMC,cAAc,CAAC,CAACH,QAAQC,GAAG,CAACG,SAAS;AAEpC,SAASN,oBACdO,GAAqB,EACrBC,YAAqB;IAErB,IAAIC,KAAK;IAET,gEAAgE;IAChE,qDAAqD;IACrD,+EAA+E;IAC/E,gDAAgD;IAChD,MAAMC,mBAAmBT,SAAS,CAACI,eAAeG;IAClD,IAAIE,kBAAkB;QACpBD,MAAM,CAAC,GAAG,EAAEF,IAAII,gBAAgB,EAAE;IACpC;IAEA,IAAIJ,IAAIK,aAAa,CAACC,gBAAgB,EAAE;QACtCJ,MAAM,GAAGC,mBAAmB,MAAM,IAAI,IAAI,EAAEH,IAAIK,aAAa,CAACC,gBAAgB,EAAE;IAClF;IACA,OAAOJ;AACT","ignoreList":[0]}
@@ -0,0 +1,8 @@
import type { CssResource } from '../../build/webpack/plugins/flight-manifest-plugin';
/**
* Get external stylesheet link hrefs based on server CSS manifest.
*/
export declare function getLinkAndScriptTags(filePath: string, injectedCSS: Set<string>, injectedScripts: Set<string>, collectNewImports?: boolean): {
styles: CssResource[];
scripts: string[];
};
+49
View File
@@ -0,0 +1,49 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "getLinkAndScriptTags", {
enumerable: true,
get: function() {
return getLinkAndScriptTags;
}
});
const _manifestssingleton = require("./manifests-singleton");
function getLinkAndScriptTags(filePath, injectedCSS, injectedScripts, collectNewImports) {
const filePathWithoutExt = filePath.replace(/\.[^.]+$/, '');
const cssChunks = new Set();
const jsChunks = new Set();
const { entryCSSFiles, entryJSFiles } = (0, _manifestssingleton.getClientReferenceManifest)();
const cssFiles = entryCSSFiles[filePathWithoutExt];
const jsFiles = entryJSFiles == null ? void 0 : entryJSFiles[filePathWithoutExt];
if (cssFiles) {
for (const css of cssFiles){
if (!injectedCSS.has(css.path)) {
if (collectNewImports) {
injectedCSS.add(css.path);
}
cssChunks.add(css);
}
}
}
if (jsFiles) {
for (const file of jsFiles){
if (!injectedScripts.has(file)) {
if (collectNewImports) {
injectedScripts.add(file);
}
jsChunks.add(file);
}
}
}
return {
styles: [
...cssChunks
],
scripts: [
...jsChunks
]
};
}
//# sourceMappingURL=get-css-inlined-link-tags.js.map
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/get-css-inlined-link-tags.tsx"],"sourcesContent":["import type { CssResource } from '../../build/webpack/plugins/flight-manifest-plugin'\nimport { getClientReferenceManifest } from './manifests-singleton'\n\n/**\n * Get external stylesheet link hrefs based on server CSS manifest.\n */\nexport function getLinkAndScriptTags(\n filePath: string,\n injectedCSS: Set<string>,\n injectedScripts: Set<string>,\n collectNewImports?: boolean\n): { styles: CssResource[]; scripts: string[] } {\n const filePathWithoutExt = filePath.replace(/\\.[^.]+$/, '')\n const cssChunks = new Set<CssResource>()\n const jsChunks = new Set<string>()\n const { entryCSSFiles, entryJSFiles } = getClientReferenceManifest()\n const cssFiles = entryCSSFiles[filePathWithoutExt]\n const jsFiles = entryJSFiles?.[filePathWithoutExt]\n\n if (cssFiles) {\n for (const css of cssFiles) {\n if (!injectedCSS.has(css.path)) {\n if (collectNewImports) {\n injectedCSS.add(css.path)\n }\n cssChunks.add(css)\n }\n }\n }\n\n if (jsFiles) {\n for (const file of jsFiles) {\n if (!injectedScripts.has(file)) {\n if (collectNewImports) {\n injectedScripts.add(file)\n }\n jsChunks.add(file)\n }\n }\n }\n\n return { styles: [...cssChunks], scripts: [...jsChunks] }\n}\n"],"names":["getLinkAndScriptTags","filePath","injectedCSS","injectedScripts","collectNewImports","filePathWithoutExt","replace","cssChunks","Set","jsChunks","entryCSSFiles","entryJSFiles","getClientReferenceManifest","cssFiles","jsFiles","css","has","path","add","file","styles","scripts"],"mappings":";;;;+BAMgBA;;;eAAAA;;;oCAL2B;AAKpC,SAASA,qBACdC,QAAgB,EAChBC,WAAwB,EACxBC,eAA4B,EAC5BC,iBAA2B;IAE3B,MAAMC,qBAAqBJ,SAASK,OAAO,CAAC,YAAY;IACxD,MAAMC,YAAY,IAAIC;IACtB,MAAMC,WAAW,IAAID;IACrB,MAAM,EAAEE,aAAa,EAAEC,YAAY,EAAE,GAAGC,IAAAA,8CAA0B;IAClE,MAAMC,WAAWH,aAAa,CAACL,mBAAmB;IAClD,MAAMS,UAAUH,gCAAAA,YAAc,CAACN,mBAAmB;IAElD,IAAIQ,UAAU;QACZ,KAAK,MAAME,OAAOF,SAAU;YAC1B,IAAI,CAACX,YAAYc,GAAG,CAACD,IAAIE,IAAI,GAAG;gBAC9B,IAAIb,mBAAmB;oBACrBF,YAAYgB,GAAG,CAACH,IAAIE,IAAI;gBAC1B;gBACAV,UAAUW,GAAG,CAACH;YAChB;QACF;IACF;IAEA,IAAID,SAAS;QACX,KAAK,MAAMK,QAAQL,QAAS;YAC1B,IAAI,CAACX,gBAAgBa,GAAG,CAACG,OAAO;gBAC9B,IAAIf,mBAAmB;oBACrBD,gBAAgBe,GAAG,CAACC;gBACtB;gBACAV,SAASS,GAAG,CAACC;YACf;QACF;IACF;IAEA,OAAO;QAAEC,QAAQ;eAAIb;SAAU;QAAEc,SAAS;eAAIZ;SAAS;IAAC;AAC1D","ignoreList":[0]}
+10
View File
@@ -0,0 +1,10 @@
import type { AppRenderContext } from './app-render';
import type { PreloadCallbacks } from './types';
export declare function getLayerAssets({ ctx, layoutOrPagePath, injectedCSS: injectedCSSWithCurrentLayout, injectedJS: injectedJSWithCurrentLayout, injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout, preloadCallbacks, }: {
layoutOrPagePath: string | undefined;
injectedCSS: Set<string>;
injectedJS: Set<string>;
injectedFontPreloadTags: Set<string>;
ctx: AppRenderContext;
preloadCallbacks: PreloadCallbacks;
}): React.ReactNode;
+65
View File
@@ -0,0 +1,65 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "getLayerAssets", {
enumerable: true,
get: function() {
return getLayerAssets;
}
});
const _getcssinlinedlinktags = require("./get-css-inlined-link-tags");
const _getpreloadablefonts = require("./get-preloadable-fonts");
const _getassetquerystring = require("./get-asset-query-string");
const _encodeuripath = require("../../shared/lib/encode-uri-path");
const _rendercssresource = require("./render-css-resource");
function getLayerAssets({ ctx, layoutOrPagePath, injectedCSS: injectedCSSWithCurrentLayout, injectedJS: injectedJSWithCurrentLayout, injectedFontPreloadTags: injectedFontPreloadTagsWithCurrentLayout, preloadCallbacks }) {
const { componentMod: { createElement } } = ctx;
const { styles: styleTags, scripts: scriptTags } = layoutOrPagePath ? (0, _getcssinlinedlinktags.getLinkAndScriptTags)(layoutOrPagePath, injectedCSSWithCurrentLayout, injectedJSWithCurrentLayout, true) : {
styles: [],
scripts: []
};
const preloadedFontFiles = layoutOrPagePath ? (0, _getpreloadablefonts.getPreloadableFonts)(ctx.renderOpts.nextFontManifest, layoutOrPagePath, injectedFontPreloadTagsWithCurrentLayout) : null;
if (preloadedFontFiles) {
if (preloadedFontFiles.length) {
for(let i = 0; i < preloadedFontFiles.length; i++){
const fontFilename = preloadedFontFiles[i];
const ext = /\.(woff|woff2|eot|ttf|otf)$/.exec(fontFilename)[1];
const type = `font/${ext}`;
const href = `${ctx.assetPrefix}/_next/${(0, _encodeuripath.encodeURIPath)(fontFilename)}${(0, _getassetquerystring.getAssetQueryString)(ctx, true)}`;
preloadCallbacks.push(()=>{
ctx.componentMod.preloadFont(href, type, ctx.renderOpts.crossOrigin, ctx.nonce);
});
}
} else {
try {
let url = new URL(ctx.assetPrefix);
preloadCallbacks.push(()=>{
ctx.componentMod.preconnect(url.origin, 'anonymous', ctx.nonce);
});
} catch (error) {
// assetPrefix must not be a fully qualified domain name. We assume
// we should preconnect to same origin instead
preloadCallbacks.push(()=>{
ctx.componentMod.preconnect('/', 'anonymous', ctx.nonce);
});
}
}
}
const styles = (0, _rendercssresource.renderCssResource)(styleTags, ctx, preloadCallbacks);
const scripts = scriptTags ? scriptTags.map((href, index)=>{
const fullSrc = `${ctx.assetPrefix}/_next/${(0, _encodeuripath.encodeURIPath)(href)}${(0, _getassetquerystring.getAssetQueryString)(ctx, true)}`;
return createElement('script', {
src: fullSrc,
async: true,
key: `script-${index}`,
nonce: ctx.nonce
});
}) : [];
return styles.length || scripts.length ? [
...styles,
...scripts
] : null;
}
//# sourceMappingURL=get-layer-assets.js.map
File diff suppressed because one or more lines are too long
+10
View File
@@ -0,0 +1,10 @@
import type { NextFontManifest } from '../../build/webpack/plugins/next-font-manifest-plugin';
import type { DeepReadonly } from '../../shared/lib/deep-readonly';
/**
* Get hrefs for fonts to preload
* Returns null if there are no fonts at all.
* Returns string[] if there are fonts to preload (font paths)
* Returns empty string[] if there are fonts but none to preload and no other fonts have been preloaded
* Returns null if there are fonts but none to preload and at least some were previously preloaded
*/
export declare function getPreloadableFonts(nextFontManifest: DeepReadonly<NextFontManifest> | undefined, filePath: string | undefined, injectedFontPreloadTags: Set<string>): string[] | null;
+39
View File
@@ -0,0 +1,39 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "getPreloadableFonts", {
enumerable: true,
get: function() {
return getPreloadableFonts;
}
});
function getPreloadableFonts(nextFontManifest, filePath, injectedFontPreloadTags) {
if (!nextFontManifest || !filePath) {
return null;
}
const filepathWithoutExtension = filePath.replace(/\.[^.]+$/, '');
const fontFiles = new Set();
let foundFontUsage = false;
const preloadedFontFiles = nextFontManifest.app[filepathWithoutExtension];
if (preloadedFontFiles) {
foundFontUsage = true;
for (const fontFile of preloadedFontFiles){
if (!injectedFontPreloadTags.has(fontFile)) {
fontFiles.add(fontFile);
injectedFontPreloadTags.add(fontFile);
}
}
}
if (fontFiles.size) {
return [
...fontFiles
].sort();
} else if (foundFontUsage && injectedFontPreloadTags.size === 0) {
return [];
} else {
return null;
}
}
//# sourceMappingURL=get-preloadable-fonts.js.map
@@ -0,0 +1 @@
{"version":3,"sources":["../../../src/server/app-render/get-preloadable-fonts.tsx"],"sourcesContent":["import type { NextFontManifest } from '../../build/webpack/plugins/next-font-manifest-plugin'\nimport type { DeepReadonly } from '../../shared/lib/deep-readonly'\n\n/**\n * Get hrefs for fonts to preload\n * Returns null if there are no fonts at all.\n * Returns string[] if there are fonts to preload (font paths)\n * Returns empty string[] if there are fonts but none to preload and no other fonts have been preloaded\n * Returns null if there are fonts but none to preload and at least some were previously preloaded\n */\nexport function getPreloadableFonts(\n nextFontManifest: DeepReadonly<NextFontManifest> | undefined,\n filePath: string | undefined,\n injectedFontPreloadTags: Set<string>\n): string[] | null {\n if (!nextFontManifest || !filePath) {\n return null\n }\n const filepathWithoutExtension = filePath.replace(/\\.[^.]+$/, '')\n const fontFiles = new Set<string>()\n let foundFontUsage = false\n\n const preloadedFontFiles = nextFontManifest.app[filepathWithoutExtension]\n if (preloadedFontFiles) {\n foundFontUsage = true\n for (const fontFile of preloadedFontFiles) {\n if (!injectedFontPreloadTags.has(fontFile)) {\n fontFiles.add(fontFile)\n injectedFontPreloadTags.add(fontFile)\n }\n }\n }\n\n if (fontFiles.size) {\n return [...fontFiles].sort()\n } else if (foundFontUsage && injectedFontPreloadTags.size === 0) {\n return []\n } else {\n return null\n }\n}\n"],"names":["getPreloadableFonts","nextFontManifest","filePath","injectedFontPreloadTags","filepathWithoutExtension","replace","fontFiles","Set","foundFontUsage","preloadedFontFiles","app","fontFile","has","add","size","sort"],"mappings":";;;;+BAUgBA;;;eAAAA;;;AAAT,SAASA,oBACdC,gBAA4D,EAC5DC,QAA4B,EAC5BC,uBAAoC;IAEpC,IAAI,CAACF,oBAAoB,CAACC,UAAU;QAClC,OAAO;IACT;IACA,MAAME,2BAA2BF,SAASG,OAAO,CAAC,YAAY;IAC9D,MAAMC,YAAY,IAAIC;IACtB,IAAIC,iBAAiB;IAErB,MAAMC,qBAAqBR,iBAAiBS,GAAG,CAACN,yBAAyB;IACzE,IAAIK,oBAAoB;QACtBD,iBAAiB;QACjB,KAAK,MAAMG,YAAYF,mBAAoB;YACzC,IAAI,CAACN,wBAAwBS,GAAG,CAACD,WAAW;gBAC1CL,UAAUO,GAAG,CAACF;gBACdR,wBAAwBU,GAAG,CAACF;YAC9B;QACF;IACF;IAEA,IAAIL,UAAUQ,IAAI,EAAE;QAClB,OAAO;eAAIR;SAAU,CAACS,IAAI;IAC5B,OAAO,IAAIP,kBAAkBL,wBAAwBW,IAAI,KAAK,GAAG;QAC/D,OAAO,EAAE;IACX,OAAO;QACL,OAAO;IACT;AACF","ignoreList":[0]}
@@ -0,0 +1 @@
export declare function getScriptNonceFromHeader(cspHeaderValue: string): string | undefined;

Some files were not shown because too many files have changed in this diff Show More