.
This commit is contained in:
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
export declare const INSTANT_VALIDATION_BOUNDARY_NAME = "__next_instant_validation_boundary__";
|
||||
Generated
Vendored
+13
@@ -0,0 +1,13 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "INSTANT_VALIDATION_BOUNDARY_NAME", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return INSTANT_VALIDATION_BOUNDARY_NAME;
|
||||
}
|
||||
});
|
||||
const INSTANT_VALIDATION_BOUNDARY_NAME = '__next_instant_validation_boundary__';
|
||||
|
||||
//# sourceMappingURL=boundary-constants.js.map
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/app-render/instant-validation/boundary-constants.ts"],"sourcesContent":["export const INSTANT_VALIDATION_BOUNDARY_NAME =\n '__next_instant_validation_boundary__'\n"],"names":["INSTANT_VALIDATION_BOUNDARY_NAME"],"mappings":";;;;+BAAaA;;;eAAAA;;;AAAN,MAAMA,mCACX","ignoreList":[0]}
|
||||
+12
@@ -0,0 +1,12 @@
|
||||
import { type ReactNode } from 'react';
|
||||
type BoundaryPlacement = null | string;
|
||||
export declare const InstantValidationBoundaryContext: import("react").Context<BoundaryPlacement>;
|
||||
export declare function PlaceValidationBoundaryBelowThisLevel({ id, children, }: {
|
||||
id: string;
|
||||
children: ReactNode;
|
||||
}): import("react/jsx-runtime").JSX.Element;
|
||||
export declare function RenderValidationBoundaryAtThisLevel({ id, children, }: {
|
||||
id: string;
|
||||
children: ReactNode;
|
||||
}): import("react/jsx-runtime").JSX.Element;
|
||||
export {};
|
||||
+103
@@ -0,0 +1,103 @@
|
||||
/* eslint-disable @next/internal/no-ambiguous-jsx -- React Client */ // Do not put a "use client" directive here. Import this module via the shim in
|
||||
// `packages/next/src/client/components/instant-validation/boundary.tsx` instead.
|
||||
// 'use client'
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
InstantValidationBoundaryContext: null,
|
||||
PlaceValidationBoundaryBelowThisLevel: null,
|
||||
RenderValidationBoundaryAtThisLevel: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
InstantValidationBoundaryContext: function() {
|
||||
return InstantValidationBoundaryContext;
|
||||
},
|
||||
PlaceValidationBoundaryBelowThisLevel: function() {
|
||||
return PlaceValidationBoundaryBelowThisLevel;
|
||||
},
|
||||
RenderValidationBoundaryAtThisLevel: function() {
|
||||
return RenderValidationBoundaryAtThisLevel;
|
||||
}
|
||||
});
|
||||
const _jsxruntime = require("react/jsx-runtime");
|
||||
const _react = require("react");
|
||||
const _boundaryconstants = require("./boundary-constants");
|
||||
const _invarianterror = require("../../../shared/lib/invariant-error");
|
||||
const _workunitasyncstorageexternal = require("../work-unit-async-storage.external");
|
||||
if (typeof window !== 'undefined') {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('Instant validation boundaries should never appear in browser bundles.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E1117",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
function getValidationBoundaryTracking() {
|
||||
const store = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
|
||||
if (!store) return null;
|
||||
switch(store.type){
|
||||
case 'validation-client':
|
||||
return store.boundaryState;
|
||||
case 'prerender':
|
||||
case 'prerender-client':
|
||||
case 'prerender-ppr':
|
||||
case 'prerender-legacy':
|
||||
case 'prerender-runtime':
|
||||
case 'request':
|
||||
case 'cache':
|
||||
case 'private-cache':
|
||||
case 'unstable-cache':
|
||||
case 'generate-static-params':
|
||||
break;
|
||||
default:
|
||||
store;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
// We use a namespace object to allow us to recover the name of the function
|
||||
// at runtime even when production bundling/minification is used.
|
||||
const NameSpace = {
|
||||
[_boundaryconstants.INSTANT_VALIDATION_BOUNDARY_NAME]: function({ id, children }) {
|
||||
// Track which boundaries we actually managed to render.
|
||||
const state = getValidationBoundaryTracking();
|
||||
if (state === null) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('Missing boundary tracking state'), "__NEXT_ERROR_CODE", {
|
||||
value: "E1060",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
state.renderedIds.add(id);
|
||||
return children;
|
||||
}
|
||||
};
|
||||
const InstantValidationBoundaryContext = /*#__PURE__*/ (0, _react.createContext)(null);
|
||||
function PlaceValidationBoundaryBelowThisLevel({ id, children }) {
|
||||
return(// OuterLayoutRouter will see this and render a `RenderValidationBoundaryAtThisLevel`.
|
||||
/*#__PURE__*/ (0, _jsxruntime.jsx)(InstantValidationBoundaryContext, {
|
||||
value: id,
|
||||
children: children
|
||||
}));
|
||||
}
|
||||
function RenderValidationBoundaryAtThisLevel({ id, children }) {
|
||||
// We got a boundaryId from the context. Clear the context so that the children don't render another boundary.
|
||||
return /*#__PURE__*/ (0, _jsxruntime.jsx)(InstantValidationBoundary, {
|
||||
id: id,
|
||||
children: /*#__PURE__*/ (0, _jsxruntime.jsx)(InstantValidationBoundaryContext, {
|
||||
value: null,
|
||||
children: children
|
||||
})
|
||||
});
|
||||
}
|
||||
const InstantValidationBoundary = // We use slice(0) to trick the bundler into not inlining/minifying the function
|
||||
// so it retains the name inferred from the namespace object
|
||||
NameSpace[_boundaryconstants.INSTANT_VALIDATION_BOUNDARY_NAME.slice(0)];
|
||||
|
||||
//# sourceMappingURL=boundary-impl.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/app-render/instant-validation/boundary-impl.tsx"],"sourcesContent":["/* eslint-disable @next/internal/no-ambiguous-jsx -- React Client */\n\n// Do not put a \"use client\" directive here. Import this module via the shim in\n// `packages/next/src/client/components/instant-validation/boundary.tsx` instead.\n// 'use client'\n\nimport { createContext, type ReactNode } from 'react'\nimport { INSTANT_VALIDATION_BOUNDARY_NAME } from './boundary-constants'\nimport { InvariantError } from '../../../shared/lib/invariant-error'\nimport type { ValidationBoundaryTracking } from './boundary-tracking'\nimport { workUnitAsyncStorage } from '../work-unit-async-storage.external'\n\nif (typeof window !== 'undefined') {\n throw new InvariantError(\n 'Instant validation boundaries should never appear in browser bundles.'\n )\n}\n\nfunction getValidationBoundaryTracking(): ValidationBoundaryTracking | null {\n const store = workUnitAsyncStorage.getStore()\n if (!store) return null\n switch (store.type) {\n case 'validation-client':\n return store.boundaryState\n case 'prerender':\n case 'prerender-client':\n case 'prerender-ppr':\n case 'prerender-legacy':\n case 'prerender-runtime':\n case 'request':\n case 'cache':\n case 'private-cache':\n case 'unstable-cache':\n case 'generate-static-params':\n break\n default:\n store satisfies never\n }\n return null\n}\n\n// We use a namespace object to allow us to recover the name of the function\n// at runtime even when production bundling/minification is used.\nconst NameSpace = {\n [INSTANT_VALIDATION_BOUNDARY_NAME]: function ({\n id,\n children,\n }: {\n id: string\n children: ReactNode\n }) {\n // Track which boundaries we actually managed to render.\n const state = getValidationBoundaryTracking()\n if (state === null) {\n throw new InvariantError('Missing boundary tracking state')\n }\n state.renderedIds.add(id)\n\n return children\n },\n}\n\ntype BoundaryPlacement =\n | null // do not place here\n | string // boundaryId -- place here\n\nexport const InstantValidationBoundaryContext =\n createContext<BoundaryPlacement>(null)\n\nexport function PlaceValidationBoundaryBelowThisLevel({\n id,\n children,\n}: {\n id: string\n children: ReactNode\n}) {\n return (\n // OuterLayoutRouter will see this and render a `RenderValidationBoundaryAtThisLevel`.\n <InstantValidationBoundaryContext value={id}>\n {children}\n </InstantValidationBoundaryContext>\n )\n}\n\nexport function RenderValidationBoundaryAtThisLevel({\n id,\n children,\n}: {\n id: string\n children: ReactNode\n}) {\n // We got a boundaryId from the context. Clear the context so that the children don't render another boundary.\n return (\n <InstantValidationBoundary id={id}>\n <InstantValidationBoundaryContext value={null}>\n {children}\n </InstantValidationBoundaryContext>\n </InstantValidationBoundary>\n )\n}\n\nconst InstantValidationBoundary =\n // We use slice(0) to trick the bundler into not inlining/minifying the function\n // so it retains the name inferred from the namespace object\n NameSpace[\n INSTANT_VALIDATION_BOUNDARY_NAME.slice(\n 0\n ) as typeof INSTANT_VALIDATION_BOUNDARY_NAME\n ]\n"],"names":["InstantValidationBoundaryContext","PlaceValidationBoundaryBelowThisLevel","RenderValidationBoundaryAtThisLevel","window","InvariantError","getValidationBoundaryTracking","store","workUnitAsyncStorage","getStore","type","boundaryState","NameSpace","INSTANT_VALIDATION_BOUNDARY_NAME","id","children","state","renderedIds","add","createContext","value","InstantValidationBoundary","slice"],"mappings":"AAAA,kEAAkE,GAElE,+EAA+E;AAC/E,iFAAiF;AACjF,eAAe;;;;;;;;;;;;;;;;;IA8DFA,gCAAgC;eAAhCA;;IAGGC,qCAAqC;eAArCA;;IAeAC,mCAAmC;eAAnCA;;;;uBA9E8B;mCACG;gCAClB;8CAEM;AAErC,IAAI,OAAOC,WAAW,aAAa;IACjC,MAAM,qBAEL,CAFK,IAAIC,8BAAc,CACtB,0EADI,qBAAA;eAAA;oBAAA;sBAAA;IAEN;AACF;AAEA,SAASC;IACP,MAAMC,QAAQC,kDAAoB,CAACC,QAAQ;IAC3C,IAAI,CAACF,OAAO,OAAO;IACnB,OAAQA,MAAMG,IAAI;QAChB,KAAK;YACH,OAAOH,MAAMI,aAAa;QAC5B,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;YACH;QACF;YACEJ;IACJ;IACA,OAAO;AACT;AAEA,4EAA4E;AAC5E,iEAAiE;AACjE,MAAMK,YAAY;IAChB,CAACC,mDAAgC,CAAC,EAAE,SAAU,EAC5CC,EAAE,EACFC,QAAQ,EAIT;QACC,wDAAwD;QACxD,MAAMC,QAAQV;QACd,IAAIU,UAAU,MAAM;YAClB,MAAM,qBAAqD,CAArD,IAAIX,8BAAc,CAAC,oCAAnB,qBAAA;uBAAA;4BAAA;8BAAA;YAAoD;QAC5D;QACAW,MAAMC,WAAW,CAACC,GAAG,CAACJ;QAEtB,OAAOC;IACT;AACF;AAMO,MAAMd,iDACXkB,IAAAA,oBAAa,EAAoB;AAE5B,SAASjB,sCAAsC,EACpDY,EAAE,EACFC,QAAQ,EAIT;IACC,OACE,sFAAsF;kBACtF,qBAACd;QAAiCmB,OAAON;kBACtCC;;AAGP;AAEO,SAASZ,oCAAoC,EAClDW,EAAE,EACFC,QAAQ,EAIT;IACC,8GAA8G;IAC9G,qBACE,qBAACM;QAA0BP,IAAIA;kBAC7B,cAAA,qBAACb;YAAiCmB,OAAO;sBACtCL;;;AAIT;AAEA,MAAMM,4BACJ,gFAAgF;AAChF,4DAA4D;AAC5DT,SAAS,CACPC,mDAAgC,CAACS,KAAK,CACpC,GAEH","ignoreList":[0]}
|
||||
Generated
Vendored
+5
@@ -0,0 +1,5 @@
|
||||
export type ValidationBoundaryTracking = {
|
||||
expectedIds: Set<string>;
|
||||
renderedIds: Set<string>;
|
||||
};
|
||||
export declare function createValidationBoundaryTracking(): ValidationBoundaryTracking;
|
||||
+18
@@ -0,0 +1,18 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "createValidationBoundaryTracking", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return createValidationBoundaryTracking;
|
||||
}
|
||||
});
|
||||
function createValidationBoundaryTracking() {
|
||||
return {
|
||||
expectedIds: new Set(),
|
||||
renderedIds: new Set()
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=boundary-tracking.js.map
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/app-render/instant-validation/boundary-tracking.tsx"],"sourcesContent":["export type ValidationBoundaryTracking = {\n expectedIds: Set<string>\n renderedIds: Set<string>\n}\n\nexport function createValidationBoundaryTracking(): ValidationBoundaryTracking {\n return {\n expectedIds: new Set(),\n renderedIds: new Set(),\n }\n}\n"],"names":["createValidationBoundaryTracking","expectedIds","Set","renderedIds"],"mappings":";;;;+BAKgBA;;;eAAAA;;;AAAT,SAASA;IACd,OAAO;QACLC,aAAa,IAAIC;QACjBC,aAAa,IAAID;IACnB;AACF","ignoreList":[0]}
|
||||
+13
@@ -0,0 +1,13 @@
|
||||
import type { LoaderTree } from '../../lib/app-dir-module';
|
||||
import type { AppSegmentConfig, InstantSample } from '../../../build/segment-config/app/app-segment-config';
|
||||
export declare function anySegmentHasRuntimePrefetchEnabled(tree: LoaderTree): Promise<boolean>;
|
||||
export declare function isPageAllowedToBlock(tree: LoaderTree): Promise<boolean>;
|
||||
type FoundSegmentWithConfig = {
|
||||
path: string[];
|
||||
config: NonNullable<AppSegmentConfig['unstable_instant']>;
|
||||
};
|
||||
export declare const anySegmentNeedsInstantValidationInDev: (arg: LoaderTree) => Promise<boolean>;
|
||||
export declare const anySegmentNeedsInstantValidationInBuild: (arg: LoaderTree) => Promise<boolean>;
|
||||
export declare const findSegmentsWithInstantConfig: (arg: LoaderTree) => Promise<FoundSegmentWithConfig[]>;
|
||||
export declare const resolveInstantConfigSamplesForPage: (tree: LoaderTree) => Promise<InstantSample[] | null>;
|
||||
export {};
|
||||
+181
@@ -0,0 +1,181 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
anySegmentHasRuntimePrefetchEnabled: null,
|
||||
anySegmentNeedsInstantValidationInBuild: null,
|
||||
anySegmentNeedsInstantValidationInDev: null,
|
||||
findSegmentsWithInstantConfig: null,
|
||||
isPageAllowedToBlock: null,
|
||||
resolveInstantConfigSamplesForPage: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
anySegmentHasRuntimePrefetchEnabled: function() {
|
||||
return anySegmentHasRuntimePrefetchEnabled;
|
||||
},
|
||||
anySegmentNeedsInstantValidationInBuild: function() {
|
||||
return anySegmentNeedsInstantValidationInBuild;
|
||||
},
|
||||
anySegmentNeedsInstantValidationInDev: function() {
|
||||
return anySegmentNeedsInstantValidationInDev;
|
||||
},
|
||||
findSegmentsWithInstantConfig: function() {
|
||||
return findSegmentsWithInstantConfig;
|
||||
},
|
||||
isPageAllowedToBlock: function() {
|
||||
return isPageAllowedToBlock;
|
||||
},
|
||||
resolveInstantConfigSamplesForPage: function() {
|
||||
return resolveInstantConfigSamplesForPage;
|
||||
}
|
||||
});
|
||||
const _appdirmodule = require("../../lib/app-dir-module");
|
||||
const _parseloadertree = require("../../../shared/lib/router/utils/parse-loader-tree");
|
||||
const _workasyncstorageexternal = require("../work-async-storage.external");
|
||||
async function anySegmentHasRuntimePrefetchEnabled(tree) {
|
||||
const { mod: layoutOrPageMod } = await (0, _appdirmodule.getLayoutOrPageModule)(tree);
|
||||
// TODO(restart-on-cache-miss): Does this work correctly for client page/layout modules?
|
||||
const instantConfig = layoutOrPageMod ? layoutOrPageMod.unstable_instant : undefined;
|
||||
const hasRuntimePrefetch = instantConfig && typeof instantConfig === 'object' ? instantConfig.prefetch === 'runtime' : false;
|
||||
if (hasRuntimePrefetch) {
|
||||
return true;
|
||||
}
|
||||
const { parallelRoutes } = (0, _parseloadertree.parseLoaderTree)(tree);
|
||||
for(const parallelRouteKey in parallelRoutes){
|
||||
const parallelRoute = parallelRoutes[parallelRouteKey];
|
||||
const hasChildRuntimePrefetch = await anySegmentHasRuntimePrefetchEnabled(parallelRoute);
|
||||
if (hasChildRuntimePrefetch) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
async function isPageAllowedToBlock(tree) {
|
||||
const { mod: layoutOrPageMod } = await (0, _appdirmodule.getLayoutOrPageModule)(tree);
|
||||
// TODO(restart-on-cache-miss): Does this work correctly for client page/layout modules?
|
||||
const instantConfig = layoutOrPageMod ? layoutOrPageMod.unstable_instant : undefined;
|
||||
// If we encounter a non-false instant config before a instant=false,
|
||||
// the page isn't allowed to block. The config expresses a requirement for
|
||||
// instant UI, so we should make sure that a static shell exists.
|
||||
// (even if it'd use runtime prefetching for client navs)
|
||||
if (instantConfig !== undefined) {
|
||||
if (typeof instantConfig === 'object') {
|
||||
return false;
|
||||
} else if (instantConfig === false) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
const { parallelRoutes } = (0, _parseloadertree.parseLoaderTree)(tree);
|
||||
for(const parallelRouteKey in parallelRoutes){
|
||||
const parallelRoute = parallelRoutes[parallelRouteKey];
|
||||
const subtreeIsBlocking = await isPageAllowedToBlock(parallelRoute);
|
||||
if (subtreeIsBlocking) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Checks if any segments in the loader tree have `instant` configs that need validating.
|
||||
* NOTE: Client navigations call this multiple times, so we cache it.
|
||||
* */ // Shared helper (not exported, not cached — called by the cached wrappers)
|
||||
async function anySegmentNeedsInstantValidation(rootTree, mode) {
|
||||
const segments = await findSegmentsWithInstantConfig(rootTree);
|
||||
// Check if there's any configs with `prefetch: 'static'` or `mode: 'instant'`.
|
||||
// (If there's only `false`, there's no need to run validation).
|
||||
// If any segment has `unstable_disableValidation`, we skip validation for the whole tree.
|
||||
let needsValidation = false;
|
||||
for (const { config } of segments){
|
||||
if (typeof config === 'object') {
|
||||
if (config.unstable_disableValidation === true || mode === 'dev' && config.unstable_disableDevValidation === true || mode === 'build' && config.unstable_disableBuildValidation === true) {
|
||||
return false;
|
||||
}
|
||||
// do not short-circuit, some other segment might still have `unstable_disableValidation`
|
||||
needsValidation = true;
|
||||
}
|
||||
}
|
||||
return needsValidation;
|
||||
}
|
||||
const anySegmentNeedsInstantValidationInDev = cacheScopedToWorkStore(async (rootTree)=>anySegmentNeedsInstantValidation(rootTree, 'dev'));
|
||||
const anySegmentNeedsInstantValidationInBuild = cacheScopedToWorkStore(async (rootTree)=>anySegmentNeedsInstantValidation(rootTree, 'build'));
|
||||
const findSegmentsWithInstantConfig = cacheScopedToWorkStore(async (rootTree)=>{
|
||||
const results = [];
|
||||
async function visit(tree, path) {
|
||||
const { mod: layoutOrPageMod } = await (0, _appdirmodule.getLayoutOrPageModule)(tree);
|
||||
// TODO(restart-on-cache-miss): Does this work correctly for client page/layout modules?
|
||||
const instantConfig = layoutOrPageMod ? layoutOrPageMod.unstable_instant : undefined;
|
||||
if (instantConfig !== undefined) {
|
||||
results.push({
|
||||
path,
|
||||
config: instantConfig
|
||||
});
|
||||
}
|
||||
const { parallelRoutes } = (0, _parseloadertree.parseLoaderTree)(tree);
|
||||
for(const parallelRouteKey in parallelRoutes){
|
||||
const childTree = parallelRoutes[parallelRouteKey];
|
||||
await visit(childTree, [
|
||||
...path,
|
||||
parallelRouteKey
|
||||
]);
|
||||
}
|
||||
}
|
||||
await visit(rootTree, []);
|
||||
return results;
|
||||
});
|
||||
const resolveInstantConfigSamplesForPage = async (tree)=>{
|
||||
const { mod: layoutOrPageMod } = await (0, _appdirmodule.getLayoutOrPageModule)(tree);
|
||||
const instantConfig = layoutOrPageMod ? layoutOrPageMod.unstable_instant : undefined;
|
||||
let samples = null;
|
||||
if (instantConfig !== undefined && typeof instantConfig === 'object' && instantConfig.samples) {
|
||||
samples = instantConfig.samples;
|
||||
}
|
||||
// The samples from inner segments override samples from outer segments,
|
||||
// i.e. a page overrides the samples from a layout.
|
||||
// We do not perform any merging logic.
|
||||
const { parallelRoutes } = (0, _parseloadertree.parseLoaderTree)(tree);
|
||||
for(const parallelRouteKey in parallelRoutes){
|
||||
if (parallelRouteKey !== 'children') {
|
||||
continue;
|
||||
}
|
||||
const childTree = parallelRoutes[parallelRouteKey];
|
||||
const childSamples = await resolveInstantConfigSamplesForPage(childTree);
|
||||
if (childSamples !== null) {
|
||||
samples = childSamples;
|
||||
}
|
||||
}
|
||||
return samples;
|
||||
};
|
||||
/**
|
||||
* A simple cache wrapper for 1-argument functions.
|
||||
* The cache will live as long as the current WorkStore,
|
||||
* i.e. it's scoped to a single request.
|
||||
*/ function cacheScopedToWorkStore(func) {
|
||||
const resultsPerWorkStore = new WeakMap();
|
||||
return (arg)=>{
|
||||
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
|
||||
if (!workStore) {
|
||||
// No caching.
|
||||
return func(arg);
|
||||
}
|
||||
let results = resultsPerWorkStore.get(workStore);
|
||||
if (results && results.has(arg)) {
|
||||
return results.get(arg);
|
||||
}
|
||||
const result = func(arg);
|
||||
if (!results) {
|
||||
results = new WeakMap();
|
||||
resultsPerWorkStore.set(workStore, results);
|
||||
}
|
||||
results.set(arg, result);
|
||||
return result;
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=instant-config.js.map
|
||||
Generated
Vendored
+1
File diff suppressed because one or more lines are too long
Generated
Vendored
+5
@@ -0,0 +1,5 @@
|
||||
import type { Params } from '../../request/params';
|
||||
import type { ReadonlyURLSearchParams } from '../../../client/components/readonly-url-search-params';
|
||||
export declare function instrumentParamsForClientValidation<TPArams extends Params>(underlyingParams: TPArams): TPArams;
|
||||
export declare function expectCompleteParamsInClientValidation(expression: string): void;
|
||||
export declare function instrumentSearchParamsForClientValidation(underlyingSearchParams: ReadonlyURLSearchParams): ReadonlyURLSearchParams;
|
||||
Generated
Vendored
+128
@@ -0,0 +1,128 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
expectCompleteParamsInClientValidation: null,
|
||||
instrumentParamsForClientValidation: null,
|
||||
instrumentSearchParamsForClientValidation: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
expectCompleteParamsInClientValidation: function() {
|
||||
return expectCompleteParamsInClientValidation;
|
||||
},
|
||||
instrumentParamsForClientValidation: function() {
|
||||
return instrumentParamsForClientValidation;
|
||||
},
|
||||
instrumentSearchParamsForClientValidation: function() {
|
||||
return instrumentSearchParamsForClientValidation;
|
||||
}
|
||||
});
|
||||
const _workunitasyncstorageexternal = require("../work-unit-async-storage.external");
|
||||
const _workasyncstorageexternal = require("../work-async-storage.external");
|
||||
const _instantsamples = require("./instant-samples");
|
||||
const _instantvalidationerror = require("./instant-validation-error");
|
||||
function instrumentParamsForClientValidation(underlyingParams) {
|
||||
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
|
||||
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
|
||||
if (workStore && workUnitStore) {
|
||||
switch(workUnitStore.type){
|
||||
case 'validation-client':
|
||||
{
|
||||
if (workUnitStore.validationSamples) {
|
||||
const declaredKeys = new Set(Object.keys(workUnitStore.validationSamples.params ?? {}));
|
||||
return (0, _instantsamples.createExhaustiveParamsProxy)(underlyingParams, declaredKeys, workStore.route);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'prerender-runtime':
|
||||
case 'prerender-client':
|
||||
case 'prerender-legacy':
|
||||
case 'prerender-ppr':
|
||||
case 'prerender':
|
||||
case 'cache':
|
||||
case 'request':
|
||||
case 'private-cache':
|
||||
case 'unstable-cache':
|
||||
case 'generate-static-params':
|
||||
break;
|
||||
default:
|
||||
workUnitStore;
|
||||
}
|
||||
}
|
||||
return underlyingParams;
|
||||
}
|
||||
function expectCompleteParamsInClientValidation(expression) {
|
||||
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
|
||||
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
|
||||
if (workStore && workUnitStore) {
|
||||
switch(workUnitStore.type){
|
||||
case 'validation-client':
|
||||
{
|
||||
if (workUnitStore.validationSamples) {
|
||||
const fallbackParams = workUnitStore.fallbackRouteParams;
|
||||
if (fallbackParams && fallbackParams.size > 0) {
|
||||
const missingParams = Array.from(fallbackParams.keys());
|
||||
(0, _instantsamples.trackMissingSampleErrorAndThrow)(Object.defineProperty(new _instantvalidationerror.InstantValidationError(`Route "${workStore.route}" called ${expression} but param${missingParams.length > 1 ? 's' : ''} ${missingParams.map((p)=>`"${p}"`).join(', ')} ${missingParams.length > 1 ? 'are' : 'is'} not defined in the \`samples\` of \`unstable_instant\`. ` + `${expression} requires all route params to be provided.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E1109",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
}));
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'prerender-runtime':
|
||||
case 'prerender-client':
|
||||
case 'prerender-legacy':
|
||||
case 'prerender-ppr':
|
||||
case 'prerender':
|
||||
case 'cache':
|
||||
case 'request':
|
||||
case 'private-cache':
|
||||
case 'unstable-cache':
|
||||
case 'generate-static-params':
|
||||
break;
|
||||
default:
|
||||
workUnitStore;
|
||||
}
|
||||
}
|
||||
}
|
||||
function instrumentSearchParamsForClientValidation(underlyingSearchParams) {
|
||||
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
|
||||
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
|
||||
if (workStore && workUnitStore) {
|
||||
switch(workUnitStore.type){
|
||||
case 'validation-client':
|
||||
{
|
||||
if (workUnitStore.validationSamples) {
|
||||
const declaredKeys = new Set(Object.keys(workUnitStore.validationSamples.searchParams ?? {}));
|
||||
return (0, _instantsamples.createExhaustiveURLSearchParamsProxy)(underlyingSearchParams, declaredKeys, workStore.route);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'prerender-runtime':
|
||||
case 'prerender-client':
|
||||
case 'prerender-legacy':
|
||||
case 'prerender-ppr':
|
||||
case 'prerender':
|
||||
case 'cache':
|
||||
case 'request':
|
||||
case 'private-cache':
|
||||
case 'unstable-cache':
|
||||
case 'generate-static-params':
|
||||
break;
|
||||
default:
|
||||
workUnitStore;
|
||||
}
|
||||
}
|
||||
return underlyingSearchParams;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=instant-samples-client.js.map
|
||||
Generated
Vendored
+1
File diff suppressed because one or more lines are too long
+49
@@ -0,0 +1,49 @@
|
||||
import type { InstantSample } from '../../../build/segment-config/app/app-segment-config';
|
||||
import type { ReadonlyRequestCookies } from '../../web/spec-extension/adapters/request-cookies';
|
||||
import type { ReadonlyHeaders } from '../../web/spec-extension/adapters/headers';
|
||||
import type { DraftModeProvider } from '../../async-storage/draft-mode-provider';
|
||||
import type { Params } from '../../request/params';
|
||||
import type { SearchParams } from '../../request/search-params';
|
||||
import { InstantValidationError } from './instant-validation-error';
|
||||
import type { WorkStore } from '../work-async-storage.external';
|
||||
export type InstantValidationSampleTracking = {
|
||||
missingSampleErrors: InstantValidationError[];
|
||||
};
|
||||
export declare function createValidationSampleTracking(): InstantValidationSampleTracking;
|
||||
export declare function trackMissingSampleError(error: InstantValidationError): void;
|
||||
export declare function trackMissingSampleErrorAndThrow(error: InstantValidationError): never;
|
||||
/**
|
||||
* Creates ReadonlyRequestCookies from sample cookie data.
|
||||
* Accessing a cookie not declared in the sample will throw an error.
|
||||
* Cookies with `value: null` are declared (allowed to access) but return no value.
|
||||
*/
|
||||
export declare function createCookiesFromSample(sampleCookies: InstantSample['cookies'], route: string): ReadonlyRequestCookies;
|
||||
/**
|
||||
* Creates ReadonlyHeaders from sample header data.
|
||||
* Accessing a header not declared in the sample will throw an error.
|
||||
* Headers with `value: null` are declared (allowed to access) but return null.
|
||||
*/
|
||||
export declare function createHeadersFromSample(rawSampleHeaders: InstantSample['headers'], sampleCookies: InstantSample['cookies'], route: string): ReadonlyHeaders;
|
||||
/**
|
||||
* Creates a DraftModeProvider that always returns isEnabled: false.
|
||||
*/
|
||||
export declare function createDraftModeForValidation(): DraftModeProvider;
|
||||
/**
|
||||
* Creates params wrapped with an exhaustive proxy.
|
||||
* Accessing a param not declared in the sample will throw an error.
|
||||
*/
|
||||
export declare function createExhaustiveParamsProxy<TParams extends Params>(underlyingParams: TParams, declaredParamNames: Set<string>, route: string): TParams;
|
||||
/**
|
||||
* Creates searchParams wrapped with an exhaustive proxy.
|
||||
* Accessing a searchParam not declared in the sample will throw an error.
|
||||
* A searchParam with `value: undefined` means "declared but absent" (allowed to access, returns undefined).
|
||||
*/
|
||||
export declare function createExhaustiveSearchParamsProxy(searchParams: SearchParams, declaredSearchParamNames: Set<string>, route: string): SearchParams;
|
||||
/**
|
||||
* Wraps a URLSearchParams (or subclass like ReadonlyURLSearchParams) with an
|
||||
* exhaustive proxy. Accessing a search param not declared in the sample via
|
||||
* get/getAll/has will throw an error.
|
||||
*/
|
||||
export declare function createExhaustiveURLSearchParamsProxy<T extends URLSearchParams>(searchParams: T, declaredSearchParamNames: Set<string>, route: string): T;
|
||||
export declare function createRelativeURLFromSamples(route: string, sampleParams: InstantSample['params'], sampleSearchParams: InstantSample['searchParams']): import("../../../shared/lib/router/utils/parse-relative-url").ParsedRelativeUrl;
|
||||
export declare function assertRootParamInSamples(workStore: WorkStore, sampleParams: Params | undefined, paramName: string): void;
|
||||
+432
@@ -0,0 +1,432 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
assertRootParamInSamples: null,
|
||||
createCookiesFromSample: null,
|
||||
createDraftModeForValidation: null,
|
||||
createExhaustiveParamsProxy: null,
|
||||
createExhaustiveSearchParamsProxy: null,
|
||||
createExhaustiveURLSearchParamsProxy: null,
|
||||
createHeadersFromSample: null,
|
||||
createRelativeURLFromSamples: null,
|
||||
createValidationSampleTracking: null,
|
||||
trackMissingSampleError: null,
|
||||
trackMissingSampleErrorAndThrow: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
assertRootParamInSamples: function() {
|
||||
return assertRootParamInSamples;
|
||||
},
|
||||
createCookiesFromSample: function() {
|
||||
return createCookiesFromSample;
|
||||
},
|
||||
createDraftModeForValidation: function() {
|
||||
return createDraftModeForValidation;
|
||||
},
|
||||
createExhaustiveParamsProxy: function() {
|
||||
return createExhaustiveParamsProxy;
|
||||
},
|
||||
createExhaustiveSearchParamsProxy: function() {
|
||||
return createExhaustiveSearchParamsProxy;
|
||||
},
|
||||
createExhaustiveURLSearchParamsProxy: function() {
|
||||
return createExhaustiveURLSearchParamsProxy;
|
||||
},
|
||||
createHeadersFromSample: function() {
|
||||
return createHeadersFromSample;
|
||||
},
|
||||
createRelativeURLFromSamples: function() {
|
||||
return createRelativeURLFromSamples;
|
||||
},
|
||||
createValidationSampleTracking: function() {
|
||||
return createValidationSampleTracking;
|
||||
},
|
||||
trackMissingSampleError: function() {
|
||||
return trackMissingSampleError;
|
||||
},
|
||||
trackMissingSampleErrorAndThrow: function() {
|
||||
return trackMissingSampleErrorAndThrow;
|
||||
}
|
||||
});
|
||||
const _cookies = require("../../web/spec-extension/cookies");
|
||||
const _requestcookies = require("../../web/spec-extension/adapters/request-cookies");
|
||||
const _headers = require("../../web/spec-extension/adapters/headers");
|
||||
const _getsegmentparam = require("../../../shared/lib/router/utils/get-segment-param");
|
||||
const _parserelativeurl = require("../../../shared/lib/router/utils/parse-relative-url");
|
||||
const _invarianterror = require("../../../shared/lib/invariant-error");
|
||||
const _instantvalidationerror = require("./instant-validation-error");
|
||||
const _workunitasyncstorageexternal = require("../work-unit-async-storage.external");
|
||||
const _reflectutils = require("../../../shared/lib/utils/reflect-utils");
|
||||
function createValidationSampleTracking() {
|
||||
return {
|
||||
missingSampleErrors: []
|
||||
};
|
||||
}
|
||||
function getExpectedSampleTracking() {
|
||||
let validationSampleTracking = null;
|
||||
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
|
||||
if (workUnitStore) {
|
||||
switch(workUnitStore.type){
|
||||
case 'request':
|
||||
case 'validation-client':
|
||||
// TODO(instant-validation-build): do we need any special handling for caches?
|
||||
validationSampleTracking = workUnitStore.validationSampleTracking ?? null;
|
||||
break;
|
||||
case 'cache':
|
||||
case 'private-cache':
|
||||
case 'unstable-cache':
|
||||
case 'prerender-legacy':
|
||||
case 'prerender-ppr':
|
||||
case 'prerender-client':
|
||||
case 'prerender':
|
||||
case 'prerender-runtime':
|
||||
case 'generate-static-params':
|
||||
break;
|
||||
default:
|
||||
workUnitStore;
|
||||
}
|
||||
}
|
||||
if (!validationSampleTracking) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('Expected to have a workUnitStore that provides validationSampleTracking'), "__NEXT_ERROR_CODE", {
|
||||
value: "E1110",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return validationSampleTracking;
|
||||
}
|
||||
function trackMissingSampleError(error) {
|
||||
const validationSampleTracking = getExpectedSampleTracking();
|
||||
validationSampleTracking.missingSampleErrors.push(error);
|
||||
}
|
||||
function trackMissingSampleErrorAndThrow(error) {
|
||||
// TODO(instant-validation-build): this should abort the render
|
||||
trackMissingSampleError(error);
|
||||
throw error;
|
||||
}
|
||||
function createCookiesFromSample(sampleCookies, route) {
|
||||
const declaredNames = new Set();
|
||||
const cookies = new _cookies.RequestCookies(new Headers());
|
||||
if (sampleCookies) {
|
||||
for (const cookie of sampleCookies){
|
||||
declaredNames.add(cookie.name);
|
||||
if (cookie.value !== null) {
|
||||
cookies.set(cookie.name, cookie.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
const sealed = _requestcookies.RequestCookiesAdapter.seal(cookies);
|
||||
return new Proxy(sealed, {
|
||||
get (target, prop, receiver) {
|
||||
if (prop === 'has') {
|
||||
const originalMethod = Reflect.get(target, prop, receiver);
|
||||
const wrappedMethod = function(name) {
|
||||
if (!declaredNames.has(name)) {
|
||||
trackMissingSampleErrorAndThrow(createMissingCookieSampleError(route, name));
|
||||
}
|
||||
return originalMethod.call(target, name);
|
||||
};
|
||||
return wrappedMethod;
|
||||
}
|
||||
if (prop === 'get') {
|
||||
const originalMethod = Reflect.get(target, prop, receiver);
|
||||
const wrappedMethod = function(nameOrCookie) {
|
||||
let name;
|
||||
if (typeof nameOrCookie === 'string') {
|
||||
name = nameOrCookie;
|
||||
} else if (nameOrCookie && typeof nameOrCookie === 'object' && typeof nameOrCookie.name === 'string') {
|
||||
name = nameOrCookie.name;
|
||||
} else {
|
||||
// This is an invalid input. Pass it through to the original method so it can error.
|
||||
return originalMethod.call(target, nameOrCookie);
|
||||
}
|
||||
if (!declaredNames.has(name)) {
|
||||
trackMissingSampleErrorAndThrow(createMissingCookieSampleError(route, name));
|
||||
}
|
||||
return originalMethod.call(target, name);
|
||||
};
|
||||
return wrappedMethod;
|
||||
}
|
||||
// TODO(instant-validation-build): what should getAll do?
|
||||
// Maybe we should only allow it if there's an array (possibly empty?)
|
||||
return Reflect.get(target, prop, receiver);
|
||||
}
|
||||
});
|
||||
}
|
||||
function createMissingCookieSampleError(route, name) {
|
||||
return Object.defineProperty(new _instantvalidationerror.InstantValidationError(`Route "${route}" accessed cookie "${name}" which is not defined in the \`samples\` ` + `of \`unstable_instant\`. Add it to the sample's \`cookies\` array, ` + `or \`{ name: "${name}", value: null }\` if it should be absent.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E1115",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
function createHeadersFromSample(rawSampleHeaders, sampleCookies, route) {
|
||||
// If we have cookie samples, add a `cookie` header to match.
|
||||
// Accessing it will be implicitly allowed by the proxy --
|
||||
// if the user defined some cookies, accessing the "cookie" header is also fine.
|
||||
const sampleHeaders = rawSampleHeaders ? [
|
||||
...rawSampleHeaders
|
||||
] : [];
|
||||
if (sampleHeaders.find(([name])=>name.toLowerCase() === 'cookie')) {
|
||||
throw Object.defineProperty(new _instantvalidationerror.InstantValidationError('Invalid sample: Defining cookies via a "cookie" header is not supported. Use `cookies: [{ name: ..., value: ... }]` instead.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E1111",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
if (sampleCookies) {
|
||||
const cookieHeaderValue = sampleCookies.toString();
|
||||
sampleHeaders.push([
|
||||
'cookie',
|
||||
// if the `cookies` samples were empty, or they were all `null`, then we have no cookies,
|
||||
// and the header isn't present, but should remains readable, so we set it to null.
|
||||
cookieHeaderValue !== '' ? cookieHeaderValue : null
|
||||
]);
|
||||
}
|
||||
const declaredNames = new Set();
|
||||
const headersInit = {};
|
||||
for (const [name, value] of sampleHeaders){
|
||||
declaredNames.add(name.toLowerCase());
|
||||
if (value !== null) {
|
||||
headersInit[name.toLowerCase()] = value;
|
||||
}
|
||||
}
|
||||
const sealed = _headers.HeadersAdapter.seal(_headers.HeadersAdapter.from(headersInit));
|
||||
return new Proxy(sealed, {
|
||||
get (target, prop, receiver) {
|
||||
if (prop === 'get' || prop === 'has') {
|
||||
const originalMethod = Reflect.get(target, prop, receiver);
|
||||
const patchedMethod = function(rawName) {
|
||||
const name = rawName.toLowerCase();
|
||||
if (!declaredNames.has(name)) {
|
||||
trackMissingSampleErrorAndThrow(Object.defineProperty(new _instantvalidationerror.InstantValidationError(`Route "${route}" accessed header "${name}" which is not defined in the \`samples\` ` + `of \`unstable_instant\`. Add it to the sample's \`headers\` array, ` + `or \`["${name}", null]\` if it should be absent.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E1116",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
}));
|
||||
}
|
||||
// typescript can't reconcile a union of functions with a union of return types,
|
||||
// so we have to cast the original return type away
|
||||
return originalMethod.call(target, name);
|
||||
};
|
||||
return patchedMethod;
|
||||
}
|
||||
return Reflect.get(target, prop, receiver);
|
||||
}
|
||||
});
|
||||
}
|
||||
function createDraftModeForValidation() {
|
||||
// Create a minimal DraftModeProvider-compatible object
|
||||
// that always reports draft mode as disabled.
|
||||
//
|
||||
// private properties that can't be set from outside the class.
|
||||
return {
|
||||
get isEnabled () {
|
||||
return false;
|
||||
},
|
||||
enable () {
|
||||
throw Object.defineProperty(new Error('Draft mode cannot be enabled during build-time instant validation.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E1092",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
},
|
||||
disable () {
|
||||
throw Object.defineProperty(new Error('Draft mode cannot be disabled during build-time instant validation.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E1094",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
function createExhaustiveParamsProxy(underlyingParams, declaredParamNames, route) {
|
||||
return new Proxy(underlyingParams, {
|
||||
get (target, prop, receiver) {
|
||||
if (typeof prop === 'string' && !_reflectutils.wellKnownProperties.has(prop) && // Only error when accessing a param that is part of the route but wasn't provided.
|
||||
// accessing properties that aren't expected to be a valid param value is fine.
|
||||
prop in underlyingParams && !declaredParamNames.has(prop)) {
|
||||
trackMissingSampleErrorAndThrow(Object.defineProperty(new _instantvalidationerror.InstantValidationError(`Route "${route}" accessed param "${prop}" which is not defined in the \`samples\` ` + `of \`unstable_instant\`. Add it to the sample's \`params\` object.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E1095",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
}));
|
||||
}
|
||||
return Reflect.get(target, prop, receiver);
|
||||
}
|
||||
});
|
||||
}
|
||||
function createExhaustiveSearchParamsProxy(searchParams, declaredSearchParamNames, route) {
|
||||
return new Proxy(searchParams, {
|
||||
get (target, prop, receiver) {
|
||||
if (typeof prop === 'string' && !_reflectutils.wellKnownProperties.has(prop) && !declaredSearchParamNames.has(prop)) {
|
||||
trackMissingSampleErrorAndThrow(createMissingSearchParamSampleError(route, prop));
|
||||
}
|
||||
return Reflect.get(target, prop, receiver);
|
||||
},
|
||||
has (target, prop) {
|
||||
if (typeof prop === 'string' && !_reflectutils.wellKnownProperties.has(prop) && !declaredSearchParamNames.has(prop)) {
|
||||
trackMissingSampleErrorAndThrow(createMissingSearchParamSampleError(route, prop));
|
||||
}
|
||||
return Reflect.has(target, prop);
|
||||
}
|
||||
});
|
||||
}
|
||||
function createExhaustiveURLSearchParamsProxy(searchParams, declaredSearchParamNames, route) {
|
||||
return new Proxy(searchParams, {
|
||||
get (target, prop, receiver) {
|
||||
// Intercept method calls that access specific param names
|
||||
if (prop === 'get' || prop === 'getAll' || prop === 'has') {
|
||||
const originalMathod = Reflect.get(target, prop, receiver);
|
||||
return (name)=>{
|
||||
if (typeof name === 'string' && !declaredSearchParamNames.has(name)) {
|
||||
trackMissingSampleErrorAndThrow(createMissingSearchParamSampleError(route, name));
|
||||
}
|
||||
return originalMathod.call(target, name);
|
||||
};
|
||||
}
|
||||
const value = Reflect.get(target, prop, receiver);
|
||||
// Prevent `TypeError: Value of "this" must be of type URLSearchParams` for methods
|
||||
if (typeof value === 'function' && !Object.hasOwn(target, prop)) {
|
||||
return value.bind(target);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
});
|
||||
}
|
||||
function createMissingSearchParamSampleError(route, name) {
|
||||
return Object.defineProperty(new _instantvalidationerror.InstantValidationError(`Route "${route}" accessed searchParam "${name}" which is not defined in the \`samples\` ` + `of \`unstable_instant\`. Add it to the sample's \`searchParams\` object, ` + `or \`{ "${name}": null }\` if it should be absent.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E1098",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
function createRelativeURLFromSamples(route, sampleParams, sampleSearchParams) {
|
||||
// Build searchParams query object and URL search string from sample
|
||||
const pathname = createPathnameFromRouteAndSampleParams(route, sampleParams ?? {});
|
||||
let search = '';
|
||||
if (sampleSearchParams) {
|
||||
const qs = createURLSearchParamsFromSample(sampleSearchParams).toString();
|
||||
if (qs) {
|
||||
search = '?' + qs;
|
||||
}
|
||||
}
|
||||
return (0, _parserelativeurl.parseRelativeUrl)(pathname + search, undefined, true);
|
||||
}
|
||||
function createURLSearchParamsFromSample(sampleSearchParams) {
|
||||
const result = new URLSearchParams();
|
||||
if (sampleSearchParams) {
|
||||
for (const [key, value] of Object.entries(sampleSearchParams)){
|
||||
if (value === null || value === undefined) continue;
|
||||
if (Array.isArray(value)) {
|
||||
for (const v of value){
|
||||
result.append(key, v);
|
||||
}
|
||||
} else {
|
||||
result.set(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Substitute sample params into `workStore.route` to create a plausible pathname.
|
||||
* TODO(instant-validation-build): this logic is somewhat hacky and likely incomplete,
|
||||
* but it should be good enough for some initial testing.
|
||||
*/ function createPathnameFromRouteAndSampleParams(route, params) {
|
||||
let interpolatedSegments = [];
|
||||
const rawSegments = route.split('/');
|
||||
for (const rawSegment of rawSegments){
|
||||
const param = (0, _getsegmentparam.getSegmentParam)(rawSegment);
|
||||
if (param) {
|
||||
switch(param.paramType){
|
||||
case 'catchall':
|
||||
case 'optional-catchall':
|
||||
{
|
||||
let paramValue = params[param.paramName];
|
||||
if (paramValue === undefined) {
|
||||
// The value for the param was not provided. `usePathname` will detect this and throw
|
||||
// before this can surface to userspace. Use `[...NAME]` as a placeholder for the param value
|
||||
// in case it pops up somewhere unexpectedly.
|
||||
paramValue = [
|
||||
rawSegment
|
||||
];
|
||||
} else if (!Array.isArray(paramValue)) {
|
||||
// NOTE: this happens outside of render, so we don't need `trackMissingSampleErrorAndThrow`
|
||||
throw Object.defineProperty(new _instantvalidationerror.InstantValidationError(`Expected sample param value for segment '${rawSegment}' to be an array of strings, got ${typeof paramValue}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E1104",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
interpolatedSegments.push(...paramValue.map((v)=>encodeURIComponent(v)));
|
||||
break;
|
||||
}
|
||||
case 'dynamic':
|
||||
{
|
||||
let paramValue = params[param.paramName];
|
||||
if (paramValue === undefined) {
|
||||
// The value for the param was not provided. `usePathname` will detect this and throw
|
||||
// before this can surface to userspace. Use `[NAME]` as a placeholder for the param value
|
||||
// in case it pops up somewhere unexpectedly.
|
||||
paramValue = rawSegment;
|
||||
} else if (typeof paramValue !== 'string') {
|
||||
// NOTE: this happens outside of render, so we don't need `trackMissingSampleErrorAndThrow`
|
||||
throw Object.defineProperty(new _instantvalidationerror.InstantValidationError(`Expected sample param value for segment '${rawSegment}' to be a string, got ${typeof paramValue}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E1108",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
interpolatedSegments.push(encodeURIComponent(paramValue));
|
||||
break;
|
||||
}
|
||||
case 'catchall-intercepted-(..)(..)':
|
||||
case 'catchall-intercepted-(.)':
|
||||
case 'catchall-intercepted-(..)':
|
||||
case 'catchall-intercepted-(...)':
|
||||
case 'dynamic-intercepted-(..)(..)':
|
||||
case 'dynamic-intercepted-(.)':
|
||||
case 'dynamic-intercepted-(..)':
|
||||
case 'dynamic-intercepted-(...)':
|
||||
{
|
||||
// TODO(instant-validation-build): i don't know how these are supposed to work, or if we can even get them here
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('Not implemented: Validation of interception routes'), "__NEXT_ERROR_CODE", {
|
||||
value: "E1106",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
default:
|
||||
{
|
||||
param.paramType;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
interpolatedSegments.push(rawSegment);
|
||||
}
|
||||
}
|
||||
return interpolatedSegments.join('/');
|
||||
}
|
||||
function assertRootParamInSamples(workStore, sampleParams, paramName) {
|
||||
if (sampleParams && paramName in sampleParams) {
|
||||
// The param is defined in the samples.
|
||||
} else {
|
||||
const route = workStore.route;
|
||||
trackMissingSampleErrorAndThrow(Object.defineProperty(new _instantvalidationerror.InstantValidationError(`Route "${route}" accessed root param "${paramName}" which is not defined in the \`samples\` ` + `of \`unstable_instant\`. Add it to the sample's \`params\` object.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E1114",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=instant-samples.js.map
|
||||
Generated
Vendored
+1
File diff suppressed because one or more lines are too long
Generated
Vendored
+5
@@ -0,0 +1,5 @@
|
||||
/** Check if an error is an exhaustive samples validation error (by digest). */
|
||||
export declare function isInstantValidationError(err: unknown): err is InstantValidationError;
|
||||
export declare class InstantValidationError extends Error {
|
||||
digest: string;
|
||||
}
|
||||
Generated
Vendored
+33
@@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
InstantValidationError: null,
|
||||
isInstantValidationError: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
InstantValidationError: function() {
|
||||
return InstantValidationError;
|
||||
},
|
||||
isInstantValidationError: function() {
|
||||
return isInstantValidationError;
|
||||
}
|
||||
});
|
||||
const INSTANT_VALIDATION_ERROR_DIGEST = 'INSTANT_VALIDATION_ERROR';
|
||||
function isInstantValidationError(err) {
|
||||
return !!(err && typeof err === 'object' && err instanceof Error && err.digest === INSTANT_VALIDATION_ERROR_DIGEST);
|
||||
}
|
||||
class InstantValidationError extends Error {
|
||||
constructor(...args){
|
||||
super(...args), this.digest = INSTANT_VALIDATION_ERROR_DIGEST;
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=instant-validation-error.js.map
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/app-render/instant-validation/instant-validation-error.ts"],"sourcesContent":["const INSTANT_VALIDATION_ERROR_DIGEST = 'INSTANT_VALIDATION_ERROR'\n\n/** Check if an error is an exhaustive samples validation error (by digest). */\nexport function isInstantValidationError(\n err: unknown\n): err is InstantValidationError {\n return !!(\n err &&\n typeof err === 'object' &&\n err instanceof Error &&\n (err as any).digest === INSTANT_VALIDATION_ERROR_DIGEST\n )\n}\n\nexport class InstantValidationError extends Error {\n digest = INSTANT_VALIDATION_ERROR_DIGEST\n}\n"],"names":["InstantValidationError","isInstantValidationError","INSTANT_VALIDATION_ERROR_DIGEST","err","Error","digest"],"mappings":";;;;;;;;;;;;;;;IAcaA,sBAAsB;eAAtBA;;IAXGC,wBAAwB;eAAxBA;;;AAHhB,MAAMC,kCAAkC;AAGjC,SAASD,yBACdE,GAAY;IAEZ,OAAO,CAAC,CACNA,CAAAA,OACA,OAAOA,QAAQ,YACfA,eAAeC,SACf,AAACD,IAAYE,MAAM,KAAKH,+BAA8B;AAE1D;AAEO,MAAMF,+BAA+BI;;QAArC,qBACLC,SAASH;;AACX","ignoreList":[0]}
|
||||
Generated
Vendored
+119
@@ -0,0 +1,119 @@
|
||||
import type { InitialRSCPayload, Segment } from '../../../shared/lib/app-router-types';
|
||||
import { RenderStage } from '../staged-rendering';
|
||||
import type { ValidationBoundaryTracking } from './boundary-tracking';
|
||||
import { type LoaderTree } from '../../lib/app-dir-module';
|
||||
import type { GetDynamicParamFromSegment } from '../app-render';
|
||||
import type { Instant } from '../../../build/segment-config/app/app-segment-config';
|
||||
import { Readable } from 'node:stream';
|
||||
import type { NextParsedUrlQuery } from '../../request-meta';
|
||||
type ClientReferenceManifest = Record<string, any>;
|
||||
/** Used to identify a segment. Conceptually similar to request keys in the Client Segment Cache. */
|
||||
export type SegmentPath = string & {
|
||||
_tag: 'SegmentPath';
|
||||
};
|
||||
/**
|
||||
* Isomorphic to a FlightRouterState, but with extra data attached.
|
||||
* Carries the segment path for each segment so we can easily get it from the cache.
|
||||
* */
|
||||
export type RouteTree = {
|
||||
path: SegmentPath;
|
||||
segment: Segment;
|
||||
module: null | {
|
||||
type: 'layout' | 'page';
|
||||
instantConfig: Instant | null;
|
||||
conventionPath: string;
|
||||
createInstantStack: (() => Error) | null;
|
||||
};
|
||||
slots: {
|
||||
[parallelRouteKey: string]: RouteTree;
|
||||
} | null;
|
||||
};
|
||||
export type SegmentStage = RenderStage.Static | RenderStage.Runtime | RenderStage.Dynamic;
|
||||
export type StageChunks = Record<SegmentStage, Uint8Array[]>;
|
||||
export type StageEndTimes = {
|
||||
[RenderStage.Static]: number;
|
||||
[RenderStage.Runtime]: number;
|
||||
};
|
||||
/**
|
||||
* Splits an existing staged stream (represented as arrays of chunks)
|
||||
* into separate staged streams (also in arrays-of-chunks form), one for each segment.
|
||||
* */
|
||||
export declare function collectStagedSegmentData(fullPageChunks: StageChunks, fullPageDebugChunks: Uint8Array[] | null, startTime: number, hasRuntimePrefetch: boolean, clientReferenceManifest: ClientReferenceManifest): Promise<{
|
||||
cache: SegmentCache;
|
||||
payload: InitialRSCPayload;
|
||||
stageEndTimes: StageEndTimes;
|
||||
}>;
|
||||
/**
|
||||
* Creates a late-release stream for a given payload.
|
||||
* When `renderSignal` is triggered, the stream will release late chunks
|
||||
* to provide extra debug info.
|
||||
* */
|
||||
export declare function createCombinedPayloadStream(payload: InitialRSCPayload, extraChunksAbortController: AbortController, renderSignal: AbortSignal, clientReferenceManifest: ClientReferenceManifest, startTime: number, isDebugChannelEnabled: boolean): Promise<{
|
||||
stream: Readable;
|
||||
debugStream: Readable | null;
|
||||
}>;
|
||||
export type SegmentCache = {
|
||||
head: SegmentCacheItem | null;
|
||||
segments: Map<SegmentPath, SegmentCacheItem>;
|
||||
};
|
||||
type SegmentCacheItem = {
|
||||
chunks: StageChunks;
|
||||
debugChunks: Uint8Array[] | null;
|
||||
};
|
||||
/**
|
||||
* Walks the LoaderTree to discover validation depth bounds.
|
||||
*
|
||||
* Each route group between URL segments represents a potential
|
||||
* shared/new boundary in a client navigation. When a user navigates
|
||||
* between sibling routes that share a route group layout, that
|
||||
* layout is already mounted — its Suspense boundaries are revealed
|
||||
* and don't cover new content below. By tracking the max group
|
||||
* depth at each URL depth, we can iterate all possible group
|
||||
* boundaries and validate that blocking code is always covered by
|
||||
* Suspense in the new tree. This is conservative: some boundaries
|
||||
* may not correspond to real navigations (e.g. a route group with
|
||||
* no siblings), but it ensures we don't miss real violations.
|
||||
*
|
||||
* The max is taken across all parallel slots. When slots have
|
||||
* different numbers of groups, the deepest slot determines the
|
||||
* iteration range. Shallower slots simply stay entirely shared
|
||||
* at group depths beyond their own group count — they run out
|
||||
* of groups before reaching the boundary, so their content
|
||||
* remains in the Dynamic stage.
|
||||
*
|
||||
* Returns an array where:
|
||||
* - length = max URL depth (number of URL-consuming segments)
|
||||
* - array[i] = max group depth at URL depth i (number of route group
|
||||
* segments between this URL depth and the next)
|
||||
*
|
||||
* For example, a tree like:
|
||||
* '' / (outer) / (inner) / dashboard / page
|
||||
* returns [2, 0] — URL depth 0 (root) has 2 group layers before
|
||||
* the next URL segment (dashboard), and URL depth 1 (dashboard) has
|
||||
* 0 group layers before the leaf.
|
||||
*/
|
||||
export declare function discoverValidationDepths(loaderTree: LoaderTree): number[];
|
||||
/**
|
||||
* Builds a combined RSC payload for validation at a given URL depth.
|
||||
*
|
||||
* Walks the LoaderTree directly, loading modules and counting
|
||||
* URL-contributing layouts. When `depth` URL segments have been
|
||||
* consumed, the boundary flips from shared (dynamic stage) to new
|
||||
* (static/runtime stage). As the new subtree is built, we check for
|
||||
* instant configs. If none are found, returns null — no validation
|
||||
* needed at this depth or deeper.
|
||||
*
|
||||
* This combines module loading, tree walking, config discovery, and
|
||||
* payload construction into a single pass.
|
||||
*/
|
||||
export type ValidationPayloadResult = {
|
||||
payload: InitialRSCPayload;
|
||||
/** Whether errors from this payload could be ambiguous between runtime
|
||||
* API access (cookies, headers) and uncached IO (connection, fetch).
|
||||
* True when some segments used Static stage. False when all segments
|
||||
* used Runtime stage and errors are definitively from uncached IO. */
|
||||
hasAmbiguousErrors: boolean;
|
||||
createInstantStack: (() => Error) | null;
|
||||
};
|
||||
export declare function createCombinedPayloadAtDepth(initialRSCPayload: InitialRSCPayload, cache: SegmentCache, initialLoaderTree: LoaderTree, getDynamicParamFromSegment: GetDynamicParamFromSegment, query: NextParsedUrlQuery | null, depth: number, groupDepth: number, releaseSignal: AbortSignal, boundaryState: ValidationBoundaryTracking, clientReferenceManifest: ClientReferenceManifest, stageEndTimes: StageEndTimes, useRuntimeStageForPartialSegments: boolean): Promise<ValidationPayloadResult | null>;
|
||||
export {};
|
||||
Generated
Vendored
+713
@@ -0,0 +1,713 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
collectStagedSegmentData: null,
|
||||
createCombinedPayloadAtDepth: null,
|
||||
createCombinedPayloadStream: null,
|
||||
discoverValidationDepths: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
collectStagedSegmentData: function() {
|
||||
return collectStagedSegmentData;
|
||||
},
|
||||
createCombinedPayloadAtDepth: function() {
|
||||
return createCombinedPayloadAtDepth;
|
||||
},
|
||||
createCombinedPayloadStream: function() {
|
||||
return createCombinedPayloadStream;
|
||||
},
|
||||
discoverValidationDepths: function() {
|
||||
return discoverValidationDepths;
|
||||
}
|
||||
});
|
||||
const _jsxruntime = require("react/jsx-runtime");
|
||||
const _invarianterror = require("../../../shared/lib/invariant-error");
|
||||
const _stagedrendering = require("../staged-rendering");
|
||||
const _manifestssingleton = require("../manifests-singleton");
|
||||
const _apprenderrenderutils = require("../app-render-render-utils");
|
||||
const _workasyncstorageexternal = require("../work-async-storage.external");
|
||||
const _prospectiverenderutils = require("../prospective-render-utils");
|
||||
const _createerrorhandler = require("../create-error-handler");
|
||||
const _boundary = require("../../../client/components/instant-validation/boundary");
|
||||
const _appdirmodule = require("../../lib/app-dir-module");
|
||||
const _parseloadertree = require("../../../shared/lib/router/utils/parse-loader-tree");
|
||||
const _nodestream = require("node:stream");
|
||||
const _streamutils = require("./stream-utils");
|
||||
const _debugchannelserver = require("../debug-channel-server");
|
||||
const _client = require("react-server-dom-webpack/client");
|
||||
const _server = require("react-server-dom-webpack/server");
|
||||
const _segment = require("../../../shared/lib/segment");
|
||||
const filterStackFrame = process.env.NODE_ENV !== 'production' ? require('../../lib/source-maps').filterStackFrameDEV : undefined;
|
||||
const findSourceMapURL = process.env.NODE_ENV !== 'production' ? require('../../lib/source-maps').findSourceMapURLDEV : undefined;
|
||||
const debug = process.env.NEXT_PRIVATE_DEBUG_VALIDATION === '1' ? console.log : undefined;
|
||||
function traverseRootSeedDataSegments(initialRSCPayload, processSegment) {
|
||||
const { flightRouterState, seedData } = getRootDataFromPayload(initialRSCPayload);
|
||||
const [rootSegment] = flightRouterState;
|
||||
const rootPath = stringifySegment(rootSegment);
|
||||
return traverseCacheNodeSegments(rootPath, flightRouterState, seedData, processSegment);
|
||||
}
|
||||
function traverseCacheNodeSegments(path, route, seedData, processSegment) {
|
||||
processSegment(path, seedData);
|
||||
const [_segment, childRoutes] = route;
|
||||
const [_node, parallelRoutesData, _loading, _isPartial] = seedData;
|
||||
for(const parallelRouteKey in childRoutes){
|
||||
const childSeedData = parallelRoutesData[parallelRouteKey];
|
||||
if (!childSeedData) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError(`Got unexpected empty seed data during instant validation`), "__NEXT_ERROR_CODE", {
|
||||
value: "E992",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const childRoute = childRoutes[parallelRouteKey];
|
||||
// NOTE: if this is a __PAGE__ segment, it might have search params appended.
|
||||
// Whoever reads from the cache needs to append them as well.
|
||||
const [childSegment] = childRoute;
|
||||
const childPath = createChildSegmentPath(path, parallelRouteKey, childSegment);
|
||||
traverseCacheNodeSegments(childPath, childRoute, childSeedData, processSegment);
|
||||
}
|
||||
}
|
||||
function createChildSegmentPath(parentPath, parallelRouteKey, segment) {
|
||||
const parallelRoutePrefix = parallelRouteKey === 'children' ? '' : `@${encodeURIComponent(parallelRouteKey)}/`;
|
||||
return `${parentPath}/${parallelRoutePrefix}${stringifySegment(segment)}`;
|
||||
}
|
||||
function stringifySegment(segment) {
|
||||
return typeof segment === 'string' ? encodeURIComponent(segment) : encodeURIComponent(segment[0]) + '|' + segment[1] + '|' + segment[2];
|
||||
}
|
||||
async function collectStagedSegmentData(fullPageChunks, fullPageDebugChunks, startTime, hasRuntimePrefetch, clientReferenceManifest) {
|
||||
const debugChannelAbortController = new AbortController();
|
||||
const debugStream = fullPageDebugChunks ? (0, _streamutils.createNodeStreamFromChunks)(fullPageDebugChunks, debugChannelAbortController.signal) : null;
|
||||
const { stream, controller } = createStagedStreamFromChunks(fullPageChunks);
|
||||
stream.on('end', ()=>{
|
||||
// When the stream finishes, we have to close the debug stream too,
|
||||
// but delay it to avoid "Connection closed." errors.
|
||||
setImmediate(()=>debugChannelAbortController.abort());
|
||||
});
|
||||
// Technically we're just re-encoding, so nothing new should be emitted,
|
||||
// but we add an environment name just in case.
|
||||
const environmentName = ()=>{
|
||||
const currentStage = controller.currentStage;
|
||||
switch(currentStage){
|
||||
case _stagedrendering.RenderStage.Static:
|
||||
return 'Prerender';
|
||||
case _stagedrendering.RenderStage.Runtime:
|
||||
return hasRuntimePrefetch ? 'Prefetch' : 'Prefetchable';
|
||||
case _stagedrendering.RenderStage.Dynamic:
|
||||
return 'Server';
|
||||
default:
|
||||
currentStage;
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError(`Invalid render stage: ${currentStage}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E881",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
};
|
||||
// Deserialize the payload.
|
||||
// NOTE: the stream will initially be in the static stage, so that's as far as we get here.
|
||||
// We still expect the outer structure of the payload to be readable in this state.
|
||||
const serverConsumerManifest = {
|
||||
moduleLoading: null,
|
||||
moduleMap: clientReferenceManifest.rscModuleMapping,
|
||||
serverModuleMap: (0, _manifestssingleton.getServerModuleMap)()
|
||||
};
|
||||
const payload = await (0, _client.createFromNodeStream)(stream, serverConsumerManifest, {
|
||||
findSourceMapURL,
|
||||
debugChannel: debugStream ?? undefined,
|
||||
// Do not pass start/end timings - we do not want to omit any debug info.
|
||||
startTime: undefined,
|
||||
endTime: undefined
|
||||
});
|
||||
// Deconstruct the payload into separate streams per segment.
|
||||
// We have to preserve the stage information for each of them,
|
||||
// so that we can later render each segment in any stage we need.
|
||||
const { head } = getRootDataFromPayload(payload);
|
||||
const segments = new Map();
|
||||
traverseRootSeedDataSegments(payload, (segmentPath, seedData)=>{
|
||||
segments.set(segmentPath, createSegmentData(seedData));
|
||||
});
|
||||
const cache = createSegmentCache();
|
||||
const pendingTasks = [];
|
||||
/** Track when we advance stages so we can pass them as `endTime` later. */ const stageEndTimes = {
|
||||
[_stagedrendering.RenderStage.Static]: -1,
|
||||
[_stagedrendering.RenderStage.Runtime]: -1
|
||||
};
|
||||
const renderIntoCacheItem = async (data, cacheEntry)=>{
|
||||
const segmentDebugChannel = cacheEntry.debugChunks ? (0, _debugchannelserver.createDebugChannel)() : undefined;
|
||||
const itemStream = (0, _server.renderToReadableStream)(data, clientReferenceManifest.clientModules, {
|
||||
filterStackFrame,
|
||||
debugChannel: segmentDebugChannel == null ? void 0 : segmentDebugChannel.serverSide,
|
||||
environmentName,
|
||||
startTime,
|
||||
onError (error) {
|
||||
const digest = (0, _createerrorhandler.getDigestForWellKnownError)(error);
|
||||
if (digest) {
|
||||
return digest;
|
||||
}
|
||||
// Forward existing digests
|
||||
if (error && typeof error === 'object' && 'digest' in error && typeof error.digest === 'string') {
|
||||
return error.digest;
|
||||
}
|
||||
// We don't need to log the errors because we would have already done that
|
||||
// when generating the original Flight stream for the whole page.
|
||||
if (process.env.NEXT_DEBUG_BUILD || process.env.__NEXT_VERBOSE_LOGGING) {
|
||||
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
|
||||
(0, _prospectiverenderutils.printDebugThrownValueForProspectiveRender)(error, (workStore == null ? void 0 : workStore.route) ?? 'unknown route', _prospectiverenderutils.Phase.InstantValidation);
|
||||
}
|
||||
}
|
||||
});
|
||||
await Promise.all([
|
||||
// accumulate Flight chunks
|
||||
(async ()=>{
|
||||
for await (const chunk of itemStream.values()){
|
||||
writeChunk(cacheEntry.chunks, controller.currentStage, chunk);
|
||||
}
|
||||
})(),
|
||||
// accumulate Debug chunks
|
||||
segmentDebugChannel && (async ()=>{
|
||||
for await (const chunk of segmentDebugChannel.clientSide.readable.values()){
|
||||
cacheEntry.debugChunks.push(chunk);
|
||||
}
|
||||
})()
|
||||
]);
|
||||
};
|
||||
await (0, _apprenderrenderutils.runInSequentialTasks)(()=>{
|
||||
{
|
||||
const headCacheItem = createSegmentCacheItem(!!fullPageDebugChunks);
|
||||
cache.head = headCacheItem;
|
||||
pendingTasks.push(renderIntoCacheItem(head, headCacheItem));
|
||||
}
|
||||
for (const [segmentPath, segmentData] of segments){
|
||||
const segmentCacheItem = createSegmentCacheItem(!!fullPageDebugChunks);
|
||||
cache.segments.set(segmentPath, segmentCacheItem);
|
||||
pendingTasks.push(renderIntoCacheItem(segmentData, segmentCacheItem));
|
||||
}
|
||||
}, ()=>{
|
||||
stageEndTimes[_stagedrendering.RenderStage.Static] = performance.now() + performance.timeOrigin;
|
||||
controller.advanceStage(_stagedrendering.RenderStage.Runtime);
|
||||
}, ()=>{
|
||||
stageEndTimes[_stagedrendering.RenderStage.Runtime] = performance.now() + performance.timeOrigin;
|
||||
controller.advanceStage(_stagedrendering.RenderStage.Dynamic);
|
||||
});
|
||||
await Promise.all(pendingTasks);
|
||||
return {
|
||||
cache,
|
||||
payload,
|
||||
stageEndTimes
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Turns accumulated stage chunks into a stream.
|
||||
* The stream starts out in Static stage, and can be advanced further
|
||||
* using the returned controller object.
|
||||
* Conceptually, this is similar to how we unblock more content
|
||||
* by advancing stages in a regular staged render.
|
||||
* */ function createStagedStreamFromChunks(stageChunks) {
|
||||
// The successive stages are supersets of one another,
|
||||
// so we can index into the dynamic chunks everywhere
|
||||
// and just look at the lengths of the Static/Runtime arrays
|
||||
const allChunks = stageChunks[_stagedrendering.RenderStage.Dynamic];
|
||||
const numStaticChunks = stageChunks[_stagedrendering.RenderStage.Static].length;
|
||||
const numRuntimeChunks = stageChunks[_stagedrendering.RenderStage.Runtime].length;
|
||||
const numDynamicChunks = stageChunks[_stagedrendering.RenderStage.Dynamic].length;
|
||||
let chunkIx = 0;
|
||||
let currentStage = _stagedrendering.RenderStage.Static;
|
||||
let closed = false;
|
||||
function push(chunk) {
|
||||
stream.push(chunk);
|
||||
}
|
||||
function close() {
|
||||
closed = true;
|
||||
stream.push(null);
|
||||
}
|
||||
const stream = new _nodestream.Readable({
|
||||
read () {
|
||||
// Emit static chunks
|
||||
for(; chunkIx < numStaticChunks; chunkIx++){
|
||||
push(allChunks[chunkIx]);
|
||||
}
|
||||
// If there's no more chunks after this stage, finish the stream.
|
||||
if (chunkIx >= allChunks.length) {
|
||||
close();
|
||||
return;
|
||||
}
|
||||
}
|
||||
});
|
||||
function advanceStage(stage) {
|
||||
if (closed) return true;
|
||||
switch(stage){
|
||||
case _stagedrendering.RenderStage.Runtime:
|
||||
{
|
||||
currentStage = _stagedrendering.RenderStage.Runtime;
|
||||
for(; chunkIx < numRuntimeChunks; chunkIx++){
|
||||
push(allChunks[chunkIx]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case _stagedrendering.RenderStage.Dynamic:
|
||||
{
|
||||
currentStage = _stagedrendering.RenderStage.Dynamic;
|
||||
for(; chunkIx < numDynamicChunks; chunkIx++){
|
||||
push(allChunks[chunkIx]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
{
|
||||
stage;
|
||||
}
|
||||
}
|
||||
// If there's no more chunks after this stage, finish the stream.
|
||||
if (chunkIx >= allChunks.length) {
|
||||
close();
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return {
|
||||
stream,
|
||||
controller: {
|
||||
get currentStage () {
|
||||
return currentStage;
|
||||
},
|
||||
advanceStage
|
||||
}
|
||||
};
|
||||
}
|
||||
function writeChunk(stageChunks, stage, chunk) {
|
||||
switch(stage){
|
||||
case _stagedrendering.RenderStage.Static:
|
||||
{
|
||||
stageChunks[_stagedrendering.RenderStage.Static].push(chunk);
|
||||
// fallthrough
|
||||
}
|
||||
case _stagedrendering.RenderStage.Runtime:
|
||||
{
|
||||
stageChunks[_stagedrendering.RenderStage.Runtime].push(chunk);
|
||||
// fallthrough
|
||||
}
|
||||
case _stagedrendering.RenderStage.Dynamic:
|
||||
{
|
||||
stageChunks[_stagedrendering.RenderStage.Dynamic].push(chunk);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
{
|
||||
stage;
|
||||
}
|
||||
}
|
||||
}
|
||||
async function createCombinedPayloadStream(payload, extraChunksAbortController, renderSignal, clientReferenceManifest, startTime, isDebugChannelEnabled) {
|
||||
// Collect all the chunks so that we're not dependent on timing of the render.
|
||||
let isRenderable = true;
|
||||
const renderableChunks = [];
|
||||
const allChunks = [];
|
||||
const debugChunks = isDebugChannelEnabled ? [] : null;
|
||||
const debugChannel = isDebugChannelEnabled ? (0, _debugchannelserver.createDebugChannel)() : null;
|
||||
let streamFinished;
|
||||
await (0, _apprenderrenderutils.runInSequentialTasks)(()=>{
|
||||
const stream = (0, _server.renderToReadableStream)(payload, clientReferenceManifest.clientModules, {
|
||||
filterStackFrame,
|
||||
debugChannel: debugChannel == null ? void 0 : debugChannel.serverSide,
|
||||
startTime,
|
||||
onError (error) {
|
||||
const digest = (0, _createerrorhandler.getDigestForWellKnownError)(error);
|
||||
if (digest) {
|
||||
return digest;
|
||||
}
|
||||
// Forward existing digests
|
||||
if (error && typeof error === 'object' && 'digest' in error && typeof error.digest === 'string') {
|
||||
return error.digest;
|
||||
}
|
||||
// We don't need to log the errors because we would have already done that
|
||||
// when generating the original Flight stream for the whole page.
|
||||
if (process.env.NEXT_DEBUG_BUILD || process.env.__NEXT_VERBOSE_LOGGING) {
|
||||
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
|
||||
(0, _prospectiverenderutils.printDebugThrownValueForProspectiveRender)(error, (workStore == null ? void 0 : workStore.route) ?? 'unknown route', _prospectiverenderutils.Phase.InstantValidation);
|
||||
}
|
||||
}
|
||||
});
|
||||
streamFinished = Promise.all([
|
||||
// Accumulate Flight chunks
|
||||
(async ()=>{
|
||||
for await (const chunk of stream.values()){
|
||||
allChunks.push(chunk);
|
||||
if (isRenderable) {
|
||||
renderableChunks.push(chunk);
|
||||
}
|
||||
}
|
||||
})(),
|
||||
// Accumulate debug chunks
|
||||
debugChannel && (async ()=>{
|
||||
for await (const chunk of debugChannel.clientSide.readable.values()){
|
||||
debugChunks.push(chunk);
|
||||
}
|
||||
})()
|
||||
]);
|
||||
}, ()=>{
|
||||
isRenderable = false;
|
||||
extraChunksAbortController.abort();
|
||||
});
|
||||
await streamFinished;
|
||||
return {
|
||||
stream: (0, _streamutils.createNodeStreamWithLateRelease)(renderableChunks, allChunks, renderSignal),
|
||||
debugStream: debugChunks ? (0, _streamutils.createNodeStreamFromChunks)(debugChunks, renderSignal) : null
|
||||
};
|
||||
}
|
||||
function getRootDataFromPayload(initialRSCPayload) {
|
||||
// FlightDataPath is an unsound type, hence the additional checks.
|
||||
const flightDataPaths = initialRSCPayload.f;
|
||||
if (flightDataPaths.length !== 1 && flightDataPaths[0].length !== 3) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('InitialRSCPayload does not match the expected shape during instant validation.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E994",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const flightRouterState = flightDataPaths[0][0];
|
||||
const seedData = flightDataPaths[0][1];
|
||||
// TODO: handle head
|
||||
const head = flightDataPaths[0][2];
|
||||
return {
|
||||
flightRouterState,
|
||||
seedData,
|
||||
head
|
||||
};
|
||||
}
|
||||
async function createValidationHead(cache, releaseSignal, clientReferenceManifest, stageEndTimes, stage) {
|
||||
const segmentCacheItem = cache.head;
|
||||
if (!segmentCacheItem) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError(`Missing segment data: <head>`), "__NEXT_ERROR_CODE", {
|
||||
value: "E1072",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return await deserializeFromChunks(segmentCacheItem.chunks[stage], segmentCacheItem.chunks[_stagedrendering.RenderStage.Dynamic], segmentCacheItem.debugChunks, releaseSignal, clientReferenceManifest, {
|
||||
startTime: undefined,
|
||||
endTime: stageEndTimes[stage]
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Deserializes a (partial possibly partial) RSC stream, given as a chunk-array.
|
||||
* If the stream is partial, we'll wait for `releaseSignal` to fire
|
||||
* and then complete the deserialization using `allChunks`.
|
||||
*
|
||||
* This is used to obtain a partially-complete model (that might contain unresolved holes)
|
||||
* and then release any late debug info from chunks that came later before we abort the render.
|
||||
* */ function deserializeFromChunks(partialChunks, allChunks, debugChunks, releaseSignal, clientReferenceManifest, timings) {
|
||||
const debugChannelAbortController = new AbortController();
|
||||
const debugStream = debugChunks ? (0, _streamutils.createNodeStreamFromChunks)(debugChunks, debugChannelAbortController.signal) : null;
|
||||
const serverConsumerManifest = {
|
||||
moduleLoading: null,
|
||||
moduleMap: clientReferenceManifest.rscModuleMapping,
|
||||
serverModuleMap: (0, _manifestssingleton.getServerModuleMap)()
|
||||
};
|
||||
const segmentStream = partialChunks.length < allChunks.length ? (0, _streamutils.createNodeStreamWithLateRelease)(partialChunks, allChunks, releaseSignal) : (0, _streamutils.createNodeStreamFromChunks)(partialChunks);
|
||||
segmentStream.on('end', ()=>{
|
||||
// When the stream finishes, we have to close the debug stream too,
|
||||
// but delay it to avoid "Connection closed." errors.
|
||||
setImmediate(()=>debugChannelAbortController.abort());
|
||||
});
|
||||
return (0, _client.createFromNodeStream)(segmentStream, serverConsumerManifest, {
|
||||
findSourceMapURL,
|
||||
debugChannel: debugStream ?? undefined,
|
||||
startTime: timings == null ? void 0 : timings.startTime,
|
||||
endTime: timings == null ? void 0 : timings.endTime
|
||||
});
|
||||
}
|
||||
function createSegmentData(seedData) {
|
||||
const [node, _parallelRoutesData, _unused, isPartial, varyParams] = seedData;
|
||||
return {
|
||||
node,
|
||||
isPartial,
|
||||
varyParams
|
||||
};
|
||||
}
|
||||
function getCacheNodeSeedDataFromSegment(data, slots) {
|
||||
return [
|
||||
data.node,
|
||||
slots,
|
||||
/* unused (previously `loading`) */ null,
|
||||
data.isPartial,
|
||||
data.varyParams
|
||||
];
|
||||
}
|
||||
function createSegmentCache() {
|
||||
return {
|
||||
head: null,
|
||||
segments: new Map()
|
||||
};
|
||||
}
|
||||
function createSegmentCacheItem(withDebugChunks) {
|
||||
return {
|
||||
chunks: {
|
||||
[_stagedrendering.RenderStage.Static]: [],
|
||||
[_stagedrendering.RenderStage.Runtime]: [],
|
||||
[_stagedrendering.RenderStage.Dynamic]: []
|
||||
},
|
||||
debugChunks: withDebugChunks ? [] : null
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Whether this segment consumes a URL depth level. Each URL depth
|
||||
* represents a potential navigation boundary.
|
||||
*
|
||||
* The root segment ('') consumes depth 0. Regular segments like
|
||||
* 'dashboard' consume the next depth — whether or not they have a
|
||||
* layout. Route groups, __PAGE__, __DEFAULT__, and /_not-found don't
|
||||
* consume a depth — they share the boundary of their parent.
|
||||
*/ function segmentConsumesURLDepth(segment) {
|
||||
// Dynamic segments (tuples) always consume a URL depth.
|
||||
if (typeof segment !== 'string') return true;
|
||||
// Route groups, pages, defaults, and not-found don't consume a depth.
|
||||
if (segment.startsWith(_segment.PAGE_SEGMENT_KEY) || (0, _segment.isGroupSegment)(segment) || segment === _segment.DEFAULT_SEGMENT_KEY || segment === _segment.NOT_FOUND_SEGMENT_KEY) {
|
||||
return false;
|
||||
}
|
||||
// Everything else consumes a depth, including the root segment ''.
|
||||
return true;
|
||||
}
|
||||
function discoverValidationDepths(loaderTree) {
|
||||
const groupDepthsByUrlDepth = [];
|
||||
function recordGroupDepth(urlDepth, groupDepth) {
|
||||
while(groupDepthsByUrlDepth.length <= urlDepth){
|
||||
groupDepthsByUrlDepth.push(0);
|
||||
}
|
||||
if (groupDepth > groupDepthsByUrlDepth[urlDepth]) {
|
||||
groupDepthsByUrlDepth[urlDepth] = groupDepth;
|
||||
}
|
||||
}
|
||||
// urlDepth tracks the index of the current URL-consuming segment.
|
||||
// Groups accumulate at the same index. When the next URL segment
|
||||
// is reached, it increments the index and resets the group counter.
|
||||
// We start at -1 so the root segment '' increments to 0.
|
||||
function walk(tree, urlDepth, groupDepth) {
|
||||
const segment = tree[0];
|
||||
const { parallelRoutes } = (0, _parseloadertree.parseLoaderTree)(tree);
|
||||
const consumesDepth = segmentConsumesURLDepth(segment);
|
||||
let nextUrlDepth = urlDepth;
|
||||
let nextGroupDepth = groupDepth;
|
||||
if (consumesDepth) {
|
||||
nextUrlDepth = urlDepth + 1;
|
||||
nextGroupDepth = 0;
|
||||
recordGroupDepth(nextUrlDepth, 0);
|
||||
} else if (typeof segment === 'string' && (0, _segment.isGroupSegment)(segment) && segment !== '(__SLOT__)') {
|
||||
// Count real route groups but not the synthetic '(__SLOT__)' segment
|
||||
// that Next.js inserts for parallel slots. The synthetic group
|
||||
// can't be a real navigation boundary.
|
||||
nextGroupDepth++;
|
||||
recordGroupDepth(urlDepth, nextGroupDepth);
|
||||
}
|
||||
for(const key in parallelRoutes){
|
||||
walk(parallelRoutes[key], nextUrlDepth, nextGroupDepth);
|
||||
}
|
||||
}
|
||||
walk(loaderTree, -1, 0);
|
||||
return groupDepthsByUrlDepth;
|
||||
}
|
||||
async function createCombinedPayloadAtDepth(initialRSCPayload, cache, initialLoaderTree, getDynamicParamFromSegment, query, depth, groupDepth, releaseSignal, boundaryState, clientReferenceManifest, stageEndTimes, useRuntimeStageForPartialSegments) {
|
||||
let hasStaticSegments = false;
|
||||
let hasRuntimeSegments = false;
|
||||
function getSegment(loaderTree) {
|
||||
const dynamicParam = getDynamicParamFromSegment(loaderTree);
|
||||
if (dynamicParam) {
|
||||
return dynamicParam.treeSegment;
|
||||
}
|
||||
const segment = loaderTree[0];
|
||||
return query ? (0, _segment.addSearchParamsIfPageSegment)(segment, query) : segment;
|
||||
}
|
||||
async function buildSharedTreeSeedData(loaderTree, parentPath, key, urlDepthConsumed, groupDepthConsumed) {
|
||||
const { parallelRoutes } = (0, _parseloadertree.parseLoaderTree)(loaderTree);
|
||||
const segment = getSegment(loaderTree);
|
||||
const path = parentPath === null ? stringifySegment(segment) : createChildSegmentPath(parentPath, key, segment);
|
||||
debug == null ? void 0 : debug(` ${path || '/'} - Dynamic`);
|
||||
const segmentCacheItem = cache.segments.get(path);
|
||||
if (!segmentCacheItem) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError(`Missing segment data: ${path}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E995",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const segmentData = await deserializeFromChunks(segmentCacheItem.chunks[_stagedrendering.RenderStage.Dynamic], segmentCacheItem.chunks[_stagedrendering.RenderStage.Dynamic], segmentCacheItem.debugChunks, releaseSignal, clientReferenceManifest, null);
|
||||
const consumesUrlDepth = segmentConsumesURLDepth(segment);
|
||||
const isGroup = typeof segment === 'string' && (0, _segment.isGroupSegment)(segment) && segment !== '(__SLOT__)';
|
||||
// Advance counters for this segment before the boundary check,
|
||||
// mirroring how discoverValidationDepths counts. URL segments
|
||||
// increment urlDepthConsumed, groups increment groupDepthConsumed.
|
||||
// The synthetic '(__SLOT__)' segment is excluded — it can't be a
|
||||
// real navigation boundary.
|
||||
let nextUrlDepth = urlDepthConsumed;
|
||||
let currentGroupDepth = groupDepthConsumed;
|
||||
if (consumesUrlDepth) {
|
||||
nextUrlDepth++;
|
||||
currentGroupDepth = 0;
|
||||
} else if (isGroup) {
|
||||
currentGroupDepth++;
|
||||
}
|
||||
const pastUrlBoundary = nextUrlDepth > depth;
|
||||
const isBoundary = pastUrlBoundary && currentGroupDepth >= groupDepth;
|
||||
if (isBoundary) {
|
||||
debug == null ? void 0 : debug(` ['${path}' is the boundary (url=${nextUrlDepth}, group=${currentGroupDepth})]`);
|
||||
boundaryState.expectedIds.add(path);
|
||||
const finalSegmentData = {
|
||||
...segmentData,
|
||||
node: // eslint-disable-next-line @next/internal/no-ambiguous-jsx -- bundled in the server layer
|
||||
/*#__PURE__*/ (0, _jsxruntime.jsx)(_boundary.PlaceValidationBoundaryBelowThisLevel, {
|
||||
id: path,
|
||||
children: segmentData.node
|
||||
}, "c")
|
||||
};
|
||||
const slots = {};
|
||||
let requiresInstantUI = false;
|
||||
let createInstantStack = null;
|
||||
for(const parallelRouteKey in parallelRoutes){
|
||||
const result = await buildNewTreeSeedData(parallelRoutes[parallelRouteKey], path, parallelRouteKey, false);
|
||||
slots[parallelRouteKey] = result.seedData;
|
||||
if (result.requiresInstantUI) {
|
||||
requiresInstantUI = true;
|
||||
if (createInstantStack === null) {
|
||||
createInstantStack = result.createInstantStack;
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
seedData: getCacheNodeSeedDataFromSegment(finalSegmentData, slots),
|
||||
requiresInstantUI,
|
||||
createInstantStack
|
||||
};
|
||||
}
|
||||
// Not at the boundary yet — keep walking as shared.
|
||||
const slots = {};
|
||||
let requiresInstantUI = false;
|
||||
let createInstantStack = null;
|
||||
for(const parallelRouteKey in parallelRoutes){
|
||||
const result = await buildSharedTreeSeedData(parallelRoutes[parallelRouteKey], path, parallelRouteKey, nextUrlDepth, currentGroupDepth);
|
||||
slots[parallelRouteKey] = result.seedData;
|
||||
if (result.requiresInstantUI) {
|
||||
requiresInstantUI = true;
|
||||
if (createInstantStack === null) {
|
||||
createInstantStack = result.createInstantStack;
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
seedData: getCacheNodeSeedDataFromSegment(segmentData, slots),
|
||||
requiresInstantUI,
|
||||
createInstantStack
|
||||
};
|
||||
}
|
||||
async function buildNewTreeSeedData(lt, parentPath, key, isInsideRuntimePrefetch) {
|
||||
const { parallelRoutes } = (0, _parseloadertree.parseLoaderTree)(lt);
|
||||
const { mod: layoutOrPageMod } = await (0, _appdirmodule.getLayoutOrPageModule)(lt);
|
||||
const segment = getSegment(lt);
|
||||
const path = parentPath === null ? stringifySegment(segment) : createChildSegmentPath(parentPath, key, segment);
|
||||
let instantConfig = null;
|
||||
let localCreateInstantStack = null;
|
||||
if (layoutOrPageMod !== undefined) {
|
||||
instantConfig = layoutOrPageMod.unstable_instant ?? null;
|
||||
if (instantConfig && typeof instantConfig === 'object') {
|
||||
const rawFactory = layoutOrPageMod.__debugCreateInstantConfigStack;
|
||||
localCreateInstantStack = typeof rawFactory === 'function' ? rawFactory : null;
|
||||
}
|
||||
}
|
||||
let childIsInsideRuntimePrefetch = isInsideRuntimePrefetch;
|
||||
let stage;
|
||||
if (!isInsideRuntimePrefetch) {
|
||||
if (instantConfig && typeof instantConfig === 'object' && instantConfig.prefetch === 'runtime') {
|
||||
stage = _stagedrendering.RenderStage.Runtime;
|
||||
childIsInsideRuntimePrefetch = true;
|
||||
hasRuntimeSegments = true;
|
||||
} else {
|
||||
if (useRuntimeStageForPartialSegments) {
|
||||
stage = _stagedrendering.RenderStage.Runtime;
|
||||
hasRuntimeSegments = true;
|
||||
} else {
|
||||
stage = _stagedrendering.RenderStage.Static;
|
||||
hasStaticSegments = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
stage = _stagedrendering.RenderStage.Runtime;
|
||||
hasRuntimeSegments = true;
|
||||
}
|
||||
debug == null ? void 0 : debug(` ${path || '/'} - ${_stagedrendering.RenderStage[stage]}`);
|
||||
const segmentCacheItem = cache.segments.get(path);
|
||||
if (!segmentCacheItem) {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError(`Missing segment data: ${path}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E995",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const segmentData = await deserializeFromChunks(segmentCacheItem.chunks[stage], segmentCacheItem.chunks[_stagedrendering.RenderStage.Dynamic], segmentCacheItem.debugChunks, releaseSignal, clientReferenceManifest, {
|
||||
startTime: undefined,
|
||||
endTime: stageEndTimes[stage]
|
||||
});
|
||||
// Build children first, then determine requiresInstantUI.
|
||||
const slots = {};
|
||||
let childrenRequireInstantUI = false;
|
||||
let childCreateInstantStack = null;
|
||||
for(const parallelRouteKey in parallelRoutes){
|
||||
const result = await buildNewTreeSeedData(parallelRoutes[parallelRouteKey], path, parallelRouteKey, childIsInsideRuntimePrefetch);
|
||||
slots[parallelRouteKey] = result.seedData;
|
||||
if (result.requiresInstantUI) {
|
||||
childrenRequireInstantUI = true;
|
||||
if (childCreateInstantStack === null) {
|
||||
childCreateInstantStack = result.createInstantStack;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Local config takes precedence over children.
|
||||
let requiresInstantUI;
|
||||
let createInstantStack;
|
||||
if (instantConfig === false) {
|
||||
requiresInstantUI = false;
|
||||
createInstantStack = null;
|
||||
} else if (instantConfig && typeof instantConfig === 'object') {
|
||||
requiresInstantUI = true;
|
||||
createInstantStack = localCreateInstantStack;
|
||||
} else {
|
||||
requiresInstantUI = childrenRequireInstantUI;
|
||||
createInstantStack = childCreateInstantStack;
|
||||
}
|
||||
return {
|
||||
seedData: getCacheNodeSeedDataFromSegment(segmentData, slots),
|
||||
requiresInstantUI,
|
||||
createInstantStack
|
||||
};
|
||||
}
|
||||
const { seedData, requiresInstantUI, createInstantStack } = await buildSharedTreeSeedData(initialLoaderTree, null, null, 0 /* urlDepthConsumed */ , 0 /* groupDepthConsumed */ );
|
||||
if (!requiresInstantUI) {
|
||||
return null;
|
||||
}
|
||||
const { flightRouterState } = getRootDataFromPayload(initialRSCPayload);
|
||||
const headStage = hasRuntimeSegments ? _stagedrendering.RenderStage.Runtime : _stagedrendering.RenderStage.Static;
|
||||
const head = await createValidationHead(cache, releaseSignal, clientReferenceManifest, stageEndTimes, headStage);
|
||||
const payload = {
|
||||
...initialRSCPayload,
|
||||
f: [
|
||||
[
|
||||
flightRouterState,
|
||||
seedData,
|
||||
head
|
||||
]
|
||||
]
|
||||
};
|
||||
return {
|
||||
payload,
|
||||
hasAmbiguousErrors: hasStaticSegments,
|
||||
createInstantStack
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=instant-validation.js.map
|
||||
Generated
Vendored
+1
File diff suppressed because one or more lines are too long
+7
@@ -0,0 +1,7 @@
|
||||
import type { Readable } from 'node:stream';
|
||||
/**
|
||||
* When we abort a staged render, we can still provide react with more chunks from later phases
|
||||
* to use for their debug info. This will not cause more contents to be rendered.
|
||||
*/
|
||||
export declare function createNodeStreamWithLateRelease(partialChunks: Array<Uint8Array>, allChunks: Array<Uint8Array>, releaseSignal: AbortSignal): Readable;
|
||||
export declare function createNodeStreamFromChunks(chunks: Array<Uint8Array>, signal?: AbortSignal): Readable;
|
||||
+96
@@ -0,0 +1,96 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
createNodeStreamFromChunks: null,
|
||||
createNodeStreamWithLateRelease: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
createNodeStreamFromChunks: function() {
|
||||
return createNodeStreamFromChunks;
|
||||
},
|
||||
createNodeStreamWithLateRelease: function() {
|
||||
return createNodeStreamWithLateRelease;
|
||||
}
|
||||
});
|
||||
const _invarianterror = require("../../../shared/lib/invariant-error");
|
||||
function createNodeStreamWithLateRelease(partialChunks, allChunks, releaseSignal) {
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('createNodeStreamWithLateRelease cannot be used in the edge runtime'), "__NEXT_ERROR_CODE", {
|
||||
value: "E993",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
const { Readable } = require('node:stream');
|
||||
let nextIndex = 0;
|
||||
const readable = new Readable({
|
||||
read () {
|
||||
while(nextIndex < partialChunks.length){
|
||||
this.push(partialChunks[nextIndex]);
|
||||
nextIndex++;
|
||||
}
|
||||
}
|
||||
});
|
||||
releaseSignal.addEventListener('abort', ()=>{
|
||||
// Flush any remaining chunks from the original set
|
||||
while(nextIndex < partialChunks.length){
|
||||
readable.push(partialChunks[nextIndex]);
|
||||
nextIndex++;
|
||||
}
|
||||
// Flush all chunks since we're now aborted and can't schedule
|
||||
// any new work but these chunks might unblock debugInfo
|
||||
while(nextIndex < allChunks.length){
|
||||
readable.push(allChunks[nextIndex]);
|
||||
nextIndex++;
|
||||
}
|
||||
setImmediate(()=>{
|
||||
readable.push(null);
|
||||
});
|
||||
}, {
|
||||
once: true
|
||||
});
|
||||
return readable;
|
||||
}
|
||||
}
|
||||
function createNodeStreamFromChunks(chunks, signal) {
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('createNodeStreamFromChunks cannot be used in the edge runtime'), "__NEXT_ERROR_CODE", {
|
||||
value: "E945",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
const { Readable } = require('node:stream');
|
||||
// If there's a signal, delay closing until it fires
|
||||
if (signal) {
|
||||
signal.addEventListener('abort', ()=>{
|
||||
readable.push(null);
|
||||
}, {
|
||||
once: true
|
||||
});
|
||||
}
|
||||
let nextIndex = 0;
|
||||
const readable = new Readable({
|
||||
read () {
|
||||
while(nextIndex < chunks.length){
|
||||
this.push(chunks[nextIndex]);
|
||||
nextIndex++;
|
||||
}
|
||||
if (!signal) {
|
||||
this.push(null);
|
||||
}
|
||||
}
|
||||
});
|
||||
return readable;
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=stream-utils.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/app-render/instant-validation/stream-utils.ts"],"sourcesContent":["import type { Readable } from 'node:stream'\nimport { InvariantError } from '../../../shared/lib/invariant-error'\n\n/**\n * When we abort a staged render, we can still provide react with more chunks from later phases\n * to use for their debug info. This will not cause more contents to be rendered.\n */\nexport function createNodeStreamWithLateRelease(\n partialChunks: Array<Uint8Array>,\n allChunks: Array<Uint8Array>,\n releaseSignal: AbortSignal\n): Readable {\n if (process.env.NEXT_RUNTIME === 'edge') {\n throw new InvariantError(\n 'createNodeStreamWithLateRelease cannot be used in the edge runtime'\n )\n } else {\n const { Readable } = require('node:stream') as typeof import('node:stream')\n\n let nextIndex = 0\n\n const readable = new Readable({\n read() {\n while (nextIndex < partialChunks.length) {\n this.push(partialChunks[nextIndex])\n nextIndex++\n }\n },\n })\n\n releaseSignal.addEventListener(\n 'abort',\n () => {\n // Flush any remaining chunks from the original set\n while (nextIndex < partialChunks.length) {\n readable.push(partialChunks[nextIndex])\n nextIndex++\n }\n // Flush all chunks since we're now aborted and can't schedule\n // any new work but these chunks might unblock debugInfo\n while (nextIndex < allChunks.length) {\n readable.push(allChunks[nextIndex])\n nextIndex++\n }\n\n setImmediate(() => {\n readable.push(null)\n })\n },\n { once: true }\n )\n\n return readable\n }\n}\n\nexport function createNodeStreamFromChunks(\n chunks: Array<Uint8Array>,\n signal?: AbortSignal\n): Readable {\n if (process.env.NEXT_RUNTIME === 'edge') {\n throw new InvariantError(\n 'createNodeStreamFromChunks cannot be used in the edge runtime'\n )\n } else {\n const { Readable } = require('node:stream') as typeof import('node:stream')\n\n // If there's a signal, delay closing until it fires\n if (signal) {\n signal.addEventListener(\n 'abort',\n () => {\n readable.push(null)\n },\n { once: true }\n )\n }\n\n let nextIndex = 0\n const readable = new Readable({\n read() {\n while (nextIndex < chunks.length) {\n this.push(chunks[nextIndex])\n nextIndex++\n }\n if (!signal) {\n this.push(null)\n }\n },\n })\n return readable\n }\n}\n"],"names":["createNodeStreamFromChunks","createNodeStreamWithLateRelease","partialChunks","allChunks","releaseSignal","process","env","NEXT_RUNTIME","InvariantError","Readable","require","nextIndex","readable","read","length","push","addEventListener","setImmediate","once","chunks","signal"],"mappings":";;;;;;;;;;;;;;;IAwDgBA,0BAA0B;eAA1BA;;IAjDAC,+BAA+B;eAA/BA;;;gCANe;AAMxB,SAASA,gCACdC,aAAgC,EAChCC,SAA4B,EAC5BC,aAA0B;IAE1B,IAAIC,QAAQC,GAAG,CAACC,YAAY,KAAK,QAAQ;QACvC,MAAM,qBAEL,CAFK,IAAIC,8BAAc,CACtB,uEADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF,OAAO;QACL,MAAM,EAAEC,QAAQ,EAAE,GAAGC,QAAQ;QAE7B,IAAIC,YAAY;QAEhB,MAAMC,WAAW,IAAIH,SAAS;YAC5BI;gBACE,MAAOF,YAAYT,cAAcY,MAAM,CAAE;oBACvC,IAAI,CAACC,IAAI,CAACb,aAAa,CAACS,UAAU;oBAClCA;gBACF;YACF;QACF;QAEAP,cAAcY,gBAAgB,CAC5B,SACA;YACE,mDAAmD;YACnD,MAAOL,YAAYT,cAAcY,MAAM,CAAE;gBACvCF,SAASG,IAAI,CAACb,aAAa,CAACS,UAAU;gBACtCA;YACF;YACA,8DAA8D;YAC9D,wDAAwD;YACxD,MAAOA,YAAYR,UAAUW,MAAM,CAAE;gBACnCF,SAASG,IAAI,CAACZ,SAAS,CAACQ,UAAU;gBAClCA;YACF;YAEAM,aAAa;gBACXL,SAASG,IAAI,CAAC;YAChB;QACF,GACA;YAAEG,MAAM;QAAK;QAGf,OAAON;IACT;AACF;AAEO,SAASZ,2BACdmB,MAAyB,EACzBC,MAAoB;IAEpB,IAAIf,QAAQC,GAAG,CAACC,YAAY,KAAK,QAAQ;QACvC,MAAM,qBAEL,CAFK,IAAIC,8BAAc,CACtB,kEADI,qBAAA;mBAAA;wBAAA;0BAAA;QAEN;IACF,OAAO;QACL,MAAM,EAAEC,QAAQ,EAAE,GAAGC,QAAQ;QAE7B,oDAAoD;QACpD,IAAIU,QAAQ;YACVA,OAAOJ,gBAAgB,CACrB,SACA;gBACEJ,SAASG,IAAI,CAAC;YAChB,GACA;gBAAEG,MAAM;YAAK;QAEjB;QAEA,IAAIP,YAAY;QAChB,MAAMC,WAAW,IAAIH,SAAS;YAC5BI;gBACE,MAAOF,YAAYQ,OAAOL,MAAM,CAAE;oBAChC,IAAI,CAACC,IAAI,CAACI,MAAM,CAACR,UAAU;oBAC3BA;gBACF;gBACA,IAAI,CAACS,QAAQ;oBACX,IAAI,CAACL,IAAI,CAAC;gBACZ;YACF;QACF;QACA,OAAOH;IACT;AACF","ignoreList":[0]}
|
||||
Reference in New Issue
Block a user