.
This commit is contained in:
+37
@@ -0,0 +1,37 @@
|
||||
import { DEFAULT_SEGMENT_KEY } from '../../shared/lib/segment';
|
||||
export async function getLayoutOrPageModule(loaderTree) {
|
||||
const { layout, page, defaultPage } = loaderTree[2];
|
||||
const isLayout = typeof layout !== 'undefined';
|
||||
const isPage = typeof page !== 'undefined';
|
||||
const isDefaultPage = typeof defaultPage !== 'undefined' && loaderTree[0] === DEFAULT_SEGMENT_KEY;
|
||||
let mod = undefined;
|
||||
let modType = undefined;
|
||||
let filePath = undefined;
|
||||
if (isLayout) {
|
||||
mod = await layout[0]();
|
||||
modType = 'layout';
|
||||
filePath = layout[1];
|
||||
} else if (isPage) {
|
||||
mod = await page[0]();
|
||||
modType = 'page';
|
||||
filePath = page[1];
|
||||
} else if (isDefaultPage) {
|
||||
mod = await defaultPage[0]();
|
||||
modType = 'page';
|
||||
filePath = defaultPage[1];
|
||||
}
|
||||
return {
|
||||
mod,
|
||||
modType,
|
||||
filePath
|
||||
};
|
||||
}
|
||||
export async function getComponentTypeModule(loaderTree, moduleType) {
|
||||
const { [moduleType]: module } = loaderTree[2];
|
||||
if (typeof module !== 'undefined') {
|
||||
return await module[0]();
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=app-dir-module.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/app-dir-module.ts"],"sourcesContent":["import type { AppDirModules } from '../../build/webpack/loaders/next-app-loader'\nimport { DEFAULT_SEGMENT_KEY } from '../../shared/lib/segment'\n\n/**\n * LoaderTree is generated in next-app-loader.\n */\nexport type LoaderTree = [\n segment: string,\n parallelRoutes: { [parallelRouterKey: string]: LoaderTree },\n modules: AppDirModules,\n /**\n * At build time, for each dynamic segment, we compute the list of static\n * sibling segments that exist at the same URL path level. This is used by\n * the client router to determine if a prefetch can be reused.\n *\n * For example, given the following file structure:\n * /app/(group1)/products/sale/page.tsx -> /products/sale\n * /app/(group2)/products/[id]/page.tsx -> /products/[id]\n *\n * The [id] segment would have staticSiblings: ['sale']\n *\n * This accounts for route groups, which may place sibling routes in\n * different parts of the file system tree but at the same URL level.\n *\n * A value of `null` means the static siblings are unknown (e.g., in webpack\n * dev mode where routes are compiled on-demand).\n */\n staticSiblings: readonly string[] | null,\n]\n\nexport async function getLayoutOrPageModule(loaderTree: LoaderTree) {\n const { layout, page, defaultPage } = loaderTree[2]\n const isLayout = typeof layout !== 'undefined'\n const isPage = typeof page !== 'undefined'\n const isDefaultPage =\n typeof defaultPage !== 'undefined' && loaderTree[0] === DEFAULT_SEGMENT_KEY\n\n let mod = undefined\n let modType: 'layout' | 'page' | undefined = undefined\n let filePath = undefined\n\n if (isLayout) {\n mod = await layout[0]()\n modType = 'layout'\n filePath = layout[1]\n } else if (isPage) {\n mod = await page[0]()\n modType = 'page'\n filePath = page[1]\n } else if (isDefaultPage) {\n mod = await defaultPage[0]()\n modType = 'page'\n filePath = defaultPage[1]\n }\n\n return { mod, modType, filePath }\n}\n\nexport async function getComponentTypeModule(\n loaderTree: LoaderTree,\n moduleType: 'layout' | 'not-found' | 'forbidden' | 'unauthorized'\n) {\n const { [moduleType]: module } = loaderTree[2]\n if (typeof module !== 'undefined') {\n return await module[0]()\n }\n return undefined\n}\n"],"names":["DEFAULT_SEGMENT_KEY","getLayoutOrPageModule","loaderTree","layout","page","defaultPage","isLayout","isPage","isDefaultPage","mod","undefined","modType","filePath","getComponentTypeModule","moduleType","module"],"mappings":"AACA,SAASA,mBAAmB,QAAQ,2BAA0B;AA6B9D,OAAO,eAAeC,sBAAsBC,UAAsB;IAChE,MAAM,EAAEC,MAAM,EAAEC,IAAI,EAAEC,WAAW,EAAE,GAAGH,UAAU,CAAC,EAAE;IACnD,MAAMI,WAAW,OAAOH,WAAW;IACnC,MAAMI,SAAS,OAAOH,SAAS;IAC/B,MAAMI,gBACJ,OAAOH,gBAAgB,eAAeH,UAAU,CAAC,EAAE,KAAKF;IAE1D,IAAIS,MAAMC;IACV,IAAIC,UAAyCD;IAC7C,IAAIE,WAAWF;IAEf,IAAIJ,UAAU;QACZG,MAAM,MAAMN,MAAM,CAAC,EAAE;QACrBQ,UAAU;QACVC,WAAWT,MAAM,CAAC,EAAE;IACtB,OAAO,IAAII,QAAQ;QACjBE,MAAM,MAAML,IAAI,CAAC,EAAE;QACnBO,UAAU;QACVC,WAAWR,IAAI,CAAC,EAAE;IACpB,OAAO,IAAII,eAAe;QACxBC,MAAM,MAAMJ,WAAW,CAAC,EAAE;QAC1BM,UAAU;QACVC,WAAWP,WAAW,CAAC,EAAE;IAC3B;IAEA,OAAO;QAAEI;QAAKE;QAASC;IAAS;AAClC;AAEA,OAAO,eAAeC,uBACpBX,UAAsB,EACtBY,UAAiE;IAEjE,MAAM,EAAE,CAACA,WAAW,EAAEC,MAAM,EAAE,GAAGb,UAAU,CAAC,EAAE;IAC9C,IAAI,OAAOa,WAAW,aAAa;QACjC,OAAO,MAAMA,MAAM,CAAC,EAAE;IACxB;IACA,OAAOL;AACT","ignoreList":[0]}
|
||||
+73
@@ -0,0 +1,73 @@
|
||||
import { loadEnvConfig } from '@next/env';
|
||||
import * as inspector from 'inspector';
|
||||
import * as Log from '../../build/output/log';
|
||||
import { bold, purple, strikethrough } from '../../lib/picocolors';
|
||||
import { experimentalSchema } from '../config-schema';
|
||||
/**
|
||||
* Logs basic startup info that doesn't require config.
|
||||
* Called before "Ready in X" to show immediate feedback.
|
||||
*/ export function logStartInfo({ networkUrl, appUrl, envInfo, logBundler }) {
|
||||
let versionSuffix = '';
|
||||
const parts = [];
|
||||
if (logBundler) {
|
||||
if (process.env.TURBOPACK) {
|
||||
parts.push('Turbopack');
|
||||
} else if (process.env.NEXT_RSPACK) {
|
||||
parts.push('Rspack');
|
||||
} else {
|
||||
parts.push('webpack');
|
||||
}
|
||||
}
|
||||
if (parts.length > 0) {
|
||||
versionSuffix = ` (${parts.join(', ')})`;
|
||||
}
|
||||
Log.bootstrap(`${bold(purple(`${Log.prefixes.ready} Next.js ${"16.2.0"}`))}${versionSuffix}`);
|
||||
if (appUrl) {
|
||||
Log.bootstrap(`- Local: ${appUrl}`);
|
||||
}
|
||||
if (networkUrl) {
|
||||
Log.bootstrap(`- Network: ${networkUrl}`);
|
||||
}
|
||||
const inspectorUrl = inspector.url();
|
||||
if (inspectorUrl) {
|
||||
// Could also parse this port from the inspector URL.
|
||||
// process.debugPort will always be defined even if the process is not being inspected.
|
||||
// The full URL seems noisy as far as I can tell.
|
||||
// Node.js will print the full URL anyway.
|
||||
const debugPort = process.debugPort;
|
||||
Log.bootstrap(`- Debugger port: ${debugPort}`);
|
||||
}
|
||||
if (envInfo == null ? void 0 : envInfo.length) Log.bootstrap(`- Environments: ${envInfo.join(', ')}`);
|
||||
}
|
||||
/**
|
||||
* Logs experimental features and config-dependent info.
|
||||
* Called after getRequestHandlers completes.
|
||||
*/ export function logExperimentalInfo({ experimentalFeatures, cacheComponents }) {
|
||||
if (cacheComponents) {
|
||||
Log.bootstrap(`- Cache Components enabled`);
|
||||
}
|
||||
if (experimentalFeatures == null ? void 0 : experimentalFeatures.length) {
|
||||
Log.bootstrap(`- Experiments (use with caution):`);
|
||||
for (const exp of experimentalFeatures){
|
||||
const isValid = Object.prototype.hasOwnProperty.call(experimentalSchema, exp.key);
|
||||
if (isValid) {
|
||||
const symbol = typeof exp.value === 'boolean' ? exp.value === true ? bold('✓') : bold('⨯') : '·';
|
||||
const suffix = typeof exp.value === 'number' || typeof exp.value === 'string' ? `: ${JSON.stringify(exp.value)}` : '';
|
||||
const reason = exp.reason ? ` (${exp.reason})` : '';
|
||||
Log.bootstrap(` ${symbol} ${exp.key}${suffix}${reason}`);
|
||||
} else {
|
||||
Log.bootstrap(` ? ${strikethrough(exp.key)} (invalid experimental key)`);
|
||||
}
|
||||
}
|
||||
}
|
||||
// New line after the bootstrap info
|
||||
Log.info('');
|
||||
}
|
||||
/**
|
||||
* Gets environment info for logging. Fast operation that doesn't require config.
|
||||
*/ export function getEnvInfo(dir) {
|
||||
const { loadedEnvFiles } = loadEnvConfig(dir, true, console, false);
|
||||
return loadedEnvFiles.map((f)=>f.path);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=app-info-log.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+19
@@ -0,0 +1,19 @@
|
||||
export class AsyncCallbackSet {
|
||||
add(callback) {
|
||||
this.callbacks.push(callback);
|
||||
}
|
||||
async runAll() {
|
||||
if (!this.callbacks.length) {
|
||||
return;
|
||||
}
|
||||
const callbacks = this.callbacks;
|
||||
this.callbacks = [];
|
||||
await Promise.allSettled(callbacks.map(// NOTE: wrapped in an async function to protect against synchronous exceptions
|
||||
async (f)=>f()));
|
||||
}
|
||||
constructor(){
|
||||
this.callbacks = [];
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=async-callback-set.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/async-callback-set.ts"],"sourcesContent":["export class AsyncCallbackSet {\n private callbacks: (() => Promise<void>)[] = []\n\n public add(callback: () => Promise<void>) {\n this.callbacks.push(callback)\n }\n\n public async runAll(): Promise<void> {\n if (!this.callbacks.length) {\n return\n }\n const callbacks = this.callbacks\n this.callbacks = []\n await Promise.allSettled(\n callbacks.map(\n // NOTE: wrapped in an async function to protect against synchronous exceptions\n async (f) => f()\n )\n )\n }\n}\n"],"names":["AsyncCallbackSet","add","callback","callbacks","push","runAll","length","Promise","allSettled","map","f"],"mappings":"AAAA,OAAO,MAAMA;IAGJC,IAAIC,QAA6B,EAAE;QACxC,IAAI,CAACC,SAAS,CAACC,IAAI,CAACF;IACtB;IAEA,MAAaG,SAAwB;QACnC,IAAI,CAAC,IAAI,CAACF,SAAS,CAACG,MAAM,EAAE;YAC1B;QACF;QACA,MAAMH,YAAY,IAAI,CAACA,SAAS;QAChC,IAAI,CAACA,SAAS,GAAG,EAAE;QACnB,MAAMI,QAAQC,UAAU,CACtBL,UAAUM,GAAG,CACX,+EAA+E;QAC/E,OAAOC,IAAMA;IAGnB;;aAlBQP,YAAqC,EAAE;;AAmBjD","ignoreList":[0]}
|
||||
+12
@@ -0,0 +1,12 @@
|
||||
import { CACHE_ONE_YEAR_SECONDS } from '../../lib/constants';
|
||||
export function getCacheControlHeader({ revalidate, expire }) {
|
||||
const swrHeader = typeof revalidate === 'number' && expire !== undefined && revalidate < expire ? `, stale-while-revalidate=${expire - revalidate}` : '';
|
||||
if (revalidate === 0) {
|
||||
return 'private, no-cache, no-store, max-age=0, must-revalidate';
|
||||
} else if (typeof revalidate === 'number') {
|
||||
return `s-maxage=${revalidate}${swrHeader}`;
|
||||
}
|
||||
return `s-maxage=${CACHE_ONE_YEAR_SECONDS}${swrHeader}`;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=cache-control.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/cache-control.ts"],"sourcesContent":["import { CACHE_ONE_YEAR_SECONDS } from '../../lib/constants'\n\n/**\n * The revalidate option used internally for pages. A value of `false` means\n * that the page should not be revalidated. A number means that the page\n * should be revalidated after the given number of seconds (this also includes\n * `1` which means to revalidate after 1 second). A value of `0` is not a valid\n * value for this option.\n */\nexport type Revalidate = number | false\n\nexport interface CacheControl {\n revalidate: Revalidate\n expire: number | undefined\n}\n\nexport function getCacheControlHeader({\n revalidate,\n expire,\n}: CacheControl): string {\n const swrHeader =\n typeof revalidate === 'number' &&\n expire !== undefined &&\n revalidate < expire\n ? `, stale-while-revalidate=${expire - revalidate}`\n : ''\n\n if (revalidate === 0) {\n return 'private, no-cache, no-store, max-age=0, must-revalidate'\n } else if (typeof revalidate === 'number') {\n return `s-maxage=${revalidate}${swrHeader}`\n }\n\n return `s-maxage=${CACHE_ONE_YEAR_SECONDS}${swrHeader}`\n}\n"],"names":["CACHE_ONE_YEAR_SECONDS","getCacheControlHeader","revalidate","expire","swrHeader","undefined"],"mappings":"AAAA,SAASA,sBAAsB,QAAQ,sBAAqB;AAgB5D,OAAO,SAASC,sBAAsB,EACpCC,UAAU,EACVC,MAAM,EACO;IACb,MAAMC,YACJ,OAAOF,eAAe,YACtBC,WAAWE,aACXH,aAAaC,SACT,CAAC,yBAAyB,EAAEA,SAASD,YAAY,GACjD;IAEN,IAAIA,eAAe,GAAG;QACpB,OAAO;IACT,OAAO,IAAI,OAAOA,eAAe,UAAU;QACzC,OAAO,CAAC,SAAS,EAAEA,aAAaE,WAAW;IAC7C;IAEA,OAAO,CAAC,SAAS,EAAEJ,yBAAyBI,WAAW;AACzD","ignoreList":[0]}
|
||||
+8
@@ -0,0 +1,8 @@
|
||||
import { createDefaultCacheHandler } from './default';
|
||||
/**
|
||||
* Used for edge runtime compatibility.
|
||||
*
|
||||
* @deprecated Use createDefaultCacheHandler instead.
|
||||
*/ export default createDefaultCacheHandler(50 * 1024 * 1024);
|
||||
|
||||
//# sourceMappingURL=default.external.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/cache-handlers/default.external.ts"],"sourcesContent":["import { createDefaultCacheHandler } from './default'\n\n/**\n * Used for edge runtime compatibility.\n *\n * @deprecated Use createDefaultCacheHandler instead.\n */\nexport default createDefaultCacheHandler(50 * 1024 * 1024)\n"],"names":["createDefaultCacheHandler"],"mappings":"AAAA,SAASA,yBAAyB,QAAQ,YAAW;AAErD;;;;CAIC,GACD,eAAeA,0BAA0B,KAAK,OAAO,MAAK","ignoreList":[0]}
|
||||
+150
@@ -0,0 +1,150 @@
|
||||
/**
|
||||
* This is the default "use cache" handler it defaults to an in-memory store.
|
||||
* In-memory caches are fragile and should not use stale-while-revalidate
|
||||
* semantics on the caches because it's not worth warming up an entry that's
|
||||
* likely going to get evicted before we get to use it anyway. However, we also
|
||||
* don't want to reuse a stale entry for too long so stale entries should be
|
||||
* considered expired/missing in such cache handlers.
|
||||
*/ import { LRUCache } from '../lru-cache';
|
||||
import { areTagsExpired, areTagsStale, tagsManifest } from '../incremental-cache/tags-manifest.external';
|
||||
export function createDefaultCacheHandler(maxSize) {
|
||||
// If the max size is 0, return a cache handler that doesn't cache anything,
|
||||
// this avoids an unnecessary LRUCache instance and potential memory
|
||||
// allocation.
|
||||
if (maxSize === 0) {
|
||||
return {
|
||||
get: ()=>Promise.resolve(undefined),
|
||||
set: ()=>Promise.resolve(),
|
||||
refreshTags: ()=>Promise.resolve(),
|
||||
getExpiration: ()=>Promise.resolve(0),
|
||||
updateTags: ()=>Promise.resolve()
|
||||
};
|
||||
}
|
||||
const memoryCache = new LRUCache(maxSize, (entry)=>entry.size);
|
||||
const pendingSets = new Map();
|
||||
const debug = process.env.NEXT_PRIVATE_DEBUG_CACHE ? console.debug.bind(console, 'DefaultCacheHandler:') : undefined;
|
||||
return {
|
||||
async get (cacheKey) {
|
||||
const pendingPromise = pendingSets.get(cacheKey);
|
||||
if (pendingPromise) {
|
||||
debug == null ? void 0 : debug('get', cacheKey, 'pending');
|
||||
await pendingPromise;
|
||||
}
|
||||
const privateEntry = memoryCache.get(cacheKey);
|
||||
if (!privateEntry) {
|
||||
debug == null ? void 0 : debug('get', cacheKey, 'not found');
|
||||
return undefined;
|
||||
}
|
||||
const entry = privateEntry.entry;
|
||||
if (performance.timeOrigin + performance.now() > entry.timestamp + entry.revalidate * 1000) {
|
||||
// In-memory caches should expire after revalidate time because it is
|
||||
// unlikely that a new entry will be able to be used before it is dropped
|
||||
// from the cache.
|
||||
debug == null ? void 0 : debug('get', cacheKey, 'expired');
|
||||
return undefined;
|
||||
}
|
||||
let revalidate = entry.revalidate;
|
||||
if (areTagsExpired(entry.tags, entry.timestamp)) {
|
||||
debug == null ? void 0 : debug('get', cacheKey, 'had expired tag');
|
||||
return undefined;
|
||||
}
|
||||
if (areTagsStale(entry.tags, entry.timestamp)) {
|
||||
debug == null ? void 0 : debug('get', cacheKey, 'had stale tag');
|
||||
revalidate = -1;
|
||||
}
|
||||
const [returnStream, newSaved] = entry.value.tee();
|
||||
entry.value = newSaved;
|
||||
debug == null ? void 0 : debug('get', cacheKey, 'found', {
|
||||
tags: entry.tags,
|
||||
timestamp: entry.timestamp,
|
||||
expire: entry.expire,
|
||||
revalidate
|
||||
});
|
||||
return {
|
||||
...entry,
|
||||
revalidate,
|
||||
value: returnStream
|
||||
};
|
||||
},
|
||||
async set (cacheKey, pendingEntry) {
|
||||
debug == null ? void 0 : debug('set', cacheKey, 'start');
|
||||
let resolvePending = ()=>{};
|
||||
const pendingPromise = new Promise((resolve)=>{
|
||||
resolvePending = resolve;
|
||||
});
|
||||
pendingSets.set(cacheKey, pendingPromise);
|
||||
const entry = await pendingEntry;
|
||||
let size = 0;
|
||||
try {
|
||||
const [value, clonedValue] = entry.value.tee();
|
||||
entry.value = value;
|
||||
const reader = clonedValue.getReader();
|
||||
for(let chunk; !(chunk = await reader.read()).done;){
|
||||
size += Buffer.from(chunk.value).byteLength;
|
||||
}
|
||||
memoryCache.set(cacheKey, {
|
||||
entry,
|
||||
isErrored: false,
|
||||
errorRetryCount: 0,
|
||||
size
|
||||
});
|
||||
debug == null ? void 0 : debug('set', cacheKey, 'done');
|
||||
} catch (err) {
|
||||
// TODO: store partial buffer with error after we retry 3 times
|
||||
debug == null ? void 0 : debug('set', cacheKey, 'failed', err);
|
||||
} finally{
|
||||
resolvePending();
|
||||
pendingSets.delete(cacheKey);
|
||||
}
|
||||
},
|
||||
async refreshTags () {
|
||||
// Nothing to do for an in-memory cache handler.
|
||||
},
|
||||
async getExpiration (tags) {
|
||||
const expirations = tags.map((tag)=>{
|
||||
const entry = tagsManifest.get(tag);
|
||||
if (!entry) return 0;
|
||||
// Return the most recent timestamp (either expired or stale)
|
||||
return entry.expired || 0;
|
||||
});
|
||||
const expiration = Math.max(...expirations, 0);
|
||||
debug == null ? void 0 : debug('getExpiration', {
|
||||
tags,
|
||||
expiration
|
||||
});
|
||||
return expiration;
|
||||
},
|
||||
async updateTags (tags, durations) {
|
||||
const now = Math.round(performance.timeOrigin + performance.now());
|
||||
debug == null ? void 0 : debug('updateTags', {
|
||||
tags,
|
||||
timestamp: now
|
||||
});
|
||||
for (const tag of tags){
|
||||
// TODO: update file-system-cache?
|
||||
const existingEntry = tagsManifest.get(tag) || {};
|
||||
if (durations) {
|
||||
// Use provided durations directly
|
||||
const updates = {
|
||||
...existingEntry
|
||||
};
|
||||
// mark as stale immediately
|
||||
updates.stale = now;
|
||||
if (durations.expire !== undefined) {
|
||||
updates.expired = now + durations.expire * 1000 // Convert seconds to ms
|
||||
;
|
||||
}
|
||||
tagsManifest.set(tag, updates);
|
||||
} else {
|
||||
// Update expired field for immediate expiration (default behavior when no durations provided)
|
||||
tagsManifest.set(tag, {
|
||||
...existingEntry,
|
||||
expired: now
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=default.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+5
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* A timestamp in milliseconds elapsed since the epoch
|
||||
*/ export { };
|
||||
|
||||
//# sourceMappingURL=types.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/cache-handlers/types.ts"],"sourcesContent":["/**\n * A timestamp in milliseconds elapsed since the epoch\n */\nexport type Timestamp = number\n\nexport interface CacheEntry {\n /**\n * The ReadableStream can error and only have partial data so any cache\n * handlers need to handle this case and decide to keep the partial cache\n * around or not.\n */\n value: ReadableStream<Uint8Array>\n\n /**\n * The tags configured for the entry excluding soft tags\n */\n tags: string[]\n\n /**\n * This is for the client, not used to calculate cache entry expiration\n * [duration in seconds]\n */\n stale: number\n\n /**\n * When the cache entry was created [timestamp in milliseconds]\n */\n timestamp: Timestamp\n\n /**\n * How long the entry is allowed to be used (should be longer than revalidate)\n * [duration in seconds]\n */\n expire: number\n\n /**\n * How long until the entry should be revalidated [duration in seconds]\n */\n revalidate: number\n}\n\nexport interface CacheHandler {\n /**\n * Retrieve a cache entry for the given cache key, if available. Will return\n * undefined if there's no valid entry, or if the given soft tags are stale.\n */\n get(cacheKey: string, softTags: string[]): Promise<undefined | CacheEntry>\n\n /**\n * Store a cache entry for the given cache key. When this is called, the entry\n * may still be pending, i.e. its value stream may still be written to. So it\n * needs to be awaited first. If a `get` for the same cache key is called,\n * before the pending entry is complete, the cache handler must wait for the\n * `set` operation to finish, before returning the entry, instead of returning\n * undefined.\n */\n set(cacheKey: string, pendingEntry: Promise<CacheEntry>): Promise<void>\n\n /**\n * This function may be called periodically, but always before starting a new\n * request. If applicable, it should communicate with the tags service to\n * refresh the local tags manifest accordingly.\n */\n refreshTags(): Promise<void>\n\n /**\n * This function is called for each set of soft tags that are relevant at the\n * start of a request. The result is the maximum timestamp of a revalidate\n * event for the tags. Returns `0` if none of the tags were ever revalidated.\n * Returns `Infinity` if the soft tags are supposed to be passed into the\n * `get` method instead to be checked for expiration.\n */\n getExpiration(tags: string[]): Promise<Timestamp>\n\n /**\n * This function is called when tags are revalidated/expired. If applicable,\n * it should update the tags manifest accordingly.\n */\n updateTags(tags: string[], durations?: { expire?: number }): Promise<void>\n}\n"],"names":[],"mappings":"AAAA;;CAEC,GAuCD,WAsCC","ignoreList":[0]}
|
||||
+53
@@ -0,0 +1,53 @@
|
||||
import { randomUUID } from 'crypto';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { getStorageDirectory } from '../cache-dir';
|
||||
// Keep the uuid in memory as it should never change during the lifetime of the server.
|
||||
let workspaceUUID = null;
|
||||
export function isChromeDevtoolsWorkspaceUrl(pathname) {
|
||||
return pathname === '/.well-known/appspecific/com.chrome.devtools.json';
|
||||
}
|
||||
export async function handleChromeDevtoolsWorkspaceRequest(response, opts, config) {
|
||||
response.setHeader('Content-Type', 'application/json');
|
||||
response.end(JSON.stringify(await getChromeDevtoolsWorkspace(opts.dir, config.distDir), null, 2));
|
||||
}
|
||||
/**
|
||||
* For https://developer.chrome.com/docs/devtools/workspaces
|
||||
*/ async function getChromeDevtoolsWorkspace(root, configDistDir) {
|
||||
if (workspaceUUID === null) {
|
||||
const distDir = path.join(root, configDistDir);
|
||||
const cacheBaseDir = getStorageDirectory(distDir);
|
||||
if (cacheBaseDir === undefined) {
|
||||
workspaceUUID = randomUUID();
|
||||
} else {
|
||||
const cachedUUIDPath = path.join(cacheBaseDir, 'chrome-devtools-workspace-uuid');
|
||||
try {
|
||||
workspaceUUID = await fs.promises.readFile(cachedUUIDPath, 'utf8');
|
||||
} catch {
|
||||
// TODO: Why does this need to be v4 and not v5?
|
||||
// With v5 we could base it off of the `distDir` and `root` which would
|
||||
// allow us to persist the workspace across .next wipes.
|
||||
workspaceUUID = randomUUID();
|
||||
try {
|
||||
await fs.promises.writeFile(cachedUUIDPath, workspaceUUID, 'utf8');
|
||||
} catch (cause) {
|
||||
console.warn(Object.defineProperty(new Error('Failed to persist Chrome DevTools workspace UUID. The Chrome DevTools Workspace needs to be reconnected after the next page reload.', {
|
||||
cause
|
||||
}), "__NEXT_ERROR_CODE", {
|
||||
value: "E708",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
workspace: {
|
||||
uuid: workspaceUUID,
|
||||
root
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=chrome-devtools-workspace.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/chrome-devtools-workspace.ts"],"sourcesContent":["import type { ServerResponse } from 'http'\nimport type { NextConfigRuntime } from '../config-shared'\n\nimport { randomUUID } from 'crypto'\nimport * as fs from 'fs'\nimport * as path from 'path'\nimport { getStorageDirectory } from '../cache-dir'\n\n// Keep the uuid in memory as it should never change during the lifetime of the server.\nlet workspaceUUID: string | null = null\n\nexport function isChromeDevtoolsWorkspaceUrl(\n pathname: string | undefined\n): boolean {\n return pathname === '/.well-known/appspecific/com.chrome.devtools.json'\n}\n\nexport async function handleChromeDevtoolsWorkspaceRequest(\n response: ServerResponse,\n opts: { dir: string },\n config: NextConfigRuntime\n): Promise<void> {\n response.setHeader('Content-Type', 'application/json')\n response.end(\n JSON.stringify(\n await getChromeDevtoolsWorkspace(opts.dir, config.distDir),\n null,\n 2\n )\n )\n}\n\n/**\n * https://developer.chrome.com/docs/devtools/workspaces#generate-json\n */\ninterface ChromeDevtoolsWorkspace {\n workspace: {\n uuid: string\n root: string\n }\n}\n\n/**\n * For https://developer.chrome.com/docs/devtools/workspaces\n */\nasync function getChromeDevtoolsWorkspace(\n root: string,\n configDistDir: string\n): Promise<ChromeDevtoolsWorkspace> {\n if (workspaceUUID === null) {\n const distDir = path.join(root, configDistDir)\n const cacheBaseDir = getStorageDirectory(distDir)\n\n if (cacheBaseDir === undefined) {\n workspaceUUID = randomUUID()\n } else {\n const cachedUUIDPath = path.join(\n cacheBaseDir,\n 'chrome-devtools-workspace-uuid'\n )\n try {\n workspaceUUID = await fs.promises.readFile(cachedUUIDPath, 'utf8')\n } catch {\n // TODO: Why does this need to be v4 and not v5?\n // With v5 we could base it off of the `distDir` and `root` which would\n // allow us to persist the workspace across .next wipes.\n workspaceUUID = randomUUID()\n\n try {\n await fs.promises.writeFile(cachedUUIDPath, workspaceUUID, 'utf8')\n } catch (cause) {\n console.warn(\n new Error(\n 'Failed to persist Chrome DevTools workspace UUID. The Chrome DevTools Workspace needs to be reconnected after the next page reload.',\n { cause }\n )\n )\n }\n }\n }\n }\n\n return {\n workspace: {\n uuid: workspaceUUID,\n root,\n },\n }\n}\n"],"names":["randomUUID","fs","path","getStorageDirectory","workspaceUUID","isChromeDevtoolsWorkspaceUrl","pathname","handleChromeDevtoolsWorkspaceRequest","response","opts","config","setHeader","end","JSON","stringify","getChromeDevtoolsWorkspace","dir","distDir","root","configDistDir","join","cacheBaseDir","undefined","cachedUUIDPath","promises","readFile","writeFile","cause","console","warn","Error","workspace","uuid"],"mappings":"AAGA,SAASA,UAAU,QAAQ,SAAQ;AACnC,YAAYC,QAAQ,KAAI;AACxB,YAAYC,UAAU,OAAM;AAC5B,SAASC,mBAAmB,QAAQ,eAAc;AAElD,uFAAuF;AACvF,IAAIC,gBAA+B;AAEnC,OAAO,SAASC,6BACdC,QAA4B;IAE5B,OAAOA,aAAa;AACtB;AAEA,OAAO,eAAeC,qCACpBC,QAAwB,EACxBC,IAAqB,EACrBC,MAAyB;IAEzBF,SAASG,SAAS,CAAC,gBAAgB;IACnCH,SAASI,GAAG,CACVC,KAAKC,SAAS,CACZ,MAAMC,2BAA2BN,KAAKO,GAAG,EAAEN,OAAOO,OAAO,GACzD,MACA;AAGN;AAYA;;CAEC,GACD,eAAeF,2BACbG,IAAY,EACZC,aAAqB;IAErB,IAAIf,kBAAkB,MAAM;QAC1B,MAAMa,UAAUf,KAAKkB,IAAI,CAACF,MAAMC;QAChC,MAAME,eAAelB,oBAAoBc;QAEzC,IAAII,iBAAiBC,WAAW;YAC9BlB,gBAAgBJ;QAClB,OAAO;YACL,MAAMuB,iBAAiBrB,KAAKkB,IAAI,CAC9BC,cACA;YAEF,IAAI;gBACFjB,gBAAgB,MAAMH,GAAGuB,QAAQ,CAACC,QAAQ,CAACF,gBAAgB;YAC7D,EAAE,OAAM;gBACN,gDAAgD;gBAChD,uEAAuE;gBACvE,wDAAwD;gBACxDnB,gBAAgBJ;gBAEhB,IAAI;oBACF,MAAMC,GAAGuB,QAAQ,CAACE,SAAS,CAACH,gBAAgBnB,eAAe;gBAC7D,EAAE,OAAOuB,OAAO;oBACdC,QAAQC,IAAI,CACV,qBAGC,CAHD,IAAIC,MACF,uIACA;wBAAEH;oBAAM,IAFV,qBAAA;+BAAA;oCAAA;sCAAA;oBAGA;gBAEJ;YACF;QACF;IACF;IAEA,OAAO;QACLI,WAAW;YACTC,MAAM5B;YACNc;QACF;IACF;AACF","ignoreList":[0]}
|
||||
+78
@@ -0,0 +1,78 @@
|
||||
const noop = ()=>{};
|
||||
let registry;
|
||||
if (globalThis.FinalizationRegistry) {
|
||||
registry = new FinalizationRegistry((weakRef)=>{
|
||||
const stream = weakRef.deref();
|
||||
if (stream && !stream.locked) {
|
||||
stream.cancel('Response object has been garbage collected').then(noop);
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Clones a response by teeing the body so we can return two independent
|
||||
* ReadableStreams from it. This avoids the bug in the undici library around
|
||||
* response cloning.
|
||||
*
|
||||
* After cloning, the original response's body will be consumed and closed.
|
||||
*
|
||||
* @see https://github.com/vercel/next.js/pull/73274
|
||||
*
|
||||
* @param original - The original response to clone.
|
||||
* @returns A tuple containing two independent clones of the original response.
|
||||
*/ export function cloneResponse(original) {
|
||||
// If the response has no body, then we can just return the original response
|
||||
// twice because it's immutable.
|
||||
if (!original.body) {
|
||||
return [
|
||||
original,
|
||||
original
|
||||
];
|
||||
}
|
||||
const [body1, body2] = original.body.tee();
|
||||
const cloned1 = new Response(body1, {
|
||||
status: original.status,
|
||||
statusText: original.statusText,
|
||||
headers: original.headers
|
||||
});
|
||||
Object.defineProperty(cloned1, 'url', {
|
||||
value: original.url,
|
||||
// How the original response.url behaves
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
writable: false
|
||||
});
|
||||
const cloned2 = new Response(body2, {
|
||||
status: original.status,
|
||||
statusText: original.statusText,
|
||||
headers: original.headers
|
||||
});
|
||||
Object.defineProperty(cloned2, 'url', {
|
||||
value: original.url,
|
||||
// How the original response.url behaves
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
writable: false
|
||||
});
|
||||
// The Fetch Standard allows users to skip consuming the response body by
|
||||
// relying on garbage collection to release connection resources.
|
||||
// https://github.com/nodejs/undici?tab=readme-ov-file#garbage-collection
|
||||
//
|
||||
// To cancel the stream you then need to cancel both resulting branches.
|
||||
// Teeing a stream will generally lock it for the duration, preventing other
|
||||
// readers from locking it.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream/tee
|
||||
if (registry) {
|
||||
if (cloned1.body) {
|
||||
registry.register(cloned1, new WeakRef(cloned1.body));
|
||||
}
|
||||
if (cloned2.body) {
|
||||
registry.register(cloned2, new WeakRef(cloned2.body));
|
||||
}
|
||||
}
|
||||
return [
|
||||
cloned1,
|
||||
cloned2
|
||||
];
|
||||
}
|
||||
|
||||
//# sourceMappingURL=clone-response.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/clone-response.ts"],"sourcesContent":["const noop = () => {}\n\nlet registry: FinalizationRegistry<WeakRef<ReadableStream>> | undefined\n\nif (globalThis.FinalizationRegistry) {\n registry = new FinalizationRegistry((weakRef: WeakRef<ReadableStream>) => {\n const stream = weakRef.deref()\n if (stream && !stream.locked) {\n stream.cancel('Response object has been garbage collected').then(noop)\n }\n })\n}\n\n/**\n * Clones a response by teeing the body so we can return two independent\n * ReadableStreams from it. This avoids the bug in the undici library around\n * response cloning.\n *\n * After cloning, the original response's body will be consumed and closed.\n *\n * @see https://github.com/vercel/next.js/pull/73274\n *\n * @param original - The original response to clone.\n * @returns A tuple containing two independent clones of the original response.\n */\nexport function cloneResponse(original: Response): [Response, Response] {\n // If the response has no body, then we can just return the original response\n // twice because it's immutable.\n if (!original.body) {\n return [original, original]\n }\n\n const [body1, body2] = original.body.tee()\n\n const cloned1 = new Response(body1, {\n status: original.status,\n statusText: original.statusText,\n headers: original.headers,\n })\n\n Object.defineProperty(cloned1, 'url', {\n value: original.url,\n // How the original response.url behaves\n configurable: true,\n enumerable: true,\n writable: false,\n })\n\n const cloned2 = new Response(body2, {\n status: original.status,\n statusText: original.statusText,\n headers: original.headers,\n })\n\n Object.defineProperty(cloned2, 'url', {\n value: original.url,\n // How the original response.url behaves\n configurable: true,\n enumerable: true,\n writable: false,\n })\n\n // The Fetch Standard allows users to skip consuming the response body by\n // relying on garbage collection to release connection resources.\n // https://github.com/nodejs/undici?tab=readme-ov-file#garbage-collection\n //\n // To cancel the stream you then need to cancel both resulting branches.\n // Teeing a stream will generally lock it for the duration, preventing other\n // readers from locking it.\n // https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream/tee\n if (registry) {\n if (cloned1.body) {\n registry.register(cloned1, new WeakRef(cloned1.body))\n }\n\n if (cloned2.body) {\n registry.register(cloned2, new WeakRef(cloned2.body))\n }\n }\n\n return [cloned1, cloned2]\n}\n"],"names":["noop","registry","globalThis","FinalizationRegistry","weakRef","stream","deref","locked","cancel","then","cloneResponse","original","body","body1","body2","tee","cloned1","Response","status","statusText","headers","Object","defineProperty","value","url","configurable","enumerable","writable","cloned2","register","WeakRef"],"mappings":"AAAA,MAAMA,OAAO,KAAO;AAEpB,IAAIC;AAEJ,IAAIC,WAAWC,oBAAoB,EAAE;IACnCF,WAAW,IAAIE,qBAAqB,CAACC;QACnC,MAAMC,SAASD,QAAQE,KAAK;QAC5B,IAAID,UAAU,CAACA,OAAOE,MAAM,EAAE;YAC5BF,OAAOG,MAAM,CAAC,8CAA8CC,IAAI,CAACT;QACnE;IACF;AACF;AAEA;;;;;;;;;;;CAWC,GACD,OAAO,SAASU,cAAcC,QAAkB;IAC9C,6EAA6E;IAC7E,gCAAgC;IAChC,IAAI,CAACA,SAASC,IAAI,EAAE;QAClB,OAAO;YAACD;YAAUA;SAAS;IAC7B;IAEA,MAAM,CAACE,OAAOC,MAAM,GAAGH,SAASC,IAAI,CAACG,GAAG;IAExC,MAAMC,UAAU,IAAIC,SAASJ,OAAO;QAClCK,QAAQP,SAASO,MAAM;QACvBC,YAAYR,SAASQ,UAAU;QAC/BC,SAAST,SAASS,OAAO;IAC3B;IAEAC,OAAOC,cAAc,CAACN,SAAS,OAAO;QACpCO,OAAOZ,SAASa,GAAG;QACnB,wCAAwC;QACxCC,cAAc;QACdC,YAAY;QACZC,UAAU;IACZ;IAEA,MAAMC,UAAU,IAAIX,SAASH,OAAO;QAClCI,QAAQP,SAASO,MAAM;QACvBC,YAAYR,SAASQ,UAAU;QAC/BC,SAAST,SAASS,OAAO;IAC3B;IAEAC,OAAOC,cAAc,CAACM,SAAS,OAAO;QACpCL,OAAOZ,SAASa,GAAG;QACnB,wCAAwC;QACxCC,cAAc;QACdC,YAAY;QACZC,UAAU;IACZ;IAEA,yEAAyE;IACzE,iEAAiE;IACjE,yEAAyE;IACzE,EAAE;IACF,wEAAwE;IACxE,4EAA4E;IAC5E,2BAA2B;IAC3B,sEAAsE;IACtE,IAAI1B,UAAU;QACZ,IAAIe,QAAQJ,IAAI,EAAE;YAChBX,SAAS4B,QAAQ,CAACb,SAAS,IAAIc,QAAQd,QAAQJ,IAAI;QACrD;QAEA,IAAIgB,QAAQhB,IAAI,EAAE;YAChBX,SAAS4B,QAAQ,CAACD,SAAS,IAAIE,QAAQF,QAAQhB,IAAI;QACrD;IACF;IAEA,OAAO;QAACI;QAASY;KAAQ;AAC3B","ignoreList":[0]}
|
||||
+55
@@ -0,0 +1,55 @@
|
||||
const privateCpuProfileName = process.env.__NEXT_PRIVATE_CPU_PROFILE;
|
||||
const isCpuProfileEnabled = process.env.NEXT_CPU_PROF || privateCpuProfileName;
|
||||
const cpuProfileDir = process.env.NEXT_CPU_PROF_DIR;
|
||||
let session = null;
|
||||
let profileSaved = false;
|
||||
if (isCpuProfileEnabled) {
|
||||
const { Session } = require('inspector');
|
||||
session = new Session();
|
||||
session.connect();
|
||||
session.post('Profiler.enable');
|
||||
session.post('Profiler.start');
|
||||
process.on('exit', ()=>{
|
||||
saveCpuProfile();
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Save the CPU profile to disk.
|
||||
*
|
||||
* This is synchronous despite the callback-based API because inspector's
|
||||
* session.post() executes its callback synchronously when connected to
|
||||
* the same process (via session.connect()).
|
||||
*/ export function saveCpuProfile() {
|
||||
if (!session || profileSaved || !isCpuProfileEnabled) {
|
||||
return;
|
||||
}
|
||||
profileSaved = true;
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
session.post('Profiler.stop', (error, param)=>{
|
||||
if (error) {
|
||||
console.error('Cannot generate CPU profiling:', error);
|
||||
return;
|
||||
}
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19);
|
||||
const baseName = privateCpuProfileName || 'cpu-profile';
|
||||
const filename = `${baseName}-${timestamp}.cpuprofile`;
|
||||
let outputPath;
|
||||
if (cpuProfileDir) {
|
||||
if (!fs.existsSync(cpuProfileDir)) {
|
||||
fs.mkdirSync(cpuProfileDir, {
|
||||
recursive: true
|
||||
});
|
||||
}
|
||||
outputPath = path.join(cpuProfileDir, filename);
|
||||
} else {
|
||||
outputPath = `./${filename}`;
|
||||
}
|
||||
fs.writeFileSync(outputPath, JSON.stringify(param.profile));
|
||||
const { green } = require('../../lib/picocolors');
|
||||
console.log(`\n${green('CPU profile saved:')} ${outputPath}`);
|
||||
console.log('Open in Chrome DevTools → Performance tab → Load profile');
|
||||
});
|
||||
}
|
||||
|
||||
//# sourceMappingURL=cpu-profile.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/cpu-profile.ts"],"sourcesContent":["const privateCpuProfileName = process.env.__NEXT_PRIVATE_CPU_PROFILE\nconst isCpuProfileEnabled = process.env.NEXT_CPU_PROF || privateCpuProfileName\nconst cpuProfileDir = process.env.NEXT_CPU_PROF_DIR\n\nlet session: import('inspector').Session | null = null\nlet profileSaved = false\n\nif (isCpuProfileEnabled) {\n const { Session } = require('inspector') as typeof import('inspector')\n\n session = new Session()\n session.connect()\n\n session.post('Profiler.enable')\n session.post('Profiler.start')\n\n process.on('exit', () => {\n saveCpuProfile()\n })\n}\n\n/**\n * Save the CPU profile to disk.\n *\n * This is synchronous despite the callback-based API because inspector's\n * session.post() executes its callback synchronously when connected to\n * the same process (via session.connect()).\n */\nexport function saveCpuProfile(): void {\n if (!session || profileSaved || !isCpuProfileEnabled) {\n return\n }\n profileSaved = true\n\n const fs = require('fs') as typeof import('fs')\n const path = require('path') as typeof import('path')\n\n session!.post('Profiler.stop', (error, param) => {\n if (error) {\n console.error('Cannot generate CPU profiling:', error)\n return\n }\n\n const timestamp = new Date()\n .toISOString()\n .replace(/[:.]/g, '-')\n .slice(0, 19)\n const baseName = privateCpuProfileName || 'cpu-profile'\n const filename = `${baseName}-${timestamp}.cpuprofile`\n\n let outputPath: string\n if (cpuProfileDir) {\n if (!fs.existsSync(cpuProfileDir)) {\n fs.mkdirSync(cpuProfileDir, { recursive: true })\n }\n outputPath = path.join(cpuProfileDir, filename)\n } else {\n outputPath = `./${filename}`\n }\n\n fs.writeFileSync(outputPath, JSON.stringify(param.profile))\n const { green } =\n require('../../lib/picocolors') as typeof import('../../lib/picocolors')\n console.log(`\\n${green('CPU profile saved:')} ${outputPath}`)\n console.log('Open in Chrome DevTools → Performance tab → Load profile')\n })\n}\n"],"names":["privateCpuProfileName","process","env","__NEXT_PRIVATE_CPU_PROFILE","isCpuProfileEnabled","NEXT_CPU_PROF","cpuProfileDir","NEXT_CPU_PROF_DIR","session","profileSaved","Session","require","connect","post","on","saveCpuProfile","fs","path","error","param","console","timestamp","Date","toISOString","replace","slice","baseName","filename","outputPath","existsSync","mkdirSync","recursive","join","writeFileSync","JSON","stringify","profile","green","log"],"mappings":"AAAA,MAAMA,wBAAwBC,QAAQC,GAAG,CAACC,0BAA0B;AACpE,MAAMC,sBAAsBH,QAAQC,GAAG,CAACG,aAAa,IAAIL;AACzD,MAAMM,gBAAgBL,QAAQC,GAAG,CAACK,iBAAiB;AAEnD,IAAIC,UAA8C;AAClD,IAAIC,eAAe;AAEnB,IAAIL,qBAAqB;IACvB,MAAM,EAAEM,OAAO,EAAE,GAAGC,QAAQ;IAE5BH,UAAU,IAAIE;IACdF,QAAQI,OAAO;IAEfJ,QAAQK,IAAI,CAAC;IACbL,QAAQK,IAAI,CAAC;IAEbZ,QAAQa,EAAE,CAAC,QAAQ;QACjBC;IACF;AACF;AAEA;;;;;;CAMC,GACD,OAAO,SAASA;IACd,IAAI,CAACP,WAAWC,gBAAgB,CAACL,qBAAqB;QACpD;IACF;IACAK,eAAe;IAEf,MAAMO,KAAKL,QAAQ;IACnB,MAAMM,OAAON,QAAQ;IAErBH,QAASK,IAAI,CAAC,iBAAiB,CAACK,OAAOC;QACrC,IAAID,OAAO;YACTE,QAAQF,KAAK,CAAC,kCAAkCA;YAChD;QACF;QAEA,MAAMG,YAAY,IAAIC,OACnBC,WAAW,GACXC,OAAO,CAAC,SAAS,KACjBC,KAAK,CAAC,GAAG;QACZ,MAAMC,WAAW1B,yBAAyB;QAC1C,MAAM2B,WAAW,GAAGD,SAAS,CAAC,EAAEL,UAAU,WAAW,CAAC;QAEtD,IAAIO;QACJ,IAAItB,eAAe;YACjB,IAAI,CAACU,GAAGa,UAAU,CAACvB,gBAAgB;gBACjCU,GAAGc,SAAS,CAACxB,eAAe;oBAAEyB,WAAW;gBAAK;YAChD;YACAH,aAAaX,KAAKe,IAAI,CAAC1B,eAAeqB;QACxC,OAAO;YACLC,aAAa,CAAC,EAAE,EAAED,UAAU;QAC9B;QAEAX,GAAGiB,aAAa,CAACL,YAAYM,KAAKC,SAAS,CAAChB,MAAMiB,OAAO;QACzD,MAAM,EAAEC,KAAK,EAAE,GACb1B,QAAQ;QACVS,QAAQkB,GAAG,CAAC,CAAC,EAAE,EAAED,MAAM,sBAAsB,CAAC,EAAET,YAAY;QAC5DR,QAAQkB,GAAG,CAAC;IACd;AACF","ignoreList":[0]}
|
||||
+16
@@ -0,0 +1,16 @@
|
||||
/**
|
||||
* Decodes a query path parameter.
|
||||
*
|
||||
* @param value - The value to decode.
|
||||
* @returns The decoded value.
|
||||
*/ export function decodeQueryPathParameter(value) {
|
||||
// When deployed to Vercel, the value may be encoded, so this attempts to
|
||||
// decode it and returns the original value if it fails.
|
||||
try {
|
||||
return decodeURIComponent(value);
|
||||
} catch {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=decode-query-path-parameter.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/decode-query-path-parameter.ts"],"sourcesContent":["/**\n * Decodes a query path parameter.\n *\n * @param value - The value to decode.\n * @returns The decoded value.\n */\nexport function decodeQueryPathParameter(value: string) {\n // When deployed to Vercel, the value may be encoded, so this attempts to\n // decode it and returns the original value if it fails.\n try {\n return decodeURIComponent(value)\n } catch {\n return value\n }\n}\n"],"names":["decodeQueryPathParameter","value","decodeURIComponent"],"mappings":"AAAA;;;;;CAKC,GACD,OAAO,SAASA,yBAAyBC,KAAa;IACpD,yEAAyE;IACzE,wDAAwD;IACxD,IAAI;QACF,OAAOC,mBAAmBD;IAC5B,EAAE,OAAM;QACN,OAAOA;IACT;AACF","ignoreList":[0]}
|
||||
+112
@@ -0,0 +1,112 @@
|
||||
/**
|
||||
* Based on https://github.com/facebook/react/blob/d4e78c42a94be027b4dc7ed2659a5fddfbf9bd4e/packages/react/src/ReactFetch.js
|
||||
*/ import * as React from 'react';
|
||||
import { cloneResponse } from './clone-response';
|
||||
import { InvariantError } from '../../shared/lib/invariant-error';
|
||||
const simpleCacheKey = '["GET",[],null,"follow",null,null,null,null]' // generateCacheKey(new Request('https://blank'));
|
||||
;
|
||||
// Headers that should not affect deduplication
|
||||
// traceparent and tracestate are used for distributed tracing and should not affect cache keys
|
||||
const headersToExcludeInCacheKey = new Set([
|
||||
'traceparent',
|
||||
'tracestate'
|
||||
]);
|
||||
function generateCacheKey(request) {
|
||||
// We pick the fields that goes into the key used to dedupe requests.
|
||||
// We don't include the `cache` field, because we end up using whatever
|
||||
// caching resulted from the first request.
|
||||
// Notably we currently don't consider non-standard (or future) options.
|
||||
// This might not be safe. TODO: warn for non-standard extensions differing.
|
||||
// IF YOU CHANGE THIS UPDATE THE simpleCacheKey ABOVE.
|
||||
const filteredHeaders = Array.from(request.headers.entries()).filter(([key])=>!headersToExcludeInCacheKey.has(key.toLowerCase()));
|
||||
return JSON.stringify([
|
||||
request.method,
|
||||
filteredHeaders,
|
||||
request.mode,
|
||||
request.redirect,
|
||||
request.credentials,
|
||||
request.referrer,
|
||||
request.referrerPolicy,
|
||||
request.integrity
|
||||
]);
|
||||
}
|
||||
export function createDedupeFetch(originalFetch) {
|
||||
const getCacheEntries = React.cache(// eslint-disable-next-line @typescript-eslint/no-unused-vars -- url is the cache key
|
||||
(url)=>[]);
|
||||
return function dedupeFetch(resource, options) {
|
||||
if (options && options.signal) {
|
||||
// If we're passed a signal, then we assume that
|
||||
// someone else controls the lifetime of this object and opts out of
|
||||
// caching. It's effectively the opt-out mechanism.
|
||||
// Ideally we should be able to check this on the Request but
|
||||
// it always gets initialized with its own signal so we don't
|
||||
// know if it's supposed to override - unless we also override the
|
||||
// Request constructor.
|
||||
return originalFetch(resource, options);
|
||||
}
|
||||
// Normalize the Request
|
||||
let url;
|
||||
let cacheKey;
|
||||
if (typeof resource === 'string' && !options) {
|
||||
// Fast path.
|
||||
cacheKey = simpleCacheKey;
|
||||
url = resource;
|
||||
} else {
|
||||
// Normalize the request.
|
||||
// if resource is not a string or a URL (its an instance of Request)
|
||||
// then do not instantiate a new Request but instead
|
||||
// reuse the request as to not disturb the body in the event it's a ReadableStream.
|
||||
const request = typeof resource === 'string' || resource instanceof URL ? new Request(resource, options) : resource;
|
||||
if (request.method !== 'GET' && request.method !== 'HEAD' || request.keepalive) {
|
||||
// We currently don't dedupe requests that might have side-effects. Those
|
||||
// have to be explicitly cached. We assume that the request doesn't have a
|
||||
// body if it's GET or HEAD.
|
||||
// keepalive gets treated the same as if you passed a custom cache signal.
|
||||
return originalFetch(resource, options);
|
||||
}
|
||||
cacheKey = generateCacheKey(request);
|
||||
url = request.url;
|
||||
}
|
||||
const cacheEntries = getCacheEntries(url);
|
||||
for(let i = 0, j = cacheEntries.length; i < j; i += 1){
|
||||
const [key, promise] = cacheEntries[i];
|
||||
if (key === cacheKey) {
|
||||
return promise.then(()=>{
|
||||
const response = cacheEntries[i][2];
|
||||
if (!response) throw Object.defineProperty(new InvariantError('No cached response'), "__NEXT_ERROR_CODE", {
|
||||
value: "E579",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
// We're cloning the response using this utility because there exists
|
||||
// a bug in the undici library around response cloning. See the
|
||||
// following pull request for more details:
|
||||
// https://github.com/vercel/next.js/pull/73274
|
||||
const [cloned1, cloned2] = cloneResponse(response);
|
||||
cacheEntries[i][2] = cloned2;
|
||||
return cloned1;
|
||||
});
|
||||
}
|
||||
}
|
||||
// We pass the original arguments here in case normalizing the Request
|
||||
// doesn't include all the options in this environment.
|
||||
const promise = originalFetch(resource, options);
|
||||
const entry = [
|
||||
cacheKey,
|
||||
promise,
|
||||
null
|
||||
];
|
||||
cacheEntries.push(entry);
|
||||
return promise.then((response)=>{
|
||||
// We're cloning the response using this utility because there exists
|
||||
// a bug in the undici library around response cloning. See the
|
||||
// following pull request for more details:
|
||||
// https://github.com/vercel/next.js/pull/73274
|
||||
const [cloned1, cloned2] = cloneResponse(response);
|
||||
entry[2] = cloned2;
|
||||
return cloned1;
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=dedupe-fetch.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+88
@@ -0,0 +1,88 @@
|
||||
import { LRUCache } from './lru-cache';
|
||||
import { createRequestResponseMocks } from './mock-request';
|
||||
import { HMR_MESSAGE_SENT_TO_BROWSER } from '../dev/hot-reloader-types';
|
||||
/**
|
||||
* The DevBundlerService provides an interface to perform tasks with the
|
||||
* bundler while in development.
|
||||
*/ export class DevBundlerService {
|
||||
constructor(bundler, handler){
|
||||
this.bundler = bundler;
|
||||
this.handler = handler;
|
||||
this.ensurePage = async (definition)=>{
|
||||
// TODO: remove after ensure is pulled out of server
|
||||
return await this.bundler.hotReloader.ensurePage(definition);
|
||||
};
|
||||
this.logErrorWithOriginalStack = this.bundler.logErrorWithOriginalStack.bind(this.bundler);
|
||||
this.appIsrManifestInner = new LRUCache(8000, function length() {
|
||||
return 16;
|
||||
});
|
||||
const { hotReloader } = bundler;
|
||||
this.close = hotReloader.close.bind(hotReloader);
|
||||
this.setCacheStatus = hotReloader.setCacheStatus.bind(hotReloader);
|
||||
this.setReactDebugChannel = hotReloader.setReactDebugChannel.bind(hotReloader);
|
||||
this.sendErrorsToBrowser = hotReloader.sendErrorsToBrowser.bind(hotReloader);
|
||||
}
|
||||
async getFallbackErrorComponents(url) {
|
||||
await this.bundler.hotReloader.buildFallbackError();
|
||||
// Build the error page to ensure the fallback is built too.
|
||||
// TODO: See if this can be moved into hotReloader or removed.
|
||||
await this.bundler.hotReloader.ensurePage({
|
||||
page: '/_error',
|
||||
clientOnly: false,
|
||||
definition: undefined,
|
||||
url
|
||||
});
|
||||
}
|
||||
async getCompilationError(page) {
|
||||
const errors = await this.bundler.hotReloader.getCompilationErrors(page);
|
||||
if (!errors) return;
|
||||
// Return the very first error we found.
|
||||
return errors[0];
|
||||
}
|
||||
async revalidate({ urlPath, headers, opts: revalidateOpts }) {
|
||||
const mocked = createRequestResponseMocks({
|
||||
url: urlPath,
|
||||
headers
|
||||
});
|
||||
await this.handler(mocked.req, mocked.res);
|
||||
await mocked.res.hasStreamed;
|
||||
if (mocked.res.getHeader('x-nextjs-cache') !== 'REVALIDATED' && mocked.res.statusCode !== 200 && !(mocked.res.statusCode === 404 && revalidateOpts.unstable_onlyGenerated)) {
|
||||
throw Object.defineProperty(new Error(`Invalid response ${mocked.res.statusCode}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E175",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return {};
|
||||
}
|
||||
get appIsrManifest() {
|
||||
const serializableManifest = {};
|
||||
for (const [key, value] of this.appIsrManifestInner){
|
||||
serializableManifest[key] = value;
|
||||
}
|
||||
return serializableManifest;
|
||||
}
|
||||
setIsrStatus(key, value) {
|
||||
var // Only send the ISR manifest to legacy clients, i.e. Pages Router clients,
|
||||
// or App Router clients that have Cache Components disabled. The ISR
|
||||
// manifest is only used to inform the static indicator, which currently
|
||||
// does not provide useful information if Cache Components is enabled due to
|
||||
// its binary nature (i.e. it does not support showing info for partially
|
||||
// static pages).
|
||||
_this_bundler_hotReloader, _this_bundler;
|
||||
if (value === undefined) {
|
||||
this.appIsrManifestInner.remove(key);
|
||||
} else {
|
||||
this.appIsrManifestInner.set(key, value);
|
||||
}
|
||||
(_this_bundler = this.bundler) == null ? void 0 : (_this_bundler_hotReloader = _this_bundler.hotReloader) == null ? void 0 : _this_bundler_hotReloader.sendToLegacyClients({
|
||||
type: HMR_MESSAGE_SENT_TO_BROWSER.ISR_MANIFEST,
|
||||
data: this.appIsrManifest
|
||||
});
|
||||
}
|
||||
sendHmrMessage(message) {
|
||||
this.bundler.hotReloader.send(message);
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=dev-bundler-service.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+44
@@ -0,0 +1,44 @@
|
||||
import { promises } from 'fs';
|
||||
import { LRUCache } from './lru-cache';
|
||||
/**
|
||||
* Module-level LRU singleton for disk cache eviction.
|
||||
* Initialized once on first `set()`, shared across all consumers.
|
||||
* Once resolved, the promise stays resolved — subsequent calls just await the cached result.
|
||||
*/ let _diskLRUPromise = null;
|
||||
/**
|
||||
* Initialize or return the module-level LRU for disk cache eviction.
|
||||
* Concurrent calls are deduplicated via the shared promise.
|
||||
*
|
||||
* @param cacheDir - The directory where cached files are stored
|
||||
* @param maxDiskSize - Maximum disk cache size in bytes
|
||||
* @param readEntries - Callback to scan existing cache entries (format-agnostic)
|
||||
*/ export async function getOrInitDiskLRU(cacheDir, maxDiskSize, readEntries, evictEntry) {
|
||||
if (!_diskLRUPromise) {
|
||||
_diskLRUPromise = (async ()=>{
|
||||
let maxSize = maxDiskSize;
|
||||
if (typeof maxSize === 'undefined') {
|
||||
// Ensure cacheDir exists before checking disk space
|
||||
await promises.mkdir(cacheDir, {
|
||||
recursive: true
|
||||
});
|
||||
// Since config was not provided, default to 50% of available disk space
|
||||
const { bavail, bsize } = await promises.statfs(cacheDir);
|
||||
maxSize = Math.floor(bavail * bsize / 2);
|
||||
}
|
||||
const lru = new LRUCache(maxSize, (size)=>size, (cacheKey)=>evictEntry(cacheDir, cacheKey));
|
||||
const entries = await readEntries(cacheDir);
|
||||
for (const entry of entries){
|
||||
lru.set(entry.key, entry.size);
|
||||
}
|
||||
return lru;
|
||||
})();
|
||||
}
|
||||
return _diskLRUPromise;
|
||||
}
|
||||
/**
|
||||
* Reset the module-level LRU singleton. Exported for testing only.
|
||||
*/ export function resetDiskLRU() {
|
||||
_diskLRUPromise = null;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=disk-lru-cache.external.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/disk-lru-cache.external.ts"],"sourcesContent":["import { promises } from 'fs'\nimport { LRUCache } from './lru-cache'\n\n/**\n * Module-level LRU singleton for disk cache eviction.\n * Initialized once on first `set()`, shared across all consumers.\n * Once resolved, the promise stays resolved — subsequent calls just await the cached result.\n */\nlet _diskLRUPromise: Promise<LRUCache<number>> | null = null\n\n/**\n * Initialize or return the module-level LRU for disk cache eviction.\n * Concurrent calls are deduplicated via the shared promise.\n *\n * @param cacheDir - The directory where cached files are stored\n * @param maxDiskSize - Maximum disk cache size in bytes\n * @param readEntries - Callback to scan existing cache entries (format-agnostic)\n */\nexport async function getOrInitDiskLRU(\n cacheDir: string,\n maxDiskSize: number | undefined,\n readEntries: (\n cacheDir: string\n ) => Promise<Array<{ key: string; size: number; expireAt: number }>>,\n evictEntry: (cacheDir: string, cacheKey: string) => Promise<void>\n): Promise<LRUCache<number>> {\n if (!_diskLRUPromise) {\n _diskLRUPromise = (async () => {\n let maxSize = maxDiskSize\n if (typeof maxSize === 'undefined') {\n // Ensure cacheDir exists before checking disk space\n await promises.mkdir(cacheDir, { recursive: true })\n // Since config was not provided, default to 50% of available disk space\n const { bavail, bsize } = await promises.statfs(cacheDir)\n maxSize = Math.floor((bavail * bsize) / 2)\n }\n\n const lru = new LRUCache<number>(\n maxSize,\n (size) => size,\n (cacheKey) => evictEntry(cacheDir, cacheKey)\n )\n\n const entries = await readEntries(cacheDir)\n for (const entry of entries) {\n lru.set(entry.key, entry.size)\n }\n\n return lru\n })()\n }\n return _diskLRUPromise\n}\n\n/**\n * Reset the module-level LRU singleton. Exported for testing only.\n */\nexport function resetDiskLRU(): void {\n _diskLRUPromise = null\n}\n"],"names":["promises","LRUCache","_diskLRUPromise","getOrInitDiskLRU","cacheDir","maxDiskSize","readEntries","evictEntry","maxSize","mkdir","recursive","bavail","bsize","statfs","Math","floor","lru","size","cacheKey","entries","entry","set","key","resetDiskLRU"],"mappings":"AAAA,SAASA,QAAQ,QAAQ,KAAI;AAC7B,SAASC,QAAQ,QAAQ,cAAa;AAEtC;;;;CAIC,GACD,IAAIC,kBAAoD;AAExD;;;;;;;CAOC,GACD,OAAO,eAAeC,iBACpBC,QAAgB,EAChBC,WAA+B,EAC/BC,WAEoE,EACpEC,UAAiE;IAEjE,IAAI,CAACL,iBAAiB;QACpBA,kBAAkB,AAAC,CAAA;YACjB,IAAIM,UAAUH;YACd,IAAI,OAAOG,YAAY,aAAa;gBAClC,oDAAoD;gBACpD,MAAMR,SAASS,KAAK,CAACL,UAAU;oBAAEM,WAAW;gBAAK;gBACjD,wEAAwE;gBACxE,MAAM,EAAEC,MAAM,EAAEC,KAAK,EAAE,GAAG,MAAMZ,SAASa,MAAM,CAACT;gBAChDI,UAAUM,KAAKC,KAAK,CAAC,AAACJ,SAASC,QAAS;YAC1C;YAEA,MAAMI,MAAM,IAAIf,SACdO,SACA,CAACS,OAASA,MACV,CAACC,WAAaX,WAAWH,UAAUc;YAGrC,MAAMC,UAAU,MAAMb,YAAYF;YAClC,KAAK,MAAMgB,SAASD,QAAS;gBAC3BH,IAAIK,GAAG,CAACD,MAAME,GAAG,EAAEF,MAAMH,IAAI;YAC/B;YAEA,OAAOD;QACT,CAAA;IACF;IACA,OAAOd;AACT;AAEA;;CAEC,GACD,OAAO,SAASqB;IACdrB,kBAAkB;AACpB","ignoreList":[0]}
|
||||
+34
@@ -0,0 +1,34 @@
|
||||
/**
|
||||
* FNV-1a Hash implementation
|
||||
* @author Travis Webb (tjwebb) <me@traviswebb.com>
|
||||
*
|
||||
* Ported from https://github.com/tjwebb/fnv-plus/blob/master/index.js
|
||||
*
|
||||
* Simplified, optimized and add modified for 52 bit, which provides a larger hash space
|
||||
* and still making use of Javascript's 53-bit integer space.
|
||||
*/ export const fnv1a52 = (str)=>{
|
||||
const len = str.length;
|
||||
let i = 0, t0 = 0, v0 = 0x2325, t1 = 0, v1 = 0x8422, t2 = 0, v2 = 0x9ce4, t3 = 0, v3 = 0xcbf2;
|
||||
while(i < len){
|
||||
v0 ^= str.charCodeAt(i++);
|
||||
t0 = v0 * 435;
|
||||
t1 = v1 * 435;
|
||||
t2 = v2 * 435;
|
||||
t3 = v3 * 435;
|
||||
t2 += v0 << 8;
|
||||
t3 += v1 << 8;
|
||||
t1 += t0 >>> 16;
|
||||
v0 = t0 & 65535;
|
||||
t2 += t1 >>> 16;
|
||||
v1 = t1 & 65535;
|
||||
v3 = t3 + (t2 >>> 16) & 65535;
|
||||
v2 = t2 & 65535;
|
||||
}
|
||||
return (v3 & 15) * 281474976710656 + v2 * 4294967296 + v1 * 65536 + (v0 ^ v3 >> 4);
|
||||
};
|
||||
export const generateETag = (payload, weak = false)=>{
|
||||
const prefix = weak ? 'W/"' : '"';
|
||||
return prefix + fnv1a52(payload).toString(36) + payload.length.toString(36) + '"';
|
||||
};
|
||||
|
||||
//# sourceMappingURL=etag.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/etag.ts"],"sourcesContent":["/**\n * FNV-1a Hash implementation\n * @author Travis Webb (tjwebb) <me@traviswebb.com>\n *\n * Ported from https://github.com/tjwebb/fnv-plus/blob/master/index.js\n *\n * Simplified, optimized and add modified for 52 bit, which provides a larger hash space\n * and still making use of Javascript's 53-bit integer space.\n */\nexport const fnv1a52 = (str: string) => {\n const len = str.length\n let i = 0,\n t0 = 0,\n v0 = 0x2325,\n t1 = 0,\n v1 = 0x8422,\n t2 = 0,\n v2 = 0x9ce4,\n t3 = 0,\n v3 = 0xcbf2\n\n while (i < len) {\n v0 ^= str.charCodeAt(i++)\n t0 = v0 * 435\n t1 = v1 * 435\n t2 = v2 * 435\n t3 = v3 * 435\n t2 += v0 << 8\n t3 += v1 << 8\n t1 += t0 >>> 16\n v0 = t0 & 65535\n t2 += t1 >>> 16\n v1 = t1 & 65535\n v3 = (t3 + (t2 >>> 16)) & 65535\n v2 = t2 & 65535\n }\n\n return (\n (v3 & 15) * 281474976710656 +\n v2 * 4294967296 +\n v1 * 65536 +\n (v0 ^ (v3 >> 4))\n )\n}\n\nexport const generateETag = (payload: string, weak = false) => {\n const prefix = weak ? 'W/\"' : '\"'\n return (\n prefix + fnv1a52(payload).toString(36) + payload.length.toString(36) + '\"'\n )\n}\n"],"names":["fnv1a52","str","len","length","i","t0","v0","t1","v1","t2","v2","t3","v3","charCodeAt","generateETag","payload","weak","prefix","toString"],"mappings":"AAAA;;;;;;;;CAQC,GACD,OAAO,MAAMA,UAAU,CAACC;IACtB,MAAMC,MAAMD,IAAIE,MAAM;IACtB,IAAIC,IAAI,GACNC,KAAK,GACLC,KAAK,QACLC,KAAK,GACLC,KAAK,QACLC,KAAK,GACLC,KAAK,QACLC,KAAK,GACLC,KAAK;IAEP,MAAOR,IAAIF,IAAK;QACdI,MAAML,IAAIY,UAAU,CAACT;QACrBC,KAAKC,KAAK;QACVC,KAAKC,KAAK;QACVC,KAAKC,KAAK;QACVC,KAAKC,KAAK;QACVH,MAAMH,MAAM;QACZK,MAAMH,MAAM;QACZD,MAAMF,OAAO;QACbC,KAAKD,KAAK;QACVI,MAAMF,OAAO;QACbC,KAAKD,KAAK;QACVK,KAAK,AAACD,KAAMF,CAAAA,OAAO,EAAC,IAAM;QAC1BC,KAAKD,KAAK;IACZ;IAEA,OACE,AAACG,CAAAA,KAAK,EAAC,IAAK,kBACZF,KAAK,aACLF,KAAK,QACJF,CAAAA,KAAMM,MAAM,CAAC;AAElB,EAAC;AAED,OAAO,MAAME,eAAe,CAACC,SAAiBC,OAAO,KAAK;IACxD,MAAMC,SAASD,OAAO,QAAQ;IAC9B,OACEC,SAASjB,QAAQe,SAASG,QAAQ,CAAC,MAAMH,QAAQZ,MAAM,CAACe,QAAQ,CAAC,MAAM;AAE3E,EAAC","ignoreList":[0]}
|
||||
+38
@@ -0,0 +1,38 @@
|
||||
import { join } from 'node:path';
|
||||
import { writeFile } from 'node:fs/promises';
|
||||
export async function createEnvDefinitions({ distDir, loadedEnvFiles }) {
|
||||
const envLines = [];
|
||||
const seenKeys = new Set();
|
||||
// env files are in order of priority
|
||||
for (const { path, env } of loadedEnvFiles){
|
||||
for(const key in env){
|
||||
if (!seenKeys.has(key)) {
|
||||
envLines.push(` /** Loaded from \`${path}\` */`);
|
||||
envLines.push(` ${key}?: string`);
|
||||
seenKeys.add(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
const envStr = envLines.join('\n');
|
||||
const definitionStr = `// Type definitions for Next.js environment variables
|
||||
declare global {
|
||||
namespace NodeJS {
|
||||
interface ProcessEnv {
|
||||
${envStr}
|
||||
}
|
||||
}
|
||||
}
|
||||
export {}`;
|
||||
if (process.env.NODE_ENV === 'test') {
|
||||
return definitionStr;
|
||||
}
|
||||
try {
|
||||
// we expect the types directory to already exist
|
||||
const envDtsPath = join(distDir, 'types', 'env.d.ts');
|
||||
await writeFile(envDtsPath, definitionStr, 'utf-8');
|
||||
} catch (e) {
|
||||
console.error('Failed to write env.d.ts:', e);
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=create-env-definitions.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/experimental/create-env-definitions.ts"],"sourcesContent":["import type { LoadedEnvFiles } from '@next/env'\nimport { join } from 'node:path'\nimport { writeFile } from 'node:fs/promises'\n\nexport async function createEnvDefinitions({\n distDir,\n loadedEnvFiles,\n}: {\n distDir: string\n loadedEnvFiles: LoadedEnvFiles\n}) {\n const envLines = []\n const seenKeys = new Set()\n // env files are in order of priority\n for (const { path, env } of loadedEnvFiles) {\n for (const key in env) {\n if (!seenKeys.has(key)) {\n envLines.push(` /** Loaded from \\`${path}\\` */`)\n envLines.push(` ${key}?: string`)\n seenKeys.add(key)\n }\n }\n }\n const envStr = envLines.join('\\n')\n\n const definitionStr = `// Type definitions for Next.js environment variables\ndeclare global {\n namespace NodeJS {\n interface ProcessEnv {\n${envStr}\n }\n }\n}\nexport {}`\n\n if (process.env.NODE_ENV === 'test') {\n return definitionStr\n }\n\n try {\n // we expect the types directory to already exist\n const envDtsPath = join(distDir, 'types', 'env.d.ts')\n await writeFile(envDtsPath, definitionStr, 'utf-8')\n } catch (e) {\n console.error('Failed to write env.d.ts:', e)\n }\n}\n"],"names":["join","writeFile","createEnvDefinitions","distDir","loadedEnvFiles","envLines","seenKeys","Set","path","env","key","has","push","add","envStr","definitionStr","process","NODE_ENV","envDtsPath","e","console","error"],"mappings":"AACA,SAASA,IAAI,QAAQ,YAAW;AAChC,SAASC,SAAS,QAAQ,mBAAkB;AAE5C,OAAO,eAAeC,qBAAqB,EACzCC,OAAO,EACPC,cAAc,EAIf;IACC,MAAMC,WAAW,EAAE;IACnB,MAAMC,WAAW,IAAIC;IACrB,qCAAqC;IACrC,KAAK,MAAM,EAAEC,IAAI,EAAEC,GAAG,EAAE,IAAIL,eAAgB;QAC1C,IAAK,MAAMM,OAAOD,IAAK;YACrB,IAAI,CAACH,SAASK,GAAG,CAACD,MAAM;gBACtBL,SAASO,IAAI,CAAC,CAAC,wBAAwB,EAAEJ,KAAK,KAAK,CAAC;gBACpDH,SAASO,IAAI,CAAC,CAAC,MAAM,EAAEF,IAAI,SAAS,CAAC;gBACrCJ,SAASO,GAAG,CAACH;YACf;QACF;IACF;IACA,MAAMI,SAAST,SAASL,IAAI,CAAC;IAE7B,MAAMe,gBAAgB,CAAC;;;;AAIzB,EAAED,OAAO;;;;SAIA,CAAC;IAER,IAAIE,QAAQP,GAAG,CAACQ,QAAQ,KAAK,QAAQ;QACnC,OAAOF;IACT;IAEA,IAAI;QACF,iDAAiD;QACjD,MAAMG,aAAalB,KAAKG,SAAS,SAAS;QAC1C,MAAMF,UAAUiB,YAAYH,eAAe;IAC7C,EAAE,OAAOI,GAAG;QACVC,QAAQC,KAAK,CAAC,6BAA6BF;IAC7C;AACF","ignoreList":[0]}
|
||||
+38
@@ -0,0 +1,38 @@
|
||||
/**
|
||||
* If set to `incremental`, only those leaf pages that export
|
||||
* `experimental_ppr = true` will have partial prerendering enabled. If any
|
||||
* page exports this value as `false` or does not export it at all will not
|
||||
* have partial prerendering enabled. If set to a boolean, the options for
|
||||
* `experimental_ppr` will be ignored.
|
||||
*/ /**
|
||||
* Returns true if partial prerendering is enabled for the application. It does
|
||||
* not tell you if a given route has PPR enabled, as that requires analysis of
|
||||
* the route's configuration.
|
||||
*
|
||||
* @see {@link checkIsRoutePPREnabled} - for checking if a specific route has PPR enabled.
|
||||
*/ export function checkIsAppPPREnabled(config) {
|
||||
// If the config is undefined, partial prerendering is disabled.
|
||||
if (typeof config === 'undefined') return false;
|
||||
// If the config is a boolean, use it directly.
|
||||
if (typeof config === 'boolean') return config;
|
||||
// If the config is a string, it must be 'incremental' to enable partial
|
||||
// prerendering.
|
||||
if (config === 'incremental') return true;
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Returns true if partial prerendering is supported for the current page with
|
||||
* the provided app configuration. If the application doesn't have partial
|
||||
* prerendering enabled, this function will always return false. If you want to
|
||||
* check if the application has partial prerendering enabled
|
||||
*
|
||||
* @see {@link checkIsAppPPREnabled} for checking if the application has PPR enabled.
|
||||
*/ export function checkIsRoutePPREnabled(config) {
|
||||
// If the config is undefined, partial prerendering is disabled.
|
||||
if (typeof config === 'undefined') return false;
|
||||
// If the config is a boolean, use it directly.
|
||||
if (typeof config === 'boolean') return config;
|
||||
return false;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=ppr.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/experimental/ppr.ts"],"sourcesContent":["/**\n * If set to `incremental`, only those leaf pages that export\n * `experimental_ppr = true` will have partial prerendering enabled. If any\n * page exports this value as `false` or does not export it at all will not\n * have partial prerendering enabled. If set to a boolean, the options for\n * `experimental_ppr` will be ignored.\n */\n\nexport type ExperimentalPPRConfig = boolean | 'incremental'\n\n/**\n * Returns true if partial prerendering is enabled for the application. It does\n * not tell you if a given route has PPR enabled, as that requires analysis of\n * the route's configuration.\n *\n * @see {@link checkIsRoutePPREnabled} - for checking if a specific route has PPR enabled.\n */\nexport function checkIsAppPPREnabled(\n config: ExperimentalPPRConfig | undefined\n): boolean {\n // If the config is undefined, partial prerendering is disabled.\n if (typeof config === 'undefined') return false\n\n // If the config is a boolean, use it directly.\n if (typeof config === 'boolean') return config\n\n // If the config is a string, it must be 'incremental' to enable partial\n // prerendering.\n if (config === 'incremental') return true\n\n return false\n}\n\n/**\n * Returns true if partial prerendering is supported for the current page with\n * the provided app configuration. If the application doesn't have partial\n * prerendering enabled, this function will always return false. If you want to\n * check if the application has partial prerendering enabled\n *\n * @see {@link checkIsAppPPREnabled} for checking if the application has PPR enabled.\n */\nexport function checkIsRoutePPREnabled(\n config: ExperimentalPPRConfig | undefined\n): boolean {\n // If the config is undefined, partial prerendering is disabled.\n if (typeof config === 'undefined') return false\n\n // If the config is a boolean, use it directly.\n if (typeof config === 'boolean') return config\n\n return false\n}\n"],"names":["checkIsAppPPREnabled","config","checkIsRoutePPREnabled"],"mappings":"AAAA;;;;;;CAMC,GAID;;;;;;CAMC,GACD,OAAO,SAASA,qBACdC,MAAyC;IAEzC,gEAAgE;IAChE,IAAI,OAAOA,WAAW,aAAa,OAAO;IAE1C,+CAA+C;IAC/C,IAAI,OAAOA,WAAW,WAAW,OAAOA;IAExC,wEAAwE;IACxE,gBAAgB;IAChB,IAAIA,WAAW,eAAe,OAAO;IAErC,OAAO;AACT;AAEA;;;;;;;CAOC,GACD,OAAO,SAASC,uBACdD,MAAyC;IAEzC,gEAAgE;IAChE,IAAI,OAAOA,WAAW,aAAa,OAAO;IAE1C,+CAA+C;IAC/C,IAAI,OAAOA,WAAW,WAAW,OAAOA;IAExC,OAAO;AACT","ignoreList":[0]}
|
||||
+139
@@ -0,0 +1,139 @@
|
||||
import { fileExists } from '../../lib/file-exists';
|
||||
import { getPagePaths } from '../../shared/lib/page-path/get-page-paths';
|
||||
import { nonNullable } from '../../lib/non-nullable';
|
||||
import { join, sep, normalize } from 'path';
|
||||
import { promises as fsPromises } from 'fs';
|
||||
import { warn } from '../../build/output/log';
|
||||
import { cyan } from '../../lib/picocolors';
|
||||
import { isMetadataRouteFile } from '../../lib/metadata/is-metadata-route';
|
||||
import { escapeStringRegexp } from '../../shared/lib/escape-regexp';
|
||||
async function isTrueCasePagePath(pagePath, pagesDir) {
|
||||
const pageSegments = normalize(pagePath).split(sep).filter(Boolean);
|
||||
const segmentExistsPromises = pageSegments.map(async (segment, i)=>{
|
||||
const segmentParentDir = join(pagesDir, ...pageSegments.slice(0, i));
|
||||
const parentDirEntries = await fsPromises.readdir(segmentParentDir);
|
||||
return parentDirEntries.includes(segment);
|
||||
});
|
||||
return (await Promise.all(segmentExistsPromises)).every(Boolean);
|
||||
}
|
||||
/**
|
||||
* Finds a page file with the given parameters. If the page is duplicated with
|
||||
* multiple extensions it will throw, otherwise it will return the *relative*
|
||||
* path to the page file or null if it is not found.
|
||||
*
|
||||
* @param pagesDir Absolute path to the pages folder with trailing `/pages`.
|
||||
* @param normalizedPagePath The page normalized (it will be denormalized).
|
||||
* @param pageExtensions Array of page extensions.
|
||||
*/ export async function findPageFile(pagesDir, normalizedPagePath, pageExtensions, isAppDir) {
|
||||
const pagePaths = getPagePaths(normalizedPagePath, pageExtensions, isAppDir);
|
||||
const [existingPath, ...others] = (await Promise.all(pagePaths.map(async (path)=>{
|
||||
const filePath = join(pagesDir, path);
|
||||
try {
|
||||
return await fileExists(filePath) ? path : null;
|
||||
} catch (err) {
|
||||
var _err_code;
|
||||
if (!(err == null ? void 0 : (_err_code = err.code) == null ? void 0 : _err_code.includes('ENOTDIR'))) throw err;
|
||||
}
|
||||
return null;
|
||||
}))).filter(nonNullable);
|
||||
if (!existingPath) {
|
||||
return null;
|
||||
}
|
||||
if (!await isTrueCasePagePath(existingPath, pagesDir)) {
|
||||
return null;
|
||||
}
|
||||
if (others.length > 0) {
|
||||
warn(`Duplicate page detected. ${cyan(join('pages', existingPath))} and ${cyan(join('pages', others[0]))} both resolve to ${cyan(normalizedPagePath)}.`);
|
||||
}
|
||||
return existingPath;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* createValidFileMatcher receives configured page extensions and return helpers to determine:
|
||||
* `isLayoutsLeafPage`: if a file is a valid page file or routes file under app directory
|
||||
* `isTrackedFiles`: if it's a tracked file for our file watcher
|
||||
*
|
||||
*/ export function createValidFileMatcher(pageExtensions, appDirPath) {
|
||||
// Helper to create extension regex pattern
|
||||
const extPattern = `(?:${pageExtensions.map((extension)=>escapeStringRegexp(extension)).join('|')})`;
|
||||
// Pattern factory for "leaf" files that can appear at start of path or after separator
|
||||
// e.g., 'page.tsx', '/path/page.tsx', '\\path\\route.js'
|
||||
const createLeafPattern = (fileNames)=>{
|
||||
const names = fileNames.length === 1 ? fileNames[0] : `(${fileNames.join('|')})`;
|
||||
return new RegExp(`(^${names}|[\\\\/]${names})\\.${extPattern}$`);
|
||||
};
|
||||
// Pattern factory for root-only files (no path separator allowed)
|
||||
const createRootOnlyPattern = (fileName)=>new RegExp(`^${fileName}\\.${extPattern}$`);
|
||||
// All file matching patterns
|
||||
const validExtensionFileRegex = new RegExp(`\\.${extPattern}$`);
|
||||
const leafOnlyPageFileRegex = createLeafPattern([
|
||||
'page',
|
||||
'route'
|
||||
]);
|
||||
const leafOnlyRouteFileRegex = createLeafPattern([
|
||||
'route'
|
||||
]);
|
||||
const leafOnlyLayoutFileRegex = createLeafPattern([
|
||||
'layout'
|
||||
]);
|
||||
const leafOnlyDefaultFileRegex = createLeafPattern([
|
||||
'default'
|
||||
]);
|
||||
const rootNotFoundFileRegex = createRootOnlyPattern('not-found');
|
||||
/** TODO-METADATA: support other metadata routes
|
||||
* regex for:
|
||||
*
|
||||
* /robots.txt|<ext>
|
||||
* /sitemap.xml|<ext>
|
||||
* /favicon.ico
|
||||
* /manifest.json|<ext>
|
||||
* <route>/icon.png|jpg|<ext>
|
||||
* <route>/apple-touch-icon.png|jpg|<ext>
|
||||
*
|
||||
*/ /**
|
||||
* Match the file if it's a metadata route file, static: if the file is a static metadata file.
|
||||
* It needs to be a file which doesn't match the custom metadata routes e.g. `app/robots.txt/route.js`
|
||||
*/ function isMetadataFile(filePath) {
|
||||
const appDirRelativePath = appDirPath ? filePath.replace(appDirPath, '') : filePath;
|
||||
return isMetadataRouteFile(appDirRelativePath, pageExtensions, true);
|
||||
}
|
||||
// Determine if the file is leaf node page file or route file under layouts,
|
||||
// 'page.<extension>' | 'route.<extension>'
|
||||
function isAppRouterPage(filePath) {
|
||||
return leafOnlyPageFileRegex.test(filePath) || isMetadataFile(filePath);
|
||||
}
|
||||
// Determine if the file is leaf node route file under app directory
|
||||
function isAppRouterRoute(filePath) {
|
||||
return leafOnlyRouteFileRegex.test(filePath);
|
||||
}
|
||||
function isAppLayoutPage(filePath) {
|
||||
return leafOnlyLayoutFileRegex.test(filePath);
|
||||
}
|
||||
function isAppDefaultPage(filePath) {
|
||||
return leafOnlyDefaultFileRegex.test(filePath);
|
||||
}
|
||||
function isPageFile(filePath) {
|
||||
return validExtensionFileRegex.test(filePath) || isMetadataFile(filePath);
|
||||
}
|
||||
function isRootNotFound(filePath) {
|
||||
if (!appDirPath) {
|
||||
return false;
|
||||
}
|
||||
if (!filePath.startsWith(appDirPath + sep)) {
|
||||
return false;
|
||||
}
|
||||
const rest = filePath.slice(appDirPath.length + 1);
|
||||
return rootNotFoundFileRegex.test(rest);
|
||||
}
|
||||
return {
|
||||
isPageFile,
|
||||
isAppRouterPage,
|
||||
isAppRouterRoute,
|
||||
isAppLayoutPage,
|
||||
isAppDefaultPage,
|
||||
isMetadataFile,
|
||||
isRootNotFound
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=find-page-file.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+15
@@ -0,0 +1,15 @@
|
||||
// x-matched-path header can be decoded incorrectly
|
||||
// and should only be utf8 characters so this fixes
|
||||
// incorrectly encoded values
|
||||
export function fixMojibake(input) {
|
||||
// Convert each character's char code to a byte
|
||||
const bytes = new Uint8Array(input.length);
|
||||
for(let i = 0; i < input.length; i++){
|
||||
bytes[i] = input.charCodeAt(i);
|
||||
}
|
||||
// Decode the bytes as proper UTF-8
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
return decoder.decode(bytes);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=fix-mojibake.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/fix-mojibake.ts"],"sourcesContent":["// x-matched-path header can be decoded incorrectly\n// and should only be utf8 characters so this fixes\n// incorrectly encoded values\nexport function fixMojibake(input: string): string {\n // Convert each character's char code to a byte\n const bytes = new Uint8Array(input.length)\n for (let i = 0; i < input.length; i++) {\n bytes[i] = input.charCodeAt(i)\n }\n\n // Decode the bytes as proper UTF-8\n const decoder = new TextDecoder('utf-8')\n return decoder.decode(bytes)\n}\n"],"names":["fixMojibake","input","bytes","Uint8Array","length","i","charCodeAt","decoder","TextDecoder","decode"],"mappings":"AAAA,mDAAmD;AACnD,mDAAmD;AACnD,6BAA6B;AAC7B,OAAO,SAASA,YAAYC,KAAa;IACvC,+CAA+C;IAC/C,MAAMC,QAAQ,IAAIC,WAAWF,MAAMG,MAAM;IACzC,IAAK,IAAIC,IAAI,GAAGA,IAAIJ,MAAMG,MAAM,EAAEC,IAAK;QACrCH,KAAK,CAACG,EAAE,GAAGJ,MAAMK,UAAU,CAACD;IAC9B;IAEA,mCAAmC;IACnC,MAAME,UAAU,IAAIC,YAAY;IAChC,OAAOD,QAAQE,MAAM,CAACP;AACxB","ignoreList":[0]}
|
||||
+11
@@ -0,0 +1,11 @@
|
||||
import { isIPv6 } from './is-ipv6';
|
||||
/**
|
||||
* Formats a hostname so that it is a valid host that can be fetched by wrapping
|
||||
* IPv6 hosts with brackets.
|
||||
* @param hostname
|
||||
* @returns
|
||||
*/ export function formatHostname(hostname) {
|
||||
return isIPv6(hostname) ? `[${hostname}]` : hostname;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=format-hostname.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/format-hostname.ts"],"sourcesContent":["import { isIPv6 } from './is-ipv6'\n\n/**\n * Formats a hostname so that it is a valid host that can be fetched by wrapping\n * IPv6 hosts with brackets.\n * @param hostname\n * @returns\n */\nexport function formatHostname(hostname: string): string {\n return isIPv6(hostname) ? `[${hostname}]` : hostname\n}\n"],"names":["isIPv6","formatHostname","hostname"],"mappings":"AAAA,SAASA,MAAM,QAAQ,YAAW;AAElC;;;;;CAKC,GACD,OAAO,SAASC,eAAeC,QAAgB;IAC7C,OAAOF,OAAOE,YAAY,CAAC,CAAC,EAAEA,SAAS,CAAC,CAAC,GAAGA;AAC9C","ignoreList":[0]}
|
||||
+123
@@ -0,0 +1,123 @@
|
||||
import { getRequestMeta } from '../request-meta';
|
||||
/**
|
||||
* The I18NProvider is used to match locale aware routes, detect the locale from
|
||||
* the pathname and hostname and normalize the pathname by removing the locale
|
||||
* prefix.
|
||||
*/ export class I18NProvider {
|
||||
constructor(config){
|
||||
var _config_domains;
|
||||
this.config = config;
|
||||
if (!config.locales.length) {
|
||||
throw Object.defineProperty(new Error('Invariant: No locales provided'), "__NEXT_ERROR_CODE", {
|
||||
value: "E510",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
this.lowerCaseLocales = config.locales.map((locale)=>locale.toLowerCase());
|
||||
this.lowerCaseDomains = (_config_domains = config.domains) == null ? void 0 : _config_domains.map((domainLocale)=>{
|
||||
var _domainLocale_locales;
|
||||
const domain = domainLocale.domain.toLowerCase();
|
||||
return {
|
||||
defaultLocale: domainLocale.defaultLocale.toLowerCase(),
|
||||
hostname: domain.split(':', 1)[0],
|
||||
domain,
|
||||
locales: (_domainLocale_locales = domainLocale.locales) == null ? void 0 : _domainLocale_locales.map((locale)=>locale.toLowerCase()),
|
||||
http: domainLocale.http
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Detects the domain locale from the hostname and the detected locale if
|
||||
* provided.
|
||||
*
|
||||
* @param hostname The hostname to detect the domain locale from, this must be lowercased.
|
||||
* @param detectedLocale The detected locale to use if the hostname does not match.
|
||||
* @returns The domain locale if found, `undefined` otherwise.
|
||||
*/ detectDomainLocale(hostname, detectedLocale) {
|
||||
if (!hostname || !this.lowerCaseDomains || !this.config.domains) return;
|
||||
if (detectedLocale) detectedLocale = detectedLocale.toLowerCase();
|
||||
for(let i = 0; i < this.lowerCaseDomains.length; i++){
|
||||
var // Configuration validation ensures that the locale is not repeated in
|
||||
// other domains locales.
|
||||
_domainLocale_locales;
|
||||
const domainLocale = this.lowerCaseDomains[i];
|
||||
if (// We assume that the hostname is already lowercased.
|
||||
domainLocale.hostname === hostname || ((_domainLocale_locales = domainLocale.locales) == null ? void 0 : _domainLocale_locales.some((locale)=>locale === detectedLocale))) {
|
||||
return this.config.domains[i];
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
/**
|
||||
* Pulls the pre-computed locale and inference results from the query
|
||||
* object.
|
||||
*
|
||||
* @param req the request object
|
||||
* @param pathname the pathname that could contain a locale prefix
|
||||
* @returns the locale analysis result
|
||||
*/ fromRequest(req, pathname) {
|
||||
const detectedLocale = getRequestMeta(req, 'locale');
|
||||
// If a locale was detected on the query, analyze the pathname to ensure
|
||||
// that the locale matches.
|
||||
if (detectedLocale) {
|
||||
const analysis = this.analyze(pathname);
|
||||
// If the analysis contained a locale we should validate it against the
|
||||
// query and strip it from the pathname.
|
||||
if (analysis.detectedLocale) {
|
||||
if (analysis.detectedLocale !== detectedLocale) {
|
||||
console.warn(`The detected locale does not match the locale in the query. Expected to find '${detectedLocale}' in '${pathname}' but found '${analysis.detectedLocale}'}`);
|
||||
}
|
||||
pathname = analysis.pathname;
|
||||
}
|
||||
}
|
||||
return {
|
||||
pathname,
|
||||
detectedLocale,
|
||||
inferredFromDefault: getRequestMeta(req, 'localeInferredFromDefault') ?? false
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Analyzes the pathname for a locale and returns the pathname without it.
|
||||
*
|
||||
* @param pathname The pathname that could contain a locale prefix.
|
||||
* @param options The options to use when matching the locale.
|
||||
* @returns The matched locale and the pathname without the locale prefix
|
||||
* (if any).
|
||||
*/ analyze(pathname, options = {}) {
|
||||
let detectedLocale = options.defaultLocale;
|
||||
// By default, we assume that the default locale was inferred if there was
|
||||
// no detected locale.
|
||||
let inferredFromDefault = typeof detectedLocale === 'string';
|
||||
// The first segment will be empty, because it has a leading `/`. If
|
||||
// there is no further segment, there is no locale (or it's the default).
|
||||
const segments = pathname.split('/', 2);
|
||||
if (!segments[1]) return {
|
||||
detectedLocale,
|
||||
pathname,
|
||||
inferredFromDefault
|
||||
};
|
||||
// The second segment will contain the locale part if any.
|
||||
const segment = segments[1].toLowerCase();
|
||||
// See if the segment matches one of the locales. If it doesn't, there is
|
||||
// no locale (or it's the default).
|
||||
const index = this.lowerCaseLocales.indexOf(segment);
|
||||
if (index < 0) return {
|
||||
detectedLocale,
|
||||
pathname,
|
||||
inferredFromDefault
|
||||
};
|
||||
// Return the case-sensitive locale.
|
||||
detectedLocale = this.config.locales[index];
|
||||
inferredFromDefault = false;
|
||||
// Remove the `/${locale}` part of the pathname.
|
||||
pathname = pathname.slice(detectedLocale.length + 1) || '/';
|
||||
return {
|
||||
detectedLocale,
|
||||
pathname,
|
||||
inferredFromDefault
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=i18n-provider.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+67
@@ -0,0 +1,67 @@
|
||||
import { NEXT_CACHE_IMPLICIT_TAG_ID } from '../../lib/constants';
|
||||
import { getCacheHandlerEntries } from '../use-cache/handlers';
|
||||
import { createLazyResult } from './lazy-result';
|
||||
const getDerivedTags = (pathname)=>{
|
||||
const derivedTags = [
|
||||
`/layout`
|
||||
];
|
||||
// we automatically add the current path segments as tags
|
||||
// for revalidatePath handling
|
||||
if (pathname.startsWith('/')) {
|
||||
const pathnameParts = pathname.split('/');
|
||||
for(let i = 1; i < pathnameParts.length + 1; i++){
|
||||
let curPathname = pathnameParts.slice(0, i).join('/');
|
||||
if (curPathname) {
|
||||
// all derived tags other than the page are layout tags
|
||||
if (!curPathname.endsWith('/page') && !curPathname.endsWith('/route')) {
|
||||
curPathname = `${curPathname}${!curPathname.endsWith('/') ? '/' : ''}layout`;
|
||||
}
|
||||
derivedTags.push(curPathname);
|
||||
}
|
||||
}
|
||||
}
|
||||
return derivedTags;
|
||||
};
|
||||
/**
|
||||
* Creates a map with lazy results that fetch the expiration value for the given
|
||||
* tags and respective cache kind when they're awaited for the first time.
|
||||
*/ function createTagsExpirationsByCacheKind(tags) {
|
||||
const expirationsByCacheKind = new Map();
|
||||
const cacheHandlers = getCacheHandlerEntries();
|
||||
if (cacheHandlers) {
|
||||
for (const [kind, cacheHandler] of cacheHandlers){
|
||||
if ('getExpiration' in cacheHandler) {
|
||||
expirationsByCacheKind.set(kind, createLazyResult(async ()=>cacheHandler.getExpiration(tags)));
|
||||
}
|
||||
}
|
||||
}
|
||||
return expirationsByCacheKind;
|
||||
}
|
||||
export async function getImplicitTags(page, pathname, fallbackRouteParams) {
|
||||
const tags = new Set();
|
||||
// Add the derived tags from the page.
|
||||
const derivedTags = getDerivedTags(page);
|
||||
for (let tag of derivedTags){
|
||||
tag = `${NEXT_CACHE_IMPLICIT_TAG_ID}${tag}`;
|
||||
tags.add(tag);
|
||||
}
|
||||
// Add the tags from the pathname. If the route has unknown params, we don't
|
||||
// want to add the pathname as a tag, as it will be invalid.
|
||||
if (pathname && (!fallbackRouteParams || fallbackRouteParams.size === 0)) {
|
||||
const tag = `${NEXT_CACHE_IMPLICIT_TAG_ID}${pathname}`;
|
||||
tags.add(tag);
|
||||
}
|
||||
if (tags.has(`${NEXT_CACHE_IMPLICIT_TAG_ID}/`)) {
|
||||
tags.add(`${NEXT_CACHE_IMPLICIT_TAG_ID}/index`);
|
||||
}
|
||||
if (tags.has(`${NEXT_CACHE_IMPLICIT_TAG_ID}/index`)) {
|
||||
tags.add(`${NEXT_CACHE_IMPLICIT_TAG_ID}/`);
|
||||
}
|
||||
const tagsArray = Array.from(tags);
|
||||
return {
|
||||
tags: tagsArray,
|
||||
expirationsByCacheKind: createTagsExpirationsByCacheKind(tagsArray)
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=implicit-tags.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+320
@@ -0,0 +1,320 @@
|
||||
import { CachedRouteKind, IncrementalCacheKind } from '../../response-cache';
|
||||
import path from '../../../shared/lib/isomorphic/path';
|
||||
import { NEXT_CACHE_TAGS_HEADER, NEXT_DATA_SUFFIX, NEXT_META_SUFFIX, RSC_SEGMENT_SUFFIX, RSC_SEGMENTS_DIR_SUFFIX, RSC_SUFFIX } from '../../../lib/constants';
|
||||
import { areTagsExpired, tagsManifest } from './tags-manifest.external';
|
||||
import { MultiFileWriter } from '../../../lib/multi-file-writer';
|
||||
import { getMemoryCache } from './memory-cache.external';
|
||||
export default class FileSystemCache {
|
||||
static #_ = this.debug = !!process.env.NEXT_PRIVATE_DEBUG_CACHE;
|
||||
constructor(ctx){
|
||||
this.fs = ctx.fs;
|
||||
this.flushToDisk = ctx.flushToDisk;
|
||||
this.serverDistDir = ctx.serverDistDir;
|
||||
this.revalidatedTags = ctx.revalidatedTags;
|
||||
if (ctx.maxMemoryCacheSize) {
|
||||
if (!FileSystemCache.memoryCache) {
|
||||
if (FileSystemCache.debug) {
|
||||
console.log('FileSystemCache: using memory store for fetch cache');
|
||||
}
|
||||
FileSystemCache.memoryCache = getMemoryCache(ctx.maxMemoryCacheSize);
|
||||
} else if (FileSystemCache.debug) {
|
||||
console.log('FileSystemCache: memory store already initialized');
|
||||
}
|
||||
} else if (FileSystemCache.debug) {
|
||||
console.log('FileSystemCache: not using memory store for fetch cache');
|
||||
}
|
||||
}
|
||||
resetRequestCache() {}
|
||||
async revalidateTag(tags, durations) {
|
||||
tags = typeof tags === 'string' ? [
|
||||
tags
|
||||
] : tags;
|
||||
if (FileSystemCache.debug) {
|
||||
console.log('FileSystemCache: revalidateTag', tags, durations);
|
||||
}
|
||||
if (tags.length === 0) {
|
||||
return;
|
||||
}
|
||||
const now = Date.now();
|
||||
for (const tag of tags){
|
||||
const existingEntry = tagsManifest.get(tag) || {};
|
||||
if (durations) {
|
||||
// Use provided durations directly
|
||||
const updates = {
|
||||
...existingEntry
|
||||
};
|
||||
// mark as stale immediately
|
||||
updates.stale = now;
|
||||
if (durations.expire !== undefined) {
|
||||
updates.expired = now + durations.expire * 1000 // Convert seconds to ms
|
||||
;
|
||||
}
|
||||
tagsManifest.set(tag, updates);
|
||||
} else {
|
||||
// Update expired field for immediate expiration (default behavior when no durations provided)
|
||||
tagsManifest.set(tag, {
|
||||
...existingEntry,
|
||||
expired: now
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
async get(...args) {
|
||||
var _FileSystemCache_memoryCache, _data_value, _data_value1, _data_value2, _data_value3;
|
||||
const [key, ctx] = args;
|
||||
const { kind } = ctx;
|
||||
let data = (_FileSystemCache_memoryCache = FileSystemCache.memoryCache) == null ? void 0 : _FileSystemCache_memoryCache.get(key);
|
||||
if (FileSystemCache.debug) {
|
||||
if (kind === IncrementalCacheKind.FETCH) {
|
||||
console.log('FileSystemCache: get', key, ctx.tags, kind, !!data);
|
||||
} else {
|
||||
console.log('FileSystemCache: get', key, kind, !!data);
|
||||
}
|
||||
}
|
||||
// let's check the disk for seed data
|
||||
if (!data && process.env.NEXT_RUNTIME !== 'edge') {
|
||||
try {
|
||||
if (kind === IncrementalCacheKind.APP_ROUTE) {
|
||||
const filePath = this.getFilePath(`${key}.body`, IncrementalCacheKind.APP_ROUTE);
|
||||
const fileData = await this.fs.readFile(filePath);
|
||||
const { mtime } = await this.fs.stat(filePath);
|
||||
const meta = JSON.parse(await this.fs.readFile(filePath.replace(/\.body$/, NEXT_META_SUFFIX), 'utf8'));
|
||||
data = {
|
||||
lastModified: mtime.getTime(),
|
||||
value: {
|
||||
kind: CachedRouteKind.APP_ROUTE,
|
||||
body: fileData,
|
||||
headers: meta.headers,
|
||||
status: meta.status
|
||||
}
|
||||
};
|
||||
} else {
|
||||
const filePath = this.getFilePath(kind === IncrementalCacheKind.FETCH ? key : `${key}.html`, kind);
|
||||
const fileData = await this.fs.readFile(filePath, 'utf8');
|
||||
const { mtime } = await this.fs.stat(filePath);
|
||||
if (kind === IncrementalCacheKind.FETCH) {
|
||||
var _data_value4;
|
||||
const { tags, fetchIdx, fetchUrl } = ctx;
|
||||
if (!this.flushToDisk) return null;
|
||||
const lastModified = mtime.getTime();
|
||||
const parsedData = JSON.parse(fileData);
|
||||
data = {
|
||||
lastModified,
|
||||
value: parsedData
|
||||
};
|
||||
if (((_data_value4 = data.value) == null ? void 0 : _data_value4.kind) === CachedRouteKind.FETCH) {
|
||||
var _data_value5;
|
||||
const storedTags = (_data_value5 = data.value) == null ? void 0 : _data_value5.tags;
|
||||
// update stored tags if a new one is being added
|
||||
// TODO: remove this when we can send the tags
|
||||
// via header on GET same as SET
|
||||
if (!(tags == null ? void 0 : tags.every((tag)=>storedTags == null ? void 0 : storedTags.includes(tag)))) {
|
||||
if (FileSystemCache.debug) {
|
||||
console.log('FileSystemCache: tags vs storedTags mismatch', tags, storedTags);
|
||||
}
|
||||
await this.set(key, data.value, {
|
||||
fetchCache: true,
|
||||
tags,
|
||||
fetchIdx,
|
||||
fetchUrl
|
||||
});
|
||||
}
|
||||
}
|
||||
} else if (kind === IncrementalCacheKind.APP_PAGE) {
|
||||
// We try to load the metadata file, but if it fails, we don't
|
||||
// error. We also don't load it if this is a fallback.
|
||||
let meta;
|
||||
try {
|
||||
meta = JSON.parse(await this.fs.readFile(filePath.replace(/\.html$/, NEXT_META_SUFFIX), 'utf8'));
|
||||
} catch {}
|
||||
let maybeSegmentData;
|
||||
if (meta == null ? void 0 : meta.segmentPaths) {
|
||||
// Collect all the segment data for this page.
|
||||
// TODO: To optimize file system reads, we should consider creating
|
||||
// separate cache entries for each segment, rather than storing them
|
||||
// all on the page's entry. Though the behavior is
|
||||
// identical regardless.
|
||||
const segmentData = new Map();
|
||||
maybeSegmentData = segmentData;
|
||||
const segmentsDir = key + RSC_SEGMENTS_DIR_SUFFIX;
|
||||
await Promise.all(meta.segmentPaths.map(async (segmentPath)=>{
|
||||
const segmentDataFilePath = this.getFilePath(segmentsDir + segmentPath + RSC_SEGMENT_SUFFIX, IncrementalCacheKind.APP_PAGE);
|
||||
try {
|
||||
segmentData.set(segmentPath, await this.fs.readFile(segmentDataFilePath));
|
||||
} catch {
|
||||
// This shouldn't happen, but if for some reason we fail to
|
||||
// load a segment from the filesystem, treat it the same as if
|
||||
// the segment is dynamic and does not have a prefetch.
|
||||
}
|
||||
}));
|
||||
}
|
||||
let rscData;
|
||||
if (!ctx.isFallback && (!ctx.isRoutePPREnabled || (meta == null ? void 0 : meta.postponed) == null)) {
|
||||
rscData = await this.fs.readFile(this.getFilePath(`${key}${RSC_SUFFIX}`, IncrementalCacheKind.APP_PAGE));
|
||||
}
|
||||
data = {
|
||||
lastModified: mtime.getTime(),
|
||||
value: {
|
||||
kind: CachedRouteKind.APP_PAGE,
|
||||
html: fileData,
|
||||
rscData,
|
||||
postponed: meta == null ? void 0 : meta.postponed,
|
||||
headers: meta == null ? void 0 : meta.headers,
|
||||
status: meta == null ? void 0 : meta.status,
|
||||
segmentData: maybeSegmentData
|
||||
}
|
||||
};
|
||||
} else if (kind === IncrementalCacheKind.PAGES) {
|
||||
let meta;
|
||||
let pageData = {};
|
||||
if (!ctx.isFallback) {
|
||||
pageData = JSON.parse(await this.fs.readFile(this.getFilePath(`${key}${NEXT_DATA_SUFFIX}`, IncrementalCacheKind.PAGES), 'utf8'));
|
||||
}
|
||||
data = {
|
||||
lastModified: mtime.getTime(),
|
||||
value: {
|
||||
kind: CachedRouteKind.PAGES,
|
||||
html: fileData,
|
||||
pageData,
|
||||
headers: meta == null ? void 0 : meta.headers,
|
||||
status: meta == null ? void 0 : meta.status
|
||||
}
|
||||
};
|
||||
} else {
|
||||
throw Object.defineProperty(new Error(`Invariant: Unexpected route kind ${kind} in file system cache.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E445",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
if (data) {
|
||||
var _FileSystemCache_memoryCache1;
|
||||
(_FileSystemCache_memoryCache1 = FileSystemCache.memoryCache) == null ? void 0 : _FileSystemCache_memoryCache1.set(key, data);
|
||||
}
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
if ((data == null ? void 0 : (_data_value = data.value) == null ? void 0 : _data_value.kind) === CachedRouteKind.APP_PAGE || (data == null ? void 0 : (_data_value1 = data.value) == null ? void 0 : _data_value1.kind) === CachedRouteKind.APP_ROUTE || (data == null ? void 0 : (_data_value2 = data.value) == null ? void 0 : _data_value2.kind) === CachedRouteKind.PAGES) {
|
||||
var _data_value_headers;
|
||||
const tagsHeader = (_data_value_headers = data.value.headers) == null ? void 0 : _data_value_headers[NEXT_CACHE_TAGS_HEADER];
|
||||
if (typeof tagsHeader === 'string') {
|
||||
const cacheTags = tagsHeader.split(',');
|
||||
// we trigger a blocking validation if an ISR page
|
||||
// had a tag revalidated, if we want to be a background
|
||||
// revalidation instead we return data.lastModified = -1
|
||||
if (cacheTags.length > 0 && areTagsExpired(cacheTags, data.lastModified)) {
|
||||
if (FileSystemCache.debug) {
|
||||
console.log('FileSystemCache: expired tags', cacheTags);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
} else if ((data == null ? void 0 : (_data_value3 = data.value) == null ? void 0 : _data_value3.kind) === CachedRouteKind.FETCH) {
|
||||
const combinedTags = ctx.kind === IncrementalCacheKind.FETCH ? [
|
||||
...ctx.tags || [],
|
||||
...ctx.softTags || []
|
||||
] : [];
|
||||
// When revalidate tag is called we don't return stale data so it's
|
||||
// updated right away.
|
||||
if (combinedTags.some((tag)=>this.revalidatedTags.includes(tag))) {
|
||||
if (FileSystemCache.debug) {
|
||||
console.log('FileSystemCache: was revalidated', combinedTags);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
if (areTagsExpired(combinedTags, data.lastModified)) {
|
||||
if (FileSystemCache.debug) {
|
||||
console.log('FileSystemCache: expired tags', combinedTags);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return data ?? null;
|
||||
}
|
||||
async set(key, data, ctx) {
|
||||
var _FileSystemCache_memoryCache;
|
||||
(_FileSystemCache_memoryCache = FileSystemCache.memoryCache) == null ? void 0 : _FileSystemCache_memoryCache.set(key, {
|
||||
value: data,
|
||||
lastModified: Date.now()
|
||||
});
|
||||
if (FileSystemCache.debug) {
|
||||
console.log('FileSystemCache: set', key);
|
||||
}
|
||||
if (!this.flushToDisk || !data) return;
|
||||
// Create a new writer that will prepare to write all the files to disk
|
||||
// after their containing directory is created.
|
||||
const writer = new MultiFileWriter(this.fs);
|
||||
if (data.kind === CachedRouteKind.APP_ROUTE) {
|
||||
const filePath = this.getFilePath(`${key}.body`, IncrementalCacheKind.APP_ROUTE);
|
||||
writer.append(filePath, data.body);
|
||||
const meta = {
|
||||
headers: data.headers,
|
||||
status: data.status,
|
||||
postponed: undefined,
|
||||
segmentPaths: undefined,
|
||||
prefetchHints: undefined
|
||||
};
|
||||
writer.append(filePath.replace(/\.body$/, NEXT_META_SUFFIX), JSON.stringify(meta, null, 2));
|
||||
} else if (data.kind === CachedRouteKind.PAGES || data.kind === CachedRouteKind.APP_PAGE) {
|
||||
const isAppPath = data.kind === CachedRouteKind.APP_PAGE;
|
||||
const htmlPath = this.getFilePath(`${key}.html`, isAppPath ? IncrementalCacheKind.APP_PAGE : IncrementalCacheKind.PAGES);
|
||||
writer.append(htmlPath, data.html);
|
||||
// Fallbacks don't generate a data file.
|
||||
if (!ctx.fetchCache && !ctx.isFallback && !ctx.isRoutePPREnabled) {
|
||||
writer.append(this.getFilePath(`${key}${isAppPath ? RSC_SUFFIX : NEXT_DATA_SUFFIX}`, isAppPath ? IncrementalCacheKind.APP_PAGE : IncrementalCacheKind.PAGES), isAppPath ? data.rscData : JSON.stringify(data.pageData));
|
||||
}
|
||||
if ((data == null ? void 0 : data.kind) === CachedRouteKind.APP_PAGE) {
|
||||
let segmentPaths;
|
||||
if (data.segmentData) {
|
||||
segmentPaths = [];
|
||||
const segmentsDir = htmlPath.replace(/\.html$/, RSC_SEGMENTS_DIR_SUFFIX);
|
||||
for (const [segmentPath, buffer] of data.segmentData){
|
||||
segmentPaths.push(segmentPath);
|
||||
const segmentDataFilePath = segmentsDir + segmentPath + RSC_SEGMENT_SUFFIX;
|
||||
writer.append(segmentDataFilePath, buffer);
|
||||
}
|
||||
}
|
||||
const meta = {
|
||||
headers: data.headers,
|
||||
status: data.status,
|
||||
postponed: data.postponed,
|
||||
segmentPaths,
|
||||
prefetchHints: undefined
|
||||
};
|
||||
writer.append(htmlPath.replace(/\.html$/, NEXT_META_SUFFIX), JSON.stringify(meta));
|
||||
}
|
||||
} else if (data.kind === CachedRouteKind.FETCH) {
|
||||
const filePath = this.getFilePath(key, IncrementalCacheKind.FETCH);
|
||||
writer.append(filePath, JSON.stringify({
|
||||
...data,
|
||||
tags: ctx.fetchCache ? ctx.tags : []
|
||||
}));
|
||||
}
|
||||
// Wait for all FS operations to complete.
|
||||
await writer.wait();
|
||||
}
|
||||
getFilePath(pathname, kind) {
|
||||
switch(kind){
|
||||
case IncrementalCacheKind.FETCH:
|
||||
// we store in .next/cache/fetch-cache so it can be persisted
|
||||
// across deploys
|
||||
return path.join(this.serverDistDir, '..', 'cache', 'fetch-cache', pathname);
|
||||
case IncrementalCacheKind.PAGES:
|
||||
return path.join(this.serverDistDir, 'pages', pathname);
|
||||
case IncrementalCacheKind.IMAGE:
|
||||
case IncrementalCacheKind.APP_PAGE:
|
||||
case IncrementalCacheKind.APP_ROUTE:
|
||||
return path.join(this.serverDistDir, 'app', pathname);
|
||||
default:
|
||||
throw Object.defineProperty(new Error(`Unexpected file path kind: ${kind}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E479",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=file-system-cache.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+474
@@ -0,0 +1,474 @@
|
||||
import { IncrementalCacheKind, CachedRouteKind } from '../../response-cache';
|
||||
import FileSystemCache from './file-system-cache';
|
||||
import { normalizePagePath } from '../../../shared/lib/page-path/normalize-page-path';
|
||||
import { CACHE_ONE_YEAR_SECONDS, NEXT_CACHE_TAGS_HEADER, PRERENDER_REVALIDATE_HEADER } from '../../../lib/constants';
|
||||
import { toRoute } from '../to-route';
|
||||
import { SharedCacheControls } from './shared-cache-controls.external';
|
||||
import { getPrerenderResumeDataCache, getRenderResumeDataCache, workUnitAsyncStorage } from '../../app-render/work-unit-async-storage.external';
|
||||
import { InvariantError } from '../../../shared/lib/invariant-error';
|
||||
import { getPreviouslyRevalidatedTags } from '../../server-utils';
|
||||
import { workAsyncStorage } from '../../app-render/work-async-storage.external';
|
||||
import { DetachedPromise } from '../../../lib/detached-promise';
|
||||
import { areTagsExpired, areTagsStale } from './tags-manifest.external';
|
||||
export class CacheHandler {
|
||||
// eslint-disable-next-line
|
||||
constructor(_ctx){}
|
||||
async get(_cacheKey, _ctx) {
|
||||
return {};
|
||||
}
|
||||
async set(_cacheKey, _data, _ctx) {}
|
||||
async revalidateTag(_tags, _durations) {}
|
||||
resetRequestCache() {}
|
||||
}
|
||||
export class IncrementalCache {
|
||||
static #_ = this.debug = !!process.env.NEXT_PRIVATE_DEBUG_CACHE;
|
||||
constructor({ fs, dev, flushToDisk, minimalMode, serverDistDir, requestHeaders, maxMemoryCacheSize, getPrerenderManifest, fetchCacheKeyPrefix, CurCacheHandler, allowedRevalidateHeaderKeys }){
|
||||
var _this_prerenderManifest_preview, _this_prerenderManifest;
|
||||
this.locks = new Map();
|
||||
this.hasCustomCacheHandler = Boolean(CurCacheHandler);
|
||||
const cacheHandlersSymbol = Symbol.for('@next/cache-handlers');
|
||||
const _globalThis = globalThis;
|
||||
if (!CurCacheHandler) {
|
||||
// if we have a global cache handler available leverage it
|
||||
const globalCacheHandler = _globalThis[cacheHandlersSymbol];
|
||||
if (globalCacheHandler == null ? void 0 : globalCacheHandler.FetchCache) {
|
||||
CurCacheHandler = globalCacheHandler.FetchCache;
|
||||
if (IncrementalCache.debug) {
|
||||
console.log('IncrementalCache: using global FetchCache cache handler');
|
||||
}
|
||||
} else {
|
||||
if (fs && serverDistDir) {
|
||||
if (IncrementalCache.debug) {
|
||||
console.log('IncrementalCache: using filesystem cache handler');
|
||||
}
|
||||
CurCacheHandler = FileSystemCache;
|
||||
}
|
||||
}
|
||||
} else if (IncrementalCache.debug) {
|
||||
console.log('IncrementalCache: using custom cache handler', CurCacheHandler.name);
|
||||
}
|
||||
if (process.env.__NEXT_TEST_MAX_ISR_CACHE) {
|
||||
// Allow cache size to be overridden for testing purposes
|
||||
maxMemoryCacheSize = parseInt(process.env.__NEXT_TEST_MAX_ISR_CACHE, 10);
|
||||
}
|
||||
this.dev = dev;
|
||||
this.disableForTestmode = process.env.NEXT_PRIVATE_TEST_PROXY === 'true';
|
||||
// this is a hack to avoid Webpack knowing this is equal to this.minimalMode
|
||||
// because we replace this.minimalMode to true in production bundles.
|
||||
const minimalModeKey = 'minimalMode';
|
||||
this[minimalModeKey] = minimalMode;
|
||||
this.requestHeaders = requestHeaders;
|
||||
this.allowedRevalidateHeaderKeys = allowedRevalidateHeaderKeys;
|
||||
this.prerenderManifest = getPrerenderManifest();
|
||||
this.cacheControls = new SharedCacheControls(this.prerenderManifest);
|
||||
this.fetchCacheKeyPrefix = fetchCacheKeyPrefix;
|
||||
let revalidatedTags = [];
|
||||
if (requestHeaders[PRERENDER_REVALIDATE_HEADER] === ((_this_prerenderManifest = this.prerenderManifest) == null ? void 0 : (_this_prerenderManifest_preview = _this_prerenderManifest.preview) == null ? void 0 : _this_prerenderManifest_preview.previewModeId)) {
|
||||
this.isOnDemandRevalidate = true;
|
||||
}
|
||||
if (minimalMode) {
|
||||
var _this_prerenderManifest_preview1, _this_prerenderManifest1;
|
||||
revalidatedTags = this.revalidatedTags = getPreviouslyRevalidatedTags(requestHeaders, (_this_prerenderManifest1 = this.prerenderManifest) == null ? void 0 : (_this_prerenderManifest_preview1 = _this_prerenderManifest1.preview) == null ? void 0 : _this_prerenderManifest_preview1.previewModeId);
|
||||
}
|
||||
if (CurCacheHandler) {
|
||||
this.cacheHandler = new CurCacheHandler({
|
||||
dev,
|
||||
fs,
|
||||
flushToDisk,
|
||||
serverDistDir,
|
||||
revalidatedTags,
|
||||
maxMemoryCacheSize,
|
||||
_requestHeaders: requestHeaders,
|
||||
fetchCacheKeyPrefix
|
||||
});
|
||||
}
|
||||
}
|
||||
calculateRevalidate(pathname, fromTime, dev, isFallback) {
|
||||
// in development we don't have a prerender-manifest
|
||||
// and default to always revalidating to allow easier debugging
|
||||
if (dev) return Math.floor(performance.timeOrigin + performance.now() - 1000);
|
||||
const cacheControl = this.cacheControls.get(toRoute(pathname));
|
||||
// if an entry isn't present in routes we fallback to a default
|
||||
// of revalidating after 1 second unless it's a fallback request.
|
||||
const initialRevalidateSeconds = cacheControl ? cacheControl.revalidate : isFallback ? false : 1;
|
||||
const revalidateAfter = typeof initialRevalidateSeconds === 'number' ? initialRevalidateSeconds * 1000 + fromTime : initialRevalidateSeconds;
|
||||
return revalidateAfter;
|
||||
}
|
||||
_getPathname(pathname, fetchCache) {
|
||||
return fetchCache ? pathname : normalizePagePath(pathname);
|
||||
}
|
||||
resetRequestCache() {
|
||||
var _this_cacheHandler_resetRequestCache, _this_cacheHandler;
|
||||
(_this_cacheHandler = this.cacheHandler) == null ? void 0 : (_this_cacheHandler_resetRequestCache = _this_cacheHandler.resetRequestCache) == null ? void 0 : _this_cacheHandler_resetRequestCache.call(_this_cacheHandler);
|
||||
}
|
||||
async lock(cacheKey) {
|
||||
// Wait for any existing lock on this cache key to be released
|
||||
// This implements a simple queue-based locking mechanism
|
||||
while(true){
|
||||
const lock = this.locks.get(cacheKey);
|
||||
if (IncrementalCache.debug) {
|
||||
console.log('IncrementalCache: lock get', cacheKey, !!lock);
|
||||
}
|
||||
// If no lock exists, we can proceed to acquire it
|
||||
if (!lock) break;
|
||||
// Wait for the existing lock to be released before trying again
|
||||
await lock;
|
||||
}
|
||||
// Create a new detached promise that will represent this lock
|
||||
// The resolve function (unlock) will be returned to the caller
|
||||
const { resolve, promise } = new DetachedPromise();
|
||||
if (IncrementalCache.debug) {
|
||||
console.log('IncrementalCache: successfully locked', cacheKey);
|
||||
}
|
||||
// Store the lock promise in the locks map
|
||||
this.locks.set(cacheKey, promise);
|
||||
return ()=>{
|
||||
// Resolve the promise to release the lock.
|
||||
resolve();
|
||||
// Remove the lock from the map once it's released so that future gets
|
||||
// can acquire the lock.
|
||||
this.locks.delete(cacheKey);
|
||||
};
|
||||
}
|
||||
async revalidateTag(tags, durations) {
|
||||
var _this_cacheHandler;
|
||||
return (_this_cacheHandler = this.cacheHandler) == null ? void 0 : _this_cacheHandler.revalidateTag(tags, durations);
|
||||
}
|
||||
// x-ref: https://github.com/facebook/react/blob/2655c9354d8e1c54ba888444220f63e836925caa/packages/react/src/ReactFetch.js#L23
|
||||
async generateCacheKey(url, init = {}) {
|
||||
// this should be bumped anytime a fix is made to cache entries
|
||||
// that should bust the cache
|
||||
const MAIN_KEY_PREFIX = 'v3';
|
||||
const bodyChunks = [];
|
||||
const encoder = new TextEncoder();
|
||||
const decoder = new TextDecoder();
|
||||
if (init.body) {
|
||||
// handle Uint8Array body
|
||||
if (init.body instanceof Uint8Array) {
|
||||
bodyChunks.push(decoder.decode(init.body));
|
||||
init._ogBody = init.body;
|
||||
} else if (typeof init.body.getReader === 'function') {
|
||||
const readableBody = init.body;
|
||||
const chunks = [];
|
||||
try {
|
||||
await readableBody.pipeTo(new WritableStream({
|
||||
write (chunk) {
|
||||
if (typeof chunk === 'string') {
|
||||
chunks.push(encoder.encode(chunk));
|
||||
bodyChunks.push(chunk);
|
||||
} else {
|
||||
chunks.push(chunk);
|
||||
bodyChunks.push(decoder.decode(chunk, {
|
||||
stream: true
|
||||
}));
|
||||
}
|
||||
}
|
||||
}));
|
||||
// Flush the decoder.
|
||||
bodyChunks.push(decoder.decode());
|
||||
// Create a new buffer with all the chunks.
|
||||
const length = chunks.reduce((total, arr)=>total + arr.length, 0);
|
||||
const arrayBuffer = new Uint8Array(length);
|
||||
// Push each of the chunks into the new array buffer.
|
||||
let offset = 0;
|
||||
for (const chunk of chunks){
|
||||
arrayBuffer.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
;
|
||||
init._ogBody = arrayBuffer;
|
||||
} catch (err) {
|
||||
console.error('Problem reading body', err);
|
||||
}
|
||||
} else if (typeof init.body.keys === 'function') {
|
||||
const formData = init.body;
|
||||
init._ogBody = init.body;
|
||||
for (const key of new Set([
|
||||
...formData.keys()
|
||||
])){
|
||||
const values = formData.getAll(key);
|
||||
bodyChunks.push(`${key}=${(await Promise.all(values.map(async (val)=>{
|
||||
if (typeof val === 'string') {
|
||||
return val;
|
||||
} else {
|
||||
return await val.text();
|
||||
}
|
||||
}))).join(',')}`);
|
||||
}
|
||||
// handle blob body
|
||||
} else if (typeof init.body.arrayBuffer === 'function') {
|
||||
const blob = init.body;
|
||||
const arrayBuffer = await blob.arrayBuffer();
|
||||
bodyChunks.push(await blob.text());
|
||||
init._ogBody = new Blob([
|
||||
arrayBuffer
|
||||
], {
|
||||
type: blob.type
|
||||
});
|
||||
} else if (typeof init.body === 'string') {
|
||||
bodyChunks.push(init.body);
|
||||
init._ogBody = init.body;
|
||||
}
|
||||
}
|
||||
const headers = typeof (init.headers || {}).keys === 'function' ? Object.fromEntries(init.headers) : Object.assign({}, init.headers);
|
||||
// w3c trace context headers can break request caching and deduplication
|
||||
// so we remove them from the cache key
|
||||
if ('traceparent' in headers) delete headers['traceparent'];
|
||||
if ('tracestate' in headers) delete headers['tracestate'];
|
||||
const cacheString = JSON.stringify([
|
||||
MAIN_KEY_PREFIX,
|
||||
this.fetchCacheKeyPrefix || '',
|
||||
url,
|
||||
init.method,
|
||||
headers,
|
||||
init.mode,
|
||||
init.redirect,
|
||||
init.credentials,
|
||||
init.referrer,
|
||||
init.referrerPolicy,
|
||||
init.integrity,
|
||||
init.cache,
|
||||
bodyChunks
|
||||
]);
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
function bufferToHex(buffer) {
|
||||
return Array.prototype.map.call(new Uint8Array(buffer), (b)=>b.toString(16).padStart(2, '0')).join('');
|
||||
}
|
||||
const buffer = encoder.encode(cacheString);
|
||||
return bufferToHex(await crypto.subtle.digest('SHA-256', buffer));
|
||||
} else {
|
||||
const crypto1 = require('crypto');
|
||||
return crypto1.createHash('sha256').update(cacheString).digest('hex');
|
||||
}
|
||||
}
|
||||
async get(cacheKey, ctx) {
|
||||
var _this_cacheHandler, _cacheData_value;
|
||||
// Unlike other caches if we have a resume data cache, we use it even if
|
||||
// testmode would normally disable it or if requestHeaders say 'no-cache'.
|
||||
if (ctx.kind === IncrementalCacheKind.FETCH) {
|
||||
const workUnitStore = workUnitAsyncStorage.getStore();
|
||||
const resumeDataCache = workUnitStore ? getRenderResumeDataCache(workUnitStore) : null;
|
||||
if (resumeDataCache) {
|
||||
const memoryCacheData = resumeDataCache.fetch.get(cacheKey);
|
||||
if ((memoryCacheData == null ? void 0 : memoryCacheData.kind) === CachedRouteKind.FETCH) {
|
||||
// Check if any tags were recently revalidated before returning RDC entry.
|
||||
// When a server action calls updateTag(), the re-render should see fresh
|
||||
// data instead of stale RDC data.
|
||||
const workStore = workAsyncStorage.getStore();
|
||||
const combinedTags = [
|
||||
...ctx.tags || [],
|
||||
...ctx.softTags || []
|
||||
];
|
||||
const hasRevalidatedTag = combinedTags.some((tag)=>{
|
||||
var _this_revalidatedTags, _workStore_pendingRevalidatedTags;
|
||||
return ((_this_revalidatedTags = this.revalidatedTags) == null ? void 0 : _this_revalidatedTags.includes(tag)) || (workStore == null ? void 0 : (_workStore_pendingRevalidatedTags = workStore.pendingRevalidatedTags) == null ? void 0 : _workStore_pendingRevalidatedTags.some((item)=>item.tag === tag));
|
||||
});
|
||||
if (hasRevalidatedTag) {
|
||||
if (IncrementalCache.debug) {
|
||||
console.log('IncrementalCache: rdc:revalidated-tag', cacheKey);
|
||||
}
|
||||
// Fall through to cacheHandler lookup
|
||||
} else {
|
||||
if (IncrementalCache.debug) {
|
||||
console.log('IncrementalCache: rdc:hit', cacheKey);
|
||||
}
|
||||
return {
|
||||
isStale: false,
|
||||
value: memoryCacheData
|
||||
};
|
||||
}
|
||||
} else if (IncrementalCache.debug) {
|
||||
console.log('IncrementalCache: rdc:miss', cacheKey);
|
||||
}
|
||||
} else {
|
||||
if (IncrementalCache.debug) {
|
||||
console.log('IncrementalCache: rdc:no-resume-data');
|
||||
}
|
||||
}
|
||||
}
|
||||
// we don't leverage the prerender cache in dev mode
|
||||
// so that getStaticProps is always called for easier debugging
|
||||
if (this.disableForTestmode || this.dev && (ctx.kind !== IncrementalCacheKind.FETCH || this.requestHeaders['cache-control'] === 'no-cache')) {
|
||||
return null;
|
||||
}
|
||||
cacheKey = this._getPathname(cacheKey, ctx.kind === IncrementalCacheKind.FETCH);
|
||||
const cacheData = await ((_this_cacheHandler = this.cacheHandler) == null ? void 0 : _this_cacheHandler.get(cacheKey, ctx));
|
||||
if (ctx.kind === IncrementalCacheKind.FETCH) {
|
||||
var _cacheData_value1;
|
||||
if (!cacheData) {
|
||||
return null;
|
||||
}
|
||||
if (((_cacheData_value1 = cacheData.value) == null ? void 0 : _cacheData_value1.kind) !== CachedRouteKind.FETCH) {
|
||||
var _cacheData_value2;
|
||||
throw Object.defineProperty(new InvariantError(`Expected cached value for cache key ${JSON.stringify(cacheKey)} to be a "FETCH" kind, got ${JSON.stringify((_cacheData_value2 = cacheData.value) == null ? void 0 : _cacheData_value2.kind)} instead.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E653",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const workStore = workAsyncStorage.getStore();
|
||||
const combinedTags = [
|
||||
...ctx.tags || [],
|
||||
...ctx.softTags || []
|
||||
];
|
||||
// if a tag was revalidated we don't return stale data
|
||||
if (combinedTags.some((tag)=>{
|
||||
var _this_revalidatedTags, _workStore_pendingRevalidatedTags;
|
||||
return ((_this_revalidatedTags = this.revalidatedTags) == null ? void 0 : _this_revalidatedTags.includes(tag)) || (workStore == null ? void 0 : (_workStore_pendingRevalidatedTags = workStore.pendingRevalidatedTags) == null ? void 0 : _workStore_pendingRevalidatedTags.some((item)=>item.tag === tag));
|
||||
})) {
|
||||
if (IncrementalCache.debug) {
|
||||
console.log('IncrementalCache: expired tag', cacheKey);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
// As we're able to get the cache entry for this fetch, and the prerender
|
||||
// resume data cache (RDC) is available, it must have been populated by a
|
||||
// previous fetch, but was not yet present in the in-memory cache. This
|
||||
// could be the case when performing multiple renders in parallel during
|
||||
// build time where we de-duplicate the fetch calls.
|
||||
//
|
||||
// We add it to the RDC so that the next fetch call will be able to use it
|
||||
// and it won't have to reach into the fetch cache implementation.
|
||||
const workUnitStore = workUnitAsyncStorage.getStore();
|
||||
if (workUnitStore) {
|
||||
const prerenderResumeDataCache = getPrerenderResumeDataCache(workUnitStore);
|
||||
if (prerenderResumeDataCache) {
|
||||
if (IncrementalCache.debug) {
|
||||
console.log('IncrementalCache: rdc:set', cacheKey);
|
||||
}
|
||||
prerenderResumeDataCache.fetch.set(cacheKey, cacheData.value);
|
||||
}
|
||||
}
|
||||
const revalidate = ctx.revalidate || cacheData.value.revalidate;
|
||||
const age = (performance.timeOrigin + performance.now() - (cacheData.lastModified || 0)) / 1000;
|
||||
let isStale = age > revalidate;
|
||||
const data = cacheData.value.data;
|
||||
if (areTagsExpired(combinedTags, cacheData.lastModified)) {
|
||||
return null;
|
||||
} else if (areTagsStale(combinedTags, cacheData.lastModified)) {
|
||||
isStale = true;
|
||||
}
|
||||
return {
|
||||
isStale,
|
||||
value: {
|
||||
kind: CachedRouteKind.FETCH,
|
||||
data,
|
||||
revalidate
|
||||
}
|
||||
};
|
||||
} else if ((cacheData == null ? void 0 : (_cacheData_value = cacheData.value) == null ? void 0 : _cacheData_value.kind) === CachedRouteKind.FETCH) {
|
||||
throw Object.defineProperty(new InvariantError(`Expected cached value for cache key ${JSON.stringify(cacheKey)} not to be a ${JSON.stringify(ctx.kind)} kind, got "FETCH" instead.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E652",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
let entry = null;
|
||||
const { isFallback } = ctx;
|
||||
const cacheControl = this.cacheControls.get(toRoute(cacheKey));
|
||||
let isStale;
|
||||
let revalidateAfter;
|
||||
if ((cacheData == null ? void 0 : cacheData.lastModified) === -1) {
|
||||
isStale = -1;
|
||||
revalidateAfter = -1 * CACHE_ONE_YEAR_SECONDS * 1000;
|
||||
} else {
|
||||
var _cacheData_value3, _cacheData_value4;
|
||||
const now = performance.timeOrigin + performance.now();
|
||||
const lastModified = (cacheData == null ? void 0 : cacheData.lastModified) || now;
|
||||
revalidateAfter = this.calculateRevalidate(cacheKey, lastModified, this.dev ?? false, ctx.isFallback);
|
||||
isStale = revalidateAfter !== false && revalidateAfter < now ? true : undefined;
|
||||
// If the stale time couldn't be determined based on the revalidation
|
||||
// time, we check if the tags are expired or stale.
|
||||
if (isStale === undefined && ((cacheData == null ? void 0 : (_cacheData_value3 = cacheData.value) == null ? void 0 : _cacheData_value3.kind) === CachedRouteKind.APP_PAGE || (cacheData == null ? void 0 : (_cacheData_value4 = cacheData.value) == null ? void 0 : _cacheData_value4.kind) === CachedRouteKind.APP_ROUTE)) {
|
||||
var _cacheData_value_headers;
|
||||
const tagsHeader = (_cacheData_value_headers = cacheData.value.headers) == null ? void 0 : _cacheData_value_headers[NEXT_CACHE_TAGS_HEADER];
|
||||
if (typeof tagsHeader === 'string') {
|
||||
const cacheTags = tagsHeader.split(',');
|
||||
if (cacheTags.length > 0) {
|
||||
if (areTagsExpired(cacheTags, lastModified)) {
|
||||
isStale = -1;
|
||||
} else if (areTagsStale(cacheTags, lastModified)) {
|
||||
isStale = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (cacheData) {
|
||||
entry = {
|
||||
isStale,
|
||||
cacheControl,
|
||||
revalidateAfter,
|
||||
value: cacheData.value,
|
||||
isFallback
|
||||
};
|
||||
}
|
||||
if (!cacheData && this.prerenderManifest.notFoundRoutes.includes(cacheKey)) {
|
||||
// for the first hit after starting the server the cache
|
||||
// may not have a way to save notFound: true so if
|
||||
// the prerender-manifest marks this as notFound then we
|
||||
// return that entry and trigger a cache set to give it a
|
||||
// chance to update in-memory entries
|
||||
entry = {
|
||||
isStale,
|
||||
value: null,
|
||||
cacheControl,
|
||||
revalidateAfter,
|
||||
isFallback
|
||||
};
|
||||
this.set(cacheKey, entry.value, {
|
||||
...ctx,
|
||||
cacheControl
|
||||
});
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
async set(pathname, data, ctx) {
|
||||
// Even if we otherwise disable caching for testMode or if no fetchCache is
|
||||
// configured we still always stash results in the resume data cache if one
|
||||
// exists. This is because this is a transient in memory cache that
|
||||
// populates caches ahead of a dynamic render in dev mode to allow the RSC
|
||||
// debug info to have the right environment associated to it.
|
||||
if ((data == null ? void 0 : data.kind) === CachedRouteKind.FETCH) {
|
||||
const workUnitStore = workUnitAsyncStorage.getStore();
|
||||
const prerenderResumeDataCache = workUnitStore ? getPrerenderResumeDataCache(workUnitStore) : null;
|
||||
if (prerenderResumeDataCache) {
|
||||
if (IncrementalCache.debug) {
|
||||
console.log('IncrementalCache: rdc:set', pathname);
|
||||
}
|
||||
prerenderResumeDataCache.fetch.set(pathname, data);
|
||||
}
|
||||
}
|
||||
if (this.disableForTestmode || this.dev && !ctx.fetchCache) return;
|
||||
pathname = this._getPathname(pathname, ctx.fetchCache);
|
||||
// FetchCache has upper limit of 2MB per-entry currently
|
||||
const itemSize = JSON.stringify(data).length;
|
||||
if (ctx.fetchCache && itemSize > 2 * 1024 * 1024 && // We ignore the size limit when custom cache handler is being used, as it
|
||||
// might not have this limit
|
||||
!this.hasCustomCacheHandler && // We also ignore the size limit when it's an implicit build-time-only
|
||||
// caching that the user isn't even aware of.
|
||||
!ctx.isImplicitBuildTimeCache) {
|
||||
const warningText = `Failed to set Next.js data cache for ${ctx.fetchUrl || pathname}, items over 2MB can not be cached (${itemSize} bytes)`;
|
||||
if (this.dev) {
|
||||
throw Object.defineProperty(new Error(warningText), "__NEXT_ERROR_CODE", {
|
||||
value: "E1003",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
console.warn(warningText);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
var _this_cacheHandler;
|
||||
if (!ctx.fetchCache && ctx.cacheControl) {
|
||||
this.cacheControls.set(toRoute(pathname), ctx.cacheControl);
|
||||
}
|
||||
await ((_this_cacheHandler = this.cacheHandler) == null ? void 0 : _this_cacheHandler.set(pathname, data, ctx));
|
||||
} catch (error) {
|
||||
console.warn('Failed to update prerender cache for', pathname, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=index.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+47
@@ -0,0 +1,47 @@
|
||||
import { CachedRouteKind } from '../../response-cache/types';
|
||||
import { LRUCache } from '../lru-cache';
|
||||
let memoryCache;
|
||||
function getBufferSize(buffer) {
|
||||
return (buffer == null ? void 0 : buffer.length) || 0;
|
||||
}
|
||||
function getSegmentDataSize(segmentData) {
|
||||
if (!segmentData) {
|
||||
return 0;
|
||||
}
|
||||
let size = 0;
|
||||
for (const [segmentPath, buffer] of segmentData){
|
||||
size += segmentPath.length + getBufferSize(buffer);
|
||||
}
|
||||
return size;
|
||||
}
|
||||
export function getMemoryCache(maxMemoryCacheSize) {
|
||||
if (!memoryCache) {
|
||||
memoryCache = new LRUCache(maxMemoryCacheSize, function length({ value }) {
|
||||
var _JSON_stringify;
|
||||
if (!value) {
|
||||
return 25;
|
||||
} else if (value.kind === CachedRouteKind.REDIRECT) {
|
||||
return JSON.stringify(value.props).length;
|
||||
} else if (value.kind === CachedRouteKind.IMAGE) {
|
||||
throw Object.defineProperty(new Error('invariant image should not be incremental-cache'), "__NEXT_ERROR_CODE", {
|
||||
value: "E501",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else if (value.kind === CachedRouteKind.FETCH) {
|
||||
return JSON.stringify(value.data || '').length;
|
||||
} else if (value.kind === CachedRouteKind.APP_ROUTE) {
|
||||
return value.body.length;
|
||||
}
|
||||
// rough estimate of size of cache value
|
||||
if (value.kind === CachedRouteKind.APP_PAGE) {
|
||||
var _value_postponed;
|
||||
return Math.max(1, value.html.length + getBufferSize(value.rscData) + (((_value_postponed = value.postponed) == null ? void 0 : _value_postponed.length) || 0) + getSegmentDataSize(value.segmentData));
|
||||
}
|
||||
return value.html.length + (((_JSON_stringify = JSON.stringify(value.pageData)) == null ? void 0 : _JSON_stringify.length) || 0);
|
||||
});
|
||||
}
|
||||
return memoryCache;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=memory-cache.external.js.map
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/incremental-cache/memory-cache.external.ts"],"sourcesContent":["import type { CacheHandlerValue } from '.'\nimport { CachedRouteKind } from '../../response-cache/types'\nimport { LRUCache } from '../lru-cache'\n\nlet memoryCache: LRUCache<CacheHandlerValue> | undefined\n\nfunction getBufferSize(buffer: Buffer | undefined) {\n return buffer?.length || 0\n}\n\nfunction getSegmentDataSize(segmentData: Map<string, Buffer> | undefined) {\n if (!segmentData) {\n return 0\n }\n\n let size = 0\n\n for (const [segmentPath, buffer] of segmentData) {\n size += segmentPath.length + getBufferSize(buffer)\n }\n\n return size\n}\n\nexport function getMemoryCache(maxMemoryCacheSize: number) {\n if (!memoryCache) {\n memoryCache = new LRUCache(maxMemoryCacheSize, function length({ value }) {\n if (!value) {\n return 25\n } else if (value.kind === CachedRouteKind.REDIRECT) {\n return JSON.stringify(value.props).length\n } else if (value.kind === CachedRouteKind.IMAGE) {\n throw new Error('invariant image should not be incremental-cache')\n } else if (value.kind === CachedRouteKind.FETCH) {\n return JSON.stringify(value.data || '').length\n } else if (value.kind === CachedRouteKind.APP_ROUTE) {\n return value.body.length\n }\n // rough estimate of size of cache value\n if (value.kind === CachedRouteKind.APP_PAGE) {\n return Math.max(\n 1,\n value.html.length +\n getBufferSize(value.rscData) +\n (value.postponed?.length || 0) +\n getSegmentDataSize(value.segmentData)\n )\n }\n\n return value.html.length + (JSON.stringify(value.pageData)?.length || 0)\n })\n }\n\n return memoryCache\n}\n"],"names":["CachedRouteKind","LRUCache","memoryCache","getBufferSize","buffer","length","getSegmentDataSize","segmentData","size","segmentPath","getMemoryCache","maxMemoryCacheSize","value","JSON","kind","REDIRECT","stringify","props","IMAGE","Error","FETCH","data","APP_ROUTE","body","APP_PAGE","Math","max","html","rscData","postponed","pageData"],"mappings":"AACA,SAASA,eAAe,QAAQ,6BAA4B;AAC5D,SAASC,QAAQ,QAAQ,eAAc;AAEvC,IAAIC;AAEJ,SAASC,cAAcC,MAA0B;IAC/C,OAAOA,CAAAA,0BAAAA,OAAQC,MAAM,KAAI;AAC3B;AAEA,SAASC,mBAAmBC,WAA4C;IACtE,IAAI,CAACA,aAAa;QAChB,OAAO;IACT;IAEA,IAAIC,OAAO;IAEX,KAAK,MAAM,CAACC,aAAaL,OAAO,IAAIG,YAAa;QAC/CC,QAAQC,YAAYJ,MAAM,GAAGF,cAAcC;IAC7C;IAEA,OAAOI;AACT;AAEA,OAAO,SAASE,eAAeC,kBAA0B;IACvD,IAAI,CAACT,aAAa;QAChBA,cAAc,IAAID,SAASU,oBAAoB,SAASN,OAAO,EAAEO,KAAK,EAAE;gBAuB1CC;YAtB5B,IAAI,CAACD,OAAO;gBACV,OAAO;YACT,OAAO,IAAIA,MAAME,IAAI,KAAKd,gBAAgBe,QAAQ,EAAE;gBAClD,OAAOF,KAAKG,SAAS,CAACJ,MAAMK,KAAK,EAAEZ,MAAM;YAC3C,OAAO,IAAIO,MAAME,IAAI,KAAKd,gBAAgBkB,KAAK,EAAE;gBAC/C,MAAM,qBAA4D,CAA5D,IAAIC,MAAM,oDAAV,qBAAA;2BAAA;gCAAA;kCAAA;gBAA2D;YACnE,OAAO,IAAIP,MAAME,IAAI,KAAKd,gBAAgBoB,KAAK,EAAE;gBAC/C,OAAOP,KAAKG,SAAS,CAACJ,MAAMS,IAAI,IAAI,IAAIhB,MAAM;YAChD,OAAO,IAAIO,MAAME,IAAI,KAAKd,gBAAgBsB,SAAS,EAAE;gBACnD,OAAOV,MAAMW,IAAI,CAAClB,MAAM;YAC1B;YACA,wCAAwC;YACxC,IAAIO,MAAME,IAAI,KAAKd,gBAAgBwB,QAAQ,EAAE;oBAKtCZ;gBAJL,OAAOa,KAAKC,GAAG,CACb,GACAd,MAAMe,IAAI,CAACtB,MAAM,GACfF,cAAcS,MAAMgB,OAAO,IAC1BhB,CAAAA,EAAAA,mBAAAA,MAAMiB,SAAS,qBAAfjB,iBAAiBP,MAAM,KAAI,CAAA,IAC5BC,mBAAmBM,MAAML,WAAW;YAE1C;YAEA,OAAOK,MAAMe,IAAI,CAACtB,MAAM,GAAIQ,CAAAA,EAAAA,kBAAAA,KAAKG,SAAS,CAACJ,MAAMkB,QAAQ,sBAA7BjB,gBAAgCR,MAAM,KAAI,CAAA;QACxE;IACF;IAEA,OAAOH;AACT","ignoreList":[0]}
|
||||
Generated
Vendored
+67
@@ -0,0 +1,67 @@
|
||||
/**
|
||||
* A shared cache of cache controls for routes. This cache is used so we don't
|
||||
* have to modify the prerender manifest when we want to update the cache
|
||||
* control for a route.
|
||||
*/ export class SharedCacheControls {
|
||||
static #_ = /**
|
||||
* The in-memory cache of cache lives for routes. This cache is populated when
|
||||
* the cache is updated with new cache lives.
|
||||
*/ this.cacheControls = new Map();
|
||||
constructor(/**
|
||||
* The prerender manifest that contains the initial cache controls for
|
||||
* routes.
|
||||
*/ prerenderManifest){
|
||||
this.prerenderManifest = prerenderManifest;
|
||||
}
|
||||
/**
|
||||
* Try to get the cache control value for a route. This will first try to get
|
||||
* the value from the in-memory cache. If the value is not present in the
|
||||
* in-memory cache, it will be sourced from the prerender manifest.
|
||||
*
|
||||
* @param route the route to get the cache control for
|
||||
* @returns the cache control for the route, or undefined if the values
|
||||
* are not present in the in-memory cache or the prerender manifest
|
||||
*/ get(route) {
|
||||
// This is a copy on write cache that is updated when the cache is updated.
|
||||
// If the cache is never written to, then the values will be sourced from
|
||||
// the prerender manifest.
|
||||
let cacheControl = SharedCacheControls.cacheControls.get(route);
|
||||
if (cacheControl) return cacheControl;
|
||||
let prerenderData = this.prerenderManifest.routes[route];
|
||||
if (prerenderData) {
|
||||
const { initialRevalidateSeconds, initialExpireSeconds } = prerenderData;
|
||||
if (typeof initialRevalidateSeconds !== 'undefined') {
|
||||
return {
|
||||
revalidate: initialRevalidateSeconds,
|
||||
expire: initialExpireSeconds
|
||||
};
|
||||
}
|
||||
}
|
||||
const dynamicPrerenderData = this.prerenderManifest.dynamicRoutes[route];
|
||||
if (dynamicPrerenderData) {
|
||||
const { fallbackRevalidate, fallbackExpire } = dynamicPrerenderData;
|
||||
if (typeof fallbackRevalidate !== 'undefined') {
|
||||
return {
|
||||
revalidate: fallbackRevalidate,
|
||||
expire: fallbackExpire
|
||||
};
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
/**
|
||||
* Set the cache control for a route.
|
||||
*
|
||||
* @param route the route to set the cache control for
|
||||
* @param cacheControl the cache control for the route
|
||||
*/ set(route, cacheControl) {
|
||||
SharedCacheControls.cacheControls.set(route, cacheControl);
|
||||
}
|
||||
/**
|
||||
* Clear the in-memory cache of cache controls for routes.
|
||||
*/ clear() {
|
||||
SharedCacheControls.cacheControls.clear();
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=shared-cache-controls.external.js.map
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/incremental-cache/shared-cache-controls.external.ts"],"sourcesContent":["import type { PrerenderManifest } from '../../../build'\nimport type { DeepReadonly } from '../../../shared/lib/deep-readonly'\nimport type { CacheControl } from '../cache-control'\n\n/**\n * A shared cache of cache controls for routes. This cache is used so we don't\n * have to modify the prerender manifest when we want to update the cache\n * control for a route.\n */\nexport class SharedCacheControls {\n /**\n * The in-memory cache of cache lives for routes. This cache is populated when\n * the cache is updated with new cache lives.\n */\n private static readonly cacheControls = new Map<string, CacheControl>()\n\n constructor(\n /**\n * The prerender manifest that contains the initial cache controls for\n * routes.\n */\n private readonly prerenderManifest: DeepReadonly<\n Pick<PrerenderManifest, 'routes' | 'dynamicRoutes'>\n >\n ) {}\n\n /**\n * Try to get the cache control value for a route. This will first try to get\n * the value from the in-memory cache. If the value is not present in the\n * in-memory cache, it will be sourced from the prerender manifest.\n *\n * @param route the route to get the cache control for\n * @returns the cache control for the route, or undefined if the values\n * are not present in the in-memory cache or the prerender manifest\n */\n public get(route: string): CacheControl | undefined {\n // This is a copy on write cache that is updated when the cache is updated.\n // If the cache is never written to, then the values will be sourced from\n // the prerender manifest.\n let cacheControl = SharedCacheControls.cacheControls.get(route)\n if (cacheControl) return cacheControl\n\n let prerenderData = this.prerenderManifest.routes[route]\n\n if (prerenderData) {\n const { initialRevalidateSeconds, initialExpireSeconds } = prerenderData\n\n if (typeof initialRevalidateSeconds !== 'undefined') {\n return {\n revalidate: initialRevalidateSeconds,\n expire: initialExpireSeconds,\n }\n }\n }\n\n const dynamicPrerenderData = this.prerenderManifest.dynamicRoutes[route]\n\n if (dynamicPrerenderData) {\n const { fallbackRevalidate, fallbackExpire } = dynamicPrerenderData\n\n if (typeof fallbackRevalidate !== 'undefined') {\n return { revalidate: fallbackRevalidate, expire: fallbackExpire }\n }\n }\n\n return undefined\n }\n\n /**\n * Set the cache control for a route.\n *\n * @param route the route to set the cache control for\n * @param cacheControl the cache control for the route\n */\n public set(route: string, cacheControl: CacheControl) {\n SharedCacheControls.cacheControls.set(route, cacheControl)\n }\n\n /**\n * Clear the in-memory cache of cache controls for routes.\n */\n public clear() {\n SharedCacheControls.cacheControls.clear()\n }\n}\n"],"names":["SharedCacheControls","cacheControls","Map","constructor","prerenderManifest","get","route","cacheControl","prerenderData","routes","initialRevalidateSeconds","initialExpireSeconds","revalidate","expire","dynamicPrerenderData","dynamicRoutes","fallbackRevalidate","fallbackExpire","undefined","set","clear"],"mappings":"AAIA;;;;CAIC,GACD,OAAO,MAAMA;gBACX;;;GAGC,QACuBC,gBAAgB,IAAIC;IAE5CC,YACE;;;KAGC,GACD,AAAiBC,iBAEhB,CACD;aAHiBA,oBAAAA;IAGhB;IAEH;;;;;;;;GAQC,GACD,AAAOC,IAAIC,KAAa,EAA4B;QAClD,2EAA2E;QAC3E,yEAAyE;QACzE,0BAA0B;QAC1B,IAAIC,eAAeP,oBAAoBC,aAAa,CAACI,GAAG,CAACC;QACzD,IAAIC,cAAc,OAAOA;QAEzB,IAAIC,gBAAgB,IAAI,CAACJ,iBAAiB,CAACK,MAAM,CAACH,MAAM;QAExD,IAAIE,eAAe;YACjB,MAAM,EAAEE,wBAAwB,EAAEC,oBAAoB,EAAE,GAAGH;YAE3D,IAAI,OAAOE,6BAA6B,aAAa;gBACnD,OAAO;oBACLE,YAAYF;oBACZG,QAAQF;gBACV;YACF;QACF;QAEA,MAAMG,uBAAuB,IAAI,CAACV,iBAAiB,CAACW,aAAa,CAACT,MAAM;QAExE,IAAIQ,sBAAsB;YACxB,MAAM,EAAEE,kBAAkB,EAAEC,cAAc,EAAE,GAAGH;YAE/C,IAAI,OAAOE,uBAAuB,aAAa;gBAC7C,OAAO;oBAAEJ,YAAYI;oBAAoBH,QAAQI;gBAAe;YAClE;QACF;QAEA,OAAOC;IACT;IAEA;;;;;GAKC,GACD,AAAOC,IAAIb,KAAa,EAAEC,YAA0B,EAAE;QACpDP,oBAAoBC,aAAa,CAACkB,GAAG,CAACb,OAAOC;IAC/C;IAEA;;GAEC,GACD,AAAOa,QAAQ;QACbpB,oBAAoBC,aAAa,CAACmB,KAAK;IACzC;AACF","ignoreList":[0]}
|
||||
Generated
Vendored
+31
@@ -0,0 +1,31 @@
|
||||
// We share the tags manifest between the "use cache" handlers and the previous
|
||||
// file-system cache.
|
||||
export const tagsManifest = new Map();
|
||||
export const areTagsExpired = (tags, timestamp)=>{
|
||||
for (const tag of tags){
|
||||
const entry = tagsManifest.get(tag);
|
||||
const expiredAt = entry == null ? void 0 : entry.expired;
|
||||
if (typeof expiredAt === 'number') {
|
||||
const now = Date.now();
|
||||
// For immediate expiration (expiredAt <= now) and tag was invalidated after entry was created
|
||||
// OR for future expiration that has now passed (expiredAt > timestamp && expiredAt <= now)
|
||||
const isImmediatelyExpired = expiredAt <= now && expiredAt > timestamp;
|
||||
if (isImmediatelyExpired) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
export const areTagsStale = (tags, timestamp)=>{
|
||||
for (const tag of tags){
|
||||
const entry = tagsManifest.get(tag);
|
||||
const staleAt = (entry == null ? void 0 : entry.stale) ?? 0;
|
||||
if (typeof staleAt === 'number' && staleAt > timestamp) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
//# sourceMappingURL=tags-manifest.external.js.map
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/incremental-cache/tags-manifest.external.ts"],"sourcesContent":["import type { Timestamp } from '../cache-handlers/types'\n\nexport interface TagManifestEntry {\n stale?: number\n expired?: number\n}\n\n// We share the tags manifest between the \"use cache\" handlers and the previous\n// file-system cache.\nexport const tagsManifest = new Map<string, TagManifestEntry>()\n\nexport const areTagsExpired = (tags: string[], timestamp: Timestamp) => {\n for (const tag of tags) {\n const entry = tagsManifest.get(tag)\n const expiredAt = entry?.expired\n\n if (typeof expiredAt === 'number') {\n const now = Date.now()\n // For immediate expiration (expiredAt <= now) and tag was invalidated after entry was created\n // OR for future expiration that has now passed (expiredAt > timestamp && expiredAt <= now)\n const isImmediatelyExpired = expiredAt <= now && expiredAt > timestamp\n\n if (isImmediatelyExpired) {\n return true\n }\n }\n }\n\n return false\n}\n\nexport const areTagsStale = (tags: string[], timestamp: Timestamp) => {\n for (const tag of tags) {\n const entry = tagsManifest.get(tag)\n const staleAt = entry?.stale ?? 0\n\n if (typeof staleAt === 'number' && staleAt > timestamp) {\n return true\n }\n }\n\n return false\n}\n"],"names":["tagsManifest","Map","areTagsExpired","tags","timestamp","tag","entry","get","expiredAt","expired","now","Date","isImmediatelyExpired","areTagsStale","staleAt","stale"],"mappings":"AAOA,+EAA+E;AAC/E,qBAAqB;AACrB,OAAO,MAAMA,eAAe,IAAIC,MAA+B;AAE/D,OAAO,MAAMC,iBAAiB,CAACC,MAAgBC;IAC7C,KAAK,MAAMC,OAAOF,KAAM;QACtB,MAAMG,QAAQN,aAAaO,GAAG,CAACF;QAC/B,MAAMG,YAAYF,yBAAAA,MAAOG,OAAO;QAEhC,IAAI,OAAOD,cAAc,UAAU;YACjC,MAAME,MAAMC,KAAKD,GAAG;YACpB,8FAA8F;YAC9F,2FAA2F;YAC3F,MAAME,uBAAuBJ,aAAaE,OAAOF,YAAYJ;YAE7D,IAAIQ,sBAAsB;gBACxB,OAAO;YACT;QACF;IACF;IAEA,OAAO;AACT,EAAC;AAED,OAAO,MAAMC,eAAe,CAACV,MAAgBC;IAC3C,KAAK,MAAMC,OAAOF,KAAM;QACtB,MAAMG,QAAQN,aAAaO,GAAG,CAACF;QAC/B,MAAMS,UAAUR,CAAAA,yBAAAA,MAAOS,KAAK,KAAI;QAEhC,IAAI,OAAOD,YAAY,YAAYA,UAAUV,WAAW;YACtD,OAAO;QACT;IACF;IAEA,OAAO;AACT,EAAC","ignoreList":[0]}
|
||||
+12
@@ -0,0 +1,12 @@
|
||||
/**
|
||||
* Installs code frame rendering support by injecting the renderer into patch-error-inspect.
|
||||
* This uses dependency injection to avoid hard dependencies on native bindings in runtime code.
|
||||
*
|
||||
* Should be called early in dev/build initialization, after native bindings are loaded.
|
||||
*/ export function installCodeFrameSupport() {
|
||||
const { setCodeFrameRenderer } = require('../patch-error-inspect');
|
||||
const { getOriginalCodeFrame } = require('../../next-devtools/server/shared');
|
||||
setCodeFrameRenderer(getOriginalCodeFrame);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=install-code-frame.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/install-code-frame.ts"],"sourcesContent":["/**\n * Installs code frame rendering support by injecting the renderer into patch-error-inspect.\n * This uses dependency injection to avoid hard dependencies on native bindings in runtime code.\n *\n * Should be called early in dev/build initialization, after native bindings are loaded.\n */\nexport function installCodeFrameSupport(): void {\n const { setCodeFrameRenderer } =\n require('../patch-error-inspect') as typeof import('../patch-error-inspect')\n const { getOriginalCodeFrame } =\n require('../../next-devtools/server/shared') as typeof import('../../next-devtools/server/shared')\n setCodeFrameRenderer(getOriginalCodeFrame)\n}\n"],"names":["installCodeFrameSupport","setCodeFrameRenderer","require","getOriginalCodeFrame"],"mappings":"AAAA;;;;;CAKC,GACD,OAAO,SAASA;IACd,MAAM,EAAEC,oBAAoB,EAAE,GAC5BC,QAAQ;IACV,MAAM,EAAEC,oBAAoB,EAAE,GAC5BD,QAAQ;IACVD,qBAAqBE;AACvB","ignoreList":[0]}
|
||||
+31
@@ -0,0 +1,31 @@
|
||||
// Regex from `node/lib/internal/net.js`: https://github.com/nodejs/node/blob/9fc57006c27564ed7f75eee090eca86786508f51/lib/internal/net.js#L19-L29
|
||||
// License included below:
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
const v4Seg = '(?:[0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])';
|
||||
const v4Str = `(${v4Seg}[.]){3}${v4Seg}`;
|
||||
const v6Seg = '(?:[0-9a-fA-F]{1,4})';
|
||||
const IPv6Reg = new RegExp('^(' + `(?:${v6Seg}:){7}(?:${v6Seg}|:)|` + `(?:${v6Seg}:){6}(?:${v4Str}|:${v6Seg}|:)|` + `(?:${v6Seg}:){5}(?::${v4Str}|(:${v6Seg}){1,2}|:)|` + `(?:${v6Seg}:){4}(?:(:${v6Seg}){0,1}:${v4Str}|(:${v6Seg}){1,3}|:)|` + `(?:${v6Seg}:){3}(?:(:${v6Seg}){0,2}:${v4Str}|(:${v6Seg}){1,4}|:)|` + `(?:${v6Seg}:){2}(?:(:${v6Seg}){0,3}:${v4Str}|(:${v6Seg}){1,5}|:)|` + `(?:${v6Seg}:){1}(?:(:${v6Seg}){0,4}:${v4Str}|(:${v6Seg}){1,6}|:)|` + `(?::((?::${v6Seg}){0,5}:${v4Str}|(?::${v6Seg}){1,7}|:))` + ')(%[0-9a-zA-Z-.:]{1,})?$');
|
||||
export function isIPv6(s) {
|
||||
return IPv6Reg.test(s);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=is-ipv6.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/is-ipv6.ts"],"sourcesContent":["// Regex from `node/lib/internal/net.js`: https://github.com/nodejs/node/blob/9fc57006c27564ed7f75eee090eca86786508f51/lib/internal/net.js#L19-L29\n// License included below:\n// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\nconst v4Seg = '(?:[0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])'\nconst v4Str = `(${v4Seg}[.]){3}${v4Seg}`\nconst v6Seg = '(?:[0-9a-fA-F]{1,4})'\nconst IPv6Reg = new RegExp(\n '^(' +\n `(?:${v6Seg}:){7}(?:${v6Seg}|:)|` +\n `(?:${v6Seg}:){6}(?:${v4Str}|:${v6Seg}|:)|` +\n `(?:${v6Seg}:){5}(?::${v4Str}|(:${v6Seg}){1,2}|:)|` +\n `(?:${v6Seg}:){4}(?:(:${v6Seg}){0,1}:${v4Str}|(:${v6Seg}){1,3}|:)|` +\n `(?:${v6Seg}:){3}(?:(:${v6Seg}){0,2}:${v4Str}|(:${v6Seg}){1,4}|:)|` +\n `(?:${v6Seg}:){2}(?:(:${v6Seg}){0,3}:${v4Str}|(:${v6Seg}){1,5}|:)|` +\n `(?:${v6Seg}:){1}(?:(:${v6Seg}){0,4}:${v4Str}|(:${v6Seg}){1,6}|:)|` +\n `(?::((?::${v6Seg}){0,5}:${v4Str}|(?::${v6Seg}){1,7}|:))` +\n ')(%[0-9a-zA-Z-.:]{1,})?$'\n)\n\nexport function isIPv6(s: string) {\n return IPv6Reg.test(s)\n}\n"],"names":["v4Seg","v4Str","v6Seg","IPv6Reg","RegExp","isIPv6","s","test"],"mappings":"AAAA,kJAAkJ;AAClJ,0BAA0B;AAC1B,sDAAsD;AACtD,EAAE;AACF,0EAA0E;AAC1E,gEAAgE;AAChE,sEAAsE;AACtE,sEAAsE;AACtE,4EAA4E;AAC5E,qEAAqE;AACrE,wBAAwB;AACxB,EAAE;AACF,0EAA0E;AAC1E,yDAAyD;AACzD,EAAE;AACF,0EAA0E;AAC1E,6DAA6D;AAC7D,4EAA4E;AAC5E,2EAA2E;AAC3E,wEAAwE;AACxE,4EAA4E;AAC5E,yCAAyC;AAEzC,MAAMA,QAAQ;AACd,MAAMC,QAAQ,CAAC,CAAC,EAAED,MAAM,OAAO,EAAEA,OAAO;AACxC,MAAME,QAAQ;AACd,MAAMC,UAAU,IAAIC,OAClB,OACE,CAAC,GAAG,EAAEF,MAAM,QAAQ,EAAEA,MAAM,IAAI,CAAC,GACjC,CAAC,GAAG,EAAEA,MAAM,QAAQ,EAAED,MAAM,EAAE,EAAEC,MAAM,IAAI,CAAC,GAC3C,CAAC,GAAG,EAAEA,MAAM,SAAS,EAAED,MAAM,GAAG,EAAEC,MAAM,UAAU,CAAC,GACnD,CAAC,GAAG,EAAEA,MAAM,UAAU,EAAEA,MAAM,OAAO,EAAED,MAAM,GAAG,EAAEC,MAAM,UAAU,CAAC,GACnE,CAAC,GAAG,EAAEA,MAAM,UAAU,EAAEA,MAAM,OAAO,EAAED,MAAM,GAAG,EAAEC,MAAM,UAAU,CAAC,GACnE,CAAC,GAAG,EAAEA,MAAM,UAAU,EAAEA,MAAM,OAAO,EAAED,MAAM,GAAG,EAAEC,MAAM,UAAU,CAAC,GACnE,CAAC,GAAG,EAAEA,MAAM,UAAU,EAAEA,MAAM,OAAO,EAAED,MAAM,GAAG,EAAEC,MAAM,UAAU,CAAC,GACnE,CAAC,SAAS,EAAEA,MAAM,OAAO,EAAED,MAAM,KAAK,EAAEC,MAAM,UAAU,CAAC,GACzD;AAGJ,OAAO,SAASG,OAAOC,CAAS;IAC9B,OAAOH,QAAQI,IAAI,CAACD;AACtB","ignoreList":[0]}
|
||||
+28
@@ -0,0 +1,28 @@
|
||||
/**
|
||||
* Calls the given function only when the returned promise-like object is
|
||||
* awaited. Afterwards, it provides the resolved value synchronously as `value`
|
||||
* property.
|
||||
*/ export function createLazyResult(fn) {
|
||||
let pendingResult;
|
||||
const result = {
|
||||
then (onfulfilled, onrejected) {
|
||||
if (!pendingResult) {
|
||||
pendingResult = Promise.resolve(fn());
|
||||
}
|
||||
pendingResult.then((value)=>{
|
||||
result.value = value;
|
||||
}).catch(()=>{
|
||||
// The externally awaited result will be rejected via `onrejected`. We
|
||||
// don't need to handle it here. But we do want to avoid an unhandled
|
||||
// rejection.
|
||||
});
|
||||
return pendingResult.then(onfulfilled, onrejected);
|
||||
}
|
||||
};
|
||||
return result;
|
||||
}
|
||||
export function isResolvedLazyResult(result) {
|
||||
return result.hasOwnProperty('value');
|
||||
}
|
||||
|
||||
//# sourceMappingURL=lazy-result.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/lazy-result.ts"],"sourcesContent":["export type LazyResult<TValue> = PromiseLike<TValue> & { value?: TValue }\nexport type ResolvedLazyResult<TValue> = PromiseLike<TValue> & { value: TValue }\n\n/**\n * Calls the given function only when the returned promise-like object is\n * awaited. Afterwards, it provides the resolved value synchronously as `value`\n * property.\n */\nexport function createLazyResult<TValue>(\n fn: () => Promise<TValue> | TValue\n): LazyResult<TValue> {\n let pendingResult: Promise<TValue> | undefined\n\n const result: LazyResult<TValue> = {\n then(onfulfilled, onrejected) {\n if (!pendingResult) {\n pendingResult = Promise.resolve(fn())\n }\n\n pendingResult\n .then((value) => {\n result.value = value\n })\n .catch(() => {\n // The externally awaited result will be rejected via `onrejected`. We\n // don't need to handle it here. But we do want to avoid an unhandled\n // rejection.\n })\n\n return pendingResult.then(onfulfilled, onrejected)\n },\n }\n\n return result\n}\n\nexport function isResolvedLazyResult<TValue>(\n result: LazyResult<TValue>\n): result is ResolvedLazyResult<TValue> {\n return result.hasOwnProperty('value')\n}\n"],"names":["createLazyResult","fn","pendingResult","result","then","onfulfilled","onrejected","Promise","resolve","value","catch","isResolvedLazyResult","hasOwnProperty"],"mappings":"AAGA;;;;CAIC,GACD,OAAO,SAASA,iBACdC,EAAkC;IAElC,IAAIC;IAEJ,MAAMC,SAA6B;QACjCC,MAAKC,WAAW,EAAEC,UAAU;YAC1B,IAAI,CAACJ,eAAe;gBAClBA,gBAAgBK,QAAQC,OAAO,CAACP;YAClC;YAEAC,cACGE,IAAI,CAAC,CAACK;gBACLN,OAAOM,KAAK,GAAGA;YACjB,GACCC,KAAK,CAAC;YACL,sEAAsE;YACtE,qEAAqE;YACrE,aAAa;YACf;YAEF,OAAOR,cAAcE,IAAI,CAACC,aAAaC;QACzC;IACF;IAEA,OAAOH;AACT;AAEA,OAAO,SAASQ,qBACdR,MAA0B;IAE1B,OAAOA,OAAOS,cAAc,CAAC;AAC/B","ignoreList":[0]}
|
||||
+198
@@ -0,0 +1,198 @@
|
||||
/**
|
||||
* Node in the doubly-linked list used for LRU tracking.
|
||||
* Each node represents a cache entry with bidirectional pointers.
|
||||
*/ class LRUNode {
|
||||
constructor(key, data, size){
|
||||
this.prev = null;
|
||||
this.next = null;
|
||||
this.key = key;
|
||||
this.data = data;
|
||||
this.size = size;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Sentinel node used for head/tail boundaries.
|
||||
* These nodes don't contain actual cache data but simplify list operations.
|
||||
*/ class SentinelNode {
|
||||
constructor(){
|
||||
this.prev = null;
|
||||
this.next = null;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* LRU (Least Recently Used) Cache implementation using a doubly-linked list
|
||||
* and hash map for O(1) operations.
|
||||
*
|
||||
* Algorithm:
|
||||
* - Uses a doubly-linked list to maintain access order (most recent at head)
|
||||
* - Hash map provides O(1) key-to-node lookup
|
||||
* - Sentinel head/tail nodes simplify edge case handling
|
||||
* - Size-based eviction supports custom size calculation functions
|
||||
*
|
||||
* Data Structure Layout:
|
||||
* HEAD <-> [most recent] <-> ... <-> [least recent] <-> TAIL
|
||||
*
|
||||
* Operations:
|
||||
* - get(): Move accessed node to head (mark as most recent)
|
||||
* - set(): Add new node at head, evict from tail if over capacity
|
||||
* - Eviction: Remove least recent node (tail.prev) when size exceeds limit
|
||||
*/ export class LRUCache {
|
||||
constructor(maxSize, calculateSize, onEvict){
|
||||
this.cache = new Map();
|
||||
this.totalSize = 0;
|
||||
this.maxSize = maxSize;
|
||||
this.calculateSize = calculateSize;
|
||||
this.onEvict = onEvict;
|
||||
// Create sentinel nodes to simplify doubly-linked list operations
|
||||
// HEAD <-> TAIL (empty list)
|
||||
this.head = new SentinelNode();
|
||||
this.tail = new SentinelNode();
|
||||
this.head.next = this.tail;
|
||||
this.tail.prev = this.head;
|
||||
}
|
||||
/**
|
||||
* Adds a node immediately after the head (marks as most recently used).
|
||||
* Used when inserting new items or when an item is accessed.
|
||||
* PRECONDITION: node must be disconnected (prev/next should be null)
|
||||
*/ addToHead(node) {
|
||||
node.prev = this.head;
|
||||
node.next = this.head.next;
|
||||
// head.next is always non-null (points to tail or another node)
|
||||
this.head.next.prev = node;
|
||||
this.head.next = node;
|
||||
}
|
||||
/**
|
||||
* Removes a node from its current position in the doubly-linked list.
|
||||
* Updates the prev/next pointers of adjacent nodes to maintain list integrity.
|
||||
* PRECONDITION: node must be connected (prev/next are non-null)
|
||||
*/ removeNode(node) {
|
||||
// Connected nodes always have non-null prev/next
|
||||
node.prev.next = node.next;
|
||||
node.next.prev = node.prev;
|
||||
}
|
||||
/**
|
||||
* Moves an existing node to the head position (marks as most recently used).
|
||||
* This is the core LRU operation - accessed items become most recent.
|
||||
*/ moveToHead(node) {
|
||||
this.removeNode(node);
|
||||
this.addToHead(node);
|
||||
}
|
||||
/**
|
||||
* Removes and returns the least recently used node (the one before tail).
|
||||
* This is called during eviction when the cache exceeds capacity.
|
||||
* PRECONDITION: cache is not empty (ensured by caller)
|
||||
*/ removeTail() {
|
||||
const lastNode = this.tail.prev;
|
||||
// tail.prev is always non-null and always LRUNode when cache is not empty
|
||||
this.removeNode(lastNode);
|
||||
return lastNode;
|
||||
}
|
||||
/**
|
||||
* Sets a key-value pair in the cache.
|
||||
* If the key exists, updates the value and moves to head.
|
||||
* If new, adds at head and evicts from tail if necessary.
|
||||
*
|
||||
* Time Complexity:
|
||||
* - O(1) for uniform item sizes
|
||||
* - O(k) where k is the number of items evicted (can be O(N) for variable sizes)
|
||||
*/ set(key, value) {
|
||||
const size = (this.calculateSize == null ? void 0 : this.calculateSize.call(this, value)) ?? 1;
|
||||
if (size <= 0) {
|
||||
throw Object.defineProperty(new Error(`LRUCache: calculateSize returned ${size}, but size must be > 0. ` + `Items with size 0 would never be evicted, causing unbounded cache growth.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E1045",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
if (size > this.maxSize) {
|
||||
console.warn('Single item size exceeds maxSize');
|
||||
return false;
|
||||
}
|
||||
const existing = this.cache.get(key);
|
||||
if (existing) {
|
||||
// Update existing node: adjust size and move to head (most recent)
|
||||
existing.data = value;
|
||||
this.totalSize = this.totalSize - existing.size + size;
|
||||
existing.size = size;
|
||||
this.moveToHead(existing);
|
||||
} else {
|
||||
// Add new node at head (most recent position)
|
||||
const newNode = new LRUNode(key, value, size);
|
||||
this.cache.set(key, newNode);
|
||||
this.addToHead(newNode);
|
||||
this.totalSize += size;
|
||||
}
|
||||
// Evict least recently used items until under capacity
|
||||
while(this.totalSize > this.maxSize && this.cache.size > 0){
|
||||
const tail = this.removeTail();
|
||||
this.cache.delete(tail.key);
|
||||
this.totalSize -= tail.size;
|
||||
this.onEvict == null ? void 0 : this.onEvict.call(this, tail.key, tail.data);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Checks if a key exists in the cache.
|
||||
* This is a pure query operation - does NOT update LRU order.
|
||||
*
|
||||
* Time Complexity: O(1)
|
||||
*/ has(key) {
|
||||
return this.cache.has(key);
|
||||
}
|
||||
/**
|
||||
* Retrieves a value by key and marks it as most recently used.
|
||||
* Moving to head maintains the LRU property for future evictions.
|
||||
*
|
||||
* Time Complexity: O(1)
|
||||
*/ get(key) {
|
||||
const node = this.cache.get(key);
|
||||
if (!node) return undefined;
|
||||
// Mark as most recently used by moving to head
|
||||
this.moveToHead(node);
|
||||
return node.data;
|
||||
}
|
||||
/**
|
||||
* Returns an iterator over the cache entries. The order is outputted in the
|
||||
* order of most recently used to least recently used.
|
||||
*/ *[Symbol.iterator]() {
|
||||
let current = this.head.next;
|
||||
while(current && current !== this.tail){
|
||||
// Between head and tail, current is always LRUNode
|
||||
const node = current;
|
||||
yield [
|
||||
node.key,
|
||||
node.data
|
||||
];
|
||||
current = current.next;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Removes a specific key from the cache.
|
||||
* Updates both the hash map and doubly-linked list.
|
||||
*
|
||||
* Note: This is an explicit removal and does NOT trigger the `onEvict`
|
||||
* callback. Use this for intentional deletions where eviction tracking
|
||||
* is not needed.
|
||||
*
|
||||
* Time Complexity: O(1)
|
||||
*/ remove(key) {
|
||||
const node = this.cache.get(key);
|
||||
if (!node) return;
|
||||
this.removeNode(node);
|
||||
this.cache.delete(key);
|
||||
this.totalSize -= node.size;
|
||||
}
|
||||
/**
|
||||
* Returns the number of items in the cache.
|
||||
*/ get size() {
|
||||
return this.cache.size;
|
||||
}
|
||||
/**
|
||||
* Returns the current total size of all cached items.
|
||||
* This uses the custom size calculation if provided.
|
||||
*/ get currentSize() {
|
||||
return this.totalSize;
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=lru-cache.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+8
@@ -0,0 +1,8 @@
|
||||
import { getPathMatch } from '../../shared/lib/router/utils/path-match';
|
||||
const matcher = getPathMatch('/_next/data/:path*');
|
||||
export function matchNextDataPathname(pathname) {
|
||||
if (typeof pathname !== 'string') return false;
|
||||
return matcher(pathname);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=match-next-data-pathname.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/match-next-data-pathname.ts"],"sourcesContent":["import { getPathMatch } from '../../shared/lib/router/utils/path-match'\n\nconst matcher = getPathMatch('/_next/data/:path*')\n\nexport function matchNextDataPathname(pathname: string | null | undefined) {\n if (typeof pathname !== 'string') return false\n\n return matcher(pathname)\n}\n"],"names":["getPathMatch","matcher","matchNextDataPathname","pathname"],"mappings":"AAAA,SAASA,YAAY,QAAQ,2CAA0C;AAEvE,MAAMC,UAAUD,aAAa;AAE7B,OAAO,SAASE,sBAAsBC,QAAmC;IACvE,IAAI,OAAOA,aAAa,UAAU,OAAO;IAEzC,OAAOF,QAAQE;AACjB","ignoreList":[0]}
|
||||
+385
@@ -0,0 +1,385 @@
|
||||
import Stream from 'stream';
|
||||
import { fromNodeOutgoingHttpHeaders, toNodeOutgoingHttpHeaders } from '../web/utils';
|
||||
export class MockedRequest extends Stream.Readable {
|
||||
constructor({ url, headers, method, socket = null, readable }){
|
||||
super(), // This is hardcoded for now, but can be updated to be configurable if needed.
|
||||
this.httpVersion = '1.0', this.httpVersionMajor = 1, this.httpVersionMinor = 0, // If we don't actually have a socket, we'll just use a mock one that
|
||||
// always returns false for the `encrypted` property and undefined for the
|
||||
// `remoteAddress` property.
|
||||
this.socket = new Proxy({}, {
|
||||
get: (_target, prop)=>{
|
||||
if (prop !== 'encrypted' && prop !== 'remoteAddress') {
|
||||
throw Object.defineProperty(new Error('Method not implemented'), "__NEXT_ERROR_CODE", {
|
||||
value: "E52",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
if (prop === 'remoteAddress') return undefined;
|
||||
// For this mock request, always ensure we just respond with the encrypted
|
||||
// set to false to ensure there's no odd leakages.
|
||||
return false;
|
||||
}
|
||||
});
|
||||
this.url = url;
|
||||
this.headers = headers;
|
||||
this.method = method;
|
||||
if (readable) {
|
||||
this.bodyReadable = readable;
|
||||
this.bodyReadable.on('end', ()=>this.emit('end'));
|
||||
this.bodyReadable.on('close', ()=>this.emit('close'));
|
||||
}
|
||||
if (socket) {
|
||||
this.socket = socket;
|
||||
}
|
||||
}
|
||||
get headersDistinct() {
|
||||
const headers = {};
|
||||
for (const [key, value] of Object.entries(this.headers)){
|
||||
if (!value) continue;
|
||||
headers[key] = Array.isArray(value) ? value : [
|
||||
value
|
||||
];
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
_read(size) {
|
||||
if (this.bodyReadable) {
|
||||
return this.bodyReadable._read(size);
|
||||
} else {
|
||||
this.emit('end');
|
||||
this.emit('close');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The `connection` property is just an alias for the `socket` property.
|
||||
*
|
||||
* @deprecated — since v13.0.0 - Use socket instead.
|
||||
*/ get connection() {
|
||||
return this.socket;
|
||||
}
|
||||
// The following methods are not implemented as they are not used in the
|
||||
// Next.js codebase.
|
||||
get aborted() {
|
||||
throw Object.defineProperty(new Error('Method not implemented'), "__NEXT_ERROR_CODE", {
|
||||
value: "E52",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
get complete() {
|
||||
throw Object.defineProperty(new Error('Method not implemented'), "__NEXT_ERROR_CODE", {
|
||||
value: "E52",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
get trailers() {
|
||||
throw Object.defineProperty(new Error('Method not implemented'), "__NEXT_ERROR_CODE", {
|
||||
value: "E52",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
get trailersDistinct() {
|
||||
throw Object.defineProperty(new Error('Method not implemented'), "__NEXT_ERROR_CODE", {
|
||||
value: "E52",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
get rawTrailers() {
|
||||
throw Object.defineProperty(new Error('Method not implemented'), "__NEXT_ERROR_CODE", {
|
||||
value: "E52",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
get rawHeaders() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
setTimeout() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
export class MockedResponse extends Stream.Writable {
|
||||
constructor(res = {}){
|
||||
super(), this.statusMessage = '', this.finished = false, this.headersSent = false, /**
|
||||
* A list of buffers that have been written to the response.
|
||||
*
|
||||
* @internal - used internally by Next.js
|
||||
*/ this.buffers = [];
|
||||
this.statusCode = res.statusCode ?? 200;
|
||||
this.socket = res.socket ?? null;
|
||||
this.headers = res.headers ? fromNodeOutgoingHttpHeaders(res.headers) : new Headers();
|
||||
this.headPromise = new Promise((resolve)=>{
|
||||
this.headPromiseResolve = resolve;
|
||||
});
|
||||
// Attach listeners for the `finish`, `end`, and `error` events to the
|
||||
// `MockedResponse` instance.
|
||||
this.hasStreamed = new Promise((resolve, reject)=>{
|
||||
this.on('finish', ()=>resolve(true));
|
||||
this.on('end', ()=>resolve(true));
|
||||
this.on('error', (err)=>reject(err));
|
||||
}).then((val)=>{
|
||||
this.headPromiseResolve == null ? void 0 : this.headPromiseResolve.call(this);
|
||||
return val;
|
||||
});
|
||||
if (res.resWriter) {
|
||||
this.resWriter = res.resWriter;
|
||||
}
|
||||
}
|
||||
appendHeader(name, value) {
|
||||
const values = Array.isArray(value) ? value : [
|
||||
value
|
||||
];
|
||||
for (const v of values){
|
||||
this.headers.append(name, v);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Returns true if the response has been sent, false otherwise.
|
||||
*
|
||||
* @internal - used internally by Next.js
|
||||
*/ get isSent() {
|
||||
return this.finished || this.headersSent;
|
||||
}
|
||||
/**
|
||||
* The `connection` property is just an alias for the `socket` property.
|
||||
*
|
||||
* @deprecated — since v13.0.0 - Use socket instead.
|
||||
*/ get connection() {
|
||||
return this.socket;
|
||||
}
|
||||
write(chunk) {
|
||||
if (this.resWriter) {
|
||||
return this.resWriter(chunk);
|
||||
}
|
||||
this.buffers.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
||||
return true;
|
||||
}
|
||||
end() {
|
||||
this.finished = true;
|
||||
return super.end(...arguments);
|
||||
}
|
||||
/**
|
||||
* This method is a no-op because the `MockedResponse` instance is not
|
||||
* actually connected to a socket. This method is not specified on the
|
||||
* interface type for `ServerResponse` but is called by Node.js.
|
||||
*
|
||||
* @see https://github.com/nodejs/node/pull/7949
|
||||
*/ _implicitHeader() {}
|
||||
_write(chunk, _encoding, callback) {
|
||||
this.write(chunk);
|
||||
// According to Node.js documentation, the callback MUST be invoked to
|
||||
// signal that the write completed successfully. If this callback is not
|
||||
// invoked, the 'finish' event will not be emitted.
|
||||
//
|
||||
// https://nodejs.org/docs/latest-v16.x/api/stream.html#writable_writechunk-encoding-callback
|
||||
callback();
|
||||
}
|
||||
writeHead(statusCode, statusMessage, headers) {
|
||||
if (!headers && typeof statusMessage !== 'string') {
|
||||
headers = statusMessage;
|
||||
} else if (typeof statusMessage === 'string' && statusMessage.length > 0) {
|
||||
this.statusMessage = statusMessage;
|
||||
}
|
||||
if (headers) {
|
||||
// When headers have been set with response.setHeader(), they will be
|
||||
// merged with any headers passed to response.writeHead(), with the
|
||||
// headers passed to response.writeHead() given precedence.
|
||||
//
|
||||
// https://nodejs.org/api/http.html#responsewriteheadstatuscode-statusmessage-headers
|
||||
//
|
||||
// For this reason, we need to only call `set` to ensure that this will
|
||||
// overwrite any existing headers.
|
||||
if (Array.isArray(headers)) {
|
||||
// headers may be an Array where the keys and values are in the same list.
|
||||
// It is not a list of tuples. So, the even-numbered offsets are key
|
||||
// values, and the odd-numbered offsets are the associated values. The
|
||||
// array is in the same format as request.rawHeaders.
|
||||
for(let i = 0; i < headers.length; i += 2){
|
||||
// The header key is always a string according to the spec.
|
||||
this.setHeader(headers[i], headers[i + 1]);
|
||||
}
|
||||
} else {
|
||||
for (const [key, value] of Object.entries(headers)){
|
||||
// Skip undefined values
|
||||
if (typeof value === 'undefined') continue;
|
||||
this.setHeader(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
this.statusCode = statusCode;
|
||||
this.headersSent = true;
|
||||
this.headPromiseResolve == null ? void 0 : this.headPromiseResolve.call(this);
|
||||
return this;
|
||||
}
|
||||
hasHeader(name) {
|
||||
return this.headers.has(name);
|
||||
}
|
||||
getHeader(name) {
|
||||
return this.headers.get(name) ?? undefined;
|
||||
}
|
||||
getHeaders() {
|
||||
return toNodeOutgoingHttpHeaders(this.headers);
|
||||
}
|
||||
getHeaderNames() {
|
||||
return Array.from(this.headers.keys());
|
||||
}
|
||||
setHeader(name, value) {
|
||||
if (Array.isArray(value)) {
|
||||
// Because `set` here should override any existing values, we need to
|
||||
// delete the existing values before setting the new ones via `append`.
|
||||
this.headers.delete(name);
|
||||
for (const v of value){
|
||||
this.headers.append(name, v);
|
||||
}
|
||||
} else if (typeof value === 'number') {
|
||||
this.headers.set(name, value.toString());
|
||||
} else {
|
||||
this.headers.set(name, value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
removeHeader(name) {
|
||||
this.headers.delete(name);
|
||||
}
|
||||
flushHeaders() {
|
||||
// This is a no-op because we don't actually have a socket to flush the
|
||||
// headers to.
|
||||
}
|
||||
// The following methods are not implemented as they are not used in the
|
||||
// Next.js codebase.
|
||||
get strictContentLength() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
writeEarlyHints() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
get req() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
assignSocket() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
detachSocket() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
writeContinue() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
writeProcessing() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
get upgrading() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
get chunkedEncoding() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
get shouldKeepAlive() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
get useChunkedEncodingByDefault() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
get sendDate() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
setTimeout() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
addTrailers() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
setHeaders() {
|
||||
throw Object.defineProperty(new Error('Method not implemented.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E41",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
export function createRequestResponseMocks({ url, headers = {}, method = 'GET', bodyReadable, resWriter, socket = null }) {
|
||||
return {
|
||||
req: new MockedRequest({
|
||||
url,
|
||||
headers,
|
||||
method,
|
||||
socket,
|
||||
readable: bodyReadable
|
||||
}),
|
||||
res: new MockedResponse({
|
||||
socket,
|
||||
resWriter
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=mock-request.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+5
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Loads a given module for a given ID.
|
||||
*/ export { };
|
||||
|
||||
//# sourceMappingURL=module-loader.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/module-loader/module-loader.ts"],"sourcesContent":["/**\n * Loads a given module for a given ID.\n */\nexport interface ModuleLoader {\n load<M = any>(id: string): Promise<M>\n}\n"],"names":[],"mappings":"AAAA;;CAEC,GACD,WAEC","ignoreList":[0]}
|
||||
+17
@@ -0,0 +1,17 @@
|
||||
/**
|
||||
* Loads a module using `await require(id)`.
|
||||
*/ export class NodeModuleLoader {
|
||||
async load(id) {
|
||||
if (process.env.NEXT_RUNTIME !== 'edge') {
|
||||
// Need to `await` to cover the case that route is marked ESM modules by ESM escalation.
|
||||
return await (process.env.NEXT_MINIMAL ? __non_webpack_require__(id) : require(id));
|
||||
}
|
||||
throw Object.defineProperty(new Error('NodeModuleLoader is not supported in edge runtime.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E25",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=node-module-loader.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/module-loader/node-module-loader.ts"],"sourcesContent":["import type { ModuleLoader } from './module-loader'\n\n/**\n * Loads a module using `await require(id)`.\n */\nexport class NodeModuleLoader implements ModuleLoader {\n public async load<M>(id: string): Promise<M> {\n if (process.env.NEXT_RUNTIME !== 'edge') {\n // Need to `await` to cover the case that route is marked ESM modules by ESM escalation.\n return await (process.env.NEXT_MINIMAL\n ? // @ts-ignore\n __non_webpack_require__(id)\n : require(id))\n }\n\n throw new Error('NodeModuleLoader is not supported in edge runtime.')\n }\n}\n"],"names":["NodeModuleLoader","load","id","process","env","NEXT_RUNTIME","NEXT_MINIMAL","__non_webpack_require__","require","Error"],"mappings":"AAEA;;CAEC,GACD,OAAO,MAAMA;IACX,MAAaC,KAAQC,EAAU,EAAc;QAC3C,IAAIC,QAAQC,GAAG,CAACC,YAAY,KAAK,QAAQ;YACvC,wFAAwF;YACxF,OAAO,MAAOF,CAAAA,QAAQC,GAAG,CAACE,YAAY,GAElCC,wBAAwBL,MACxBM,QAAQN,GAAE;QAChB;QAEA,MAAM,qBAA+D,CAA/D,IAAIO,MAAM,uDAAV,qBAAA;mBAAA;wBAAA;0BAAA;QAA8D;IACtE;AACF","ignoreList":[0]}
|
||||
+16
@@ -0,0 +1,16 @@
|
||||
import { NodeModuleLoader } from './node-module-loader';
|
||||
export class RouteModuleLoader {
|
||||
static async load(id, loader = new NodeModuleLoader()) {
|
||||
const module = await loader.load(id);
|
||||
if ('routeModule' in module) {
|
||||
return module.routeModule;
|
||||
}
|
||||
throw Object.defineProperty(new Error(`Module "${id}" does not export a routeModule.`), "__NEXT_ERROR_CODE", {
|
||||
value: "E53",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=route-module-loader.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/module-loader/route-module-loader.ts"],"sourcesContent":["import type { RouteModule } from '../../route-modules/route-module'\nimport type { ModuleLoader } from './module-loader'\n\nimport { NodeModuleLoader } from './node-module-loader'\n\nexport interface AppLoaderModule<M extends RouteModule = RouteModule> {\n routeModule: M\n}\n\nexport class RouteModuleLoader {\n static async load<M extends RouteModule>(\n id: string,\n loader: ModuleLoader = new NodeModuleLoader()\n ): Promise<M> {\n const module: AppLoaderModule<M> = await loader.load(id)\n if ('routeModule' in module) {\n return module.routeModule\n }\n\n throw new Error(`Module \"${id}\" does not export a routeModule.`)\n }\n}\n"],"names":["NodeModuleLoader","RouteModuleLoader","load","id","loader","module","routeModule","Error"],"mappings":"AAGA,SAASA,gBAAgB,QAAQ,uBAAsB;AAMvD,OAAO,MAAMC;IACX,aAAaC,KACXC,EAAU,EACVC,SAAuB,IAAIJ,kBAAkB,EACjC;QACZ,MAAMK,SAA6B,MAAMD,OAAOF,IAAI,CAACC;QACrD,IAAI,iBAAiBE,QAAQ;YAC3B,OAAOA,OAAOC,WAAW;QAC3B;QAEA,MAAM,qBAA0D,CAA1D,IAAIC,MAAM,CAAC,QAAQ,EAAEJ,GAAG,gCAAgC,CAAC,GAAzD,qBAAA;mBAAA;wBAAA;0BAAA;QAAyD;IACjE;AACF","ignoreList":[0]}
|
||||
+13
@@ -0,0 +1,13 @@
|
||||
import fs from 'fs';
|
||||
export const nodeFs = {
|
||||
existsSync: fs.existsSync,
|
||||
readFile: fs.promises.readFile,
|
||||
readFileSync: fs.readFileSync,
|
||||
writeFile: (f, d)=>fs.promises.writeFile(f, d),
|
||||
mkdir: (dir)=>fs.promises.mkdir(dir, {
|
||||
recursive: true
|
||||
}),
|
||||
stat: (f)=>fs.promises.stat(f)
|
||||
};
|
||||
|
||||
//# sourceMappingURL=node-fs-methods.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/node-fs-methods.ts"],"sourcesContent":["import type { CacheFs } from '../../shared/lib/utils'\n\nimport fs from 'fs'\n\nexport const nodeFs: CacheFs = {\n existsSync: fs.existsSync,\n readFile: fs.promises.readFile,\n readFileSync: fs.readFileSync,\n writeFile: (f, d) => fs.promises.writeFile(f, d),\n mkdir: (dir) => fs.promises.mkdir(dir, { recursive: true }),\n stat: (f) => fs.promises.stat(f),\n}\n"],"names":["fs","nodeFs","existsSync","readFile","promises","readFileSync","writeFile","f","d","mkdir","dir","recursive","stat"],"mappings":"AAEA,OAAOA,QAAQ,KAAI;AAEnB,OAAO,MAAMC,SAAkB;IAC7BC,YAAYF,GAAGE,UAAU;IACzBC,UAAUH,GAAGI,QAAQ,CAACD,QAAQ;IAC9BE,cAAcL,GAAGK,YAAY;IAC7BC,WAAW,CAACC,GAAGC,IAAMR,GAAGI,QAAQ,CAACE,SAAS,CAACC,GAAGC;IAC9CC,OAAO,CAACC,MAAQV,GAAGI,QAAQ,CAACK,KAAK,CAACC,KAAK;YAAEC,WAAW;QAAK;IACzDC,MAAM,CAACL,IAAMP,GAAGI,QAAQ,CAACQ,IAAI,CAACL;AAChC,EAAC","ignoreList":[0]}
|
||||
+36
@@ -0,0 +1,36 @@
|
||||
import { parse } from 'next/dist/compiled/stacktrace-parser';
|
||||
const regexNextStatic = /\/_next(\/static\/.+)/;
|
||||
export function parseStack(stack, distDir = process.env.__NEXT_DIST_DIR) {
|
||||
if (!stack) return [];
|
||||
// throw away eval information that stacktrace-parser doesn't support
|
||||
// adapted from https://github.com/stacktracejs/error-stack-parser/blob/9f33c224b5d7b607755eb277f9d51fcdb7287e24/error-stack-parser.js#L59C33-L59C62
|
||||
stack = stack.split('\n').map((line)=>{
|
||||
if (line.includes('(eval ')) {
|
||||
line = line.replace(/eval code/g, 'eval').replace(/\(eval at [^()]* \(/, '(file://').replace(/\),.*$/g, ')');
|
||||
}
|
||||
return line;
|
||||
}).join('\n');
|
||||
const frames = parse(stack);
|
||||
return frames.map((frame)=>{
|
||||
try {
|
||||
const url = new URL(frame.file);
|
||||
const res = regexNextStatic.exec(url.pathname);
|
||||
if (res) {
|
||||
var _distDir_replace;
|
||||
const effectiveDistDir = distDir == null ? void 0 : (_distDir_replace = distDir.replace(/\\/g, '/')) == null ? void 0 : _distDir_replace.replace(/\/$/, '');
|
||||
if (effectiveDistDir) {
|
||||
frame.file = 'file://' + effectiveDistDir.concat(res.pop()) + url.search;
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
return {
|
||||
file: frame.file,
|
||||
line1: frame.lineNumber,
|
||||
column1: frame.column,
|
||||
methodName: frame.methodName,
|
||||
arguments: frame.arguments
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
//# sourceMappingURL=parse-stack.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/parse-stack.ts"],"sourcesContent":["import { parse } from 'next/dist/compiled/stacktrace-parser'\n\nconst regexNextStatic = /\\/_next(\\/static\\/.+)/\n\nexport interface StackFrame {\n file: string | null\n methodName: string\n arguments: string[]\n /** 1-based */\n line1: number | null\n /** 1-based */\n column1: number | null\n}\n\nexport function parseStack(\n stack: string,\n distDir = process.env.__NEXT_DIST_DIR\n): StackFrame[] {\n if (!stack) return []\n\n // throw away eval information that stacktrace-parser doesn't support\n // adapted from https://github.com/stacktracejs/error-stack-parser/blob/9f33c224b5d7b607755eb277f9d51fcdb7287e24/error-stack-parser.js#L59C33-L59C62\n stack = stack\n .split('\\n')\n .map((line) => {\n if (line.includes('(eval ')) {\n line = line\n .replace(/eval code/g, 'eval')\n .replace(/\\(eval at [^()]* \\(/, '(file://')\n .replace(/\\),.*$/g, ')')\n }\n\n return line\n })\n .join('\\n')\n\n const frames = parse(stack)\n return frames.map((frame) => {\n try {\n const url = new URL(frame.file!)\n const res = regexNextStatic.exec(url.pathname)\n if (res) {\n const effectiveDistDir = distDir\n ?.replace(/\\\\/g, '/')\n ?.replace(/\\/$/, '')\n if (effectiveDistDir) {\n frame.file =\n 'file://' + effectiveDistDir.concat(res.pop()!) + url.search\n }\n }\n } catch {}\n return {\n file: frame.file,\n line1: frame.lineNumber,\n column1: frame.column,\n methodName: frame.methodName,\n arguments: frame.arguments,\n }\n })\n}\n"],"names":["parse","regexNextStatic","parseStack","stack","distDir","process","env","__NEXT_DIST_DIR","split","map","line","includes","replace","join","frames","frame","url","URL","file","res","exec","pathname","effectiveDistDir","concat","pop","search","line1","lineNumber","column1","column","methodName","arguments"],"mappings":"AAAA,SAASA,KAAK,QAAQ,uCAAsC;AAE5D,MAAMC,kBAAkB;AAYxB,OAAO,SAASC,WACdC,KAAa,EACbC,UAAUC,QAAQC,GAAG,CAACC,eAAe;IAErC,IAAI,CAACJ,OAAO,OAAO,EAAE;IAErB,qEAAqE;IACrE,oJAAoJ;IACpJA,QAAQA,MACLK,KAAK,CAAC,MACNC,GAAG,CAAC,CAACC;QACJ,IAAIA,KAAKC,QAAQ,CAAC,WAAW;YAC3BD,OAAOA,KACJE,OAAO,CAAC,cAAc,QACtBA,OAAO,CAAC,uBAAuB,YAC/BA,OAAO,CAAC,WAAW;QACxB;QAEA,OAAOF;IACT,GACCG,IAAI,CAAC;IAER,MAAMC,SAASd,MAAMG;IACrB,OAAOW,OAAOL,GAAG,CAAC,CAACM;QACjB,IAAI;YACF,MAAMC,MAAM,IAAIC,IAAIF,MAAMG,IAAI;YAC9B,MAAMC,MAAMlB,gBAAgBmB,IAAI,CAACJ,IAAIK,QAAQ;YAC7C,IAAIF,KAAK;oBACkBf;gBAAzB,MAAMkB,mBAAmBlB,4BAAAA,mBAAAA,QACrBQ,OAAO,CAAC,OAAO,yBADMR,iBAErBQ,OAAO,CAAC,OAAO;gBACnB,IAAIU,kBAAkB;oBACpBP,MAAMG,IAAI,GACR,YAAYI,iBAAiBC,MAAM,CAACJ,IAAIK,GAAG,MAAOR,IAAIS,MAAM;gBAChE;YACF;QACF,EAAE,OAAM,CAAC;QACT,OAAO;YACLP,MAAMH,MAAMG,IAAI;YAChBQ,OAAOX,MAAMY,UAAU;YACvBC,SAASb,MAAMc,MAAM;YACrBC,YAAYf,MAAMe,UAAU;YAC5BC,WAAWhB,MAAMgB,SAAS;QAC5B;IACF;AACF","ignoreList":[0]}
|
||||
+946
@@ -0,0 +1,946 @@
|
||||
import { AppRenderSpan, NextNodeServerSpan } from './trace/constants';
|
||||
import { getTracer, SpanKind } from './trace/tracer';
|
||||
import { CACHE_ONE_YEAR_SECONDS, INFINITE_CACHE, NEXT_CACHE_TAG_MAX_ITEMS, NEXT_CACHE_TAG_MAX_LENGTH } from '../../lib/constants';
|
||||
import { markCurrentScopeAsDynamic } from '../app-render/dynamic-rendering';
|
||||
import { makeHangingPromise } from '../dynamic-rendering-utils';
|
||||
import { createDedupeFetch } from './dedupe-fetch';
|
||||
import { getCacheSignal } from '../app-render/work-unit-async-storage.external';
|
||||
import { CachedRouteKind, IncrementalCacheKind } from '../response-cache';
|
||||
import { cloneResponse } from './clone-response';
|
||||
import { RenderStage } from '../app-render/staged-rendering';
|
||||
const isEdgeRuntime = process.env.NEXT_RUNTIME === 'edge';
|
||||
export const NEXT_PATCH_SYMBOL = Symbol.for('next-patch');
|
||||
function isFetchPatched() {
|
||||
return globalThis[NEXT_PATCH_SYMBOL] === true;
|
||||
}
|
||||
export function validateRevalidate(revalidateVal, route) {
|
||||
try {
|
||||
let normalizedRevalidate = undefined;
|
||||
if (revalidateVal === false) {
|
||||
normalizedRevalidate = INFINITE_CACHE;
|
||||
} else if (typeof revalidateVal === 'number' && !isNaN(revalidateVal) && revalidateVal > -1) {
|
||||
normalizedRevalidate = revalidateVal;
|
||||
} else if (typeof revalidateVal !== 'undefined') {
|
||||
throw Object.defineProperty(new Error(`Invalid revalidate value "${revalidateVal}" on "${route}", must be a non-negative number or false`), "__NEXT_ERROR_CODE", {
|
||||
value: "E179",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return normalizedRevalidate;
|
||||
} catch (err) {
|
||||
// handle client component error from attempting to check revalidate value
|
||||
if (err instanceof Error && err.message.includes('Invalid revalidate')) {
|
||||
throw err;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
export function validateTags(tags, description) {
|
||||
const validTags = [];
|
||||
const invalidTags = [];
|
||||
for(let i = 0; i < tags.length; i++){
|
||||
const tag = tags[i];
|
||||
if (typeof tag !== 'string') {
|
||||
invalidTags.push({
|
||||
tag,
|
||||
reason: 'invalid type, must be a string'
|
||||
});
|
||||
} else if (tag.length > NEXT_CACHE_TAG_MAX_LENGTH) {
|
||||
invalidTags.push({
|
||||
tag,
|
||||
reason: `exceeded max length of ${NEXT_CACHE_TAG_MAX_LENGTH}`
|
||||
});
|
||||
} else {
|
||||
validTags.push(tag);
|
||||
}
|
||||
if (validTags.length > NEXT_CACHE_TAG_MAX_ITEMS) {
|
||||
console.warn(`Warning: exceeded max tag count for ${description}, dropped tags:`, tags.slice(i).join(', '));
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (invalidTags.length > 0) {
|
||||
console.warn(`Warning: invalid tags passed to ${description}: `);
|
||||
for (const { tag, reason } of invalidTags){
|
||||
console.log(`tag: "${tag}" ${reason}`);
|
||||
}
|
||||
}
|
||||
return validTags;
|
||||
}
|
||||
function trackFetchMetric(workStore, ctx) {
|
||||
if (!workStore.shouldTrackFetchMetrics) {
|
||||
return;
|
||||
}
|
||||
workStore.fetchMetrics ??= [];
|
||||
workStore.fetchMetrics.push({
|
||||
...ctx,
|
||||
end: performance.timeOrigin + performance.now(),
|
||||
idx: workStore.nextFetchId || 0
|
||||
});
|
||||
}
|
||||
async function createCachedPrerenderResponse(res, cacheKey, incrementalCacheContext, incrementalCache, revalidate, handleUnlock) {
|
||||
// We are prerendering at build time or revalidate time with cacheComponents so we
|
||||
// need to buffer the response so we can guarantee it can be read in a
|
||||
// microtask.
|
||||
const bodyBuffer = await res.arrayBuffer();
|
||||
const fetchedData = {
|
||||
headers: Object.fromEntries(res.headers.entries()),
|
||||
body: Buffer.from(bodyBuffer).toString('base64'),
|
||||
status: res.status,
|
||||
url: res.url
|
||||
};
|
||||
// We can skip setting the serverComponentsHmrCache because we aren't in dev
|
||||
// mode.
|
||||
if (incrementalCacheContext) {
|
||||
await incrementalCache.set(cacheKey, {
|
||||
kind: CachedRouteKind.FETCH,
|
||||
data: fetchedData,
|
||||
revalidate
|
||||
}, incrementalCacheContext);
|
||||
}
|
||||
await handleUnlock();
|
||||
// We return a new Response to the caller.
|
||||
return new Response(bodyBuffer, {
|
||||
headers: res.headers,
|
||||
status: res.status,
|
||||
statusText: res.statusText
|
||||
});
|
||||
}
|
||||
async function createCachedDynamicResponse(workStore, res, cacheKey, incrementalCacheContext, incrementalCache, serverComponentsHmrCache, revalidate, input, handleUnlock, signal) {
|
||||
// We're cloning the response using this utility because there exists a bug in
|
||||
// the undici library around response cloning. See the following pull request
|
||||
// for more details: https://github.com/vercel/next.js/pull/73274
|
||||
const [cloned1, cloned2] = cloneResponse(res);
|
||||
// We are dynamically rendering including dev mode. We want to return the
|
||||
// response to the caller as soon as possible because it might stream over a
|
||||
// very long time.
|
||||
const cacheSetPromise = cloned1.arrayBuffer().then(async (arrayBuffer)=>{
|
||||
const bodyBuffer = Buffer.from(arrayBuffer);
|
||||
const fetchedData = {
|
||||
headers: Object.fromEntries(cloned1.headers.entries()),
|
||||
body: bodyBuffer.toString('base64'),
|
||||
status: cloned1.status,
|
||||
url: cloned1.url
|
||||
};
|
||||
serverComponentsHmrCache == null ? void 0 : serverComponentsHmrCache.set(cacheKey, fetchedData);
|
||||
if (incrementalCacheContext) {
|
||||
await incrementalCache.set(cacheKey, {
|
||||
kind: CachedRouteKind.FETCH,
|
||||
data: fetchedData,
|
||||
revalidate
|
||||
}, incrementalCacheContext);
|
||||
}
|
||||
}).catch((error)=>{
|
||||
// Don't warn if the request was aborted intentionally.
|
||||
if (!(signal == null ? void 0 : signal.aborted)) {
|
||||
console.warn(`Failed to set fetch cache`, input, error);
|
||||
}
|
||||
}).finally(handleUnlock);
|
||||
const pendingRevalidateKey = `cache-set-${cacheKey}`;
|
||||
const pendingRevalidates = workStore.pendingRevalidates ??= {};
|
||||
let pendingRevalidatePromise = Promise.resolve();
|
||||
if (pendingRevalidateKey in pendingRevalidates) {
|
||||
// There is already a pending revalidate entry that we need to await to
|
||||
// avoid race conditions.
|
||||
pendingRevalidatePromise = pendingRevalidates[pendingRevalidateKey];
|
||||
}
|
||||
pendingRevalidates[pendingRevalidateKey] = pendingRevalidatePromise.then(()=>cacheSetPromise).finally(()=>{
|
||||
// If the pending revalidate is not present in the store, then we have
|
||||
// nothing to delete.
|
||||
if (!(pendingRevalidates == null ? void 0 : pendingRevalidates[pendingRevalidateKey])) {
|
||||
return;
|
||||
}
|
||||
delete pendingRevalidates[pendingRevalidateKey];
|
||||
});
|
||||
return cloned2;
|
||||
}
|
||||
export function createPatchedFetcher(originFetch, { workAsyncStorage, workUnitAsyncStorage }) {
|
||||
// Create the patched fetch function.
|
||||
const patched = async function fetch(input, init) {
|
||||
var _init_method, _init_next;
|
||||
let url;
|
||||
try {
|
||||
url = new URL(input instanceof Request ? input.url : input);
|
||||
url.username = '';
|
||||
url.password = '';
|
||||
} catch {
|
||||
// Error caused by malformed URL should be handled by native fetch
|
||||
url = undefined;
|
||||
}
|
||||
const fetchUrl = (url == null ? void 0 : url.href) ?? '';
|
||||
const method = (init == null ? void 0 : (_init_method = init.method) == null ? void 0 : _init_method.toUpperCase()) || 'GET';
|
||||
// Do create a new span trace for internal fetches in the
|
||||
// non-verbose mode.
|
||||
const isInternal = (init == null ? void 0 : (_init_next = init.next) == null ? void 0 : _init_next.internal) === true;
|
||||
const hideSpan = process.env.NEXT_OTEL_FETCH_DISABLED === '1';
|
||||
// We don't track fetch metrics for internal fetches
|
||||
// so it's not critical that we have a start time, as it won't be recorded.
|
||||
// This is to workaround a flaky issue where performance APIs might
|
||||
// not be available and will require follow-up investigation.
|
||||
const fetchStart = isInternal ? undefined : performance.timeOrigin + performance.now();
|
||||
const workStore = workAsyncStorage.getStore();
|
||||
const workUnitStore = workUnitAsyncStorage.getStore();
|
||||
let cacheSignal = workUnitStore ? getCacheSignal(workUnitStore) : null;
|
||||
if (cacheSignal) {
|
||||
cacheSignal.beginRead();
|
||||
}
|
||||
const result = getTracer().trace(isInternal ? NextNodeServerSpan.internalFetch : AppRenderSpan.fetch, {
|
||||
hideSpan,
|
||||
kind: SpanKind.CLIENT,
|
||||
spanName: [
|
||||
'fetch',
|
||||
method,
|
||||
fetchUrl
|
||||
].filter(Boolean).join(' '),
|
||||
attributes: {
|
||||
'http.url': fetchUrl,
|
||||
'http.method': method,
|
||||
'net.peer.name': url == null ? void 0 : url.hostname,
|
||||
'net.peer.port': (url == null ? void 0 : url.port) || undefined
|
||||
}
|
||||
}, async ()=>{
|
||||
var _getRequestMeta;
|
||||
// If this is an internal fetch, we should not do any special treatment.
|
||||
if (isInternal) {
|
||||
return originFetch(input, init);
|
||||
}
|
||||
// If the workStore is not available, we can't do any
|
||||
// special treatment of fetch, therefore fallback to the original
|
||||
// fetch implementation.
|
||||
if (!workStore) {
|
||||
return originFetch(input, init);
|
||||
}
|
||||
// We should also fallback to the original fetch implementation if we
|
||||
// are in draft mode, it does not constitute a static generation.
|
||||
if (workStore.isDraftMode) {
|
||||
return originFetch(input, init);
|
||||
}
|
||||
const isRequestInput = input && typeof input === 'object' && typeof input.method === 'string';
|
||||
const getRequestMeta = (field)=>{
|
||||
// If request input is present but init is not, retrieve from input first.
|
||||
const value = init == null ? void 0 : init[field];
|
||||
return value || (isRequestInput ? input[field] : null);
|
||||
};
|
||||
let finalRevalidate = undefined;
|
||||
const getNextField = (field)=>{
|
||||
var _init_next, _init_next1, _input_next;
|
||||
return typeof (init == null ? void 0 : (_init_next = init.next) == null ? void 0 : _init_next[field]) !== 'undefined' ? init == null ? void 0 : (_init_next1 = init.next) == null ? void 0 : _init_next1[field] : isRequestInput ? (_input_next = input.next) == null ? void 0 : _input_next[field] : undefined;
|
||||
};
|
||||
// RequestInit doesn't keep extra fields e.g. next so it's
|
||||
// only available if init is used separate
|
||||
const originalFetchRevalidate = getNextField('revalidate');
|
||||
let currentFetchRevalidate = originalFetchRevalidate;
|
||||
const tags = validateTags(getNextField('tags') || [], `fetch ${input.toString()}`);
|
||||
let revalidateStore;
|
||||
if (workUnitStore) {
|
||||
switch(workUnitStore.type){
|
||||
case 'prerender':
|
||||
case 'prerender-runtime':
|
||||
// TODO: Stop accumulating tags in client prerender. (fallthrough)
|
||||
case 'prerender-client':
|
||||
case 'validation-client':
|
||||
case 'prerender-ppr':
|
||||
case 'prerender-legacy':
|
||||
case 'cache':
|
||||
case 'private-cache':
|
||||
revalidateStore = workUnitStore;
|
||||
break;
|
||||
case 'request':
|
||||
case 'unstable-cache':
|
||||
case 'generate-static-params':
|
||||
break;
|
||||
default:
|
||||
workUnitStore;
|
||||
}
|
||||
}
|
||||
if (revalidateStore) {
|
||||
if (Array.isArray(tags)) {
|
||||
// Collect tags onto parent caches or parent prerenders.
|
||||
const collectedTags = revalidateStore.tags ?? (revalidateStore.tags = []);
|
||||
for (const tag of tags){
|
||||
if (!collectedTags.includes(tag)) {
|
||||
collectedTags.push(tag);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const implicitTags = workUnitStore == null ? void 0 : workUnitStore.implicitTags;
|
||||
let pageFetchCacheMode = workStore.fetchCache;
|
||||
if (workUnitStore) {
|
||||
switch(workUnitStore.type){
|
||||
case 'unstable-cache':
|
||||
// Inside unstable-cache we treat it the same as force-no-store on
|
||||
// the page.
|
||||
pageFetchCacheMode = 'force-no-store';
|
||||
break;
|
||||
case 'prerender':
|
||||
case 'prerender-client':
|
||||
case 'validation-client':
|
||||
case 'prerender-runtime':
|
||||
case 'prerender-ppr':
|
||||
case 'prerender-legacy':
|
||||
case 'request':
|
||||
case 'cache':
|
||||
case 'private-cache':
|
||||
case 'generate-static-params':
|
||||
break;
|
||||
default:
|
||||
workUnitStore;
|
||||
}
|
||||
}
|
||||
const isUsingNoStore = !!workStore.isUnstableNoStore;
|
||||
let currentFetchCacheConfig = getRequestMeta('cache');
|
||||
let cacheReason = '';
|
||||
let cacheWarning;
|
||||
if (typeof currentFetchCacheConfig === 'string' && typeof currentFetchRevalidate !== 'undefined') {
|
||||
// If the revalidate value conflicts with the cache value, we should warn the user and unset the conflicting values.
|
||||
const isConflictingRevalidate = // revalidate: 0 and cache: force-cache
|
||||
currentFetchCacheConfig === 'force-cache' && currentFetchRevalidate === 0 || // revalidate: >0 or revalidate: false and cache: no-store
|
||||
currentFetchCacheConfig === 'no-store' && (currentFetchRevalidate > 0 || currentFetchRevalidate === false);
|
||||
if (isConflictingRevalidate) {
|
||||
cacheWarning = `Specified "cache: ${currentFetchCacheConfig}" and "revalidate: ${currentFetchRevalidate}", only one should be specified.`;
|
||||
currentFetchCacheConfig = undefined;
|
||||
currentFetchRevalidate = undefined;
|
||||
}
|
||||
}
|
||||
const hasExplicitFetchCacheOptOut = // fetch config itself signals not to cache
|
||||
currentFetchCacheConfig === 'no-cache' || currentFetchCacheConfig === 'no-store' || // the fetch isn't explicitly caching and the segment level cache config signals not to cache
|
||||
// note: `pageFetchCacheMode` is also set by being in an unstable_cache context.
|
||||
pageFetchCacheMode === 'force-no-store' || pageFetchCacheMode === 'only-no-store';
|
||||
// If no explicit fetch cache mode is set, but dynamic = `force-dynamic` is set,
|
||||
// we shouldn't consider caching the fetch. This is because the `dynamic` cache
|
||||
// is considered a "top-level" cache mode, whereas something like `fetchCache` is more
|
||||
// fine-grained. Top-level modes are responsible for setting reasonable defaults for the
|
||||
// other configurations.
|
||||
const noFetchConfigAndForceDynamic = !pageFetchCacheMode && !currentFetchCacheConfig && !currentFetchRevalidate && workStore.forceDynamic;
|
||||
if (// force-cache was specified without a revalidate value. We set the revalidate value to false
|
||||
// which will signal the cache to not revalidate
|
||||
currentFetchCacheConfig === 'force-cache' && typeof currentFetchRevalidate === 'undefined') {
|
||||
currentFetchRevalidate = false;
|
||||
} else if (hasExplicitFetchCacheOptOut || noFetchConfigAndForceDynamic) {
|
||||
currentFetchRevalidate = 0;
|
||||
}
|
||||
if (currentFetchCacheConfig === 'no-cache' || currentFetchCacheConfig === 'no-store') {
|
||||
cacheReason = `cache: ${currentFetchCacheConfig}`;
|
||||
}
|
||||
finalRevalidate = validateRevalidate(currentFetchRevalidate, workStore.route);
|
||||
const _headers = getRequestMeta('headers');
|
||||
const initHeaders = typeof (_headers == null ? void 0 : _headers.get) === 'function' ? _headers : new Headers(_headers || {});
|
||||
const hasUnCacheableHeader = initHeaders.get('authorization') || initHeaders.get('cookie');
|
||||
const isUnCacheableMethod = ![
|
||||
'get',
|
||||
'head'
|
||||
].includes(((_getRequestMeta = getRequestMeta('method')) == null ? void 0 : _getRequestMeta.toLowerCase()) || 'get');
|
||||
/**
|
||||
* We automatically disable fetch caching under the following conditions:
|
||||
* - Fetch cache configs are not set. Specifically:
|
||||
* - A page fetch cache mode is not set (export const fetchCache=...)
|
||||
* - A fetch cache mode is not set in the fetch call (fetch(url, { cache: ... }))
|
||||
* or the fetch cache mode is set to 'default'
|
||||
* - A fetch revalidate value is not set in the fetch call (fetch(url, { revalidate: ... }))
|
||||
* - OR the fetch comes after a configuration that triggered dynamic rendering (e.g., reading cookies())
|
||||
* and the fetch was considered uncacheable (e.g., POST method or has authorization headers)
|
||||
*/ const hasNoExplicitCacheConfig = // eslint-disable-next-line eqeqeq
|
||||
pageFetchCacheMode == undefined && // eslint-disable-next-line eqeqeq
|
||||
(currentFetchCacheConfig == undefined || // when considering whether to opt into the default "no-cache" fetch semantics,
|
||||
// a "default" cache config should be treated the same as no cache config
|
||||
currentFetchCacheConfig === 'default') && // eslint-disable-next-line eqeqeq
|
||||
currentFetchRevalidate == undefined;
|
||||
let autoNoCache = Boolean((hasUnCacheableHeader || isUnCacheableMethod) && (revalidateStore == null ? void 0 : revalidateStore.revalidate) === 0);
|
||||
let isImplicitBuildTimeCache = false;
|
||||
if (!autoNoCache && hasNoExplicitCacheConfig) {
|
||||
// We don't enable automatic no-cache behavior during build-time
|
||||
// prerendering so that we can still leverage the fetch cache between
|
||||
// export workers.
|
||||
if (workStore.isBuildTimePrerendering) {
|
||||
isImplicitBuildTimeCache = true;
|
||||
} else {
|
||||
autoNoCache = true;
|
||||
}
|
||||
}
|
||||
// If we have no cache config, and we're in Dynamic I/O prerendering,
|
||||
// it'll be a dynamic call. We don't have to issue that dynamic call.
|
||||
if (hasNoExplicitCacheConfig && workUnitStore !== undefined) {
|
||||
switch(workUnitStore.type){
|
||||
case 'prerender':
|
||||
case 'prerender-runtime':
|
||||
// While we don't want to do caching in the client scope we know the
|
||||
// fetch will be dynamic for cacheComponents so we may as well avoid the
|
||||
// call here. (fallthrough)
|
||||
case 'prerender-client':
|
||||
if (cacheSignal) {
|
||||
cacheSignal.endRead();
|
||||
cacheSignal = null;
|
||||
}
|
||||
return makeHangingPromise(workUnitStore.renderSignal, workStore.route, 'fetch()');
|
||||
case 'validation-client':
|
||||
break;
|
||||
case 'request':
|
||||
if (process.env.NODE_ENV === 'development' && workUnitStore.stagedRendering) {
|
||||
if (cacheSignal) {
|
||||
cacheSignal.endRead();
|
||||
cacheSignal = null;
|
||||
}
|
||||
await workUnitStore.stagedRendering.waitForStage(RenderStage.Dynamic);
|
||||
}
|
||||
break;
|
||||
case 'prerender-ppr':
|
||||
case 'prerender-legacy':
|
||||
case 'cache':
|
||||
case 'private-cache':
|
||||
case 'unstable-cache':
|
||||
case 'generate-static-params':
|
||||
break;
|
||||
default:
|
||||
workUnitStore;
|
||||
}
|
||||
}
|
||||
switch(pageFetchCacheMode){
|
||||
case 'force-no-store':
|
||||
{
|
||||
cacheReason = 'fetchCache = force-no-store';
|
||||
break;
|
||||
}
|
||||
case 'only-no-store':
|
||||
{
|
||||
if (currentFetchCacheConfig === 'force-cache' || typeof finalRevalidate !== 'undefined' && finalRevalidate > 0) {
|
||||
throw Object.defineProperty(new Error(`cache: 'force-cache' used on fetch for ${fetchUrl} with 'export const fetchCache = 'only-no-store'`), "__NEXT_ERROR_CODE", {
|
||||
value: "E448",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
cacheReason = 'fetchCache = only-no-store';
|
||||
break;
|
||||
}
|
||||
case 'only-cache':
|
||||
{
|
||||
if (currentFetchCacheConfig === 'no-store') {
|
||||
throw Object.defineProperty(new Error(`cache: 'no-store' used on fetch for ${fetchUrl} with 'export const fetchCache = 'only-cache'`), "__NEXT_ERROR_CODE", {
|
||||
value: "E521",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'force-cache':
|
||||
{
|
||||
if (typeof currentFetchRevalidate === 'undefined' || currentFetchRevalidate === 0) {
|
||||
cacheReason = 'fetchCache = force-cache';
|
||||
finalRevalidate = INFINITE_CACHE;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'default-cache':
|
||||
case 'default-no-store':
|
||||
case 'auto':
|
||||
case undefined:
|
||||
break;
|
||||
default:
|
||||
pageFetchCacheMode;
|
||||
}
|
||||
if (typeof finalRevalidate === 'undefined') {
|
||||
if (pageFetchCacheMode === 'default-cache' && !isUsingNoStore) {
|
||||
finalRevalidate = INFINITE_CACHE;
|
||||
cacheReason = 'fetchCache = default-cache';
|
||||
} else if (pageFetchCacheMode === 'default-no-store') {
|
||||
finalRevalidate = 0;
|
||||
cacheReason = 'fetchCache = default-no-store';
|
||||
} else if (isUsingNoStore) {
|
||||
finalRevalidate = 0;
|
||||
cacheReason = 'noStore call';
|
||||
} else if (autoNoCache) {
|
||||
finalRevalidate = 0;
|
||||
cacheReason = 'auto no cache';
|
||||
} else {
|
||||
// TODO: should we consider this case an invariant?
|
||||
cacheReason = 'auto cache';
|
||||
finalRevalidate = revalidateStore ? revalidateStore.revalidate : INFINITE_CACHE;
|
||||
}
|
||||
} else if (!cacheReason) {
|
||||
cacheReason = `revalidate: ${finalRevalidate}`;
|
||||
}
|
||||
if (// when force static is configured we don't bail from
|
||||
// `revalidate: 0` values
|
||||
!(workStore.forceStatic && finalRevalidate === 0) && // we don't consider autoNoCache to switch to dynamic for ISR
|
||||
!autoNoCache && // If the revalidate value isn't currently set or the value is less
|
||||
// than the current revalidate value, we should update the revalidate
|
||||
// value.
|
||||
revalidateStore && finalRevalidate < revalidateStore.revalidate) {
|
||||
// If we were setting the revalidate value to 0, we should try to
|
||||
// postpone instead first.
|
||||
if (finalRevalidate === 0) {
|
||||
if (workUnitStore) {
|
||||
switch(workUnitStore.type){
|
||||
case 'prerender':
|
||||
case 'prerender-client':
|
||||
case 'prerender-runtime':
|
||||
// If we're in an instant validation, a dynamic fetch won't
|
||||
// have time to resolve during the validation prerender anyway,
|
||||
// so we leave it hanging. This can cause false negatives in shared parents,
|
||||
// but we accept that for now, because client data fetching is non-idiomatic.
|
||||
// eslint-disable-next-line no-fallthrough
|
||||
case 'validation-client':
|
||||
if (cacheSignal) {
|
||||
cacheSignal.endRead();
|
||||
cacheSignal = null;
|
||||
}
|
||||
return makeHangingPromise(workUnitStore.renderSignal, workStore.route, 'fetch()');
|
||||
case 'request':
|
||||
if (process.env.NODE_ENV === 'development' && workUnitStore.stagedRendering) {
|
||||
if (cacheSignal) {
|
||||
cacheSignal.endRead();
|
||||
cacheSignal = null;
|
||||
}
|
||||
await workUnitStore.stagedRendering.waitForStage(RenderStage.Dynamic);
|
||||
}
|
||||
break;
|
||||
case 'prerender-ppr':
|
||||
case 'prerender-legacy':
|
||||
case 'cache':
|
||||
case 'private-cache':
|
||||
case 'unstable-cache':
|
||||
case 'generate-static-params':
|
||||
break;
|
||||
default:
|
||||
workUnitStore;
|
||||
}
|
||||
}
|
||||
markCurrentScopeAsDynamic(workStore, workUnitStore, `revalidate: 0 fetch ${input} ${workStore.route}`);
|
||||
}
|
||||
// We only want to set the revalidate store's revalidate time if it
|
||||
// was explicitly set for the fetch call, i.e.
|
||||
// originalFetchRevalidate.
|
||||
if (revalidateStore && originalFetchRevalidate === finalRevalidate) {
|
||||
revalidateStore.revalidate = finalRevalidate;
|
||||
}
|
||||
}
|
||||
const isCacheableRevalidate = typeof finalRevalidate === 'number' && finalRevalidate > 0;
|
||||
let cacheKey;
|
||||
const { incrementalCache } = workStore;
|
||||
let isHmrRefresh = false;
|
||||
let serverComponentsHmrCache;
|
||||
if (workUnitStore) {
|
||||
switch(workUnitStore.type){
|
||||
case 'request':
|
||||
case 'cache':
|
||||
case 'private-cache':
|
||||
isHmrRefresh = workUnitStore.isHmrRefresh ?? false;
|
||||
serverComponentsHmrCache = workUnitStore.serverComponentsHmrCache;
|
||||
break;
|
||||
case 'prerender':
|
||||
case 'prerender-client':
|
||||
case 'validation-client':
|
||||
case 'prerender-runtime':
|
||||
case 'prerender-ppr':
|
||||
case 'prerender-legacy':
|
||||
case 'unstable-cache':
|
||||
case 'generate-static-params':
|
||||
break;
|
||||
default:
|
||||
workUnitStore;
|
||||
}
|
||||
}
|
||||
if (incrementalCache && (isCacheableRevalidate || serverComponentsHmrCache)) {
|
||||
try {
|
||||
cacheKey = await incrementalCache.generateCacheKey(fetchUrl, isRequestInput ? input : init);
|
||||
} catch (err) {
|
||||
console.error(`Failed to generate cache key for`, input);
|
||||
}
|
||||
}
|
||||
const fetchIdx = workStore.nextFetchId ?? 1;
|
||||
workStore.nextFetchId = fetchIdx + 1;
|
||||
let handleUnlock = ()=>{};
|
||||
const doOriginalFetch = async (isStale, cacheReasonOverride)=>{
|
||||
const requestInputFields = [
|
||||
'cache',
|
||||
'credentials',
|
||||
'headers',
|
||||
'integrity',
|
||||
'keepalive',
|
||||
'method',
|
||||
'mode',
|
||||
'redirect',
|
||||
'referrer',
|
||||
'referrerPolicy',
|
||||
'window',
|
||||
'duplex',
|
||||
// don't pass through signal when revalidating
|
||||
...isStale ? [] : [
|
||||
'signal'
|
||||
]
|
||||
];
|
||||
if (isRequestInput) {
|
||||
const reqInput = input;
|
||||
const reqOptions = {
|
||||
body: reqInput._ogBody || reqInput.body
|
||||
};
|
||||
for (const field of requestInputFields){
|
||||
// @ts-expect-error custom fields
|
||||
reqOptions[field] = reqInput[field];
|
||||
}
|
||||
input = new Request(reqInput.url, reqOptions);
|
||||
} else if (init) {
|
||||
const { _ogBody, body, signal, ...otherInput } = init;
|
||||
init = {
|
||||
...otherInput,
|
||||
body: _ogBody || body,
|
||||
signal: isStale ? undefined : signal
|
||||
};
|
||||
}
|
||||
// add metadata to init without editing the original
|
||||
const clonedInit = {
|
||||
...init,
|
||||
next: {
|
||||
...init == null ? void 0 : init.next,
|
||||
fetchType: 'origin',
|
||||
fetchIdx
|
||||
}
|
||||
};
|
||||
return originFetch(input, clonedInit).then(async (res)=>{
|
||||
if (!isStale && fetchStart) {
|
||||
trackFetchMetric(workStore, {
|
||||
start: fetchStart,
|
||||
url: fetchUrl,
|
||||
cacheReason: cacheReasonOverride || cacheReason,
|
||||
cacheStatus: finalRevalidate === 0 || cacheReasonOverride ? 'skip' : 'miss',
|
||||
cacheWarning,
|
||||
status: res.status,
|
||||
method: clonedInit.method || 'GET'
|
||||
});
|
||||
}
|
||||
if (res.status === 200 && incrementalCache && cacheKey && (isCacheableRevalidate || serverComponentsHmrCache)) {
|
||||
const normalizedRevalidate = finalRevalidate >= INFINITE_CACHE ? CACHE_ONE_YEAR_SECONDS : finalRevalidate;
|
||||
const incrementalCacheConfig = isCacheableRevalidate ? {
|
||||
fetchCache: true,
|
||||
fetchUrl,
|
||||
fetchIdx,
|
||||
tags,
|
||||
isImplicitBuildTimeCache
|
||||
} : undefined;
|
||||
switch(workUnitStore == null ? void 0 : workUnitStore.type){
|
||||
case 'prerender':
|
||||
case 'prerender-client':
|
||||
case 'validation-client':
|
||||
case 'prerender-runtime':
|
||||
return createCachedPrerenderResponse(res, cacheKey, incrementalCacheConfig, incrementalCache, normalizedRevalidate, handleUnlock);
|
||||
case 'request':
|
||||
if (process.env.NODE_ENV === 'development' && workUnitStore.stagedRendering && workUnitStore.cacheSignal && isCacheableRevalidate) {
|
||||
// We're filling caches for a staged render with an
|
||||
// explicit cache config, so we need to wait for the
|
||||
// response to finish instead of streaming. For HMR-only
|
||||
// caching (no explicit revalidate), we fall through to
|
||||
// createCachedDynamicResponse which handles streaming
|
||||
// and abort gracefully.
|
||||
return createCachedPrerenderResponse(res, cacheKey, incrementalCacheConfig, incrementalCache, normalizedRevalidate, handleUnlock);
|
||||
}
|
||||
// fallthrough
|
||||
case 'prerender-ppr':
|
||||
case 'prerender-legacy':
|
||||
case 'cache':
|
||||
case 'private-cache':
|
||||
case 'unstable-cache':
|
||||
case 'generate-static-params':
|
||||
case undefined:
|
||||
return createCachedDynamicResponse(workStore, res, cacheKey, incrementalCacheConfig, incrementalCache, serverComponentsHmrCache, normalizedRevalidate, input, handleUnlock, getRequestMeta('signal'));
|
||||
default:
|
||||
workUnitStore;
|
||||
}
|
||||
}
|
||||
// we had response that we determined shouldn't be cached so we return it
|
||||
// and don't cache it. This also needs to unlock the cache lock we acquired.
|
||||
await handleUnlock();
|
||||
return res;
|
||||
}).catch((error)=>{
|
||||
handleUnlock();
|
||||
throw error;
|
||||
});
|
||||
};
|
||||
let cacheReasonOverride;
|
||||
let isForegroundRevalidate = false;
|
||||
let isHmrRefreshCache = false;
|
||||
if (cacheKey && incrementalCache) {
|
||||
let cachedFetchData;
|
||||
if (isHmrRefresh && serverComponentsHmrCache) {
|
||||
cachedFetchData = serverComponentsHmrCache.get(cacheKey);
|
||||
isHmrRefreshCache = true;
|
||||
}
|
||||
if (isCacheableRevalidate && !cachedFetchData) {
|
||||
handleUnlock = await incrementalCache.lock(cacheKey);
|
||||
const entry = workStore.isOnDemandRevalidate ? null : await incrementalCache.get(cacheKey, {
|
||||
kind: IncrementalCacheKind.FETCH,
|
||||
revalidate: finalRevalidate,
|
||||
fetchUrl,
|
||||
fetchIdx,
|
||||
tags,
|
||||
softTags: implicitTags == null ? void 0 : implicitTags.tags
|
||||
});
|
||||
if (hasNoExplicitCacheConfig && workUnitStore) {
|
||||
switch(workUnitStore.type){
|
||||
case 'prerender':
|
||||
case 'prerender-client':
|
||||
case 'validation-client':
|
||||
case 'prerender-runtime':
|
||||
// We sometimes use the cache to dedupe fetches that do not
|
||||
// specify a cache configuration. In these cases we want to
|
||||
// make sure we still exclude them from prerenders if
|
||||
// cacheComponents is on so we introduce an artificial task boundary
|
||||
// here.
|
||||
await getTimeoutBoundary();
|
||||
break;
|
||||
case 'request':
|
||||
if (process.env.NODE_ENV === 'development' && workUnitStore.stagedRendering) {
|
||||
await workUnitStore.stagedRendering.waitForStage(RenderStage.Dynamic);
|
||||
}
|
||||
break;
|
||||
case 'prerender-ppr':
|
||||
case 'prerender-legacy':
|
||||
case 'cache':
|
||||
case 'private-cache':
|
||||
case 'unstable-cache':
|
||||
case 'generate-static-params':
|
||||
break;
|
||||
default:
|
||||
workUnitStore;
|
||||
}
|
||||
}
|
||||
if (entry) {
|
||||
await handleUnlock();
|
||||
} else {
|
||||
// in dev, incremental cache response will be null in case the browser adds `cache-control: no-cache` in the request headers
|
||||
// TODO: it seems like we also hit this after revalidates in dev?
|
||||
cacheReasonOverride = 'cache-control: no-cache (hard refresh)';
|
||||
}
|
||||
if ((entry == null ? void 0 : entry.value) && entry.value.kind === CachedRouteKind.FETCH) {
|
||||
// when stale and is revalidating we wait for fresh data
|
||||
// so the revalidated entry has the updated data
|
||||
if (workStore.isStaticGeneration && entry.isStale) {
|
||||
isForegroundRevalidate = true;
|
||||
} else {
|
||||
if (entry.isStale) {
|
||||
workStore.pendingRevalidates ??= {};
|
||||
if (!workStore.pendingRevalidates[cacheKey]) {
|
||||
const pendingRevalidate = doOriginalFetch(true).then(async (response)=>({
|
||||
body: await response.arrayBuffer(),
|
||||
headers: response.headers,
|
||||
status: response.status,
|
||||
statusText: response.statusText
|
||||
})).finally(()=>{
|
||||
workStore.pendingRevalidates ??= {};
|
||||
delete workStore.pendingRevalidates[cacheKey || ''];
|
||||
});
|
||||
// Attach the empty catch here so we don't get a "unhandled
|
||||
// promise rejection" warning.
|
||||
pendingRevalidate.catch(console.error);
|
||||
workStore.pendingRevalidates[cacheKey] = pendingRevalidate;
|
||||
}
|
||||
}
|
||||
cachedFetchData = entry.value.data;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (cachedFetchData) {
|
||||
if (fetchStart) {
|
||||
trackFetchMetric(workStore, {
|
||||
start: fetchStart,
|
||||
url: fetchUrl,
|
||||
cacheReason,
|
||||
cacheStatus: isHmrRefreshCache ? 'hmr' : 'hit',
|
||||
cacheWarning,
|
||||
status: cachedFetchData.status || 200,
|
||||
method: (init == null ? void 0 : init.method) || 'GET'
|
||||
});
|
||||
}
|
||||
const response = new Response(Buffer.from(cachedFetchData.body, 'base64'), {
|
||||
headers: cachedFetchData.headers,
|
||||
status: cachedFetchData.status
|
||||
});
|
||||
Object.defineProperty(response, 'url', {
|
||||
value: cachedFetchData.url
|
||||
});
|
||||
return response;
|
||||
}
|
||||
}
|
||||
if ((workStore.isStaticGeneration || process.env.NODE_ENV === 'development' && process.env.__NEXT_CACHE_COMPONENTS && workUnitStore && // eslint-disable-next-line no-restricted-syntax
|
||||
workUnitStore.type === 'request' && workUnitStore.stagedRendering) && init && typeof init === 'object') {
|
||||
const { cache } = init;
|
||||
// Delete `cache` property as Cloudflare Workers will throw an error
|
||||
if (isEdgeRuntime) delete init.cache;
|
||||
if (cache === 'no-store') {
|
||||
// If enabled, we should bail out of static generation.
|
||||
if (workUnitStore) {
|
||||
switch(workUnitStore.type){
|
||||
case 'prerender':
|
||||
case 'prerender-client':
|
||||
case 'prerender-runtime':
|
||||
case 'validation-client':
|
||||
if (cacheSignal) {
|
||||
cacheSignal.endRead();
|
||||
cacheSignal = null;
|
||||
}
|
||||
return makeHangingPromise(workUnitStore.renderSignal, workStore.route, 'fetch()');
|
||||
case 'request':
|
||||
if (process.env.NODE_ENV === 'development' && workUnitStore.stagedRendering) {
|
||||
if (cacheSignal) {
|
||||
cacheSignal.endRead();
|
||||
cacheSignal = null;
|
||||
}
|
||||
await workUnitStore.stagedRendering.waitForStage(RenderStage.Dynamic);
|
||||
}
|
||||
break;
|
||||
case 'prerender-ppr':
|
||||
case 'prerender-legacy':
|
||||
case 'cache':
|
||||
case 'private-cache':
|
||||
case 'unstable-cache':
|
||||
case 'generate-static-params':
|
||||
break;
|
||||
default:
|
||||
workUnitStore;
|
||||
}
|
||||
}
|
||||
markCurrentScopeAsDynamic(workStore, workUnitStore, `no-store fetch ${input} ${workStore.route}`);
|
||||
}
|
||||
const hasNextConfig = 'next' in init;
|
||||
const { next = {} } = init;
|
||||
if (typeof next.revalidate === 'number' && revalidateStore && next.revalidate < revalidateStore.revalidate) {
|
||||
if (next.revalidate === 0) {
|
||||
// If enabled, we should bail out of static generation.
|
||||
if (workUnitStore) {
|
||||
switch(workUnitStore.type){
|
||||
case 'prerender':
|
||||
case 'prerender-client':
|
||||
case 'prerender-runtime':
|
||||
case 'validation-client':
|
||||
return makeHangingPromise(workUnitStore.renderSignal, workStore.route, 'fetch()');
|
||||
case 'request':
|
||||
if (process.env.NODE_ENV === 'development' && workUnitStore.stagedRendering) {
|
||||
await workUnitStore.stagedRendering.waitForStage(RenderStage.Dynamic);
|
||||
}
|
||||
break;
|
||||
case 'cache':
|
||||
case 'private-cache':
|
||||
case 'unstable-cache':
|
||||
case 'prerender-legacy':
|
||||
case 'prerender-ppr':
|
||||
case 'generate-static-params':
|
||||
break;
|
||||
default:
|
||||
workUnitStore;
|
||||
}
|
||||
}
|
||||
markCurrentScopeAsDynamic(workStore, workUnitStore, `revalidate: 0 fetch ${input} ${workStore.route}`);
|
||||
}
|
||||
if (!workStore.forceStatic || next.revalidate !== 0) {
|
||||
revalidateStore.revalidate = next.revalidate;
|
||||
}
|
||||
}
|
||||
if (hasNextConfig) delete init.next;
|
||||
}
|
||||
// if we are revalidating the whole page via time or on-demand and
|
||||
// the fetch cache entry is stale we should still de-dupe the
|
||||
// origin hit if it's a cache-able entry
|
||||
if (cacheKey && isForegroundRevalidate) {
|
||||
const pendingRevalidateKey = cacheKey;
|
||||
workStore.pendingRevalidates ??= {};
|
||||
let pendingRevalidate = workStore.pendingRevalidates[pendingRevalidateKey];
|
||||
if (pendingRevalidate) {
|
||||
const revalidatedResult = await pendingRevalidate;
|
||||
return new Response(revalidatedResult.body, {
|
||||
headers: revalidatedResult.headers,
|
||||
status: revalidatedResult.status,
|
||||
statusText: revalidatedResult.statusText
|
||||
});
|
||||
}
|
||||
// We used to just resolve the Response and clone it however for
|
||||
// static generation with cacheComponents we need the response to be able to
|
||||
// be resolved in a microtask and cloning the response will never have
|
||||
// a body that can resolve in a microtask in node (as observed through
|
||||
// experimentation) So instead we await the body and then when it is
|
||||
// available we construct manually cloned Response objects with the
|
||||
// body as an ArrayBuffer. This will be resolvable in a microtask
|
||||
// making it compatible with cacheComponents.
|
||||
const pendingResponse = doOriginalFetch(true, cacheReasonOverride)// We're cloning the response using this utility because there
|
||||
// exists a bug in the undici library around response cloning.
|
||||
// See the following pull request for more details:
|
||||
// https://github.com/vercel/next.js/pull/73274
|
||||
.then(cloneResponse);
|
||||
pendingRevalidate = pendingResponse.then(async (responses)=>{
|
||||
const response = responses[0];
|
||||
return {
|
||||
body: await response.arrayBuffer(),
|
||||
headers: response.headers,
|
||||
status: response.status,
|
||||
statusText: response.statusText
|
||||
};
|
||||
}).finally(()=>{
|
||||
var _workStore_pendingRevalidates;
|
||||
// If the pending revalidate is not present in the store, then
|
||||
// we have nothing to delete.
|
||||
if (!((_workStore_pendingRevalidates = workStore.pendingRevalidates) == null ? void 0 : _workStore_pendingRevalidates[pendingRevalidateKey])) {
|
||||
return;
|
||||
}
|
||||
delete workStore.pendingRevalidates[pendingRevalidateKey];
|
||||
});
|
||||
// Attach the empty catch here so we don't get a "unhandled promise
|
||||
// rejection" warning
|
||||
pendingRevalidate.catch(()=>{});
|
||||
workStore.pendingRevalidates[pendingRevalidateKey] = pendingRevalidate;
|
||||
return pendingResponse.then((responses)=>responses[1]);
|
||||
} else {
|
||||
return doOriginalFetch(false, cacheReasonOverride);
|
||||
}
|
||||
});
|
||||
if (cacheSignal) {
|
||||
try {
|
||||
return await result;
|
||||
} finally{
|
||||
if (cacheSignal) {
|
||||
cacheSignal.endRead();
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
// Attach the necessary properties to the patched fetch function.
|
||||
// We don't use this to determine if the fetch function has been patched,
|
||||
// but for external consumers to determine if the fetch function has been
|
||||
// patched.
|
||||
patched.__nextPatched = true;
|
||||
patched.__nextGetStaticStore = ()=>workAsyncStorage;
|
||||
patched._nextOriginalFetch = originFetch;
|
||||
globalThis[NEXT_PATCH_SYMBOL] = true;
|
||||
// Assign the function name also as a name property, so that it's preserved
|
||||
// even when mangling is enabled.
|
||||
Object.defineProperty(patched, 'name', {
|
||||
value: 'fetch',
|
||||
writable: false
|
||||
});
|
||||
return patched;
|
||||
}
|
||||
// we patch fetch to collect cache information used for
|
||||
// determining if a page is static or not
|
||||
export function patchFetch(options) {
|
||||
// If we've already patched fetch, we should not patch it again.
|
||||
if (isFetchPatched()) return;
|
||||
// Grab the original fetch function. We'll attach this so we can use it in
|
||||
// the patched fetch function.
|
||||
const original = createDedupeFetch(globalThis.fetch);
|
||||
// Set the global fetch to the patched fetch.
|
||||
globalThis.fetch = createPatchedFetcher(original, options);
|
||||
}
|
||||
let currentTimeoutBoundary = null;
|
||||
function getTimeoutBoundary() {
|
||||
if (!currentTimeoutBoundary) {
|
||||
currentTimeoutBoundary = new Promise((r)=>{
|
||||
setTimeout(()=>{
|
||||
currentTimeoutBoundary = null;
|
||||
r();
|
||||
}, 0);
|
||||
});
|
||||
}
|
||||
return currentTimeoutBoundary;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=patch-fetch.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+34
@@ -0,0 +1,34 @@
|
||||
import { getRequestMeta } from '../request-meta';
|
||||
/**
|
||||
* Ensure cookies set in middleware are merged and not overridden by API
|
||||
* routes/getServerSideProps.
|
||||
*
|
||||
* @param req Incoming request
|
||||
* @param res Outgoing response
|
||||
*/ export function patchSetHeaderWithCookieSupport(req, res) {
|
||||
const setHeader = res.setHeader.bind(res);
|
||||
res.setHeader = (name, value)=>{
|
||||
// When renders /_error after page is failed, it could attempt to set
|
||||
// headers after headers.
|
||||
if ('headersSent' in res && res.headersSent) {
|
||||
return res;
|
||||
}
|
||||
if (name.toLowerCase() === 'set-cookie') {
|
||||
const middlewareValue = getRequestMeta(req, 'middlewareCookie');
|
||||
if (!middlewareValue || !Array.isArray(value) || !value.every((item, idx)=>item === middlewareValue[idx])) {
|
||||
value = [
|
||||
// TODO: (wyattjoh) find out why this is called multiple times resulting in duplicate cookies being added
|
||||
...new Set([
|
||||
...middlewareValue || [],
|
||||
...typeof value === 'string' ? [
|
||||
value
|
||||
] : Array.isArray(value) ? value : []
|
||||
])
|
||||
];
|
||||
}
|
||||
}
|
||||
return setHeader(name, value);
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=patch-set-header.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/patch-set-header.ts"],"sourcesContent":["import { getRequestMeta, type NextIncomingMessage } from '../request-meta'\n\ntype PatchableResponse = {\n setHeader(key: string, value: string | string[]): PatchableResponse\n}\n\n/**\n * Ensure cookies set in middleware are merged and not overridden by API\n * routes/getServerSideProps.\n *\n * @param req Incoming request\n * @param res Outgoing response\n */\nexport function patchSetHeaderWithCookieSupport(\n req: NextIncomingMessage,\n res: PatchableResponse\n) {\n const setHeader = res.setHeader.bind(res)\n res.setHeader = (\n name: string,\n value: string | string[]\n ): PatchableResponse => {\n // When renders /_error after page is failed, it could attempt to set\n // headers after headers.\n if ('headersSent' in res && res.headersSent) {\n return res\n }\n\n if (name.toLowerCase() === 'set-cookie') {\n const middlewareValue = getRequestMeta(req, 'middlewareCookie')\n\n if (\n !middlewareValue ||\n !Array.isArray(value) ||\n !value.every((item, idx) => item === middlewareValue[idx])\n ) {\n value = [\n // TODO: (wyattjoh) find out why this is called multiple times resulting in duplicate cookies being added\n ...new Set([\n ...(middlewareValue || []),\n ...(typeof value === 'string'\n ? [value]\n : Array.isArray(value)\n ? value\n : []),\n ]),\n ]\n }\n }\n\n return setHeader(name, value)\n }\n}\n"],"names":["getRequestMeta","patchSetHeaderWithCookieSupport","req","res","setHeader","bind","name","value","headersSent","toLowerCase","middlewareValue","Array","isArray","every","item","idx","Set"],"mappings":"AAAA,SAASA,cAAc,QAAkC,kBAAiB;AAM1E;;;;;;CAMC,GACD,OAAO,SAASC,gCACdC,GAAwB,EACxBC,GAAsB;IAEtB,MAAMC,YAAYD,IAAIC,SAAS,CAACC,IAAI,CAACF;IACrCA,IAAIC,SAAS,GAAG,CACdE,MACAC;QAEA,qEAAqE;QACrE,yBAAyB;QACzB,IAAI,iBAAiBJ,OAAOA,IAAIK,WAAW,EAAE;YAC3C,OAAOL;QACT;QAEA,IAAIG,KAAKG,WAAW,OAAO,cAAc;YACvC,MAAMC,kBAAkBV,eAAeE,KAAK;YAE5C,IACE,CAACQ,mBACD,CAACC,MAAMC,OAAO,CAACL,UACf,CAACA,MAAMM,KAAK,CAAC,CAACC,MAAMC,MAAQD,SAASJ,eAAe,CAACK,IAAI,GACzD;gBACAR,QAAQ;oBACN,yGAAyG;uBACtG,IAAIS,IAAI;2BACLN,mBAAmB,EAAE;2BACrB,OAAOH,UAAU,WACjB;4BAACA;yBAAM,GACPI,MAAMC,OAAO,CAACL,SACZA,QACA,EAAE;qBACT;iBACF;YACH;QACF;QAEA,OAAOH,UAAUE,MAAMC;IACzB;AACF","ignoreList":[0]}
|
||||
+38
@@ -0,0 +1,38 @@
|
||||
import { DEFAULT_MAX_POSTPONED_STATE_SIZE, parseMaxPostponedStateSize } from '../../shared/lib/size-limit';
|
||||
const INVALID_MAX_POSTPONED_STATE_SIZE_ERROR_MESSAGE = 'maxPostponedStateSize must be a valid number (bytes) or filesize format string (e.g., "5mb")';
|
||||
export function getMaxPostponedStateSize(configuredMaxPostponedStateSize) {
|
||||
const maxPostponedStateSize = configuredMaxPostponedStateSize ?? DEFAULT_MAX_POSTPONED_STATE_SIZE;
|
||||
const maxPostponedStateSizeBytes = parseMaxPostponedStateSize(configuredMaxPostponedStateSize);
|
||||
if (maxPostponedStateSizeBytes === undefined) {
|
||||
throw Object.defineProperty(new Error(INVALID_MAX_POSTPONED_STATE_SIZE_ERROR_MESSAGE), "__NEXT_ERROR_CODE", {
|
||||
value: "E977",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return {
|
||||
maxPostponedStateSize,
|
||||
maxPostponedStateSizeBytes
|
||||
};
|
||||
}
|
||||
export function getPostponedStateExceededErrorMessage(maxPostponedStateSize) {
|
||||
return `Postponed state exceeded ${maxPostponedStateSize} limit. ` + `To configure the limit, see: https://nextjs.org/docs/app/api-reference/config/next-config-js/max-postponed-state-size`;
|
||||
}
|
||||
function toBuffer(chunk) {
|
||||
return Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);
|
||||
}
|
||||
export async function readBodyWithSizeLimit(body, maxBodySizeBytes) {
|
||||
const chunks = [];
|
||||
let size = 0;
|
||||
for await (const chunk of body){
|
||||
const buffer = toBuffer(chunk);
|
||||
size += buffer.byteLength;
|
||||
if (size > maxBodySizeBytes) {
|
||||
return null;
|
||||
}
|
||||
chunks.push(buffer);
|
||||
}
|
||||
return Buffer.concat(chunks);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=postponed-request-body.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../src/server/lib/postponed-request-body.ts"],"sourcesContent":["import {\n DEFAULT_MAX_POSTPONED_STATE_SIZE,\n parseMaxPostponedStateSize,\n} from '../../shared/lib/size-limit'\nimport type { SizeLimit } from '../../types'\n\nconst INVALID_MAX_POSTPONED_STATE_SIZE_ERROR_MESSAGE =\n 'maxPostponedStateSize must be a valid number (bytes) or filesize format string (e.g., \"5mb\")'\n\nexport type PostponedRequestBodyChunk = Buffer | Uint8Array | string\n\nexport function getMaxPostponedStateSize(\n configuredMaxPostponedStateSize: SizeLimit | undefined\n): {\n maxPostponedStateSize: SizeLimit\n maxPostponedStateSizeBytes: number\n} {\n const maxPostponedStateSize =\n configuredMaxPostponedStateSize ?? DEFAULT_MAX_POSTPONED_STATE_SIZE\n const maxPostponedStateSizeBytes = parseMaxPostponedStateSize(\n configuredMaxPostponedStateSize\n )\n\n if (maxPostponedStateSizeBytes === undefined) {\n throw new Error(INVALID_MAX_POSTPONED_STATE_SIZE_ERROR_MESSAGE)\n }\n\n return { maxPostponedStateSize, maxPostponedStateSizeBytes }\n}\n\nexport function getPostponedStateExceededErrorMessage(\n maxPostponedStateSize: SizeLimit\n): string {\n return (\n `Postponed state exceeded ${maxPostponedStateSize} limit. ` +\n `To configure the limit, see: https://nextjs.org/docs/app/api-reference/config/next-config-js/max-postponed-state-size`\n )\n}\n\nfunction toBuffer(chunk: PostponedRequestBodyChunk): Buffer {\n return Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)\n}\n\nexport async function readBodyWithSizeLimit(\n body: AsyncIterable<PostponedRequestBodyChunk>,\n maxBodySizeBytes: number\n): Promise<Buffer | null> {\n const chunks: Array<Buffer> = []\n let size = 0\n\n for await (const chunk of body) {\n const buffer = toBuffer(chunk)\n size += buffer.byteLength\n if (size > maxBodySizeBytes) {\n return null\n }\n chunks.push(buffer)\n }\n\n return Buffer.concat(chunks)\n}\n"],"names":["DEFAULT_MAX_POSTPONED_STATE_SIZE","parseMaxPostponedStateSize","INVALID_MAX_POSTPONED_STATE_SIZE_ERROR_MESSAGE","getMaxPostponedStateSize","configuredMaxPostponedStateSize","maxPostponedStateSize","maxPostponedStateSizeBytes","undefined","Error","getPostponedStateExceededErrorMessage","toBuffer","chunk","Buffer","isBuffer","from","readBodyWithSizeLimit","body","maxBodySizeBytes","chunks","size","buffer","byteLength","push","concat"],"mappings":"AAAA,SACEA,gCAAgC,EAChCC,0BAA0B,QACrB,8BAA6B;AAGpC,MAAMC,iDACJ;AAIF,OAAO,SAASC,yBACdC,+BAAsD;IAKtD,MAAMC,wBACJD,mCAAmCJ;IACrC,MAAMM,6BAA6BL,2BACjCG;IAGF,IAAIE,+BAA+BC,WAAW;QAC5C,MAAM,qBAAyD,CAAzD,IAAIC,MAAMN,iDAAV,qBAAA;mBAAA;wBAAA;0BAAA;QAAwD;IAChE;IAEA,OAAO;QAAEG;QAAuBC;IAA2B;AAC7D;AAEA,OAAO,SAASG,sCACdJ,qBAAgC;IAEhC,OACE,CAAC,yBAAyB,EAAEA,sBAAsB,QAAQ,CAAC,GAC3D,CAAC,qHAAqH,CAAC;AAE3H;AAEA,SAASK,SAASC,KAAgC;IAChD,OAAOC,OAAOC,QAAQ,CAACF,SAASA,QAAQC,OAAOE,IAAI,CAACH;AACtD;AAEA,OAAO,eAAeI,sBACpBC,IAA8C,EAC9CC,gBAAwB;IAExB,MAAMC,SAAwB,EAAE;IAChC,IAAIC,OAAO;IAEX,WAAW,MAAMR,SAASK,KAAM;QAC9B,MAAMI,SAASV,SAASC;QACxBQ,QAAQC,OAAOC,UAAU;QACzB,IAAIF,OAAOF,kBAAkB;YAC3B,OAAO;QACT;QACAC,OAAOI,IAAI,CAACF;IACd;IAEA,OAAOR,OAAOW,MAAM,CAACL;AACvB","ignoreList":[0]}
|
||||
+116
@@ -0,0 +1,116 @@
|
||||
import next from '../next';
|
||||
import { interopDefault } from '../../lib/interop-default';
|
||||
import { formatDynamicImportPath } from '../../lib/format-dynamic-import-path';
|
||||
let initializations = {};
|
||||
let sandboxContext;
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
sandboxContext = require('../web/sandbox/context');
|
||||
}
|
||||
export function clearAllModuleContexts() {
|
||||
return sandboxContext == null ? void 0 : sandboxContext.clearAllModuleContexts();
|
||||
}
|
||||
export function clearModuleContext(target) {
|
||||
return sandboxContext == null ? void 0 : sandboxContext.clearModuleContext(target);
|
||||
}
|
||||
export async function getServerField(dir, field) {
|
||||
const initialization = await initializations[dir];
|
||||
if (!initialization) {
|
||||
throw Object.defineProperty(new Error('Invariant cant propagate server field, no app initialized'), "__NEXT_ERROR_CODE", {
|
||||
value: "E116",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const { server } = initialization;
|
||||
let wrappedServer = server['server']// NextServer.server is private
|
||||
;
|
||||
return wrappedServer[field];
|
||||
}
|
||||
export async function propagateServerField(dir, field, value) {
|
||||
const initialization = await initializations[dir];
|
||||
if (!initialization) {
|
||||
throw Object.defineProperty(new Error('Invariant cant propagate server field, no app initialized'), "__NEXT_ERROR_CODE", {
|
||||
value: "E116",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const { server } = initialization;
|
||||
let wrappedServer = server['server'];
|
||||
const _field = field;
|
||||
if (wrappedServer) {
|
||||
if (typeof wrappedServer[_field] === 'function') {
|
||||
// @ts-expect-error
|
||||
await wrappedServer[_field].apply(wrappedServer, Array.isArray(value) ? value : []);
|
||||
} else {
|
||||
// @ts-expect-error
|
||||
wrappedServer[_field] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
async function initializeImpl(opts) {
|
||||
const type = process.env.__NEXT_PRIVATE_RENDER_WORKER;
|
||||
if (type) {
|
||||
process.title = 'next-render-worker-' + type;
|
||||
}
|
||||
let requestHandler;
|
||||
let upgradeHandler;
|
||||
const server = next({
|
||||
...opts,
|
||||
hostname: opts.hostname || 'localhost',
|
||||
customServer: false,
|
||||
httpServer: opts.server,
|
||||
port: opts.port
|
||||
})// should return a NextServer when `customServer: false`
|
||||
;
|
||||
// If we're in test mode and there's a debug cache entry handler available,
|
||||
// then use it to wrap the request handler instead of using the default one.
|
||||
if (process.env.__NEXT_TEST_MODE && process.env.NEXT_PRIVATE_DEBUG_CACHE_ENTRY_HANDLERS) {
|
||||
// This mirrors the sole implementation of this over in:
|
||||
// test/production/standalone-mode/required-server-files/cache-entry-handler.js
|
||||
const createOnCacheEntryHandlers = interopDefault(await import(formatDynamicImportPath(opts.dir, process.env.NEXT_PRIVATE_DEBUG_CACHE_ENTRY_HANDLERS)));
|
||||
// This is not to be used in any environment other than testing, as it is
|
||||
// not memoized and is subject to constant change.
|
||||
requestHandler = async (req, res, parsedUrl)=>{
|
||||
// Re re-create the entry handler for each request. This is not
|
||||
// performant, and is only used in testing environments.
|
||||
const { // TODO: remove onCacheEntry once onCacheEntryV2 is the default.
|
||||
onCacheEntry, onCacheEntryV2 } = createOnCacheEntryHandlers(res);
|
||||
// Get the request handler, using the entry handler as the metadata each
|
||||
// request.
|
||||
const handler = server.getRequestHandlerWithMetadata({
|
||||
// TODO: remove onCacheEntry once onCacheEntryV2 is the default.
|
||||
onCacheEntry,
|
||||
onCacheEntryV2
|
||||
});
|
||||
return handler(req, res, parsedUrl);
|
||||
};
|
||||
upgradeHandler = server.getUpgradeHandler();
|
||||
} else {
|
||||
requestHandler = server.getRequestHandler();
|
||||
upgradeHandler = server.getUpgradeHandler();
|
||||
}
|
||||
await server.prepare(opts.serverFields);
|
||||
return {
|
||||
requestHandler,
|
||||
upgradeHandler,
|
||||
server,
|
||||
closeUpgraded () {
|
||||
var _opts_bundlerService;
|
||||
(_opts_bundlerService = opts.bundlerService) == null ? void 0 : _opts_bundlerService.close();
|
||||
},
|
||||
distDir: opts.distDir,
|
||||
experimentalFeatures: opts.experimentalFeatures,
|
||||
cacheComponents: opts.cacheComponents
|
||||
};
|
||||
}
|
||||
export async function initialize(opts) {
|
||||
// if we already setup the server return as we only need to do
|
||||
// this on first worker boot
|
||||
if (initializations[opts.dir]) {
|
||||
return initializations[opts.dir];
|
||||
}
|
||||
return initializations[opts.dir] = initializeImpl(opts);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=render-server.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+628
@@ -0,0 +1,628 @@
|
||||
// this must come first as it includes require hooks
|
||||
// This is required before other imports to ensure the require hook is setup.
|
||||
import '../node-environment';
|
||||
import '../require-hook';
|
||||
import url from 'url';
|
||||
import path from 'path';
|
||||
import loadConfig from '../config';
|
||||
import { serveStatic } from '../serve-static';
|
||||
import setupDebug from 'next/dist/compiled/debug';
|
||||
import * as Log from '../../build/output/log';
|
||||
import { DecodeError } from '../../shared/lib/utils';
|
||||
import { findPagesDir } from '../../lib/find-pages-dir';
|
||||
import { setupFsCheck } from './router-utils/filesystem';
|
||||
import { proxyRequest } from './router-utils/proxy-request';
|
||||
import { isAbortError, pipeToNodeResponse } from '../pipe-readable';
|
||||
import { getResolveRoutes } from './router-utils/resolve-routes';
|
||||
import { addRequestMeta, getRequestMeta } from '../request-meta';
|
||||
import { pathHasPrefix } from '../../shared/lib/router/utils/path-has-prefix';
|
||||
import { removePathPrefix } from '../../shared/lib/router/utils/remove-path-prefix';
|
||||
import setupCompression from 'next/dist/compiled/compression';
|
||||
import { signalFromNodeResponse } from '../web/spec-extension/adapters/next-request';
|
||||
import { isPostpone } from './router-utils/is-postpone';
|
||||
import { parseUrl as parseUrlUtil } from '../../shared/lib/router/utils/parse-url';
|
||||
import { PHASE_PRODUCTION_SERVER, PHASE_DEVELOPMENT_SERVER, UNDERSCORE_NOT_FOUND_ROUTE } from '../../shared/lib/constants';
|
||||
import { RedirectStatusCode } from '../../client/components/redirect-status-code';
|
||||
import { DevBundlerService } from './dev-bundler-service';
|
||||
import { trace } from '../../trace';
|
||||
import { ensureLeadingSlash } from '../../shared/lib/page-path/ensure-leading-slash';
|
||||
import { getNextPathnameInfo } from '../../shared/lib/router/utils/get-next-pathname-info';
|
||||
import { getHostname } from '../../shared/lib/get-hostname';
|
||||
import { detectDomainLocale } from '../../shared/lib/i18n/detect-domain-locale';
|
||||
import { MockedResponse } from './mock-request';
|
||||
import { HMR_MESSAGE_SENT_TO_BROWSER } from '../dev/hot-reloader-types';
|
||||
import { normalizedAssetPrefix } from '../../shared/lib/normalized-asset-prefix';
|
||||
import { NEXT_PATCH_SYMBOL } from './patch-fetch';
|
||||
import { filterInternalHeaders } from './server-ipc/utils';
|
||||
import { blockCrossSiteDEV } from './router-utils/block-cross-site-dev';
|
||||
import { traceGlobals } from '../../trace/shared';
|
||||
import { NoFallbackError } from '../../shared/lib/no-fallback-error.external';
|
||||
import { RouterServerContextSymbol, routerServerGlobal } from './router-utils/router-server-context';
|
||||
import { handleChromeDevtoolsWorkspaceRequest, isChromeDevtoolsWorkspaceUrl } from './chrome-devtools-workspace';
|
||||
import { getNextConfigRuntime } from '../config-shared';
|
||||
const debug = setupDebug('next:router-server:main');
|
||||
const isNextFont = (pathname)=>pathname && /\/media\/[^/]+\.(woff|woff2|eot|ttf|otf)$/.test(pathname);
|
||||
const requestHandlers = {};
|
||||
export async function initialize(opts) {
|
||||
var _development_bundler, _development_service, _development_service1, _development_service2, _development_service3, _development_service4, _development_bundler1;
|
||||
if (!process.env.NODE_ENV) {
|
||||
// @ts-ignore not readonly
|
||||
process.env.NODE_ENV = opts.dev ? 'development' : 'production';
|
||||
}
|
||||
let experimentalFeatures = [];
|
||||
const config = await loadConfig(opts.dev ? PHASE_DEVELOPMENT_SERVER : PHASE_PRODUCTION_SERVER, opts.dir, {
|
||||
silent: false,
|
||||
reportExperimentalFeatures (features) {
|
||||
experimentalFeatures = features.toSorted(({ key: a }, { key: b })=>a.localeCompare(b));
|
||||
}
|
||||
});
|
||||
let compress;
|
||||
if ((config == null ? void 0 : config.compress) !== false) {
|
||||
compress = setupCompression();
|
||||
}
|
||||
const fsChecker = await setupFsCheck({
|
||||
dev: opts.dev,
|
||||
dir: opts.dir,
|
||||
config,
|
||||
minimalMode: opts.minimalMode
|
||||
});
|
||||
const renderServer = {};
|
||||
let development = undefined;
|
||||
let originalFetch = globalThis.fetch;
|
||||
if (opts.dev) {
|
||||
const { Telemetry } = require('../../telemetry/storage');
|
||||
const telemetry = new Telemetry({
|
||||
distDir: path.join(opts.dir, config.distDir)
|
||||
});
|
||||
traceGlobals.set('telemetry', telemetry);
|
||||
const { pagesDir, appDir } = findPagesDir(opts.dir);
|
||||
const { setupDevBundler } = require('./router-utils/setup-dev-bundler');
|
||||
const resetFetch = ()=>{
|
||||
globalThis.fetch = originalFetch;
|
||||
globalThis[NEXT_PATCH_SYMBOL] = false;
|
||||
};
|
||||
const setupDevBundlerSpan = opts.startServerSpan ? opts.startServerSpan.traceChild('setup-dev-bundler') : trace('setup-dev-bundler');
|
||||
// In development, it's always the complete config.
|
||||
let developmentConfig = config;
|
||||
let developmentBundler = await setupDevBundlerSpan.traceAsyncFn(()=>setupDevBundler({
|
||||
// Passed here but the initialization of this object happens below, doing the initialization before the setupDev call breaks.
|
||||
renderServer,
|
||||
appDir,
|
||||
pagesDir,
|
||||
telemetry,
|
||||
fsChecker,
|
||||
dir: opts.dir,
|
||||
nextConfig: developmentConfig,
|
||||
isCustomServer: opts.customServer,
|
||||
turbo: !!process.env.TURBOPACK,
|
||||
port: opts.port,
|
||||
onDevServerCleanup: opts.onDevServerCleanup,
|
||||
resetFetch,
|
||||
serverFastRefresh: opts.serverFastRefresh
|
||||
}));
|
||||
let devBundlerService = new DevBundlerService(developmentBundler, // The request handler is assigned below, this allows us to create a lazy
|
||||
// reference to it.
|
||||
(req, res)=>{
|
||||
return requestHandlers[opts.dir](req, res);
|
||||
});
|
||||
development = {
|
||||
bundler: developmentBundler,
|
||||
service: devBundlerService,
|
||||
config: developmentConfig
|
||||
};
|
||||
}
|
||||
renderServer.instance = require('./render-server');
|
||||
const requestHandlerImpl = async (req, res)=>{
|
||||
addRequestMeta(req, 'relativeProjectDir', relativeProjectDir);
|
||||
// internal headers should not be honored by the request handler
|
||||
if (!process.env.NEXT_PRIVATE_TEST_HEADERS) {
|
||||
filterInternalHeaders(req.headers);
|
||||
}
|
||||
if (!opts.minimalMode && config.i18n && config.i18n.localeDetection !== false) {
|
||||
var _this;
|
||||
const urlParts = (req.url || '').split('?', 1);
|
||||
let urlNoQuery = urlParts[0] || '';
|
||||
if (config.basePath) {
|
||||
urlNoQuery = removePathPrefix(urlNoQuery, config.basePath);
|
||||
}
|
||||
const pathnameInfo = getNextPathnameInfo(urlNoQuery, {
|
||||
nextConfig: config
|
||||
});
|
||||
const domainLocale = detectDomainLocale(config.i18n.domains, getHostname({
|
||||
hostname: urlNoQuery
|
||||
}, req.headers));
|
||||
const defaultLocale = (domainLocale == null ? void 0 : domainLocale.defaultLocale) || config.i18n.defaultLocale;
|
||||
const { getLocaleRedirect } = require('../../shared/lib/i18n/get-locale-redirect');
|
||||
const parsedUrl = parseUrlUtil((_this = req.url || '') == null ? void 0 : _this.replace(/^\/+/, '/'));
|
||||
const redirect = getLocaleRedirect({
|
||||
defaultLocale,
|
||||
domainLocale,
|
||||
headers: req.headers,
|
||||
nextConfig: config,
|
||||
pathLocale: pathnameInfo.locale,
|
||||
urlParsed: {
|
||||
...parsedUrl,
|
||||
pathname: pathnameInfo.locale ? `/${pathnameInfo.locale}${urlNoQuery}` : urlNoQuery
|
||||
}
|
||||
});
|
||||
if (redirect) {
|
||||
res.setHeader('Location', redirect);
|
||||
res.statusCode = RedirectStatusCode.TemporaryRedirect;
|
||||
res.end(redirect);
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (compress) {
|
||||
// @ts-expect-error not express req/res
|
||||
compress(req, res, ()=>{});
|
||||
}
|
||||
req.on('error', (_err)=>{
|
||||
// TODO: log socket errors?
|
||||
});
|
||||
res.on('error', (_err)=>{
|
||||
// TODO: log socket errors?
|
||||
});
|
||||
const invokedOutputs = new Set();
|
||||
async function invokeRender(parsedUrl, invokePath, handleIndex, additionalRequestMeta) {
|
||||
var _fsChecker_getMiddlewareMatchers;
|
||||
// invokeRender expects /api routes to not be locale prefixed
|
||||
// so normalize here before continuing
|
||||
if (config.i18n && removePathPrefix(invokePath, config.basePath).startsWith(`/${getRequestMeta(req, 'locale')}/api`)) {
|
||||
invokePath = fsChecker.handleLocale(removePathPrefix(invokePath, config.basePath)).pathname;
|
||||
}
|
||||
if (req.headers['x-nextjs-data'] && ((_fsChecker_getMiddlewareMatchers = fsChecker.getMiddlewareMatchers()) == null ? void 0 : _fsChecker_getMiddlewareMatchers.length) && removePathPrefix(invokePath, config.basePath) === '/404') {
|
||||
res.setHeader('x-nextjs-matched-path', parsedUrl.pathname || '');
|
||||
res.statusCode = 404;
|
||||
res.setHeader('content-type', 'application/json');
|
||||
res.end('{}');
|
||||
return null;
|
||||
}
|
||||
if (!handlers) {
|
||||
throw Object.defineProperty(new Error('Failed to initialize render server'), "__NEXT_ERROR_CODE", {
|
||||
value: "E90",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
addRequestMeta(req, 'invokePath', invokePath);
|
||||
addRequestMeta(req, 'invokeQuery', parsedUrl.query);
|
||||
addRequestMeta(req, 'middlewareInvoke', false);
|
||||
for(const key in additionalRequestMeta || {}){
|
||||
addRequestMeta(req, key, additionalRequestMeta[key]);
|
||||
}
|
||||
debug('invokeRender', req.url, req.headers);
|
||||
try {
|
||||
var _renderServer_instance;
|
||||
const initResult = await (renderServer == null ? void 0 : (_renderServer_instance = renderServer.instance) == null ? void 0 : _renderServer_instance.initialize(renderServerOpts));
|
||||
try {
|
||||
await (initResult == null ? void 0 : initResult.requestHandler(req, res));
|
||||
} catch (err) {
|
||||
if (err instanceof NoFallbackError) {
|
||||
await handleRequest(handleIndex + 1);
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
return;
|
||||
} catch (e) {
|
||||
// If the client aborts before we can receive a response object (when
|
||||
// the headers are flushed), then we can early exit without further
|
||||
// processing.
|
||||
if (isAbortError(e)) {
|
||||
return;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
const handleRequest = async (handleIndex)=>{
|
||||
var _development_bundler;
|
||||
if (handleIndex > 5) {
|
||||
throw Object.defineProperty(new Error(`Attempted to handle request too many times ${req.url}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E283",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
// handle hot-reloader first
|
||||
if (development) {
|
||||
if (blockCrossSiteDEV(req, res, development.config.allowedDevOrigins, opts.hostname)) {
|
||||
return;
|
||||
}
|
||||
const origUrl = req.url || '/';
|
||||
// both the basePath and assetPrefix need to be stripped from the URL
|
||||
// so that the development bundler can find the correct file
|
||||
if (config.basePath && pathHasPrefix(origUrl, config.basePath)) {
|
||||
req.url = removePathPrefix(origUrl, config.basePath);
|
||||
} else if (config.assetPrefix && pathHasPrefix(origUrl, config.assetPrefix)) {
|
||||
req.url = removePathPrefix(origUrl, config.assetPrefix);
|
||||
}
|
||||
const parsedUrl = parseUrlUtil(req.url || '/');
|
||||
const hotReloaderResult = await development.bundler.hotReloader.run(req, res, parsedUrl);
|
||||
if (hotReloaderResult.finished) {
|
||||
return hotReloaderResult;
|
||||
}
|
||||
req.url = origUrl;
|
||||
}
|
||||
const { finished, parsedUrl, statusCode, resHeaders, bodyStream, matchedOutput } = await resolveRoutes({
|
||||
req,
|
||||
res,
|
||||
isUpgradeReq: false,
|
||||
signal: signalFromNodeResponse(res),
|
||||
invokedOutputs
|
||||
});
|
||||
if (res.closed || res.finished) {
|
||||
return;
|
||||
}
|
||||
if (development && (matchedOutput == null ? void 0 : matchedOutput.type) === 'devVirtualFsItem') {
|
||||
const origUrl = req.url || '/';
|
||||
if (config.basePath && pathHasPrefix(origUrl, config.basePath)) {
|
||||
req.url = removePathPrefix(origUrl, config.basePath);
|
||||
} else if (config.assetPrefix && pathHasPrefix(origUrl, config.assetPrefix)) {
|
||||
req.url = removePathPrefix(origUrl, config.assetPrefix);
|
||||
}
|
||||
if (resHeaders !== null) {
|
||||
for (const key of Object.keys(resHeaders)){
|
||||
res.setHeader(key, resHeaders[key]);
|
||||
}
|
||||
}
|
||||
const result = await development.bundler.requestHandler(req, res);
|
||||
if (result.finished) {
|
||||
return;
|
||||
}
|
||||
// TODO: throw invariant if we resolved to this but it wasn't handled?
|
||||
req.url = origUrl;
|
||||
}
|
||||
debug('requestHandler!', req.url, {
|
||||
matchedOutput,
|
||||
statusCode,
|
||||
resHeaders,
|
||||
bodyStream: !!bodyStream,
|
||||
parsedUrl: {
|
||||
pathname: parsedUrl.pathname,
|
||||
query: parsedUrl.query
|
||||
},
|
||||
finished
|
||||
});
|
||||
// apply any response headers from routing
|
||||
if (resHeaders !== null) {
|
||||
for (const key of Object.keys(resHeaders)){
|
||||
res.setHeader(key, resHeaders[key]);
|
||||
}
|
||||
}
|
||||
// handle redirect
|
||||
if (!bodyStream && statusCode && statusCode > 300 && statusCode < 400) {
|
||||
const destination = url.format(parsedUrl);
|
||||
res.statusCode = statusCode;
|
||||
res.setHeader('location', destination);
|
||||
if (statusCode === RedirectStatusCode.PermanentRedirect) {
|
||||
res.setHeader('Refresh', `0;url=${destination}`);
|
||||
}
|
||||
return res.end(destination);
|
||||
}
|
||||
// handle middleware body response
|
||||
if (bodyStream) {
|
||||
res.statusCode = statusCode || 200;
|
||||
return await pipeToNodeResponse(bodyStream, res);
|
||||
}
|
||||
if (finished && parsedUrl.protocol) {
|
||||
var _getRequestMeta;
|
||||
return await proxyRequest(req, res, parsedUrl, undefined, (_getRequestMeta = getRequestMeta(req, 'clonableBody')) == null ? void 0 : _getRequestMeta.cloneBodyStream(), config.experimental.proxyTimeout);
|
||||
}
|
||||
if ((matchedOutput == null ? void 0 : matchedOutput.fsPath) && matchedOutput.itemPath) {
|
||||
if (opts.dev && (fsChecker.appFiles.has(matchedOutput.itemPath) || fsChecker.pageFiles.has(matchedOutput.itemPath))) {
|
||||
res.statusCode = 500;
|
||||
const message = `A conflicting public file and page file was found for path ${matchedOutput.itemPath} https://nextjs.org/docs/messages/conflicting-public-file-page`;
|
||||
await invokeRender(parsedUrl, '/_error', handleIndex, {
|
||||
invokeStatus: 500,
|
||||
invokeError: Object.defineProperty(new Error(message), "__NEXT_ERROR_CODE", {
|
||||
value: "E212",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
})
|
||||
});
|
||||
Log.error(message);
|
||||
return;
|
||||
}
|
||||
if (!res.getHeader('cache-control') && matchedOutput.type === 'nextStaticFolder') {
|
||||
if (opts.dev && !isNextFont(parsedUrl.pathname)) {
|
||||
res.setHeader('Cache-Control', 'no-cache, must-revalidate');
|
||||
} else {
|
||||
res.setHeader('Cache-Control', 'public, max-age=31536000, immutable');
|
||||
}
|
||||
}
|
||||
if (!(req.method === 'GET' || req.method === 'HEAD')) {
|
||||
res.setHeader('Allow', [
|
||||
'GET',
|
||||
'HEAD'
|
||||
]);
|
||||
res.statusCode = 405;
|
||||
return await invokeRender(parseUrlUtil('/405'), '/405', handleIndex, {
|
||||
invokeStatus: 405
|
||||
});
|
||||
}
|
||||
try {
|
||||
return await serveStatic(req, res, matchedOutput.itemPath, {
|
||||
root: matchedOutput.itemsRoot,
|
||||
// Ensures that etags are not generated for static files when disabled.
|
||||
etag: config.generateEtags
|
||||
});
|
||||
} catch (err) {
|
||||
/**
|
||||
* Hardcoded every possible error status code that could be thrown by "serveStatic" method
|
||||
* This is done by searching "this.error" inside "send" module's source code:
|
||||
* https://github.com/pillarjs/send/blob/master/index.js
|
||||
* https://github.com/pillarjs/send/blob/develop/index.js
|
||||
*/ const POSSIBLE_ERROR_CODE_FROM_SERVE_STATIC = new Set([
|
||||
// send module will throw 500 when header is already sent or fs.stat error happens
|
||||
// https://github.com/pillarjs/send/blob/53f0ab476145670a9bdd3dc722ab2fdc8d358fc6/index.js#L392
|
||||
// Note: we will use Next.js built-in 500 page to handle 500 errors
|
||||
// 500,
|
||||
// send module will throw 404 when file is missing
|
||||
// https://github.com/pillarjs/send/blob/53f0ab476145670a9bdd3dc722ab2fdc8d358fc6/index.js#L421
|
||||
// Note: we will use Next.js built-in 404 page to handle 404 errors
|
||||
// 404,
|
||||
// send module will throw 403 when redirecting to a directory without enabling directory listing
|
||||
// https://github.com/pillarjs/send/blob/53f0ab476145670a9bdd3dc722ab2fdc8d358fc6/index.js#L484
|
||||
// Note: Next.js throws a different error (without status code) for directory listing
|
||||
// 403,
|
||||
// send module will throw 400 when fails to normalize the path
|
||||
// https://github.com/pillarjs/send/blob/53f0ab476145670a9bdd3dc722ab2fdc8d358fc6/index.js#L520
|
||||
400,
|
||||
// send module will throw 412 with conditional GET request
|
||||
// https://github.com/pillarjs/send/blob/53f0ab476145670a9bdd3dc722ab2fdc8d358fc6/index.js#L632
|
||||
412,
|
||||
// send module will throw 416 when range is not satisfiable
|
||||
// https://github.com/pillarjs/send/blob/53f0ab476145670a9bdd3dc722ab2fdc8d358fc6/index.js#L669
|
||||
416
|
||||
]);
|
||||
let validErrorStatus = POSSIBLE_ERROR_CODE_FROM_SERVE_STATIC.has(err.statusCode);
|
||||
// normalize non-allowed status codes
|
||||
if (!validErrorStatus) {
|
||||
;
|
||||
err.statusCode = 400;
|
||||
}
|
||||
if (typeof err.statusCode === 'number') {
|
||||
const invokePath = `/${err.statusCode}`;
|
||||
const invokeStatus = err.statusCode;
|
||||
res.statusCode = err.statusCode;
|
||||
return await invokeRender(parseUrlUtil(invokePath), invokePath, handleIndex, {
|
||||
invokeStatus
|
||||
});
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
if (matchedOutput) {
|
||||
invokedOutputs.add(matchedOutput.itemPath);
|
||||
return await invokeRender(parsedUrl, parsedUrl.pathname || '/', handleIndex, {
|
||||
invokeOutput: matchedOutput.itemPath
|
||||
});
|
||||
}
|
||||
// We want the original pathname without any basePath or proxy rewrites.
|
||||
if (development && isChromeDevtoolsWorkspaceUrl(req.url)) {
|
||||
await handleChromeDevtoolsWorkspaceRequest(res, opts, config);
|
||||
return;
|
||||
}
|
||||
// 404 case
|
||||
res.setHeader('Cache-Control', 'private, no-cache, no-store, max-age=0, must-revalidate');
|
||||
let realRequestPathname = parsedUrl.pathname ?? '';
|
||||
if (realRequestPathname) {
|
||||
if (config.basePath) {
|
||||
realRequestPathname = removePathPrefix(realRequestPathname, config.basePath);
|
||||
}
|
||||
if (config.assetPrefix) {
|
||||
realRequestPathname = removePathPrefix(realRequestPathname, config.assetPrefix);
|
||||
}
|
||||
if (config.i18n) {
|
||||
realRequestPathname = removePathPrefix(realRequestPathname, '/' + (getRequestMeta(req, 'locale') ?? ''));
|
||||
}
|
||||
}
|
||||
// For not found static assets, return plain text 404 instead of
|
||||
// full HTML 404 pages to save bandwidth.
|
||||
if (realRequestPathname.startsWith('/_next/static/')) {
|
||||
res.statusCode = 404;
|
||||
res.setHeader('Content-Type', 'text/plain; charset=utf-8');
|
||||
res.end('Not Found');
|
||||
return null;
|
||||
}
|
||||
// Short-circuit favicon.ico serving so that the 404 page doesn't get built as favicon is requested by the browser when loading any route.
|
||||
if (opts.dev && !matchedOutput && parsedUrl.pathname === '/favicon.ico') {
|
||||
res.statusCode = 404;
|
||||
res.end('');
|
||||
return null;
|
||||
}
|
||||
const appNotFound = opts.dev ? development == null ? void 0 : (_development_bundler = development.bundler) == null ? void 0 : _development_bundler.serverFields.hasAppNotFound : await fsChecker.getItem(UNDERSCORE_NOT_FOUND_ROUTE);
|
||||
res.statusCode = 404;
|
||||
if (appNotFound) {
|
||||
return await invokeRender(parsedUrl, UNDERSCORE_NOT_FOUND_ROUTE, handleIndex, {
|
||||
invokeStatus: 404
|
||||
});
|
||||
}
|
||||
await invokeRender(parsedUrl, '/404', handleIndex, {
|
||||
invokeStatus: 404
|
||||
});
|
||||
};
|
||||
try {
|
||||
await handleRequest(0);
|
||||
} catch (err) {
|
||||
try {
|
||||
let invokePath = '/500';
|
||||
let invokeStatus = '500';
|
||||
if (err instanceof DecodeError) {
|
||||
invokePath = '/400';
|
||||
invokeStatus = '400';
|
||||
} else {
|
||||
console.error(err);
|
||||
}
|
||||
res.statusCode = Number(invokeStatus);
|
||||
return await invokeRender(parseUrlUtil(invokePath), invokePath, 0, {
|
||||
invokeStatus: res.statusCode
|
||||
});
|
||||
} catch (err2) {
|
||||
console.error(err2);
|
||||
}
|
||||
res.statusCode = 500;
|
||||
res.end('Internal Server Error');
|
||||
}
|
||||
};
|
||||
let requestHandler = requestHandlerImpl;
|
||||
if (config.experimental.testProxy) {
|
||||
// Intercept fetch and other testmode apis.
|
||||
const { wrapRequestHandlerWorker, interceptTestApis } = // eslint-disable-next-line @next/internal/typechecked-require -- experimental/testmode is not built ins next/dist/esm
|
||||
require('next/dist/experimental/testmode/server');
|
||||
requestHandler = wrapRequestHandlerWorker(requestHandler);
|
||||
interceptTestApis();
|
||||
// We treat the intercepted fetch as "original" fetch that should be reset to during HMR.
|
||||
originalFetch = globalThis.fetch;
|
||||
}
|
||||
requestHandlers[opts.dir] = requestHandler;
|
||||
const renderServerOpts = {
|
||||
port: opts.port,
|
||||
dir: opts.dir,
|
||||
hostname: opts.hostname,
|
||||
minimalMode: opts.minimalMode,
|
||||
dev: !!opts.dev,
|
||||
server: opts.server,
|
||||
serverFields: {
|
||||
...(development == null ? void 0 : (_development_bundler = development.bundler) == null ? void 0 : _development_bundler.serverFields) || {},
|
||||
setIsrStatus: development == null ? void 0 : (_development_service = development.service) == null ? void 0 : _development_service.setIsrStatus.bind(development == null ? void 0 : development.service)
|
||||
},
|
||||
experimentalTestProxy: !!config.experimental.testProxy,
|
||||
experimentalHttpsServer: !!opts.experimentalHttpsServer,
|
||||
bundlerService: development == null ? void 0 : development.service,
|
||||
startServerSpan: opts.startServerSpan,
|
||||
quiet: opts.quiet,
|
||||
onDevServerCleanup: opts.onDevServerCleanup,
|
||||
distDir: config.distDir,
|
||||
experimentalFeatures,
|
||||
cacheComponents: config.cacheComponents
|
||||
};
|
||||
renderServerOpts.serverFields.routerServerHandler = requestHandlerImpl;
|
||||
// pre-initialize workers
|
||||
const handlers = await renderServer.instance.initialize(renderServerOpts);
|
||||
// this must come after initialize of render server since it's
|
||||
// using initialized methods
|
||||
if (!routerServerGlobal[RouterServerContextSymbol]) {
|
||||
routerServerGlobal[RouterServerContextSymbol] = {};
|
||||
}
|
||||
const relativeProjectDir = path.relative(process.cwd(), opts.dir);
|
||||
routerServerGlobal[RouterServerContextSymbol][relativeProjectDir] = {
|
||||
nextConfig: getNextConfigRuntime(config),
|
||||
hostname: handlers.server.hostname,
|
||||
revalidate: handlers.server.revalidate.bind(handlers.server),
|
||||
render404: handlers.server.render404.bind(handlers.server),
|
||||
experimentalTestProxy: renderServerOpts.experimentalTestProxy,
|
||||
logErrorWithOriginalStack: opts.dev ? handlers.server.logErrorWithOriginalStack.bind(handlers.server) : (err)=>!opts.quiet && Log.error(err),
|
||||
setCacheStatus: config.cacheComponents ? development == null ? void 0 : (_development_service1 = development.service) == null ? void 0 : _development_service1.setCacheStatus.bind(development == null ? void 0 : development.service) : undefined,
|
||||
setIsrStatus: development == null ? void 0 : (_development_service2 = development.service) == null ? void 0 : _development_service2.setIsrStatus.bind(development == null ? void 0 : development.service),
|
||||
setReactDebugChannel: (development == null ? void 0 : development.config.experimental.reactDebugChannel) ? development == null ? void 0 : (_development_service3 = development.service) == null ? void 0 : _development_service3.setReactDebugChannel.bind(development == null ? void 0 : development.service) : undefined,
|
||||
sendErrorsToBrowser: development == null ? void 0 : (_development_service4 = development.service) == null ? void 0 : _development_service4.sendErrorsToBrowser.bind(development == null ? void 0 : development.service)
|
||||
};
|
||||
const logError = async (type, err)=>{
|
||||
if (isPostpone(err)) {
|
||||
// React postpones that are unhandled might end up logged here but they're
|
||||
// not really errors. They're just part of rendering.
|
||||
return;
|
||||
}
|
||||
if (type === 'unhandledRejection') {
|
||||
Log.error('unhandledRejection: ', err);
|
||||
} else if (type === 'uncaughtException') {
|
||||
Log.error('uncaughtException: ', err);
|
||||
}
|
||||
};
|
||||
process.on('uncaughtException', logError.bind(null, 'uncaughtException'));
|
||||
process.on('unhandledRejection', logError.bind(null, 'unhandledRejection'));
|
||||
const resolveRoutes = getResolveRoutes(fsChecker, config, opts, renderServer.instance, renderServerOpts, development == null ? void 0 : (_development_bundler1 = development.bundler) == null ? void 0 : _development_bundler1.ensureMiddleware);
|
||||
const upgradeHandler = async (req, socket, head)=>{
|
||||
try {
|
||||
req.on('error', (_err)=>{
|
||||
// TODO: log socket errors?
|
||||
// console.error(_err);
|
||||
});
|
||||
socket.on('error', (_err)=>{
|
||||
// TODO: log socket errors?
|
||||
// console.error(_err);
|
||||
});
|
||||
if (opts.dev && development && req.url) {
|
||||
if (blockCrossSiteDEV(req, socket, development.config.allowedDevOrigins, opts.hostname)) {
|
||||
return;
|
||||
}
|
||||
const { basePath, assetPrefix } = config;
|
||||
let hmrPrefix = basePath;
|
||||
// assetPrefix overrides basePath for HMR path
|
||||
if (assetPrefix) {
|
||||
hmrPrefix = normalizedAssetPrefix(assetPrefix);
|
||||
if (URL.canParse(hmrPrefix)) {
|
||||
// remove trailing slash from pathname
|
||||
// return empty string if pathname is '/'
|
||||
// to avoid conflicts with '/_next' below
|
||||
hmrPrefix = new URL(hmrPrefix).pathname.replace(/\/$/, '');
|
||||
}
|
||||
}
|
||||
const isHMRRequest = req.url.startsWith(ensureLeadingSlash(`${hmrPrefix}/_next/webpack-hmr`));
|
||||
// only handle HMR requests if the basePath in the request
|
||||
// matches the basePath for the handler responding to the request
|
||||
if (isHMRRequest) {
|
||||
return development.bundler.hotReloader.onHMR(req, socket, head, (client, { isLegacyClient })=>{
|
||||
if (isLegacyClient) {
|
||||
var _development_service;
|
||||
// Only send the ISR manifest to legacy clients, i.e. Pages
|
||||
// Router clients, or App Router clients that have Cache
|
||||
// Components disabled. The ISR manifest is only used to inform
|
||||
// the static indicator, which currently does not provide useful
|
||||
// information if Cache Components is enabled due to its binary
|
||||
// nature (i.e. it does not support showing info for partially
|
||||
// static pages).
|
||||
client.send(JSON.stringify({
|
||||
type: HMR_MESSAGE_SENT_TO_BROWSER.ISR_MANIFEST,
|
||||
data: ((_development_service = development.service) == null ? void 0 : _development_service.appIsrManifest) || {}
|
||||
}));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
const res = new MockedResponse({
|
||||
resWriter: ()=>{
|
||||
throw Object.defineProperty(new Error('Invariant: did not expect response writer to be written to for upgrade request'), "__NEXT_ERROR_CODE", {
|
||||
value: "E522",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
});
|
||||
const { matchedOutput, parsedUrl } = await resolveRoutes({
|
||||
req,
|
||||
res,
|
||||
isUpgradeReq: true,
|
||||
signal: signalFromNodeResponse(socket)
|
||||
});
|
||||
// TODO: allow upgrade requests to pages/app paths?
|
||||
// this was not previously supported
|
||||
if (matchedOutput) {
|
||||
return socket.end();
|
||||
}
|
||||
if (parsedUrl.protocol) {
|
||||
return await proxyRequest(req, socket, parsedUrl, head);
|
||||
}
|
||||
// If there's no matched output, we don't handle the request as user's
|
||||
// custom WS server may be listening on the same path.
|
||||
} catch (err) {
|
||||
console.error('Error handling upgrade request', err);
|
||||
socket.end();
|
||||
}
|
||||
};
|
||||
return {
|
||||
requestHandler,
|
||||
upgradeHandler,
|
||||
server: handlers.server,
|
||||
closeUpgraded () {
|
||||
var _development_bundler_hotReloader, _development_bundler;
|
||||
development == null ? void 0 : (_development_bundler = development.bundler) == null ? void 0 : (_development_bundler_hotReloader = _development_bundler.hotReloader) == null ? void 0 : _development_bundler_hotReloader.close();
|
||||
},
|
||||
distDir: config.distDir,
|
||||
experimentalFeatures,
|
||||
cacheComponents: config.cacheComponents
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=router-server.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+100
@@ -0,0 +1,100 @@
|
||||
import { parseUrl } from '../../../lib/url';
|
||||
import { warnOnce } from '../../../build/output/log';
|
||||
import { isCsrfOriginAllowed } from '../../app-render/csrf-protection';
|
||||
const allowedDevOriginsDocs = 'https://nextjs.org/docs/app/api-reference/config/next-config-js/allowedDevOrigins';
|
||||
function getBlockedResourcePath(req) {
|
||||
var _parseUrl;
|
||||
return ((_parseUrl = parseUrl(req.url ?? '')) == null ? void 0 : _parseUrl.pathname) ?? req.url ?? '/_next/*';
|
||||
}
|
||||
function formatBlockedCrossSiteMessage(source, resourcePath) {
|
||||
const lines = [
|
||||
`Blocked cross-origin request to Next.js dev resource ${resourcePath}${getBlockedSourceDescription(source)}.`,
|
||||
'Cross-origin access to Next.js dev resources is blocked by default for safety.'
|
||||
];
|
||||
// `source` has 3 meanings here:
|
||||
// - `'null'`: browser explicitly sent `Origin: null` for an opaque/sandboxed origin
|
||||
// - hostname string: we parsed an allowlistable host from Origin/Referer
|
||||
// - `undefined` (and effectively empty string): the request did not include a usable host
|
||||
if (source === 'null') {
|
||||
lines.push('', 'This request came from a privacy-sensitive or opaque origin, so Next.js cannot determine which host to allow.', 'If you need it to succeed, load the dev server from a normal origin and add that host to "allowedDevOrigins".');
|
||||
} else if (source) {
|
||||
lines.push('', 'To allow this host in development, add it to "allowedDevOrigins" in next.config.js and restart the dev server:', '', '// next.config.js', 'module.exports = {', ` allowedDevOrigins: ['${source}'],`, '}');
|
||||
} else {
|
||||
lines.push('', 'This request did not include an allowlistable source host.', 'If you need it to succeed, make sure the browser sends an Origin or Referer from a host listed in "allowedDevOrigins".');
|
||||
}
|
||||
lines.push('', `Read more: ${allowedDevOriginsDocs}`);
|
||||
return lines.join('\n');
|
||||
}
|
||||
function getBlockedSourceDescription(source) {
|
||||
if (source === 'null') {
|
||||
return ' from a privacy-sensitive or opaque origin';
|
||||
}
|
||||
if (source) {
|
||||
return ` from "${source}"`;
|
||||
}
|
||||
return ' from an unknown source';
|
||||
}
|
||||
function blockRequest(req, res, source) {
|
||||
warnOnce(formatBlockedCrossSiteMessage(source, getBlockedResourcePath(req)));
|
||||
if ('statusCode' in res) {
|
||||
res.statusCode = 403;
|
||||
}
|
||||
res.end('Unauthorized');
|
||||
return true;
|
||||
}
|
||||
function parseHostnameFromHeader(header) {
|
||||
const headerValue = Array.isArray(header) ? header[0] : header;
|
||||
if (!headerValue || headerValue === 'null') {
|
||||
return;
|
||||
}
|
||||
const parsedHeader = parseUrl(headerValue);
|
||||
return parsedHeader == null ? void 0 : parsedHeader.hostname.toLowerCase();
|
||||
}
|
||||
function isInternalEndpoint(req) {
|
||||
if (!req.url) return false;
|
||||
try {
|
||||
// TODO: We should standardize on a single prefix for this
|
||||
const isMiddlewareRequest = req.url.includes('/__nextjs');
|
||||
const isInternalAsset = req.url.includes('/_next');
|
||||
// Static media requests are excluded, as they might be loaded via CSS and would fail
|
||||
// CORS checks.
|
||||
const isIgnoredRequest = req.url.includes('/_next/image') || req.url.includes('/_next/static/media');
|
||||
return !isIgnoredRequest && (isInternalAsset || isMiddlewareRequest);
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
export const blockCrossSiteDEV = (req, res, allowedDevOrigins, hostname)=>{
|
||||
const allowedOrigins = [
|
||||
'*.localhost',
|
||||
'localhost',
|
||||
...allowedDevOrigins ?? []
|
||||
];
|
||||
if (hostname) {
|
||||
allowedOrigins.push(hostname);
|
||||
}
|
||||
// only process internal URLs/middleware
|
||||
if (!isInternalEndpoint(req)) {
|
||||
return false;
|
||||
}
|
||||
// block non-cors request from cross-site e.g. script tag on
|
||||
// different host
|
||||
if (req.headers['sec-fetch-mode'] === 'no-cors' && req.headers['sec-fetch-site'] === 'cross-site') {
|
||||
// no-cors requests do not send an Origin header, so fall back to Referer
|
||||
// when validating configured cross-site script loads.
|
||||
const refererHostname = parseHostnameFromHeader(req.headers['referer']);
|
||||
if (refererHostname && isCsrfOriginAllowed(refererHostname, allowedOrigins)) {
|
||||
return false;
|
||||
}
|
||||
return blockRequest(req, res, refererHostname);
|
||||
}
|
||||
// ensure websocket requests are only fulfilled from allowed origin
|
||||
const rawOrigin = req.headers['origin'];
|
||||
const originHeader = Array.isArray(rawOrigin) ? rawOrigin[0] : rawOrigin;
|
||||
const parsedOrigin = originHeader && originHeader !== 'null' ? parseUrl(originHeader) : originHeader;
|
||||
const originLowerCase = parsedOrigin === undefined || typeof parsedOrigin === 'string' ? parsedOrigin : parsedOrigin.hostname.toLowerCase();
|
||||
// Allow requests with no origin since those are just GET requests from same-site
|
||||
return originLowerCase !== undefined && !isCsrfOriginAllowed(originLowerCase, allowedOrigins) && blockRequest(req, res, originLowerCase);
|
||||
};
|
||||
|
||||
//# sourceMappingURL=block-cross-site-dev.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+33
@@ -0,0 +1,33 @@
|
||||
import path from '../../../shared/lib/isomorphic/path';
|
||||
import { normalizePagePath } from '../../../shared/lib/page-path/normalize-page-path';
|
||||
import { isDynamicRoute } from '../../../shared/lib/router/utils/is-dynamic';
|
||||
import { getNamedRouteRegex } from '../../../shared/lib/router/utils/route-regex';
|
||||
import { normalizeRouteRegex } from '../../../lib/load-custom-routes';
|
||||
import { escapeStringRegexp } from '../../../shared/lib/escape-regexp';
|
||||
export function buildDataRoute(page, buildId) {
|
||||
const pagePath = normalizePagePath(page);
|
||||
const dataRoute = path.posix.join('/_next/data', buildId, `${pagePath}.json`);
|
||||
let dataRouteRegex;
|
||||
let namedDataRouteRegex;
|
||||
let routeKeys;
|
||||
if (isDynamicRoute(page)) {
|
||||
const routeRegex = getNamedRouteRegex(dataRoute, {
|
||||
prefixRouteKeys: true,
|
||||
includeSuffix: true,
|
||||
excludeOptionalTrailingSlash: true
|
||||
});
|
||||
dataRouteRegex = normalizeRouteRegex(routeRegex.re.source);
|
||||
namedDataRouteRegex = routeRegex.namedRegex;
|
||||
routeKeys = routeRegex.routeKeys;
|
||||
} else {
|
||||
dataRouteRegex = normalizeRouteRegex(new RegExp(`^${path.posix.join('/_next/data', escapeStringRegexp(buildId), `${pagePath}\\.json`)}$`).source);
|
||||
}
|
||||
return {
|
||||
page,
|
||||
routeKeys,
|
||||
dataRouteRegex,
|
||||
namedDataRouteRegex
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=build-data-route.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/router-utils/build-data-route.ts"],"sourcesContent":["import path from '../../../shared/lib/isomorphic/path'\nimport { normalizePagePath } from '../../../shared/lib/page-path/normalize-page-path'\nimport { isDynamicRoute } from '../../../shared/lib/router/utils/is-dynamic'\nimport { getNamedRouteRegex } from '../../../shared/lib/router/utils/route-regex'\nimport { normalizeRouteRegex } from '../../../lib/load-custom-routes'\nimport { escapeStringRegexp } from '../../../shared/lib/escape-regexp'\n\nexport function buildDataRoute(page: string, buildId: string) {\n const pagePath = normalizePagePath(page)\n const dataRoute = path.posix.join('/_next/data', buildId, `${pagePath}.json`)\n\n let dataRouteRegex: string\n let namedDataRouteRegex: string | undefined\n let routeKeys: { [named: string]: string } | undefined\n\n if (isDynamicRoute(page)) {\n const routeRegex = getNamedRouteRegex(dataRoute, {\n prefixRouteKeys: true,\n includeSuffix: true,\n excludeOptionalTrailingSlash: true,\n })\n\n dataRouteRegex = normalizeRouteRegex(routeRegex.re.source)\n namedDataRouteRegex = routeRegex.namedRegex\n routeKeys = routeRegex.routeKeys\n } else {\n dataRouteRegex = normalizeRouteRegex(\n new RegExp(\n `^${path.posix.join(\n '/_next/data',\n escapeStringRegexp(buildId),\n `${pagePath}\\\\.json`\n )}$`\n ).source\n )\n }\n\n return {\n page,\n routeKeys,\n dataRouteRegex,\n namedDataRouteRegex,\n }\n}\n"],"names":["path","normalizePagePath","isDynamicRoute","getNamedRouteRegex","normalizeRouteRegex","escapeStringRegexp","buildDataRoute","page","buildId","pagePath","dataRoute","posix","join","dataRouteRegex","namedDataRouteRegex","routeKeys","routeRegex","prefixRouteKeys","includeSuffix","excludeOptionalTrailingSlash","re","source","namedRegex","RegExp"],"mappings":"AAAA,OAAOA,UAAU,sCAAqC;AACtD,SAASC,iBAAiB,QAAQ,oDAAmD;AACrF,SAASC,cAAc,QAAQ,8CAA6C;AAC5E,SAASC,kBAAkB,QAAQ,+CAA8C;AACjF,SAASC,mBAAmB,QAAQ,kCAAiC;AACrE,SAASC,kBAAkB,QAAQ,oCAAmC;AAEtE,OAAO,SAASC,eAAeC,IAAY,EAAEC,OAAe;IAC1D,MAAMC,WAAWR,kBAAkBM;IACnC,MAAMG,YAAYV,KAAKW,KAAK,CAACC,IAAI,CAAC,eAAeJ,SAAS,GAAGC,SAAS,KAAK,CAAC;IAE5E,IAAII;IACJ,IAAIC;IACJ,IAAIC;IAEJ,IAAIb,eAAeK,OAAO;QACxB,MAAMS,aAAab,mBAAmBO,WAAW;YAC/CO,iBAAiB;YACjBC,eAAe;YACfC,8BAA8B;QAChC;QAEAN,iBAAiBT,oBAAoBY,WAAWI,EAAE,CAACC,MAAM;QACzDP,sBAAsBE,WAAWM,UAAU;QAC3CP,YAAYC,WAAWD,SAAS;IAClC,OAAO;QACLF,iBAAiBT,oBACf,IAAImB,OACF,CAAC,CAAC,EAAEvB,KAAKW,KAAK,CAACC,IAAI,CACjB,eACAP,mBAAmBG,UACnB,GAAGC,SAAS,OAAO,CAAC,EACpB,CAAC,CAAC,EACJY,MAAM;IAEZ;IAEA,OAAO;QACLd;QACAQ;QACAF;QACAC;IACF;AACF","ignoreList":[0]}
|
||||
Generated
Vendored
+23
@@ -0,0 +1,23 @@
|
||||
import path from '../../../shared/lib/isomorphic/path';
|
||||
import { normalizePagePath } from '../../../shared/lib/page-path/normalize-page-path';
|
||||
import { getNamedRouteRegex } from '../../../shared/lib/router/utils/route-regex';
|
||||
import { RSC_SEGMENT_SUFFIX, RSC_SEGMENTS_DIR_SUFFIX } from '../../../lib/constants';
|
||||
export const SEGMENT_PATH_KEY = 'nextSegmentPath';
|
||||
export function buildPrefetchSegmentDataRoute(page, segmentPath) {
|
||||
const pagePath = normalizePagePath(page);
|
||||
const destination = path.posix.join(`${pagePath}${RSC_SEGMENTS_DIR_SUFFIX}`, `${segmentPath}${RSC_SEGMENT_SUFFIX}`);
|
||||
const { namedRegex, routeKeys } = getNamedRouteRegex(destination, {
|
||||
prefixRouteKeys: true,
|
||||
includePrefix: true,
|
||||
includeSuffix: true,
|
||||
excludeOptionalTrailingSlash: true,
|
||||
backreferenceDuplicateKeys: true
|
||||
});
|
||||
return {
|
||||
destination,
|
||||
source: namedRegex,
|
||||
routeKeys
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=build-prefetch-segment-data-route.js.map
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/router-utils/build-prefetch-segment-data-route.ts"],"sourcesContent":["import path from '../../../shared/lib/isomorphic/path'\nimport { normalizePagePath } from '../../../shared/lib/page-path/normalize-page-path'\nimport { getNamedRouteRegex } from '../../../shared/lib/router/utils/route-regex'\nimport {\n RSC_SEGMENT_SUFFIX,\n RSC_SEGMENTS_DIR_SUFFIX,\n} from '../../../lib/constants'\n\nexport const SEGMENT_PATH_KEY = 'nextSegmentPath'\n\nexport type PrefetchSegmentDataRoute = {\n source: string\n destination: string\n routeKeys: { [key: string]: string }\n}\n\nexport function buildPrefetchSegmentDataRoute(\n page: string,\n segmentPath: string\n): PrefetchSegmentDataRoute {\n const pagePath = normalizePagePath(page)\n\n const destination = path.posix.join(\n `${pagePath}${RSC_SEGMENTS_DIR_SUFFIX}`,\n `${segmentPath}${RSC_SEGMENT_SUFFIX}`\n )\n\n const { namedRegex, routeKeys } = getNamedRouteRegex(destination, {\n prefixRouteKeys: true,\n includePrefix: true,\n includeSuffix: true,\n excludeOptionalTrailingSlash: true,\n backreferenceDuplicateKeys: true,\n })\n\n return {\n destination,\n source: namedRegex,\n routeKeys,\n }\n}\n"],"names":["path","normalizePagePath","getNamedRouteRegex","RSC_SEGMENT_SUFFIX","RSC_SEGMENTS_DIR_SUFFIX","SEGMENT_PATH_KEY","buildPrefetchSegmentDataRoute","page","segmentPath","pagePath","destination","posix","join","namedRegex","routeKeys","prefixRouteKeys","includePrefix","includeSuffix","excludeOptionalTrailingSlash","backreferenceDuplicateKeys","source"],"mappings":"AAAA,OAAOA,UAAU,sCAAqC;AACtD,SAASC,iBAAiB,QAAQ,oDAAmD;AACrF,SAASC,kBAAkB,QAAQ,+CAA8C;AACjF,SACEC,kBAAkB,EAClBC,uBAAuB,QAClB,yBAAwB;AAE/B,OAAO,MAAMC,mBAAmB,kBAAiB;AAQjD,OAAO,SAASC,8BACdC,IAAY,EACZC,WAAmB;IAEnB,MAAMC,WAAWR,kBAAkBM;IAEnC,MAAMG,cAAcV,KAAKW,KAAK,CAACC,IAAI,CACjC,GAAGH,WAAWL,yBAAyB,EACvC,GAAGI,cAAcL,oBAAoB;IAGvC,MAAM,EAAEU,UAAU,EAAEC,SAAS,EAAE,GAAGZ,mBAAmBQ,aAAa;QAChEK,iBAAiB;QACjBC,eAAe;QACfC,eAAe;QACfC,8BAA8B;QAC9BC,4BAA4B;IAC9B;IAEA,OAAO;QACLT;QACAU,QAAQP;QACRC;IACF;AACF","ignoreList":[0]}
|
||||
+192
@@ -0,0 +1,192 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
const MINUTE_IN_SECONDS = 60;
|
||||
const HOUR_IN_SECONDS = MINUTE_IN_SECONDS * 60 // nevermind leap seconds
|
||||
;
|
||||
const DAY_IN_SECONDS = HOUR_IN_SECONDS * 24;
|
||||
const WEEK_IN_SECONDS = DAY_IN_SECONDS * 7;
|
||||
// Nevermind leap years, or you know, July
|
||||
const MONTH_30_DAYS_IN_SECONDS = DAY_IN_SECONDS * 30;
|
||||
function formatTimespan(seconds) {
|
||||
if (seconds > 0) {
|
||||
if (seconds === MONTH_30_DAYS_IN_SECONDS) {
|
||||
return '1 month';
|
||||
}
|
||||
if (seconds === WEEK_IN_SECONDS) {
|
||||
return '1 week';
|
||||
}
|
||||
if (seconds === DAY_IN_SECONDS) {
|
||||
return '1 day';
|
||||
}
|
||||
if (seconds === HOUR_IN_SECONDS) {
|
||||
return '1 hour';
|
||||
}
|
||||
if (seconds === MINUTE_IN_SECONDS) {
|
||||
return '1 minute';
|
||||
}
|
||||
if (seconds % MONTH_30_DAYS_IN_SECONDS === 0) {
|
||||
return seconds / MONTH_30_DAYS_IN_SECONDS + ' months';
|
||||
}
|
||||
if (seconds % 18144000 === 0) {
|
||||
return seconds / 18144000 + ' months';
|
||||
}
|
||||
if (seconds % WEEK_IN_SECONDS === 0) {
|
||||
return seconds / WEEK_IN_SECONDS + ' weeks';
|
||||
}
|
||||
if (seconds % DAY_IN_SECONDS === 0) {
|
||||
return seconds / DAY_IN_SECONDS + ' days';
|
||||
}
|
||||
if (seconds % HOUR_IN_SECONDS === 0) {
|
||||
return seconds / HOUR_IN_SECONDS + ' hours';
|
||||
}
|
||||
if (seconds % MINUTE_IN_SECONDS === 0) {
|
||||
return seconds / MINUTE_IN_SECONDS + ' minutes';
|
||||
}
|
||||
}
|
||||
return seconds + ' seconds';
|
||||
}
|
||||
function formatTimespanWithSeconds(seconds) {
|
||||
if (seconds === undefined) {
|
||||
return 'default';
|
||||
}
|
||||
if (seconds >= 0xfffffffe) {
|
||||
return 'never';
|
||||
}
|
||||
const text = seconds + ' seconds';
|
||||
const descriptive = formatTimespan(seconds);
|
||||
if (descriptive === text) {
|
||||
return text;
|
||||
}
|
||||
return text + ' (' + descriptive + ')';
|
||||
}
|
||||
/**
|
||||
* Generates TypeScript type definitions for custom cacheLife profiles.
|
||||
* This creates overloaded function signatures for the cacheLife() function
|
||||
* that provide autocomplete and documentation for each profile.
|
||||
*/ export function generateCacheLifeTypes(cacheLife) {
|
||||
let overloads = '';
|
||||
const profileNames = Object.keys(cacheLife);
|
||||
for(let i = 0; i < profileNames.length; i++){
|
||||
const profileName = profileNames[i];
|
||||
const profile = cacheLife[profileName];
|
||||
if (typeof profile !== 'object' || profile === null) {
|
||||
continue;
|
||||
}
|
||||
let description = '';
|
||||
if (profile.stale === undefined) {
|
||||
description += `
|
||||
* This cache may be stale on clients for the default stale time of the scope before checking with the server.`;
|
||||
} else if (profile.stale >= 0xfffffffe) {
|
||||
description += `
|
||||
* This cache may be stale on clients indefinitely before checking with the server.`;
|
||||
} else {
|
||||
description += `
|
||||
* This cache may be stale on clients for ${formatTimespan(profile.stale)} before checking with the server.`;
|
||||
}
|
||||
if (profile.revalidate !== undefined && profile.expire !== undefined && profile.revalidate >= profile.expire) {
|
||||
description += `
|
||||
* This cache will expire after ${formatTimespan(profile.expire)}. The next request will recompute it.`;
|
||||
} else {
|
||||
if (profile.revalidate === undefined) {
|
||||
description += `
|
||||
* It will inherit the default revalidate time of its scope since it does not define its own.`;
|
||||
} else if (profile.revalidate >= 0xfffffffe) {
|
||||
// Nothing to mention.
|
||||
} else {
|
||||
description += `
|
||||
* If the server receives a new request after ${formatTimespan(profile.revalidate)}, start revalidating new values in the background.`;
|
||||
}
|
||||
if (profile.expire === undefined) {
|
||||
description += `
|
||||
* It will inherit the default expiration time of its scope since it does not define its own.`;
|
||||
} else if (profile.expire >= 0xfffffffe) {
|
||||
description += `
|
||||
* It lives for the maximum age of the server cache. If this entry has no traffic for a while, it may serve an old value the next request.`;
|
||||
} else {
|
||||
description += `
|
||||
* If this entry has no traffic for ${formatTimespan(profile.expire)} it will expire. The next request will recompute it.`;
|
||||
}
|
||||
}
|
||||
overloads += `
|
||||
/**
|
||||
* Cache this \`"use cache"\` for a timespan defined by the \`${JSON.stringify(profileName)}\` profile.
|
||||
* \`\`\`
|
||||
* stale: ${formatTimespanWithSeconds(profile.stale)}
|
||||
* revalidate: ${formatTimespanWithSeconds(profile.revalidate)}
|
||||
* expire: ${formatTimespanWithSeconds(profile.expire)}
|
||||
* \`\`\`
|
||||
* ${description}
|
||||
*/
|
||||
export function cacheLife(profile: ${JSON.stringify(profileName)}): void
|
||||
`;
|
||||
}
|
||||
overloads += `
|
||||
/**
|
||||
* Cache this \`"use cache"\` using a custom timespan.
|
||||
* \`\`\`
|
||||
* stale: ... // seconds
|
||||
* revalidate: ... // seconds
|
||||
* expire: ... // seconds
|
||||
* \`\`\`
|
||||
*
|
||||
* This is similar to Cache-Control: max-age=\`stale\`,s-max-age=\`revalidate\`,stale-while-revalidate=\`expire-revalidate\`
|
||||
*
|
||||
* If a value is left out, the lowest of other cacheLife() calls or the default, is used instead.
|
||||
*/
|
||||
export function cacheLife(profile: {
|
||||
/**
|
||||
* This cache may be stale on clients for ... seconds before checking with the server.
|
||||
*/
|
||||
stale?: number,
|
||||
/**
|
||||
* If the server receives a new request after ... seconds, start revalidating new values in the background.
|
||||
*/
|
||||
revalidate?: number,
|
||||
/**
|
||||
* If this entry has no traffic for ... seconds it will expire. The next request will recompute it.
|
||||
*/
|
||||
expire?: number
|
||||
}): void
|
||||
`;
|
||||
// Redefine the cacheLife() accepted arguments.
|
||||
return `// Type definitions for Next.js cacheLife configs
|
||||
|
||||
declare module 'next/cache' {
|
||||
export { unstable_cache } from 'next/dist/server/web/spec-extension/unstable-cache'
|
||||
export {
|
||||
updateTag,
|
||||
revalidateTag,
|
||||
revalidatePath,
|
||||
refresh,
|
||||
} from 'next/dist/server/web/spec-extension/revalidate'
|
||||
export { unstable_noStore } from 'next/dist/server/web/spec-extension/unstable-no-store'
|
||||
|
||||
${overloads}
|
||||
|
||||
import { cacheTag } from 'next/dist/server/use-cache/cache-tag'
|
||||
export { cacheTag }
|
||||
|
||||
export const unstable_cacheTag: typeof cacheTag
|
||||
export const unstable_cacheLife: typeof cacheLife
|
||||
}
|
||||
`;
|
||||
}
|
||||
/**
|
||||
* Writes cache-life type definitions to a file if cacheLifeConfig exists.
|
||||
* This is used by both the CLI (next type-gen) and dev server to generate
|
||||
* cache-life.d.ts in the types directory.
|
||||
*/ export function writeCacheLifeTypes(cacheLifeConfig, filePath) {
|
||||
if (!cacheLifeConfig || Object.keys(cacheLifeConfig).length === 0) {
|
||||
return;
|
||||
}
|
||||
const dirname = path.dirname(filePath);
|
||||
if (!fs.existsSync(dirname)) {
|
||||
fs.mkdirSync(dirname, {
|
||||
recursive: true
|
||||
});
|
||||
}
|
||||
const content = generateCacheLifeTypes(cacheLifeConfig);
|
||||
fs.writeFileSync(filePath, content);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=cache-life-type-utils.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+28
@@ -0,0 +1,28 @@
|
||||
import escapePathDelimiters from '../../../shared/lib/router/utils/escape-path-delimiters';
|
||||
import { DecodeError } from '../../../shared/lib/utils';
|
||||
/**
|
||||
* We only encode path delimiters for path segments from
|
||||
* getStaticPaths so we need to attempt decoding the URL
|
||||
* to match against and only escape the path delimiters
|
||||
* this allows non-ascii values to be handled e.g.
|
||||
* Japanese characters.
|
||||
* */ function decodePathParams(pathname) {
|
||||
// TODO: investigate adding this handling for non-SSG
|
||||
// pages so non-ascii names also work there.
|
||||
return pathname.split('/').map((seg)=>{
|
||||
try {
|
||||
seg = escapePathDelimiters(decodeURIComponent(seg), true);
|
||||
} catch (_) {
|
||||
// An improperly encoded URL was provided
|
||||
throw Object.defineProperty(new DecodeError('Failed to decode path param(s).'), "__NEXT_ERROR_CODE", {
|
||||
value: "E539",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return seg;
|
||||
}).join('/');
|
||||
}
|
||||
export { decodePathParams };
|
||||
|
||||
//# sourceMappingURL=decode-path-params.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/router-utils/decode-path-params.ts"],"sourcesContent":["import escapePathDelimiters from '../../../shared/lib/router/utils/escape-path-delimiters'\nimport { DecodeError } from '../../../shared/lib/utils'\n\n/**\n * We only encode path delimiters for path segments from\n * getStaticPaths so we need to attempt decoding the URL\n * to match against and only escape the path delimiters\n * this allows non-ascii values to be handled e.g.\n * Japanese characters.\n * */\nfunction decodePathParams(pathname: string): string {\n // TODO: investigate adding this handling for non-SSG\n // pages so non-ascii names also work there.\n return pathname\n .split('/')\n .map((seg) => {\n try {\n seg = escapePathDelimiters(decodeURIComponent(seg), true)\n } catch (_) {\n // An improperly encoded URL was provided\n throw new DecodeError('Failed to decode path param(s).')\n }\n return seg\n })\n .join('/')\n}\n\nexport { decodePathParams }\n"],"names":["escapePathDelimiters","DecodeError","decodePathParams","pathname","split","map","seg","decodeURIComponent","_","join"],"mappings":"AAAA,OAAOA,0BAA0B,0DAAyD;AAC1F,SAASC,WAAW,QAAQ,4BAA2B;AAEvD;;;;;;GAMG,GACH,SAASC,iBAAiBC,QAAgB;IACxC,qDAAqD;IACrD,4CAA4C;IAC5C,OAAOA,SACJC,KAAK,CAAC,KACNC,GAAG,CAAC,CAACC;QACJ,IAAI;YACFA,MAAMN,qBAAqBO,mBAAmBD,MAAM;QACtD,EAAE,OAAOE,GAAG;YACV,yCAAyC;YACzC,MAAM,qBAAkD,CAAlD,IAAIP,YAAY,oCAAhB,qBAAA;uBAAA;4BAAA;8BAAA;YAAiD;QACzD;QACA,OAAOK;IACT,GACCG,IAAI,CAAC;AACV;AAEA,SAASP,gBAAgB,GAAE","ignoreList":[0]}
|
||||
+523
@@ -0,0 +1,523 @@
|
||||
import path from 'path';
|
||||
import fs from 'fs/promises';
|
||||
import * as Log from '../../../build/output/log';
|
||||
import setupDebug from 'next/dist/compiled/debug';
|
||||
import { LRUCache } from '../lru-cache';
|
||||
import loadCustomRoutes from '../../../lib/load-custom-routes';
|
||||
import { modifyRouteRegex } from '../../../lib/redirect-status';
|
||||
import { FileType, fileExists } from '../../../lib/file-exists';
|
||||
import { recursiveReadDir } from '../../../lib/recursive-readdir';
|
||||
import { isDynamicRoute } from '../../../shared/lib/router/utils';
|
||||
import { escapeStringRegexp } from '../../../shared/lib/escape-regexp';
|
||||
import { getPathMatch } from '../../../shared/lib/router/utils/path-match';
|
||||
import { getNamedRouteRegex, getRouteRegex } from '../../../shared/lib/router/utils/route-regex';
|
||||
import { getRouteMatcher } from '../../../shared/lib/router/utils/route-matcher';
|
||||
import { pathHasPrefix } from '../../../shared/lib/router/utils/path-has-prefix';
|
||||
import { normalizeLocalePath } from '../../../shared/lib/i18n/normalize-locale-path';
|
||||
import { removePathPrefix } from '../../../shared/lib/router/utils/remove-path-prefix';
|
||||
import { getMiddlewareRouteMatcher } from '../../../shared/lib/router/utils/middleware-route-matcher';
|
||||
import { APP_PATH_ROUTES_MANIFEST, BUILD_ID_FILE, FUNCTIONS_CONFIG_MANIFEST, MIDDLEWARE_MANIFEST, PAGES_MANIFEST, PRERENDER_MANIFEST, ROUTES_MANIFEST } from '../../../shared/lib/constants';
|
||||
import { normalizePathSep } from '../../../shared/lib/page-path/normalize-path-sep';
|
||||
import { normalizeMetadataRoute } from '../../../lib/metadata/get-metadata-route';
|
||||
import { RSCPathnameNormalizer } from '../../normalizers/request/rsc';
|
||||
import { encodeURIPath } from '../../../shared/lib/encode-uri-path';
|
||||
import { isMetadataRouteFile } from '../../../lib/metadata/is-metadata-route';
|
||||
const debug = setupDebug('next:router-server:filesystem');
|
||||
export const buildCustomRoute = (type, item, basePath, caseSensitive)=>{
|
||||
const restrictedRedirectPaths = [
|
||||
'/_next'
|
||||
].map((p)=>basePath ? `${basePath}${p}` : p);
|
||||
let builtRegex = '';
|
||||
const match = getPathMatch(item.source, {
|
||||
strict: true,
|
||||
removeUnnamedParams: true,
|
||||
regexModifier: (regex)=>{
|
||||
if (!item.internal) {
|
||||
regex = modifyRouteRegex(regex, type === 'redirect' ? restrictedRedirectPaths : undefined);
|
||||
}
|
||||
builtRegex = regex;
|
||||
return builtRegex;
|
||||
},
|
||||
sensitive: caseSensitive
|
||||
});
|
||||
return {
|
||||
...item,
|
||||
regex: builtRegex,
|
||||
...type === 'rewrite' ? {
|
||||
check: true
|
||||
} : {},
|
||||
match
|
||||
};
|
||||
};
|
||||
export async function setupFsCheck(opts) {
|
||||
const getItemsLru = !opts.dev ? new LRUCache(1024 * 1024, function length(value) {
|
||||
if (!value) {
|
||||
// Null entries (negative cache) still need a non-zero size for LRU eviction
|
||||
return 1;
|
||||
}
|
||||
return (value.fsPath || '').length + value.itemPath.length + value.type.length;
|
||||
}) : undefined;
|
||||
// routes that have _next/data endpoints (SSG/SSP)
|
||||
const nextDataRoutes = new Set();
|
||||
const publicFolderItems = new Set();
|
||||
const nextStaticFolderItems = new Set();
|
||||
const legacyStaticFolderItems = new Set();
|
||||
const appFiles = new Set();
|
||||
const pageFiles = new Set();
|
||||
// Map normalized path to the file path. This is essential
|
||||
// for parallel and group routes as their original path
|
||||
// cannot be restored from the request path.
|
||||
// Example:
|
||||
// [normalized-path] -> [file-path]
|
||||
// /icon-<hash>.png -> .../app/@parallel/icon.png
|
||||
// /icon-<hash>.png -> .../app/(group)/icon.png
|
||||
// /icon.png -> .../app/icon.png
|
||||
const staticMetadataFiles = new Map();
|
||||
let dynamicRoutes = [];
|
||||
let middlewareMatcher = ()=>false;
|
||||
const distDir = path.join(opts.dir, opts.config.distDir);
|
||||
const publicFolderPath = path.join(opts.dir, 'public');
|
||||
const nextStaticFolderPath = path.join(distDir, 'static');
|
||||
const legacyStaticFolderPath = path.join(opts.dir, 'static');
|
||||
let customRoutes = {
|
||||
redirects: [],
|
||||
rewrites: {
|
||||
beforeFiles: [],
|
||||
afterFiles: [],
|
||||
fallback: []
|
||||
},
|
||||
onMatchHeaders: [],
|
||||
headers: []
|
||||
};
|
||||
let buildId = 'development';
|
||||
let previewProps;
|
||||
if (!opts.dev) {
|
||||
var _middlewareManifest_middleware_, _middlewareManifest_middleware;
|
||||
const buildIdPath = path.join(opts.dir, opts.config.distDir, BUILD_ID_FILE);
|
||||
try {
|
||||
buildId = await fs.readFile(buildIdPath, 'utf8');
|
||||
} catch (err) {
|
||||
if (err.code !== 'ENOENT') throw err;
|
||||
throw Object.defineProperty(new Error(`Could not find a production build in the '${opts.config.distDir}' directory. Try building your app with 'next build' before starting the production server. https://nextjs.org/docs/messages/production-start-no-build-id`), "__NEXT_ERROR_CODE", {
|
||||
value: "E427",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
try {
|
||||
for (const file of (await recursiveReadDir(publicFolderPath))){
|
||||
// Ensure filename is encoded and normalized.
|
||||
publicFolderItems.add(encodeURIPath(normalizePathSep(file)));
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
try {
|
||||
for (const file of (await recursiveReadDir(legacyStaticFolderPath))){
|
||||
// Ensure filename is encoded and normalized.
|
||||
legacyStaticFolderItems.add(encodeURIPath(normalizePathSep(file)));
|
||||
}
|
||||
Log.warn(`The static directory has been deprecated in favor of the public directory. https://nextjs.org/docs/messages/static-dir-deprecated`);
|
||||
} catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
try {
|
||||
for (const file of (await recursiveReadDir(nextStaticFolderPath))){
|
||||
// Ensure filename is encoded and normalized.
|
||||
nextStaticFolderItems.add(path.posix.join('/_next/static', encodeURIPath(normalizePathSep(file))));
|
||||
}
|
||||
} catch (err) {
|
||||
if (opts.config.output !== 'standalone') throw err;
|
||||
}
|
||||
const routesManifestPath = path.join(distDir, ROUTES_MANIFEST);
|
||||
const prerenderManifestPath = path.join(distDir, PRERENDER_MANIFEST);
|
||||
const middlewareManifestPath = path.join(distDir, 'server', MIDDLEWARE_MANIFEST);
|
||||
const functionsConfigManifestPath = path.join(distDir, 'server', FUNCTIONS_CONFIG_MANIFEST);
|
||||
const pagesManifestPath = path.join(distDir, 'server', PAGES_MANIFEST);
|
||||
const appRoutesManifestPath = path.join(distDir, APP_PATH_ROUTES_MANIFEST);
|
||||
const routesManifest = JSON.parse(await fs.readFile(routesManifestPath, 'utf8'));
|
||||
previewProps = JSON.parse(await fs.readFile(prerenderManifestPath, 'utf8')).preview;
|
||||
const middlewareManifest = JSON.parse(await fs.readFile(middlewareManifestPath, 'utf8').catch(()=>'{}'));
|
||||
const functionsConfigManifest = JSON.parse(await fs.readFile(functionsConfigManifestPath, 'utf8').catch(()=>'{}'));
|
||||
const pagesManifest = JSON.parse(await fs.readFile(pagesManifestPath, 'utf8'));
|
||||
const appRoutesManifest = JSON.parse(await fs.readFile(appRoutesManifestPath, 'utf8').catch(()=>'{}'));
|
||||
for (const key of Object.keys(pagesManifest)){
|
||||
// ensure the non-locale version is in the set
|
||||
if (opts.config.i18n) {
|
||||
pageFiles.add(normalizeLocalePath(key, opts.config.i18n.locales).pathname);
|
||||
} else {
|
||||
pageFiles.add(key);
|
||||
}
|
||||
}
|
||||
for (const key of Object.keys(appRoutesManifest)){
|
||||
appFiles.add(appRoutesManifest[key]);
|
||||
}
|
||||
const escapedBuildId = escapeStringRegexp(buildId);
|
||||
for (const route of routesManifest.dataRoutes){
|
||||
if (isDynamicRoute(route.page)) {
|
||||
const routeRegex = getNamedRouteRegex(route.page, {
|
||||
prefixRouteKeys: true
|
||||
});
|
||||
dynamicRoutes.push({
|
||||
...route,
|
||||
regex: routeRegex.re.toString(),
|
||||
namedRegex: routeRegex.namedRegex,
|
||||
routeKeys: routeRegex.routeKeys,
|
||||
match: getRouteMatcher({
|
||||
// TODO: fix this in the manifest itself, must also be fixed in
|
||||
// upstream builder that relies on this
|
||||
re: opts.config.i18n ? new RegExp(route.dataRouteRegex.replace(`/${escapedBuildId}/`, `/${escapedBuildId}/(?<nextLocale>[^/]+?)/`)) : new RegExp(route.dataRouteRegex),
|
||||
groups: routeRegex.groups
|
||||
})
|
||||
});
|
||||
}
|
||||
nextDataRoutes.add(route.page);
|
||||
}
|
||||
for (const route of routesManifest.dynamicRoutes){
|
||||
// If a route is marked as skipInternalRouting, it's not for the internal
|
||||
// router, and instead has been added to support external routers.
|
||||
if (route.skipInternalRouting) {
|
||||
continue;
|
||||
}
|
||||
dynamicRoutes.push({
|
||||
...route,
|
||||
match: getRouteMatcher(getRouteRegex(route.page))
|
||||
});
|
||||
}
|
||||
if ((_middlewareManifest_middleware = middlewareManifest.middleware) == null ? void 0 : (_middlewareManifest_middleware_ = _middlewareManifest_middleware['/']) == null ? void 0 : _middlewareManifest_middleware_.matchers) {
|
||||
var _middlewareManifest_middleware_1, _middlewareManifest_middleware1;
|
||||
middlewareMatcher = getMiddlewareRouteMatcher((_middlewareManifest_middleware1 = middlewareManifest.middleware) == null ? void 0 : (_middlewareManifest_middleware_1 = _middlewareManifest_middleware1['/']) == null ? void 0 : _middlewareManifest_middleware_1.matchers);
|
||||
} else if (functionsConfigManifest == null ? void 0 : functionsConfigManifest.functions['/_middleware']) {
|
||||
middlewareMatcher = getMiddlewareRouteMatcher(functionsConfigManifest.functions['/_middleware'].matchers ?? [
|
||||
{
|
||||
regexp: '.*',
|
||||
originalSource: '/:path*'
|
||||
}
|
||||
]);
|
||||
}
|
||||
customRoutes = {
|
||||
redirects: routesManifest.redirects,
|
||||
rewrites: routesManifest.rewrites ? Array.isArray(routesManifest.rewrites) ? {
|
||||
beforeFiles: [],
|
||||
afterFiles: routesManifest.rewrites,
|
||||
fallback: []
|
||||
} : routesManifest.rewrites : {
|
||||
beforeFiles: [],
|
||||
afterFiles: [],
|
||||
fallback: []
|
||||
},
|
||||
headers: routesManifest.headers,
|
||||
onMatchHeaders: routesManifest.onMatchHeaders
|
||||
};
|
||||
} else {
|
||||
// dev handling
|
||||
customRoutes = await loadCustomRoutes(opts.config);
|
||||
previewProps = {
|
||||
previewModeId: require('crypto').randomBytes(16).toString('hex'),
|
||||
previewModeSigningKey: require('crypto').randomBytes(32).toString('hex'),
|
||||
previewModeEncryptionKey: require('crypto').randomBytes(32).toString('hex')
|
||||
};
|
||||
}
|
||||
const headers = customRoutes.headers.map((item)=>buildCustomRoute('header', item, opts.config.basePath, opts.config.experimental.caseSensitiveRoutes));
|
||||
const onMatchHeaders = customRoutes.onMatchHeaders.map((item)=>buildCustomRoute('header', item, opts.config.basePath, opts.config.experimental.caseSensitiveRoutes));
|
||||
const redirects = customRoutes.redirects.map((item)=>buildCustomRoute('redirect', item, opts.config.basePath, opts.config.experimental.caseSensitiveRoutes));
|
||||
const rewrites = {
|
||||
beforeFiles: customRoutes.rewrites.beforeFiles.map((item)=>buildCustomRoute('before_files_rewrite', item)),
|
||||
afterFiles: customRoutes.rewrites.afterFiles.map((item)=>buildCustomRoute('rewrite', item, opts.config.basePath, opts.config.experimental.caseSensitiveRoutes)),
|
||||
fallback: customRoutes.rewrites.fallback.map((item)=>buildCustomRoute('rewrite', item, opts.config.basePath, opts.config.experimental.caseSensitiveRoutes))
|
||||
};
|
||||
const { i18n } = opts.config;
|
||||
const handleLocale = (pathname, locales)=>{
|
||||
let locale;
|
||||
if (i18n) {
|
||||
const i18nResult = normalizeLocalePath(pathname, locales || i18n.locales);
|
||||
pathname = i18nResult.pathname;
|
||||
locale = i18nResult.detectedLocale;
|
||||
}
|
||||
return {
|
||||
locale,
|
||||
pathname
|
||||
};
|
||||
};
|
||||
debug('nextDataRoutes', nextDataRoutes);
|
||||
debug('dynamicRoutes', dynamicRoutes);
|
||||
debug('customRoutes', customRoutes);
|
||||
debug('publicFolderItems', publicFolderItems);
|
||||
debug('nextStaticFolderItems', nextStaticFolderItems);
|
||||
debug('pageFiles', pageFiles);
|
||||
debug('appFiles', appFiles);
|
||||
let ensureFn;
|
||||
const normalizers = {
|
||||
// Because we can't know if the app directory is enabled or not at this
|
||||
// stage, we assume that it is.
|
||||
rsc: new RSCPathnameNormalizer()
|
||||
};
|
||||
return {
|
||||
headers,
|
||||
onMatchHeaders,
|
||||
rewrites,
|
||||
redirects,
|
||||
buildId,
|
||||
handleLocale,
|
||||
appFiles,
|
||||
pageFiles,
|
||||
staticMetadataFiles,
|
||||
dynamicRoutes,
|
||||
nextDataRoutes,
|
||||
exportPathMapRoutes: undefined,
|
||||
devVirtualFsItems: new Set(),
|
||||
previewProps,
|
||||
middlewareMatcher: middlewareMatcher,
|
||||
ensureCallback (fn) {
|
||||
ensureFn = fn;
|
||||
},
|
||||
async getItem (itemPath) {
|
||||
const originalItemPath = itemPath;
|
||||
const itemKey = originalItemPath;
|
||||
const lruResult = getItemsLru == null ? void 0 : getItemsLru.get(itemKey);
|
||||
if (lruResult) {
|
||||
return lruResult;
|
||||
}
|
||||
const { basePath } = opts.config;
|
||||
const hasBasePath = pathHasPrefix(itemPath, basePath);
|
||||
// Return null if path doesn't start with basePath
|
||||
if (basePath && !hasBasePath) {
|
||||
return null;
|
||||
}
|
||||
// Remove basePath if it exists.
|
||||
if (basePath && hasBasePath) {
|
||||
itemPath = removePathPrefix(itemPath, basePath) || '/';
|
||||
}
|
||||
// Simulate minimal mode requests by normalizing RSC and postponed
|
||||
// requests.
|
||||
if (opts.minimalMode) {
|
||||
if (normalizers.rsc.match(itemPath)) {
|
||||
itemPath = normalizers.rsc.normalize(itemPath, true);
|
||||
}
|
||||
}
|
||||
if (itemPath !== '/' && itemPath.endsWith('/')) {
|
||||
itemPath = itemPath.substring(0, itemPath.length - 1);
|
||||
}
|
||||
let decodedItemPath = itemPath;
|
||||
try {
|
||||
decodedItemPath = decodeURIComponent(itemPath);
|
||||
} catch {}
|
||||
if (itemPath === '/_next/image') {
|
||||
return {
|
||||
itemPath,
|
||||
type: 'nextImage'
|
||||
};
|
||||
}
|
||||
if (opts.dev && isMetadataRouteFile(itemPath, [], false)) {
|
||||
const fsPath = staticMetadataFiles.get(itemPath);
|
||||
if (fsPath) {
|
||||
return {
|
||||
// "nextStaticFolder" sets Cache-Control "no-store" on dev.
|
||||
type: 'nextStaticFolder',
|
||||
fsPath,
|
||||
itemPath: fsPath
|
||||
};
|
||||
}
|
||||
}
|
||||
const itemsToCheck = [
|
||||
[
|
||||
this.devVirtualFsItems,
|
||||
'devVirtualFsItem'
|
||||
],
|
||||
[
|
||||
nextStaticFolderItems,
|
||||
'nextStaticFolder'
|
||||
],
|
||||
[
|
||||
legacyStaticFolderItems,
|
||||
'legacyStaticFolder'
|
||||
],
|
||||
[
|
||||
publicFolderItems,
|
||||
'publicFolder'
|
||||
],
|
||||
[
|
||||
appFiles,
|
||||
'appFile'
|
||||
],
|
||||
[
|
||||
pageFiles,
|
||||
'pageFile'
|
||||
]
|
||||
];
|
||||
for (let [items, type] of itemsToCheck){
|
||||
let locale;
|
||||
let curItemPath = itemPath;
|
||||
let curDecodedItemPath = decodedItemPath;
|
||||
const isDynamicOutput = type === 'pageFile' || type === 'appFile';
|
||||
if (i18n) {
|
||||
var _i18n_domains;
|
||||
const localeResult = handleLocale(itemPath, // legacy behavior allows visiting static assets under
|
||||
// default locale but no other locale
|
||||
isDynamicOutput ? undefined : [
|
||||
i18n == null ? void 0 : i18n.defaultLocale,
|
||||
// default locales from domains need to be matched too
|
||||
...((_i18n_domains = i18n.domains) == null ? void 0 : _i18n_domains.map((item)=>item.defaultLocale)) || []
|
||||
]);
|
||||
if (localeResult.pathname !== curItemPath) {
|
||||
curItemPath = localeResult.pathname;
|
||||
locale = localeResult.locale;
|
||||
try {
|
||||
curDecodedItemPath = decodeURIComponent(curItemPath);
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
if (type === 'legacyStaticFolder') {
|
||||
if (!pathHasPrefix(curItemPath, '/static')) {
|
||||
continue;
|
||||
}
|
||||
curItemPath = curItemPath.substring('/static'.length);
|
||||
try {
|
||||
curDecodedItemPath = decodeURIComponent(curItemPath);
|
||||
} catch {}
|
||||
}
|
||||
if (type === 'nextStaticFolder' && !pathHasPrefix(curItemPath, '/_next/static')) {
|
||||
continue;
|
||||
}
|
||||
const nextDataPrefix = `/_next/data/${buildId}/`;
|
||||
if (type === 'pageFile' && curItemPath.startsWith(nextDataPrefix) && curItemPath.endsWith('.json')) {
|
||||
items = nextDataRoutes;
|
||||
// remove _next/data/<build-id> prefix
|
||||
curItemPath = curItemPath.substring(nextDataPrefix.length - 1);
|
||||
// remove .json postfix
|
||||
curItemPath = curItemPath.substring(0, curItemPath.length - '.json'.length);
|
||||
const curLocaleResult = handleLocale(curItemPath);
|
||||
curItemPath = curLocaleResult.pathname === '/index' ? '/' : curLocaleResult.pathname;
|
||||
locale = curLocaleResult.locale;
|
||||
try {
|
||||
curDecodedItemPath = decodeURIComponent(curItemPath);
|
||||
} catch {}
|
||||
}
|
||||
let matchedItem = items.has(curItemPath);
|
||||
// check decoded variant as well
|
||||
if (!matchedItem && !opts.dev) {
|
||||
matchedItem = items.has(curDecodedItemPath);
|
||||
if (matchedItem) curItemPath = curDecodedItemPath;
|
||||
else {
|
||||
// x-ref: https://github.com/vercel/next.js/issues/54008
|
||||
// There're cases that urls get decoded before requests, we should support both encoded and decoded ones.
|
||||
// e.g. nginx could decode the proxy urls, the below ones should be treated as the same:
|
||||
// decoded version: `/_next/static/chunks/pages/blog/[slug]-d4858831b91b69f6.js`
|
||||
// encoded version: `/_next/static/chunks/pages/blog/%5Bslug%5D-d4858831b91b69f6.js`
|
||||
try {
|
||||
// encode the special characters in the path and retrieve again to determine if path exists.
|
||||
const encodedCurItemPath = encodeURIPath(curItemPath);
|
||||
matchedItem = items.has(encodedCurItemPath);
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
if (matchedItem || opts.dev) {
|
||||
let fsPath;
|
||||
let itemsRoot;
|
||||
switch(type){
|
||||
case 'nextStaticFolder':
|
||||
{
|
||||
itemsRoot = nextStaticFolderPath;
|
||||
curItemPath = curItemPath.substring('/_next/static'.length);
|
||||
break;
|
||||
}
|
||||
case 'legacyStaticFolder':
|
||||
{
|
||||
itemsRoot = legacyStaticFolderPath;
|
||||
break;
|
||||
}
|
||||
case 'publicFolder':
|
||||
{
|
||||
itemsRoot = publicFolderPath;
|
||||
break;
|
||||
}
|
||||
case 'appFile':
|
||||
case 'pageFile':
|
||||
case 'nextImage':
|
||||
case 'devVirtualFsItem':
|
||||
{
|
||||
break;
|
||||
}
|
||||
default:
|
||||
{
|
||||
;
|
||||
type;
|
||||
}
|
||||
}
|
||||
if (itemsRoot && curItemPath) {
|
||||
fsPath = path.posix.join(itemsRoot, curItemPath);
|
||||
}
|
||||
// dynamically check fs in development so we don't
|
||||
// have to wait on the watcher
|
||||
if (!matchedItem && opts.dev) {
|
||||
const isStaticAsset = [
|
||||
'nextStaticFolder',
|
||||
'publicFolder',
|
||||
'legacyStaticFolder'
|
||||
].includes(type);
|
||||
if (isStaticAsset && itemsRoot) {
|
||||
let found = fsPath && await fileExists(fsPath, FileType.File);
|
||||
if (!found) {
|
||||
try {
|
||||
// In dev, we ensure encoded paths match
|
||||
// decoded paths on the filesystem so check
|
||||
// that variation as well
|
||||
const tempItemPath = decodeURIComponent(curItemPath);
|
||||
fsPath = path.posix.join(itemsRoot, tempItemPath);
|
||||
found = await fileExists(fsPath, FileType.File);
|
||||
} catch {}
|
||||
if (!found) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else if (type === 'pageFile' || type === 'appFile') {
|
||||
const isAppFile = type === 'appFile';
|
||||
// Attempt to ensure the page/app file is compiled and ready
|
||||
if (ensureFn) {
|
||||
const ensureItemPath = isAppFile ? normalizeMetadataRoute(curItemPath) : curItemPath;
|
||||
try {
|
||||
await ensureFn({
|
||||
type,
|
||||
itemPath: ensureItemPath
|
||||
});
|
||||
} catch (error) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// i18n locales aren't matched for app dir
|
||||
if (type === 'appFile' && locale && locale !== (i18n == null ? void 0 : i18n.defaultLocale)) {
|
||||
continue;
|
||||
}
|
||||
const itemResult = {
|
||||
type,
|
||||
fsPath,
|
||||
locale,
|
||||
itemsRoot,
|
||||
itemPath: curItemPath
|
||||
};
|
||||
getItemsLru == null ? void 0 : getItemsLru.set(itemKey, itemResult);
|
||||
return itemResult;
|
||||
}
|
||||
}
|
||||
getItemsLru == null ? void 0 : getItemsLru.set(itemKey, null);
|
||||
return null;
|
||||
},
|
||||
getDynamicRoutes () {
|
||||
// this should include data routes
|
||||
return this.dynamicRoutes;
|
||||
},
|
||||
getMiddlewareMatchers () {
|
||||
return this.middlewareMatcher;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=filesystem.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
Generated
Vendored
+58
@@ -0,0 +1,58 @@
|
||||
import path from 'node:path';
|
||||
import isError from '../../../lib/is-error';
|
||||
import { INSTRUMENTATION_HOOK_FILENAME } from '../../../lib/constants';
|
||||
import { interopDefault } from '../../../lib/interop-default';
|
||||
import { afterRegistration as extendInstrumentationAfterRegistration } from './instrumentation-node-extensions';
|
||||
let cachedInstrumentationModule;
|
||||
export async function getInstrumentationModule(projectDir, distDir) {
|
||||
if (cachedInstrumentationModule) {
|
||||
return cachedInstrumentationModule;
|
||||
}
|
||||
try {
|
||||
cachedInstrumentationModule = interopDefault(await require(path.join(projectDir, distDir, 'server', `${INSTRUMENTATION_HOOK_FILENAME}.js`)));
|
||||
return cachedInstrumentationModule;
|
||||
} catch (err) {
|
||||
if (isError(err) && err.code !== 'ENOENT' && err.code !== 'MODULE_NOT_FOUND' && err.code !== 'ERR_MODULE_NOT_FOUND') {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
let instrumentationModulePromise = null;
|
||||
async function registerInstrumentation(projectDir, distDir) {
|
||||
// Ensure registerInstrumentation is not called in production build
|
||||
if (process.env.NEXT_PHASE === 'phase-production-build') {
|
||||
return;
|
||||
}
|
||||
if (!instrumentationModulePromise) {
|
||||
instrumentationModulePromise = getInstrumentationModule(projectDir, distDir);
|
||||
}
|
||||
const instrumentation = await instrumentationModulePromise;
|
||||
if (instrumentation == null ? void 0 : instrumentation.register) {
|
||||
try {
|
||||
await instrumentation.register();
|
||||
extendInstrumentationAfterRegistration();
|
||||
} catch (err) {
|
||||
err.message = `An error occurred while loading instrumentation hook: ${err.message}`;
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
export async function instrumentationOnRequestError(projectDir, distDir, ...args) {
|
||||
const instrumentation = await getInstrumentationModule(projectDir, distDir);
|
||||
try {
|
||||
var _instrumentation_onRequestError;
|
||||
await (instrumentation == null ? void 0 : (_instrumentation_onRequestError = instrumentation.onRequestError) == null ? void 0 : _instrumentation_onRequestError.call(instrumentation, ...args));
|
||||
} catch (err) {
|
||||
// Log the soft error and continue, since the original error has already been thrown
|
||||
console.error('Error in instrumentation.onRequestError:', err);
|
||||
}
|
||||
}
|
||||
let registerInstrumentationPromise = null;
|
||||
export function ensureInstrumentationRegistered(projectDir, distDir) {
|
||||
if (!registerInstrumentationPromise) {
|
||||
registerInstrumentationPromise = registerInstrumentation(projectDir, distDir);
|
||||
}
|
||||
return registerInstrumentationPromise;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=instrumentation-globals.external.js.map
|
||||
Generated
Vendored
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/server/lib/router-utils/instrumentation-globals.external.ts"],"sourcesContent":["import path from 'node:path'\nimport isError from '../../../lib/is-error'\nimport { INSTRUMENTATION_HOOK_FILENAME } from '../../../lib/constants'\nimport type {\n InstrumentationModule,\n InstrumentationOnRequestError,\n} from '../../instrumentation/types'\nimport { interopDefault } from '../../../lib/interop-default'\nimport { afterRegistration as extendInstrumentationAfterRegistration } from './instrumentation-node-extensions'\n\nlet cachedInstrumentationModule: InstrumentationModule\n\nexport async function getInstrumentationModule(\n projectDir: string,\n distDir: string\n): Promise<InstrumentationModule | undefined> {\n if (cachedInstrumentationModule) {\n return cachedInstrumentationModule\n }\n\n try {\n cachedInstrumentationModule = interopDefault(\n await require(\n path.join(\n projectDir,\n distDir,\n 'server',\n `${INSTRUMENTATION_HOOK_FILENAME}.js`\n )\n )\n )\n return cachedInstrumentationModule\n } catch (err: unknown) {\n if (\n isError(err) &&\n err.code !== 'ENOENT' &&\n err.code !== 'MODULE_NOT_FOUND' &&\n err.code !== 'ERR_MODULE_NOT_FOUND'\n ) {\n throw err\n }\n }\n}\n\nlet instrumentationModulePromise: Promise<any> | null = null\n\nasync function registerInstrumentation(projectDir: string, distDir: string) {\n // Ensure registerInstrumentation is not called in production build\n if (process.env.NEXT_PHASE === 'phase-production-build') {\n return\n }\n if (!instrumentationModulePromise) {\n instrumentationModulePromise = getInstrumentationModule(projectDir, distDir)\n }\n const instrumentation = await instrumentationModulePromise\n if (instrumentation?.register) {\n try {\n await instrumentation.register()\n extendInstrumentationAfterRegistration()\n } catch (err: any) {\n err.message = `An error occurred while loading instrumentation hook: ${err.message}`\n throw err\n }\n }\n}\n\nexport async function instrumentationOnRequestError(\n projectDir: string,\n distDir: string,\n ...args: Parameters<InstrumentationOnRequestError>\n) {\n const instrumentation = await getInstrumentationModule(projectDir, distDir)\n try {\n await instrumentation?.onRequestError?.(...args)\n } catch (err) {\n // Log the soft error and continue, since the original error has already been thrown\n console.error('Error in instrumentation.onRequestError:', err)\n }\n}\n\nlet registerInstrumentationPromise: Promise<void> | null = null\nexport function ensureInstrumentationRegistered(\n projectDir: string,\n distDir: string\n) {\n if (!registerInstrumentationPromise) {\n registerInstrumentationPromise = registerInstrumentation(\n projectDir,\n distDir\n )\n }\n return registerInstrumentationPromise\n}\n"],"names":["path","isError","INSTRUMENTATION_HOOK_FILENAME","interopDefault","afterRegistration","extendInstrumentationAfterRegistration","cachedInstrumentationModule","getInstrumentationModule","projectDir","distDir","require","join","err","code","instrumentationModulePromise","registerInstrumentation","process","env","NEXT_PHASE","instrumentation","register","message","instrumentationOnRequestError","args","onRequestError","console","error","registerInstrumentationPromise","ensureInstrumentationRegistered"],"mappings":"AAAA,OAAOA,UAAU,YAAW;AAC5B,OAAOC,aAAa,wBAAuB;AAC3C,SAASC,6BAA6B,QAAQ,yBAAwB;AAKtE,SAASC,cAAc,QAAQ,+BAA8B;AAC7D,SAASC,qBAAqBC,sCAAsC,QAAQ,oCAAmC;AAE/G,IAAIC;AAEJ,OAAO,eAAeC,yBACpBC,UAAkB,EAClBC,OAAe;IAEf,IAAIH,6BAA6B;QAC/B,OAAOA;IACT;IAEA,IAAI;QACFA,8BAA8BH,eAC5B,MAAMO,QACJV,KAAKW,IAAI,CACPH,YACAC,SACA,UACA,GAAGP,8BAA8B,GAAG,CAAC;QAI3C,OAAOI;IACT,EAAE,OAAOM,KAAc;QACrB,IACEX,QAAQW,QACRA,IAAIC,IAAI,KAAK,YACbD,IAAIC,IAAI,KAAK,sBACbD,IAAIC,IAAI,KAAK,wBACb;YACA,MAAMD;QACR;IACF;AACF;AAEA,IAAIE,+BAAoD;AAExD,eAAeC,wBAAwBP,UAAkB,EAAEC,OAAe;IACxE,mEAAmE;IACnE,IAAIO,QAAQC,GAAG,CAACC,UAAU,KAAK,0BAA0B;QACvD;IACF;IACA,IAAI,CAACJ,8BAA8B;QACjCA,+BAA+BP,yBAAyBC,YAAYC;IACtE;IACA,MAAMU,kBAAkB,MAAML;IAC9B,IAAIK,mCAAAA,gBAAiBC,QAAQ,EAAE;QAC7B,IAAI;YACF,MAAMD,gBAAgBC,QAAQ;YAC9Bf;QACF,EAAE,OAAOO,KAAU;YACjBA,IAAIS,OAAO,GAAG,CAAC,sDAAsD,EAAET,IAAIS,OAAO,EAAE;YACpF,MAAMT;QACR;IACF;AACF;AAEA,OAAO,eAAeU,8BACpBd,UAAkB,EAClBC,OAAe,EACf,GAAGc,IAA+C;IAElD,MAAMJ,kBAAkB,MAAMZ,yBAAyBC,YAAYC;IACnE,IAAI;YACIU;QAAN,OAAMA,oCAAAA,kCAAAA,gBAAiBK,cAAc,qBAA/BL,qCAAAA,oBAAqCI;IAC7C,EAAE,OAAOX,KAAK;QACZ,oFAAoF;QACpFa,QAAQC,KAAK,CAAC,4CAA4Cd;IAC5D;AACF;AAEA,IAAIe,iCAAuD;AAC3D,OAAO,SAASC,gCACdpB,UAAkB,EAClBC,OAAe;IAEf,IAAI,CAACkB,gCAAgC;QACnCA,iCAAiCZ,wBAC/BP,YACAC;IAEJ;IACA,OAAOkB;AACT","ignoreList":[0]}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user