.
This commit is contained in:
+71
@@ -0,0 +1,71 @@
|
||||
import * as Log from '../../../build/output/log';
|
||||
import { flushAllTraces } from '../../../trace';
|
||||
import { traceMemoryUsage } from '../../../lib/memory/trace';
|
||||
const MILLISECONDS_IN_NANOSECOND = BigInt(1000000);
|
||||
export function msToNs(ms) {
|
||||
return BigInt(Math.floor(ms)) * MILLISECONDS_IN_NANOSECOND;
|
||||
}
|
||||
/**
|
||||
* Subscribes to compilation events for `project` and prints them using the
|
||||
* `Log` library.
|
||||
*
|
||||
* When `parentSpan` is provided, `TraceEvent` compilation events are recorded
|
||||
* as trace spans in the `.next/trace` file.
|
||||
*
|
||||
* Returns a promise that resolves when the subscription ends. Abort the
|
||||
* `signal` to close the underlying async iterator and settle the promise
|
||||
* promptly. The iterator also closes automatically when the Rust side
|
||||
* drops the subscription (e.g. after project shutdown).
|
||||
*/ export function backgroundLogCompilationEvents(project, { eventTypes, signal, parentSpan } = {}) {
|
||||
const iterator = project.compilationEventsSubscribe(eventTypes);
|
||||
// Close the iterator as soon as the signal fires so the for-await loop
|
||||
// exits without waiting for the next compilation event.
|
||||
signal?.addEventListener('abort', ()=>iterator.return?.(undefined), {
|
||||
once: true
|
||||
});
|
||||
const promise = async function() {
|
||||
for await (const event of iterator){
|
||||
// Record TraceEvent compilation events as trace spans in .next/trace.
|
||||
if (parentSpan && event.typeName === 'TraceEvent' && event.eventJson) {
|
||||
try {
|
||||
const data = JSON.parse(event.eventJson);
|
||||
parentSpan.manualTraceChild(data.name, msToNs(data.startTimeMs), msToNs(data.endTimeMs), Object.fromEntries(data.attributes ?? []));
|
||||
traceMemoryUsage(data.name, parentSpan);
|
||||
// We flush after each event to make sure it makes it to disk. These events are rare and
|
||||
// tend to happen at the very end of a build so to make sure they are logged we need to
|
||||
// flush.
|
||||
// NOTE: in a `next build` environment where we are reporting events to the parent thread, this is a no-op.
|
||||
await flushAllTraces();
|
||||
} catch {}
|
||||
continue; // don't log these events, they just go to the trace file
|
||||
}
|
||||
switch(event.severity){
|
||||
case 'EVENT':
|
||||
Log.event(event.message);
|
||||
break;
|
||||
case 'TRACE':
|
||||
Log.trace(event.message);
|
||||
break;
|
||||
case 'INFO':
|
||||
Log.info(event.message);
|
||||
break;
|
||||
case 'WARNING':
|
||||
Log.warn(event.message);
|
||||
break;
|
||||
case 'ERROR':
|
||||
Log.error(event.message);
|
||||
break;
|
||||
case 'FATAL':
|
||||
Log.error(event.message);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}();
|
||||
// Prevent unhandled rejection if the subscription errors after the project shuts down.
|
||||
promise.catch(()=>{});
|
||||
return promise;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=compilation-events.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+21
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* `app` -> app dir
|
||||
* `pages` -> pages dir
|
||||
* `root` -> middleware / instrumentation
|
||||
* `assets` -> assets
|
||||
*/ /**
|
||||
* Get a key that's unique across all entrypoints.
|
||||
*/ export function getEntryKey(type, side, page) {
|
||||
return JSON.stringify({
|
||||
type,
|
||||
side,
|
||||
page
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Split an `EntryKey` up into its components.
|
||||
*/ export function splitEntryKey(key) {
|
||||
return JSON.parse(key);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=entry-key.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/shared/lib/turbopack/entry-key.ts"],"sourcesContent":["/**\n * `app` -> app dir\n * `pages` -> pages dir\n * `root` -> middleware / instrumentation\n * `assets` -> assets\n */\nexport type EntryKeyType = 'app' | 'pages' | 'root' | 'assets'\nexport type EntryKeySide = 'client' | 'server'\n\n// custom type to make sure you can't accidentally use a \"generic\" string\nexport type EntryKey =\n `{\"type\":\"${EntryKeyType}\",\"side\":\"${EntryKeyType}\",\"page\":\"${string}\"}`\n\n/**\n * Get a key that's unique across all entrypoints.\n */\nexport function getEntryKey(\n type: EntryKeyType,\n side: EntryKeySide,\n page: string\n): EntryKey {\n return JSON.stringify({ type, side, page }) as EntryKey\n}\n\n/**\n * Split an `EntryKey` up into its components.\n */\nexport function splitEntryKey(key: EntryKey): {\n type: EntryKeyType\n side: EntryKeySide\n page: string\n} {\n return JSON.parse(key)\n}\n"],"names":["getEntryKey","type","side","page","JSON","stringify","splitEntryKey","key","parse"],"mappings":"AAAA;;;;;CAKC,GAQD;;CAEC,GACD,OAAO,SAASA,YACdC,IAAkB,EAClBC,IAAkB,EAClBC,IAAY;IAEZ,OAAOC,KAAKC,SAAS,CAAC;QAAEJ;QAAMC;QAAMC;IAAK;AAC3C;AAEA;;CAEC,GACD,OAAO,SAASG,cAAcC,GAAa;IAKzC,OAAOH,KAAKI,KAAK,CAACD;AACpB","ignoreList":[0]}
|
||||
+40
@@ -0,0 +1,40 @@
|
||||
import { eventErrorThrown } from '../../../telemetry/events';
|
||||
import { traceGlobals } from '../../../trace/shared';
|
||||
/**
|
||||
* An error caused by a bug in Turbopack, and not the user's code (e.g. a Rust panic). These should
|
||||
* be written to a log file and details should not be shown to the user.
|
||||
*
|
||||
* These are constructed in Turbopack by calling `throwTurbopackInternalError`.
|
||||
*/ export class TurbopackInternalError extends Error {
|
||||
constructor({ message, anonymizedLocation }){
|
||||
super(message), this.name = 'TurbopackInternalError', // Manually set this as this isn't statically determinable
|
||||
this.__NEXT_ERROR_CODE = 'TurbopackInternalError';
|
||||
this.location = anonymizedLocation;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* A helper used by the napi Rust entrypoints to construct and throw a `TurbopackInternalError`.
|
||||
*
|
||||
* When called, this will emit a telemetry event.
|
||||
*/ export function throwTurbopackInternalError(conversionError, opts) {
|
||||
if (conversionError != null) {
|
||||
// Somehow napi failed to convert `opts` to a JS object??? Just give up and throw that instead.
|
||||
throw Object.defineProperty(new Error('NAPI type conversion error in throwTurbopackInternalError', {
|
||||
cause: conversionError
|
||||
}), "__NEXT_ERROR_CODE", {
|
||||
value: "E723",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const err = new TurbopackInternalError(opts);
|
||||
const telemetry = traceGlobals.get('telemetry');
|
||||
if (telemetry) {
|
||||
telemetry.record(eventErrorThrown(err, opts.anonymizedLocation));
|
||||
} else {
|
||||
console.error('Expected `telemetry` to be set in globals');
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=internal-error.js.map
|
||||
+1
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["../../../../../src/shared/lib/turbopack/internal-error.ts"],"sourcesContent":["import type { TurbopackInternalErrorOpts } from '../../../build/swc/generated-native'\nimport { eventErrorThrown } from '../../../telemetry/events'\nimport { traceGlobals } from '../../../trace/shared'\n\n/**\n * An error caused by a bug in Turbopack, and not the user's code (e.g. a Rust panic). These should\n * be written to a log file and details should not be shown to the user.\n *\n * These are constructed in Turbopack by calling `throwTurbopackInternalError`.\n */\nexport class TurbopackInternalError extends Error {\n name = 'TurbopackInternalError'\n location: string | undefined\n\n // Manually set this as this isn't statically determinable\n __NEXT_ERROR_CODE = 'TurbopackInternalError'\n\n constructor({ message, anonymizedLocation }: TurbopackInternalErrorOpts) {\n super(message)\n this.location = anonymizedLocation\n }\n}\n\n/**\n * A helper used by the napi Rust entrypoints to construct and throw a `TurbopackInternalError`.\n *\n * When called, this will emit a telemetry event.\n */\nexport function throwTurbopackInternalError(\n conversionError: Error | null,\n opts: TurbopackInternalErrorOpts\n): never {\n if (conversionError != null) {\n // Somehow napi failed to convert `opts` to a JS object??? Just give up and throw that instead.\n throw new Error(\n 'NAPI type conversion error in throwTurbopackInternalError',\n {\n cause: conversionError,\n }\n )\n }\n const err = new TurbopackInternalError(opts)\n const telemetry = traceGlobals.get('telemetry')\n if (telemetry) {\n telemetry.record(eventErrorThrown(err, opts.anonymizedLocation))\n } else {\n console.error('Expected `telemetry` to be set in globals')\n }\n throw err\n}\n"],"names":["eventErrorThrown","traceGlobals","TurbopackInternalError","Error","constructor","message","anonymizedLocation","name","__NEXT_ERROR_CODE","location","throwTurbopackInternalError","conversionError","opts","cause","err","telemetry","get","record","console","error"],"mappings":"AACA,SAASA,gBAAgB,QAAQ,4BAA2B;AAC5D,SAASC,YAAY,QAAQ,wBAAuB;AAEpD;;;;;CAKC,GACD,OAAO,MAAMC,+BAA+BC;IAO1CC,YAAY,EAAEC,OAAO,EAAEC,kBAAkB,EAA8B,CAAE;QACvE,KAAK,CAACD,eAPRE,OAAO,0BAGP,0DAA0D;aAC1DC,oBAAoB;QAIlB,IAAI,CAACC,QAAQ,GAAGH;IAClB;AACF;AAEA;;;;CAIC,GACD,OAAO,SAASI,4BACdC,eAA6B,EAC7BC,IAAgC;IAEhC,IAAID,mBAAmB,MAAM;QAC3B,+FAA+F;QAC/F,MAAM,qBAKL,CALK,IAAIR,MACR,6DACA;YACEU,OAAOF;QACT,IAJI,qBAAA;mBAAA;wBAAA;0BAAA;QAKN;IACF;IACA,MAAMG,MAAM,IAAIZ,uBAAuBU;IACvC,MAAMG,YAAYd,aAAae,GAAG,CAAC;IACnC,IAAID,WAAW;QACbA,UAAUE,MAAM,CAACjB,iBAAiBc,KAAKF,KAAKN,kBAAkB;IAChE,OAAO;QACLY,QAAQC,KAAK,CAAC;IAChB;IACA,MAAML;AACR","ignoreList":[0]}
|
||||
+488
@@ -0,0 +1,488 @@
|
||||
import { APP_PATHS_MANIFEST, BUILD_MANIFEST, CLIENT_STATIC_FILES_PATH, INTERCEPTION_ROUTE_REWRITE_MANIFEST, MIDDLEWARE_BUILD_MANIFEST, MIDDLEWARE_MANIFEST, NEXT_FONT_MANIFEST, PAGES_MANIFEST, SERVER_REFERENCE_MANIFEST, SUBRESOURCE_INTEGRITY_MANIFEST, TURBOPACK_CLIENT_BUILD_MANIFEST, TURBOPACK_CLIENT_MIDDLEWARE_MANIFEST } from '../constants';
|
||||
import { join, posix } from 'path';
|
||||
import { readFileSync } from 'fs';
|
||||
import { deleteCache } from '../../../server/dev/require-cache';
|
||||
import { writeFileAtomic } from '../../../lib/fs/write-atomic';
|
||||
import getAssetPathFromRoute from '../router/utils/get-asset-path-from-route';
|
||||
import { getEntryKey } from './entry-key';
|
||||
import { getSortedRoutes } from '../router/utils';
|
||||
import { existsSync } from 'fs';
|
||||
import { addMetadataIdToRoute, addRouteSuffix, removeRouteSuffix } from '../../../server/dev/turbopack-utils';
|
||||
import { tryToParsePath } from '../../../lib/try-to-parse-path';
|
||||
import { safePathToRegexp } from '../router/utils/route-match-utils';
|
||||
import { normalizeRewritesForBuildManifest, srcEmptySsgManifest, processRoute, createEdgeRuntimeManifest } from '../../../build/webpack/plugins/build-manifest-plugin-utils';
|
||||
const getManifestPath = (page, distDir, name, type, firstCall)=>{
|
||||
let manifestPath = posix.join(distDir, `server`, type, type === 'middleware' || type === 'instrumentation' ? '' : type === 'app' ? page : getAssetPathFromRoute(page), name);
|
||||
if (firstCall) {
|
||||
const isSitemapRoute = /[\\/]sitemap(.xml)?\/route$/.test(page);
|
||||
// Check the ambiguity of /sitemap and /sitemap.xml
|
||||
if (isSitemapRoute && !existsSync(manifestPath)) {
|
||||
manifestPath = getManifestPath(page.replace(/\/sitemap\/route$/, '/sitemap.xml/route'), distDir, name, type, false);
|
||||
}
|
||||
// existsSync is faster than using the async version
|
||||
if (!existsSync(manifestPath) && page.endsWith('/route')) {
|
||||
// TODO: Improve implementation of metadata routes, currently it requires this extra check for the variants of the files that can be written.
|
||||
let basePage = removeRouteSuffix(page);
|
||||
// For sitemap.xml routes with generateSitemaps, the manifest is at
|
||||
// /sitemap/[__metadata_id__]/route (without .xml), because the route
|
||||
// handler serves at /sitemap/[id] not /sitemap.xml/[id]
|
||||
if (basePage.endsWith('/sitemap.xml')) {
|
||||
basePage = basePage.slice(0, -'.xml'.length);
|
||||
}
|
||||
let metadataPage = addRouteSuffix(addMetadataIdToRoute(basePage));
|
||||
manifestPath = getManifestPath(metadataPage, distDir, name, type, false);
|
||||
}
|
||||
}
|
||||
return manifestPath;
|
||||
};
|
||||
function readPartialManifestContent(distDir, name, pageName, type = 'pages') {
|
||||
const page = pageName;
|
||||
const manifestPath = getManifestPath(page, distDir, name, type, true);
|
||||
return readFileSync(posix.join(manifestPath), 'utf-8');
|
||||
}
|
||||
/// Helper class that stores a map of manifests and tracks if they have changed
|
||||
/// since the last time they were written to disk. This is used to avoid
|
||||
/// unnecessary writes to disk.
|
||||
class ManifestsMap {
|
||||
set(key, value) {
|
||||
if (this.rawMap.get(key) === value) return;
|
||||
this.changed = true;
|
||||
this.rawMap.set(key, value);
|
||||
this.map.set(key, JSON.parse(value));
|
||||
}
|
||||
delete(key) {
|
||||
if (this.map.has(key)) {
|
||||
this.changed = true;
|
||||
this.rawMap.delete(key);
|
||||
this.map.delete(key);
|
||||
}
|
||||
}
|
||||
get(key) {
|
||||
return this.map.get(key);
|
||||
}
|
||||
takeChanged(extraInvalidationKey) {
|
||||
let changed = this.changed;
|
||||
if (extraInvalidationKey !== undefined) {
|
||||
const stringified = JSON.stringify(extraInvalidationKey);
|
||||
if (this.extraInvalidationKey !== stringified) {
|
||||
this.extraInvalidationKey = stringified;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
this.changed = false;
|
||||
return changed;
|
||||
}
|
||||
values() {
|
||||
return this.map.values();
|
||||
}
|
||||
constructor(){
|
||||
this.rawMap = new Map();
|
||||
this.map = new Map();
|
||||
this.extraInvalidationKey = undefined;
|
||||
this.changed = true;
|
||||
}
|
||||
}
|
||||
export class TurbopackManifestLoader {
|
||||
constructor({ distDir, buildId, encryptionKey, dev, sriEnabled }){
|
||||
this.actionManifests = new ManifestsMap();
|
||||
this.appPathsManifests = new ManifestsMap();
|
||||
this.buildManifests = new ManifestsMap();
|
||||
this.clientBuildManifests = new ManifestsMap();
|
||||
this.fontManifests = new ManifestsMap();
|
||||
this.middlewareManifests = new ManifestsMap();
|
||||
this.pagesManifests = new ManifestsMap();
|
||||
this.sriManifests = new ManifestsMap();
|
||||
/// interceptionRewrites that have been written to disk
|
||||
/// This is used to avoid unnecessary writes if the rewrites haven't changed
|
||||
this.cachedInterceptionRewrites = undefined;
|
||||
this.pendingCacheDeletes = [];
|
||||
this.distDir = distDir;
|
||||
this.buildId = buildId;
|
||||
this.encryptionKey = encryptionKey;
|
||||
this.dev = dev;
|
||||
this.sriEnabled = sriEnabled;
|
||||
}
|
||||
delete(key) {
|
||||
this.actionManifests.delete(key);
|
||||
this.appPathsManifests.delete(key);
|
||||
this.buildManifests.delete(key);
|
||||
this.clientBuildManifests.delete(key);
|
||||
this.fontManifests.delete(key);
|
||||
this.middlewareManifests.delete(key);
|
||||
this.pagesManifests.delete(key);
|
||||
}
|
||||
loadActionManifest(pageName) {
|
||||
this.actionManifests.set(getEntryKey('app', 'server', pageName), readPartialManifestContent(this.distDir, `${SERVER_REFERENCE_MANIFEST}.json`, pageName, 'app'));
|
||||
}
|
||||
mergeActionManifests(manifests) {
|
||||
const manifest = {
|
||||
node: {},
|
||||
edge: {},
|
||||
encryptionKey: this.encryptionKey
|
||||
};
|
||||
function mergeActionIds(actionEntries, other) {
|
||||
for(const key in other){
|
||||
const action = actionEntries[key] ??= {
|
||||
workers: {}
|
||||
};
|
||||
action.filename = other[key].filename;
|
||||
action.exportedName = other[key].exportedName;
|
||||
Object.assign(action.workers, other[key].workers);
|
||||
}
|
||||
}
|
||||
for (const m of manifests){
|
||||
mergeActionIds(manifest.node, m.node);
|
||||
mergeActionIds(manifest.edge, m.edge);
|
||||
}
|
||||
for(const key in manifest.node){
|
||||
const entry = manifest.node[key];
|
||||
entry.workers = sortObjectByKey(entry.workers);
|
||||
}
|
||||
for(const key in manifest.edge){
|
||||
const entry = manifest.edge[key];
|
||||
entry.workers = sortObjectByKey(entry.workers);
|
||||
}
|
||||
return manifest;
|
||||
}
|
||||
writeActionManifest() {
|
||||
if (!this.actionManifests.takeChanged()) {
|
||||
return;
|
||||
}
|
||||
const actionManifest = this.mergeActionManifests(this.actionManifests.values());
|
||||
const actionManifestJsonPath = join(this.distDir, 'server', `${SERVER_REFERENCE_MANIFEST}.json`);
|
||||
const actionManifestJsPath = join(this.distDir, 'server', `${SERVER_REFERENCE_MANIFEST}.js`);
|
||||
const json = JSON.stringify(actionManifest, null, 2);
|
||||
this.pendingCacheDeletes.push(actionManifestJsonPath);
|
||||
this.pendingCacheDeletes.push(actionManifestJsPath);
|
||||
writeFileAtomic(actionManifestJsonPath, json);
|
||||
writeFileAtomic(actionManifestJsPath, `self.__RSC_SERVER_MANIFEST=${JSON.stringify(json)}`);
|
||||
}
|
||||
loadAppPathsManifest(pageName) {
|
||||
this.appPathsManifests.set(getEntryKey('app', 'server', pageName), readPartialManifestContent(this.distDir, APP_PATHS_MANIFEST, pageName, 'app'));
|
||||
}
|
||||
writeAppPathsManifest() {
|
||||
if (!this.appPathsManifests.takeChanged()) {
|
||||
return;
|
||||
}
|
||||
const appPathsManifest = this.mergePagesManifests(this.appPathsManifests.values());
|
||||
const appPathsManifestPath = join(this.distDir, 'server', APP_PATHS_MANIFEST);
|
||||
this.pendingCacheDeletes.push(appPathsManifestPath);
|
||||
writeFileAtomic(appPathsManifestPath, JSON.stringify(appPathsManifest, null, 2));
|
||||
}
|
||||
writeSriManifest() {
|
||||
if (!this.sriEnabled || !this.sriManifests.takeChanged()) {
|
||||
return;
|
||||
}
|
||||
const sriManifest = this.mergeSriManifests(this.sriManifests.values());
|
||||
const pathJson = join(this.distDir, 'server', `${SUBRESOURCE_INTEGRITY_MANIFEST}.json`);
|
||||
const pathJs = join(this.distDir, 'server', `${SUBRESOURCE_INTEGRITY_MANIFEST}.js`);
|
||||
this.pendingCacheDeletes.push(pathJson);
|
||||
this.pendingCacheDeletes.push(pathJs);
|
||||
writeFileAtomic(pathJson, JSON.stringify(sriManifest, null, 2));
|
||||
writeFileAtomic(pathJs, `self.__SUBRESOURCE_INTEGRITY_MANIFEST=${JSON.stringify(JSON.stringify(sriManifest))}`);
|
||||
}
|
||||
loadBuildManifest(pageName, type = 'pages') {
|
||||
this.buildManifests.set(getEntryKey(type, 'server', pageName), readPartialManifestContent(this.distDir, BUILD_MANIFEST, pageName, type));
|
||||
}
|
||||
loadClientBuildManifest(pageName, type = 'pages') {
|
||||
this.clientBuildManifests.set(getEntryKey(type, 'server', pageName), readPartialManifestContent(this.distDir, TURBOPACK_CLIENT_BUILD_MANIFEST, pageName, type));
|
||||
}
|
||||
loadSriManifest(pageName, type = 'pages') {
|
||||
if (!this.sriEnabled) return;
|
||||
this.sriManifests.set(getEntryKey(type, 'client', pageName), readPartialManifestContent(this.distDir, `${SUBRESOURCE_INTEGRITY_MANIFEST}.json`, pageName, type));
|
||||
}
|
||||
mergeBuildManifests(manifests, lowPriorityFiles) {
|
||||
const manifest = {
|
||||
pages: {
|
||||
'/_app': []
|
||||
},
|
||||
// Something in next.js depends on these to exist even for app dir rendering
|
||||
devFiles: [],
|
||||
polyfillFiles: [],
|
||||
lowPriorityFiles,
|
||||
rootMainFiles: []
|
||||
};
|
||||
for (const m of manifests){
|
||||
Object.assign(manifest.pages, m.pages);
|
||||
if (m.rootMainFiles.length) manifest.rootMainFiles = m.rootMainFiles;
|
||||
// polyfillFiles should always be the same, so we can overwrite instead of actually merging
|
||||
if (m.polyfillFiles.length) manifest.polyfillFiles = m.polyfillFiles;
|
||||
}
|
||||
manifest.pages = sortObjectByKey(manifest.pages);
|
||||
return manifest;
|
||||
}
|
||||
mergeClientBuildManifests(manifests, rewrites, sortedPageKeys) {
|
||||
const manifest = {
|
||||
__rewrites: rewrites,
|
||||
sortedPages: sortedPageKeys
|
||||
};
|
||||
for (const m of manifests){
|
||||
Object.assign(manifest, m);
|
||||
}
|
||||
return sortObjectByKey(manifest);
|
||||
}
|
||||
writeInterceptionRouteRewriteManifest(devRewrites, productionRewrites) {
|
||||
const rewrites = productionRewrites ?? {
|
||||
...devRewrites,
|
||||
beforeFiles: (devRewrites?.beforeFiles ?? []).map(processRoute),
|
||||
afterFiles: (devRewrites?.afterFiles ?? []).map(processRoute),
|
||||
fallback: (devRewrites?.fallback ?? []).map(processRoute)
|
||||
};
|
||||
const interceptionRewrites = JSON.stringify(rewrites.beforeFiles.filter(require('../../../lib/is-interception-route-rewrite').isInterceptionRouteRewrite));
|
||||
if (this.cachedInterceptionRewrites === interceptionRewrites) {
|
||||
return;
|
||||
}
|
||||
this.cachedInterceptionRewrites = interceptionRewrites;
|
||||
const interceptionRewriteManifestPath = join(this.distDir, 'server', `${INTERCEPTION_ROUTE_REWRITE_MANIFEST}.js`);
|
||||
this.pendingCacheDeletes.push(interceptionRewriteManifestPath);
|
||||
writeFileAtomic(interceptionRewriteManifestPath, `self.__INTERCEPTION_ROUTE_REWRITE_MANIFEST=${JSON.stringify(interceptionRewrites)};`);
|
||||
}
|
||||
writeBuildManifest(lowPriorityFiles) {
|
||||
if (!this.buildManifests.takeChanged()) {
|
||||
return;
|
||||
}
|
||||
const buildManifest = this.mergeBuildManifests(this.buildManifests.values(), lowPriorityFiles);
|
||||
const buildManifestPath = join(this.distDir, BUILD_MANIFEST);
|
||||
const middlewareBuildManifestPath = join(this.distDir, 'server', `${MIDDLEWARE_BUILD_MANIFEST}.js`);
|
||||
this.pendingCacheDeletes.push(buildManifestPath);
|
||||
this.pendingCacheDeletes.push(middlewareBuildManifestPath);
|
||||
writeFileAtomic(buildManifestPath, JSON.stringify(buildManifest, null, 2));
|
||||
writeFileAtomic(middlewareBuildManifestPath, createEdgeRuntimeManifest(buildManifest));
|
||||
// Write fallback build manifest
|
||||
const fallbackBuildManifest = this.mergeBuildManifests([
|
||||
this.buildManifests.get(getEntryKey('pages', 'server', '_app')),
|
||||
this.buildManifests.get(getEntryKey('pages', 'server', '_error'))
|
||||
].filter(Boolean), lowPriorityFiles);
|
||||
const fallbackBuildManifestPath = join(this.distDir, `fallback-${BUILD_MANIFEST}`);
|
||||
this.pendingCacheDeletes.push(fallbackBuildManifestPath);
|
||||
writeFileAtomic(fallbackBuildManifestPath, JSON.stringify(fallbackBuildManifest, null, 2));
|
||||
}
|
||||
writeClientBuildManifest(entrypoints, devRewrites, productionRewrites) {
|
||||
const rewrites = normalizeRewritesForBuildManifest(productionRewrites ?? {
|
||||
...devRewrites,
|
||||
beforeFiles: (devRewrites?.beforeFiles ?? []).map(processRoute),
|
||||
afterFiles: (devRewrites?.afterFiles ?? []).map(processRoute),
|
||||
fallback: (devRewrites?.fallback ?? []).map(processRoute)
|
||||
});
|
||||
const pagesKeys = [
|
||||
...entrypoints.page.keys()
|
||||
];
|
||||
if (entrypoints.global.app) {
|
||||
pagesKeys.push('/_app');
|
||||
}
|
||||
if (entrypoints.global.error) {
|
||||
pagesKeys.push('/_error');
|
||||
}
|
||||
const sortedPageKeys = getSortedRoutes(pagesKeys);
|
||||
let buildManifestPath = posix.join(CLIENT_STATIC_FILES_PATH, this.buildId, '_buildManifest.js');
|
||||
let ssgManifestPath = posix.join(CLIENT_STATIC_FILES_PATH, this.buildId, '_ssgManifest.js');
|
||||
if (this.dev && !this.clientBuildManifests.takeChanged({
|
||||
rewrites,
|
||||
sortedPageKeys
|
||||
})) {
|
||||
return [
|
||||
buildManifestPath,
|
||||
ssgManifestPath
|
||||
];
|
||||
}
|
||||
const clientBuildManifest = this.mergeClientBuildManifests(this.clientBuildManifests.values(), rewrites, sortedPageKeys);
|
||||
const clientBuildManifestJs = `self.__BUILD_MANIFEST = ${JSON.stringify(clientBuildManifest, null, 2)};self.__BUILD_MANIFEST_CB && self.__BUILD_MANIFEST_CB()`;
|
||||
writeFileAtomic(join(this.distDir, buildManifestPath), clientBuildManifestJs);
|
||||
// This is just an empty placeholder, the actual manifest is written after prerendering in
|
||||
// packages/next/src/build/index.ts
|
||||
writeFileAtomic(join(this.distDir, ssgManifestPath), srcEmptySsgManifest);
|
||||
return [
|
||||
buildManifestPath,
|
||||
ssgManifestPath
|
||||
];
|
||||
}
|
||||
loadFontManifest(pageName, type = 'pages') {
|
||||
this.fontManifests.set(getEntryKey(type, 'server', pageName), readPartialManifestContent(this.distDir, `${NEXT_FONT_MANIFEST}.json`, pageName, type));
|
||||
}
|
||||
mergeFontManifests(manifests) {
|
||||
const manifest = {
|
||||
app: {},
|
||||
appUsingSizeAdjust: false,
|
||||
pages: {},
|
||||
pagesUsingSizeAdjust: false
|
||||
};
|
||||
for (const m of manifests){
|
||||
Object.assign(manifest.app, m.app);
|
||||
Object.assign(manifest.pages, m.pages);
|
||||
manifest.appUsingSizeAdjust = manifest.appUsingSizeAdjust || m.appUsingSizeAdjust;
|
||||
manifest.pagesUsingSizeAdjust = manifest.pagesUsingSizeAdjust || m.pagesUsingSizeAdjust;
|
||||
}
|
||||
manifest.app = sortObjectByKey(manifest.app);
|
||||
manifest.pages = sortObjectByKey(manifest.pages);
|
||||
return manifest;
|
||||
}
|
||||
async writeNextFontManifest() {
|
||||
if (!this.fontManifests.takeChanged()) {
|
||||
return;
|
||||
}
|
||||
const fontManifest = this.mergeFontManifests(this.fontManifests.values());
|
||||
const json = JSON.stringify(fontManifest, null, 2);
|
||||
const fontManifestJsonPath = join(this.distDir, 'server', `${NEXT_FONT_MANIFEST}.json`);
|
||||
const fontManifestJsPath = join(this.distDir, 'server', `${NEXT_FONT_MANIFEST}.js`);
|
||||
this.pendingCacheDeletes.push(fontManifestJsonPath);
|
||||
this.pendingCacheDeletes.push(fontManifestJsPath);
|
||||
writeFileAtomic(fontManifestJsonPath, json);
|
||||
writeFileAtomic(fontManifestJsPath, `self.__NEXT_FONT_MANIFEST=${JSON.stringify(json)}`);
|
||||
}
|
||||
/**
|
||||
* @returns If the manifest was written or not
|
||||
*/ loadMiddlewareManifest(pageName, type) {
|
||||
const middlewareManifestPath = getManifestPath(pageName, this.distDir, MIDDLEWARE_MANIFEST, type, true);
|
||||
// middlewareManifest is actually "edge manifest" and not all routes are edge runtime. If it is not written we skip it.
|
||||
if (!existsSync(middlewareManifestPath)) {
|
||||
return false;
|
||||
}
|
||||
this.middlewareManifests.set(getEntryKey(type === 'middleware' || type === 'instrumentation' ? 'root' : type, 'server', pageName), readPartialManifestContent(this.distDir, MIDDLEWARE_MANIFEST, pageName, type));
|
||||
return true;
|
||||
}
|
||||
getMiddlewareManifest(key) {
|
||||
return this.middlewareManifests.get(key);
|
||||
}
|
||||
deleteMiddlewareManifest(key) {
|
||||
return this.middlewareManifests.delete(key);
|
||||
}
|
||||
mergeMiddlewareManifests(manifests) {
|
||||
const manifest = {
|
||||
version: 3,
|
||||
middleware: {},
|
||||
sortedMiddleware: [],
|
||||
functions: {}
|
||||
};
|
||||
let instrumentation = undefined;
|
||||
for (const m of manifests){
|
||||
Object.assign(manifest.functions, m.functions);
|
||||
Object.assign(manifest.middleware, m.middleware);
|
||||
if (m.instrumentation) {
|
||||
instrumentation = m.instrumentation;
|
||||
}
|
||||
}
|
||||
manifest.functions = sortObjectByKey(manifest.functions);
|
||||
manifest.middleware = sortObjectByKey(manifest.middleware);
|
||||
const updateFunctionDefinition = (fun)=>{
|
||||
return {
|
||||
...fun,
|
||||
files: [
|
||||
...instrumentation?.files ?? [],
|
||||
...fun.files
|
||||
]
|
||||
};
|
||||
};
|
||||
for (const key of Object.keys(manifest.middleware)){
|
||||
const value = manifest.middleware[key];
|
||||
manifest.middleware[key] = updateFunctionDefinition(value);
|
||||
}
|
||||
for (const key of Object.keys(manifest.functions)){
|
||||
const value = manifest.functions[key];
|
||||
manifest.functions[key] = updateFunctionDefinition(value);
|
||||
}
|
||||
for (const fun of Object.values(manifest.functions).concat(Object.values(manifest.middleware))){
|
||||
for (const matcher of fun.matchers){
|
||||
if (!matcher.regexp) {
|
||||
matcher.regexp = safePathToRegexp(matcher.originalSource, [], {
|
||||
delimiter: '/',
|
||||
sensitive: false,
|
||||
strict: true
|
||||
}).source.replaceAll('\\/', '/');
|
||||
}
|
||||
}
|
||||
}
|
||||
manifest.sortedMiddleware = Object.keys(manifest.middleware);
|
||||
return manifest;
|
||||
}
|
||||
writeMiddlewareManifest() {
|
||||
let clientMiddlewareManifestPath = posix.join(CLIENT_STATIC_FILES_PATH, this.buildId, TURBOPACK_CLIENT_MIDDLEWARE_MANIFEST);
|
||||
if (this.dev && !this.middlewareManifests.takeChanged()) {
|
||||
return {
|
||||
clientMiddlewareManifestPath
|
||||
};
|
||||
}
|
||||
const middlewareManifest = this.mergeMiddlewareManifests(this.middlewareManifests.values());
|
||||
// Server middleware manifest
|
||||
// Normalize regexes as it uses path-to-regexp
|
||||
for(const key in middlewareManifest.middleware){
|
||||
middlewareManifest.middleware[key].matchers.forEach((matcher)=>{
|
||||
if (!matcher.regexp.startsWith('^')) {
|
||||
const parsedPage = tryToParsePath(matcher.regexp);
|
||||
if (parsedPage.error || !parsedPage.regexStr) {
|
||||
throw Object.defineProperty(new Error(`Invalid source: ${matcher.regexp}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E442",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
matcher.regexp = parsedPage.regexStr;
|
||||
}
|
||||
});
|
||||
}
|
||||
const middlewareManifestPath = join(this.distDir, 'server', MIDDLEWARE_MANIFEST);
|
||||
this.pendingCacheDeletes.push(middlewareManifestPath);
|
||||
writeFileAtomic(middlewareManifestPath, JSON.stringify(middlewareManifest, null, 2));
|
||||
// Client middleware manifest This is only used in dev though, packages/next/src/build/index.ts
|
||||
// writes the mainfest again for builds.
|
||||
const matchers = middlewareManifest?.middleware['/']?.matchers || [];
|
||||
const clientMiddlewareManifestJs = `self.__MIDDLEWARE_MATCHERS = ${JSON.stringify(matchers, null, 2)};self.__MIDDLEWARE_MATCHERS_CB && self.__MIDDLEWARE_MATCHERS_CB()`;
|
||||
this.pendingCacheDeletes.push(clientMiddlewareManifestPath);
|
||||
writeFileAtomic(join(this.distDir, clientMiddlewareManifestPath), clientMiddlewareManifestJs);
|
||||
return {
|
||||
clientMiddlewareManifestPath
|
||||
};
|
||||
}
|
||||
loadPagesManifest(pageName) {
|
||||
this.pagesManifests.set(getEntryKey('pages', 'server', pageName), readPartialManifestContent(this.distDir, PAGES_MANIFEST, pageName));
|
||||
}
|
||||
mergePagesManifests(manifests) {
|
||||
const manifest = {};
|
||||
for (const m of manifests){
|
||||
Object.assign(manifest, m);
|
||||
}
|
||||
return sortObjectByKey(manifest);
|
||||
}
|
||||
mergeSriManifests(manifests) {
|
||||
const manifest = {};
|
||||
for (const m of manifests){
|
||||
Object.assign(manifest, m);
|
||||
}
|
||||
return sortObjectByKey(manifest);
|
||||
}
|
||||
writePagesManifest() {
|
||||
if (!this.pagesManifests.takeChanged()) {
|
||||
return;
|
||||
}
|
||||
const pagesManifest = this.mergePagesManifests(this.pagesManifests.values());
|
||||
const pagesManifestPath = join(this.distDir, 'server', PAGES_MANIFEST);
|
||||
this.pendingCacheDeletes.push(pagesManifestPath);
|
||||
writeFileAtomic(pagesManifestPath, JSON.stringify(pagesManifest, null, 2));
|
||||
}
|
||||
writeManifests({ devRewrites, productionRewrites, entrypoints }) {
|
||||
this.writeActionManifest();
|
||||
this.writeAppPathsManifest();
|
||||
const lowPriorityFiles = this.writeClientBuildManifest(entrypoints, devRewrites, productionRewrites);
|
||||
const { clientMiddlewareManifestPath } = this.writeMiddlewareManifest();
|
||||
this.writeBuildManifest([
|
||||
...lowPriorityFiles,
|
||||
clientMiddlewareManifestPath
|
||||
]);
|
||||
this.writeInterceptionRouteRewriteManifest(devRewrites, productionRewrites);
|
||||
this.writeNextFontManifest();
|
||||
this.writePagesManifest();
|
||||
this.writeSriManifest();
|
||||
// Flush all queued cache deletions in a single require.cache scan
|
||||
if (this.pendingCacheDeletes.length > 0) {
|
||||
deleteCache(this.pendingCacheDeletes);
|
||||
this.pendingCacheDeletes = [];
|
||||
}
|
||||
}
|
||||
}
|
||||
function sortObjectByKey(obj) {
|
||||
return Object.keys(obj).sort().reduce((acc, key)=>{
|
||||
acc[key] = obj[key];
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
|
||||
//# sourceMappingURL=manifest-loader.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+226
@@ -0,0 +1,226 @@
|
||||
import { bold, green, magenta, red } from '../../../lib/picocolors';
|
||||
import { deobfuscateText } from '../magic-identifier';
|
||||
import * as Log from '../../../build/output/log';
|
||||
const VERBOSE_ISSUES = !!process.env.NEXT_TURBOPACK_VERBOSE_ISSUES;
|
||||
/**
|
||||
* An error generated from emitted Turbopack issues. This can include build
|
||||
* errors caused by issues with user code.
|
||||
*/ export class ModuleBuildError extends Error {
|
||||
constructor(...args){
|
||||
super(...args), this.name = 'ModuleBuildError';
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Thin stopgap workaround layer to mimic existing wellknown-errors-plugin in webpack's build
|
||||
* to emit certain type of errors into cli.
|
||||
*/ export function isWellKnownError(issue) {
|
||||
const { title } = issue;
|
||||
const formattedTitle = renderStyledStringToErrorAnsi(title);
|
||||
// TODO: add more well known errors
|
||||
if (formattedTitle.includes('Module not found') || formattedTitle.includes('Unknown module type')) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
export function getIssueKey(issue) {
|
||||
return `${issue.severity}-${issue.filePath}-${JSON.stringify(issue.title)}-${JSON.stringify(issue.description)}`;
|
||||
}
|
||||
export function processIssues(currentEntryIssues, key, result, throwIssue, logErrors) {
|
||||
const newIssues = new Map();
|
||||
currentEntryIssues.set(key, newIssues);
|
||||
const relevantIssues = new Set();
|
||||
for (const issue of result.issues){
|
||||
if (issue.severity !== 'error' && issue.severity !== 'fatal' && issue.severity !== 'warning') continue;
|
||||
const issueKey = getIssueKey(issue);
|
||||
newIssues.set(issueKey, issue);
|
||||
if (issue.severity !== 'warning') {
|
||||
if (throwIssue) {
|
||||
const formatted = formatIssue(issue);
|
||||
relevantIssues.add(formatted);
|
||||
} else if (logErrors && isWellKnownError(issue)) {
|
||||
const formatted = formatIssue(issue);
|
||||
Log.error(formatted);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (relevantIssues.size && throwIssue) {
|
||||
throw Object.defineProperty(new ModuleBuildError([
|
||||
...relevantIssues
|
||||
].join('\n\n')), "__NEXT_ERROR_CODE", {
|
||||
value: "E394",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
function formatFilePath(filePath) {
|
||||
return filePath.replace('[project]/', './').replaceAll('/./', '/').replace('\\\\?\\', '');
|
||||
}
|
||||
export function formatIssue(issue) {
|
||||
const { filePath, title, description, detail, source, importTraces } = issue;
|
||||
let { documentationLink } = issue;
|
||||
const formattedTitle = renderStyledStringToErrorAnsi(title).replace(/\n/g, '\n ');
|
||||
// TODO: Use error codes to identify these
|
||||
// TODO: Generalize adapting Turbopack errors to Next.js errors
|
||||
if (formattedTitle.includes('Module not found')) {
|
||||
// For compatiblity with webpack
|
||||
// TODO: include columns in webpack errors.
|
||||
documentationLink = 'https://nextjs.org/docs/messages/module-not-found';
|
||||
}
|
||||
const formattedFilePath = formatFilePath(filePath);
|
||||
let message = '';
|
||||
if (source?.range) {
|
||||
const { start } = source.range;
|
||||
message = `${formattedFilePath}:${start.line + 1}:${start.column + 1}\n${formattedTitle}`;
|
||||
} else if (formattedFilePath) {
|
||||
message = `${formattedFilePath}\n${formattedTitle}`;
|
||||
} else {
|
||||
message = formattedTitle;
|
||||
}
|
||||
message += '\n';
|
||||
if (issue.codeFrame) {
|
||||
message += issue.codeFrame.trimEnd() + '\n\n';
|
||||
}
|
||||
if (description) {
|
||||
if (description.type === 'text' && description.value.includes(`Cannot find module 'sass'`)) {
|
||||
message += "To use Next.js' built-in Sass support, you first need to install `sass`.\n";
|
||||
message += 'Run `npm i sass` or `yarn add sass` inside your workspace.\n';
|
||||
message += '\nLearn more: https://nextjs.org/docs/messages/install-sass\n';
|
||||
} else {
|
||||
message += renderStyledStringToErrorAnsi(description) + '\n\n';
|
||||
}
|
||||
}
|
||||
// TODO: make it easier to enable this for debugging
|
||||
if (VERBOSE_ISSUES && detail) {
|
||||
message += renderStyledStringToErrorAnsi(detail) + '\n\n';
|
||||
}
|
||||
// Render additional sources (e.g., generated code from a loader)
|
||||
for (const additional of issue.additionalSources ?? []){
|
||||
if (additional.codeFrame) {
|
||||
const additionalFilePath = formatFilePath(additional.source.source.filePath);
|
||||
const loc = additional.source.range ? `:${additional.source.range.start.line + 1}:${additional.source.range.start.column + 1}` : '';
|
||||
message += `${additional.description}:\n${additionalFilePath}${loc}\n${additional.codeFrame.trimEnd()}\n\n`;
|
||||
}
|
||||
}
|
||||
if (importTraces?.length) {
|
||||
// This is the same logic as in turbopack/crates/turbopack-cli-utils/src/issue.rs
|
||||
// We end up with multiple traces when the file with the error is reachable from multiple
|
||||
// different entry points (e.g. ssr, client)
|
||||
message += `Import trace${importTraces.length > 1 ? 's' : ''}:\n`;
|
||||
const everyTraceHasADistinctRootLayer = new Set(importTraces.map(leafLayerName).filter((l)=>l != null)).size === importTraces.length;
|
||||
for(let i = 0; i < importTraces.length; i++){
|
||||
const trace = importTraces[i];
|
||||
const layer = leafLayerName(trace);
|
||||
let traceIndent = ' ';
|
||||
// If this is true, layer must be present
|
||||
if (everyTraceHasADistinctRootLayer) {
|
||||
message += ` ${layer}:\n`;
|
||||
} else {
|
||||
if (importTraces.length > 1) {
|
||||
// Otherwise use simple 1 based indices to disambiguate
|
||||
message += ` #${i + 1}`;
|
||||
if (layer) {
|
||||
message += ` [${layer}]`;
|
||||
}
|
||||
message += ':\n';
|
||||
} else if (layer) {
|
||||
message += ` [${layer}]:\n`;
|
||||
} else {
|
||||
// If there is a single trace and no layer name just don't indent it.
|
||||
traceIndent = ' ';
|
||||
}
|
||||
}
|
||||
message += formatIssueTrace(trace, traceIndent, !identicalLayers(trace));
|
||||
}
|
||||
}
|
||||
if (documentationLink) {
|
||||
message += documentationLink + '\n\n';
|
||||
}
|
||||
return message;
|
||||
}
|
||||
/** Returns the first present layer name in the trace */ function leafLayerName(items) {
|
||||
for (const item of items){
|
||||
const layer = item.layer;
|
||||
if (layer != null) return layer;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
/**
|
||||
* Returns whether or not all items share the same layer.
|
||||
* If a layer is absent we ignore it in this analysis
|
||||
*/ function identicalLayers(items) {
|
||||
const firstPresentLayer = items.findIndex((t)=>t.layer != null);
|
||||
if (firstPresentLayer === -1) return true // all layers are absent
|
||||
;
|
||||
const layer = items[firstPresentLayer].layer;
|
||||
for(let i = firstPresentLayer + 1; i < items.length; i++){
|
||||
const itemLayer = items[i].layer;
|
||||
if (itemLayer == null || itemLayer !== layer) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function formatIssueTrace(items, indent, printLayers) {
|
||||
return `${items.map((item)=>{
|
||||
let r = indent;
|
||||
if (item.fsName !== 'project') {
|
||||
r += `[${item.fsName}]/`;
|
||||
} else {
|
||||
// This is consistent with webpack's output
|
||||
r += './';
|
||||
}
|
||||
r += item.path;
|
||||
if (printLayers && item.layer) {
|
||||
r += ` [${item.layer}]`;
|
||||
}
|
||||
return r;
|
||||
}).join('\n')}\n\n`;
|
||||
}
|
||||
export function isRelevantWarning(issue) {
|
||||
return issue.severity === 'warning' && !isNodeModulesIssue(issue);
|
||||
}
|
||||
function isNodeModulesIssue(issue) {
|
||||
if (issue.severity === 'warning' && issue.stage === 'config') {
|
||||
// Override for the externalize issue
|
||||
// `Package foo (serverExternalPackages or default list) can't be external`
|
||||
if (renderStyledStringToErrorAnsi(issue.title).includes("can't be external")) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return issue.severity === 'warning' && (issue.filePath.match(/^(?:.*[\\/])?node_modules(?:[\\/].*)?$/) !== null || // Ignore Next.js itself when running next directly in the monorepo where it is not inside
|
||||
// node_modules anyway.
|
||||
// TODO(mischnic) prevent matches when this is published to npm
|
||||
issue.filePath.startsWith('[project]/packages/next/'));
|
||||
}
|
||||
export function renderStyledStringToErrorAnsi(string) {
|
||||
function applyDeobfuscation(str) {
|
||||
// Use shared deobfuscate function and apply magenta color to identifiers
|
||||
const deobfuscated = deobfuscateText(str);
|
||||
// Color any {...} wrapped identifiers with magenta
|
||||
return deobfuscated.replace(/\{([^}]+)\}/g, (match)=>magenta(match));
|
||||
}
|
||||
switch(string.type){
|
||||
case 'text':
|
||||
return applyDeobfuscation(string.value);
|
||||
case 'strong':
|
||||
return bold(red(applyDeobfuscation(string.value)));
|
||||
case 'code':
|
||||
return green(applyDeobfuscation(string.value));
|
||||
case 'line':
|
||||
return string.value.map(renderStyledStringToErrorAnsi).join('');
|
||||
case 'stack':
|
||||
return string.value.map(renderStyledStringToErrorAnsi).join('\n');
|
||||
default:
|
||||
throw Object.defineProperty(new Error('Unknown StyledString type', string), "__NEXT_ERROR_CODE", {
|
||||
value: "E138",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
export function isFileSystemCacheEnabledForDev(config) {
|
||||
return config.experimental?.turbopackFileSystemCacheForDev || false;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=utils.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user