This commit is contained in:
Kismet Hasanaj
2026-05-02 20:07:02 +02:00
parent ce8672e283
commit 34dc9aec52
9428 changed files with 1733330 additions and 0 deletions
+82
View File
@@ -0,0 +1,82 @@
import { arrayBufferToString, stringToUint8Array } from '../app-render/encryption-utils';
import { DYNAMIC_EXPIRE } from '../use-cache/constants';
/**
* Parses serialized cache entries into a UseCacheCacheStore
* @param entries - The serialized entries to parse
* @returns A new UseCacheCacheStore containing the parsed entries
*/ export function parseUseCacheCacheStore(entries) {
const store = new Map();
for (const [key, { entry, hasExplicitRevalidate, hasExplicitExpire, readRootParamNames }] of entries){
store.set(key, Promise.resolve({
entry: {
// Create a ReadableStream from the Uint8Array
value: new ReadableStream({
start (controller) {
// Enqueue the Uint8Array to the stream
controller.enqueue(stringToUint8Array(atob(entry.value)));
// Close the stream
controller.close();
}
}),
tags: entry.tags,
stale: entry.stale,
timestamp: entry.timestamp,
expire: entry.expire,
revalidate: entry.revalidate
},
hasExplicitRevalidate,
hasExplicitExpire,
readRootParamNames: readRootParamNames ? new Set(readRootParamNames) : undefined
}));
}
return store;
}
/**
* Serializes UseCacheCacheStore entries into an array of key-value pairs
* @param entries - The store entries to stringify
* @returns A promise that resolves to an array of key-value pairs with serialized values
*/ export async function serializeUseCacheCacheStore(entries, isCacheComponentsEnabled) {
return Promise.all(Array.from(entries).map(([key, value])=>{
return value.then(async ({ entry, hasExplicitRevalidate, hasExplicitExpire, readRootParamNames })=>{
if (isCacheComponentsEnabled && (entry.revalidate === 0 || entry.expire < DYNAMIC_EXPIRE)) {
// The entry was omitted from the prerender result, and subsequently
// does not need to be included in the serialized RDC.
return null;
}
const [left, right] = entry.value.tee();
entry.value = right;
let binaryString = '';
// We want to encode the value as a string, but we aren't sure if the
// value is a a stream of UTF-8 bytes or not, so let's just encode it
// as a string using base64.
for await (const chunk of left){
binaryString += arrayBufferToString(chunk);
}
return [
key,
{
entry: {
// Encode the value as a base64 string.
value: btoa(binaryString),
tags: entry.tags,
stale: entry.stale,
timestamp: entry.timestamp,
expire: entry.expire,
revalidate: entry.revalidate
},
hasExplicitRevalidate,
hasExplicitExpire,
readRootParamNames: readRootParamNames ? [
...readRootParamNames
] : undefined
}
];
}).catch(()=>{
// Any failed cache writes should be ignored as to not discard the
// entire cache.
return null;
});
}));
}
//# sourceMappingURL=cache-store.js.map
File diff suppressed because one or more lines are too long
@@ -0,0 +1,112 @@
import { InvariantError } from '../../shared/lib/invariant-error';
import { serializeUseCacheCacheStore, parseUseCacheCacheStore } from './cache-store';
/**
* Serializes a resume data cache into a JSON string for storage or
* transmission. Handles 'use cache' values, fetch responses, and encrypted
* bound args for inline server functions.
*
* @param resumeDataCache - The immutable cache to serialize
* @returns A Promise that resolves to the serialized cache as a JSON string, or
* 'null' if empty
*/ export async function stringifyResumeDataCache(resumeDataCache, isCacheComponentsEnabled) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new InvariantError('`stringifyResumeDataCache` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
value: "E602",
enumerable: false,
configurable: true
});
} else {
if (resumeDataCache.fetch.size === 0 && resumeDataCache.cache.size === 0) {
return 'null';
}
const json = {
store: {
fetch: Object.fromEntries(Array.from(resumeDataCache.fetch.entries())),
cache: Object.fromEntries((await serializeUseCacheCacheStore(resumeDataCache.cache.entries(), isCacheComponentsEnabled)).filter((entry)=>entry !== null)),
encryptedBoundArgs: Object.fromEntries(Array.from(resumeDataCache.encryptedBoundArgs.entries()))
}
};
// Compress the JSON string using zlib. As the data we already want to
// decompress is in memory, we use the synchronous deflateSync function.
const { deflateSync } = require('node:zlib');
return deflateSync(JSON.stringify(json)).toString('base64');
}
}
/**
* Creates a new empty mutable resume data cache for pre-rendering.
* Initializes fresh Map instances for both the 'use cache' and fetch caches.
* Used at the start of pre-rendering to begin collecting cached values.
*
* @returns A new empty PrerenderResumeDataCache instance
*/ export function createPrerenderResumeDataCache(source) {
if (source) {
return {
cache: new Map(source.cache),
fetch: new Map(source.fetch),
encryptedBoundArgs: new Map(source.encryptedBoundArgs),
decryptedBoundArgs: new Map(source.decryptedBoundArgs)
};
} else {
return {
cache: new Map(),
fetch: new Map(),
encryptedBoundArgs: new Map(),
decryptedBoundArgs: new Map()
};
}
}
export function createRenderResumeDataCache(resumeDataCacheOrPersistedCache, maxPostponedStateSizeBytes) {
if (process.env.NEXT_RUNTIME === 'edge') {
throw Object.defineProperty(new InvariantError('`createRenderResumeDataCache` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
value: "E556",
enumerable: false,
configurable: true
});
} else {
if (typeof resumeDataCacheOrPersistedCache !== 'string') {
// If the cache is already a prerender or render cache, we can return it
// directly. For the former, we're just performing a type change.
return resumeDataCacheOrPersistedCache;
}
if (resumeDataCacheOrPersistedCache === 'null') {
return {
cache: new Map(),
fetch: new Map(),
encryptedBoundArgs: new Map(),
decryptedBoundArgs: new Map()
};
}
// This should be a compressed string. Let's decompress it using zlib.
// As the data we already want to decompress is in memory, we use the
// synchronous inflateSync function.
const { inflateSync } = require('node:zlib');
// Limit decompressed size to prevent zipbomb attacks. This is 5x the
// configured maxPostponedStateSize, allowing reasonable compression
// ratios while preventing extreme decompression bombs.
// Default is 500MB (5x the default 100MB compressed limit).
const maxDecompressedSize = maxPostponedStateSizeBytes ? maxPostponedStateSizeBytes * 5 : 500 * 1024 * 1024;
let json;
try {
json = JSON.parse(inflateSync(Buffer.from(resumeDataCacheOrPersistedCache, 'base64'), {
maxOutputLength: maxDecompressedSize
}).toString('utf-8'));
} catch (err) {
if (err instanceof RangeError && err.code === 'ERR_BUFFER_TOO_LARGE') {
throw Object.defineProperty(new Error(`Decompressed resume data cache exceeded ${maxDecompressedSize} byte limit`), "__NEXT_ERROR_CODE", {
value: "E976",
enumerable: false,
configurable: true
});
}
throw err;
}
return {
cache: parseUseCacheCacheStore(Object.entries(json.store.cache)),
fetch: new Map(Object.entries(json.store.fetch)),
encryptedBoundArgs: new Map(Object.entries(json.store.encryptedBoundArgs)),
decryptedBoundArgs: new Map()
};
}
}
//# sourceMappingURL=resume-data-cache.js.map
File diff suppressed because one or more lines are too long