.
This commit is contained in:
+53
@@ -0,0 +1,53 @@
|
||||
import type { CachedFetchValue } from '../response-cache/types';
|
||||
import type { CollectedCacheResult } from '../use-cache/use-cache-wrapper';
|
||||
/**
|
||||
* A generic cache store type that provides a subset of Map functionality
|
||||
*/
|
||||
type CacheStore<T> = Pick<Map<string, T>, 'entries' | 'keys' | 'size' | 'get' | 'set' | typeof Symbol.iterator>;
|
||||
/**
|
||||
* A cache store specifically for fetch cache values
|
||||
*/
|
||||
export type FetchCacheStore = CacheStore<CachedFetchValue>;
|
||||
/**
|
||||
* A cache store for encrypted bound args of inline server functions.
|
||||
*/
|
||||
export type EncryptedBoundArgsCacheStore = CacheStore<string>;
|
||||
/**
|
||||
* An in-memory-only cache store for decrypted bound args of inline server
|
||||
* functions.
|
||||
*/
|
||||
export type DecryptedBoundArgsCacheStore = CacheStore<string>;
|
||||
/**
|
||||
* Serialized format for "use cache" entries
|
||||
*/
|
||||
export interface UseCacheCacheStoreSerialized {
|
||||
entry: {
|
||||
value: string;
|
||||
tags: string[];
|
||||
stale: number;
|
||||
timestamp: number;
|
||||
expire: number;
|
||||
revalidate: number;
|
||||
};
|
||||
hasExplicitRevalidate: boolean | undefined;
|
||||
hasExplicitExpire: boolean | undefined;
|
||||
readRootParamNames: string[] | undefined;
|
||||
}
|
||||
/**
|
||||
* A cache store specifically for "use cache" values that stores promises of
|
||||
* collected cache results (entry + metadata).
|
||||
*/
|
||||
export type UseCacheCacheStore = CacheStore<Promise<CollectedCacheResult>>;
|
||||
/**
|
||||
* Parses serialized cache entries into a UseCacheCacheStore
|
||||
* @param entries - The serialized entries to parse
|
||||
* @returns A new UseCacheCacheStore containing the parsed entries
|
||||
*/
|
||||
export declare function parseUseCacheCacheStore(entries: Iterable<[string, UseCacheCacheStoreSerialized]>): UseCacheCacheStore;
|
||||
/**
|
||||
* Serializes UseCacheCacheStore entries into an array of key-value pairs
|
||||
* @param entries - The store entries to stringify
|
||||
* @returns A promise that resolves to an array of key-value pairs with serialized values
|
||||
*/
|
||||
export declare function serializeUseCacheCacheStore(entries: IterableIterator<[string, Promise<CollectedCacheResult>]>, isCacheComponentsEnabled: boolean): Promise<Array<[string, UseCacheCacheStoreSerialized] | null>>;
|
||||
export {};
|
||||
+96
@@ -0,0 +1,96 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
parseUseCacheCacheStore: null,
|
||||
serializeUseCacheCacheStore: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
parseUseCacheCacheStore: function() {
|
||||
return parseUseCacheCacheStore;
|
||||
},
|
||||
serializeUseCacheCacheStore: function() {
|
||||
return serializeUseCacheCacheStore;
|
||||
}
|
||||
});
|
||||
const _encryptionutils = require("../app-render/encryption-utils");
|
||||
const _constants = require("../use-cache/constants");
|
||||
function parseUseCacheCacheStore(entries) {
|
||||
const store = new Map();
|
||||
for (const [key, { entry, hasExplicitRevalidate, hasExplicitExpire, readRootParamNames }] of entries){
|
||||
store.set(key, Promise.resolve({
|
||||
entry: {
|
||||
// Create a ReadableStream from the Uint8Array
|
||||
value: new ReadableStream({
|
||||
start (controller) {
|
||||
// Enqueue the Uint8Array to the stream
|
||||
controller.enqueue((0, _encryptionutils.stringToUint8Array)(atob(entry.value)));
|
||||
// Close the stream
|
||||
controller.close();
|
||||
}
|
||||
}),
|
||||
tags: entry.tags,
|
||||
stale: entry.stale,
|
||||
timestamp: entry.timestamp,
|
||||
expire: entry.expire,
|
||||
revalidate: entry.revalidate
|
||||
},
|
||||
hasExplicitRevalidate,
|
||||
hasExplicitExpire,
|
||||
readRootParamNames: readRootParamNames ? new Set(readRootParamNames) : undefined
|
||||
}));
|
||||
}
|
||||
return store;
|
||||
}
|
||||
async function serializeUseCacheCacheStore(entries, isCacheComponentsEnabled) {
|
||||
return Promise.all(Array.from(entries).map(([key, value])=>{
|
||||
return value.then(async ({ entry, hasExplicitRevalidate, hasExplicitExpire, readRootParamNames })=>{
|
||||
if (isCacheComponentsEnabled && (entry.revalidate === 0 || entry.expire < _constants.DYNAMIC_EXPIRE)) {
|
||||
// The entry was omitted from the prerender result, and subsequently
|
||||
// does not need to be included in the serialized RDC.
|
||||
return null;
|
||||
}
|
||||
const [left, right] = entry.value.tee();
|
||||
entry.value = right;
|
||||
let binaryString = '';
|
||||
// We want to encode the value as a string, but we aren't sure if the
|
||||
// value is a a stream of UTF-8 bytes or not, so let's just encode it
|
||||
// as a string using base64.
|
||||
for await (const chunk of left){
|
||||
binaryString += (0, _encryptionutils.arrayBufferToString)(chunk);
|
||||
}
|
||||
return [
|
||||
key,
|
||||
{
|
||||
entry: {
|
||||
// Encode the value as a base64 string.
|
||||
value: btoa(binaryString),
|
||||
tags: entry.tags,
|
||||
stale: entry.stale,
|
||||
timestamp: entry.timestamp,
|
||||
expire: entry.expire,
|
||||
revalidate: entry.revalidate
|
||||
},
|
||||
hasExplicitRevalidate,
|
||||
hasExplicitExpire,
|
||||
readRootParamNames: readRootParamNames ? [
|
||||
...readRootParamNames
|
||||
] : undefined
|
||||
}
|
||||
];
|
||||
}).catch(()=>{
|
||||
// Any failed cache writes should be ignored as to not discard the
|
||||
// entire cache.
|
||||
return null;
|
||||
});
|
||||
}));
|
||||
}
|
||||
|
||||
//# sourceMappingURL=cache-store.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
+92
@@ -0,0 +1,92 @@
|
||||
import { type UseCacheCacheStore, type FetchCacheStore, type EncryptedBoundArgsCacheStore, type DecryptedBoundArgsCacheStore } from './cache-store';
|
||||
/**
|
||||
* An immutable version of the resume data cache used during rendering.
|
||||
* This cache is read-only and cannot be modified once created.
|
||||
*/
|
||||
export interface RenderResumeDataCache {
|
||||
/**
|
||||
* A read-only Map store for values cached by the 'use cache' React hook.
|
||||
* The 'set' operation is omitted to enforce immutability.
|
||||
*/
|
||||
readonly cache: Omit<UseCacheCacheStore, 'set'>;
|
||||
/**
|
||||
* A read-only Map store for cached fetch responses.
|
||||
* The 'set' operation is omitted to enforce immutability.
|
||||
*/
|
||||
readonly fetch: Omit<FetchCacheStore, 'set'>;
|
||||
/**
|
||||
* A read-only Map store for encrypted bound args of inline server functions.
|
||||
* The 'set' operation is omitted to enforce immutability.
|
||||
*/
|
||||
readonly encryptedBoundArgs: Omit<EncryptedBoundArgsCacheStore, 'set'>;
|
||||
/**
|
||||
* A read-only Map store for decrypted bound args of inline server functions.
|
||||
* This is only intended for in-memory usage during pre-rendering, and must
|
||||
* not be persisted in the resume store. The 'set' operation is omitted to
|
||||
* enforce immutability.
|
||||
*/
|
||||
readonly decryptedBoundArgs: Omit<DecryptedBoundArgsCacheStore, 'set'>;
|
||||
}
|
||||
/**
|
||||
* A mutable version of the resume data cache used during pre-rendering.
|
||||
* This cache allows both reading and writing of cached values.
|
||||
*/
|
||||
export interface PrerenderResumeDataCache {
|
||||
/**
|
||||
* A mutable Map store for values cached by the 'use cache' React hook.
|
||||
* Supports both 'get' and 'set' operations to build the cache during
|
||||
* pre-rendering.
|
||||
*/
|
||||
readonly cache: UseCacheCacheStore;
|
||||
/**
|
||||
* A mutable Map store for cached fetch responses.
|
||||
* Supports both 'get' and 'set' operations to build the cache during
|
||||
* pre-rendering.
|
||||
*/
|
||||
readonly fetch: FetchCacheStore;
|
||||
/**
|
||||
* A mutable Map store for encrypted bound args of inline server functions.
|
||||
* Supports both 'get' and 'set' operations to build the cache during
|
||||
* pre-rendering.
|
||||
*/
|
||||
readonly encryptedBoundArgs: EncryptedBoundArgsCacheStore;
|
||||
/**
|
||||
* A mutable Map store for decrypted bound args of inline server functions.
|
||||
* This is only intended for in-memory usage during pre-rendering, and must
|
||||
* not be persisted in the resume store. Supports both 'get' and 'set'
|
||||
* operations to build the cache during pre-rendering.
|
||||
*/
|
||||
readonly decryptedBoundArgs: DecryptedBoundArgsCacheStore;
|
||||
}
|
||||
/**
|
||||
* Serializes a resume data cache into a JSON string for storage or
|
||||
* transmission. Handles 'use cache' values, fetch responses, and encrypted
|
||||
* bound args for inline server functions.
|
||||
*
|
||||
* @param resumeDataCache - The immutable cache to serialize
|
||||
* @returns A Promise that resolves to the serialized cache as a JSON string, or
|
||||
* 'null' if empty
|
||||
*/
|
||||
export declare function stringifyResumeDataCache(resumeDataCache: RenderResumeDataCache | PrerenderResumeDataCache, isCacheComponentsEnabled: boolean): Promise<string>;
|
||||
/**
|
||||
* Creates a new empty mutable resume data cache for pre-rendering.
|
||||
* Initializes fresh Map instances for both the 'use cache' and fetch caches.
|
||||
* Used at the start of pre-rendering to begin collecting cached values.
|
||||
*
|
||||
* @returns A new empty PrerenderResumeDataCache instance
|
||||
*/
|
||||
export declare function createPrerenderResumeDataCache(source?: PrerenderResumeDataCache | RenderResumeDataCache): PrerenderResumeDataCache;
|
||||
/**
|
||||
* Creates an immutable render resume data cache from either:
|
||||
* 1. An existing prerender cache instance
|
||||
* 2. A serialized cache string
|
||||
*
|
||||
* @param renderResumeDataCache - A RenderResumeDataCache instance to be used directly
|
||||
* @param prerenderResumeDataCache - A PrerenderResumeDataCache instance to convert to immutable
|
||||
* @param persistedCache - A serialized cache string to parse
|
||||
* @param maxPostponedStateSizeBytes - The max compressed size limit in bytes (used to calculate 5x decompression limit)
|
||||
* @returns An immutable RenderResumeDataCache instance
|
||||
*/
|
||||
export declare function createRenderResumeDataCache(renderResumeDataCache: RenderResumeDataCache): RenderResumeDataCache;
|
||||
export declare function createRenderResumeDataCache(prerenderResumeDataCache: PrerenderResumeDataCache): RenderResumeDataCache;
|
||||
export declare function createRenderResumeDataCache(persistedCache: string, maxPostponedStateSizeBytes: number | undefined): RenderResumeDataCache;
|
||||
+124
@@ -0,0 +1,124 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
0 && (module.exports = {
|
||||
createPrerenderResumeDataCache: null,
|
||||
createRenderResumeDataCache: null,
|
||||
stringifyResumeDataCache: null
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
createPrerenderResumeDataCache: function() {
|
||||
return createPrerenderResumeDataCache;
|
||||
},
|
||||
createRenderResumeDataCache: function() {
|
||||
return createRenderResumeDataCache;
|
||||
},
|
||||
stringifyResumeDataCache: function() {
|
||||
return stringifyResumeDataCache;
|
||||
}
|
||||
});
|
||||
const _invarianterror = require("../../shared/lib/invariant-error");
|
||||
const _cachestore = require("./cache-store");
|
||||
async function stringifyResumeDataCache(resumeDataCache, isCacheComponentsEnabled) {
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('`stringifyResumeDataCache` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E602",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
if (resumeDataCache.fetch.size === 0 && resumeDataCache.cache.size === 0) {
|
||||
return 'null';
|
||||
}
|
||||
const json = {
|
||||
store: {
|
||||
fetch: Object.fromEntries(Array.from(resumeDataCache.fetch.entries())),
|
||||
cache: Object.fromEntries((await (0, _cachestore.serializeUseCacheCacheStore)(resumeDataCache.cache.entries(), isCacheComponentsEnabled)).filter((entry)=>entry !== null)),
|
||||
encryptedBoundArgs: Object.fromEntries(Array.from(resumeDataCache.encryptedBoundArgs.entries()))
|
||||
}
|
||||
};
|
||||
// Compress the JSON string using zlib. As the data we already want to
|
||||
// decompress is in memory, we use the synchronous deflateSync function.
|
||||
const { deflateSync } = require('node:zlib');
|
||||
return deflateSync(JSON.stringify(json)).toString('base64');
|
||||
}
|
||||
}
|
||||
function createPrerenderResumeDataCache(source) {
|
||||
if (source) {
|
||||
return {
|
||||
cache: new Map(source.cache),
|
||||
fetch: new Map(source.fetch),
|
||||
encryptedBoundArgs: new Map(source.encryptedBoundArgs),
|
||||
decryptedBoundArgs: new Map(source.decryptedBoundArgs)
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
cache: new Map(),
|
||||
fetch: new Map(),
|
||||
encryptedBoundArgs: new Map(),
|
||||
decryptedBoundArgs: new Map()
|
||||
};
|
||||
}
|
||||
}
|
||||
function createRenderResumeDataCache(resumeDataCacheOrPersistedCache, maxPostponedStateSizeBytes) {
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
throw Object.defineProperty(new _invarianterror.InvariantError('`createRenderResumeDataCache` should not be called in edge runtime.'), "__NEXT_ERROR_CODE", {
|
||||
value: "E556",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
} else {
|
||||
if (typeof resumeDataCacheOrPersistedCache !== 'string') {
|
||||
// If the cache is already a prerender or render cache, we can return it
|
||||
// directly. For the former, we're just performing a type change.
|
||||
return resumeDataCacheOrPersistedCache;
|
||||
}
|
||||
if (resumeDataCacheOrPersistedCache === 'null') {
|
||||
return {
|
||||
cache: new Map(),
|
||||
fetch: new Map(),
|
||||
encryptedBoundArgs: new Map(),
|
||||
decryptedBoundArgs: new Map()
|
||||
};
|
||||
}
|
||||
// This should be a compressed string. Let's decompress it using zlib.
|
||||
// As the data we already want to decompress is in memory, we use the
|
||||
// synchronous inflateSync function.
|
||||
const { inflateSync } = require('node:zlib');
|
||||
// Limit decompressed size to prevent zipbomb attacks. This is 5x the
|
||||
// configured maxPostponedStateSize, allowing reasonable compression
|
||||
// ratios while preventing extreme decompression bombs.
|
||||
// Default is 500MB (5x the default 100MB compressed limit).
|
||||
const maxDecompressedSize = maxPostponedStateSizeBytes ? maxPostponedStateSizeBytes * 5 : 500 * 1024 * 1024;
|
||||
let json;
|
||||
try {
|
||||
json = JSON.parse(inflateSync(Buffer.from(resumeDataCacheOrPersistedCache, 'base64'), {
|
||||
maxOutputLength: maxDecompressedSize
|
||||
}).toString('utf-8'));
|
||||
} catch (err) {
|
||||
if (err instanceof RangeError && err.code === 'ERR_BUFFER_TOO_LARGE') {
|
||||
throw Object.defineProperty(new Error(`Decompressed resume data cache exceeded ${maxDecompressedSize} byte limit`), "__NEXT_ERROR_CODE", {
|
||||
value: "E976",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
return {
|
||||
cache: (0, _cachestore.parseUseCacheCacheStore)(Object.entries(json.store.cache)),
|
||||
fetch: new Map(Object.entries(json.store.fetch)),
|
||||
encryptedBoundArgs: new Map(Object.entries(json.store.encryptedBoundArgs)),
|
||||
decryptedBoundArgs: new Map()
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=resume-data-cache.js.map
|
||||
+1
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user