.
This commit is contained in:
+289
@@ -0,0 +1,289 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "unstable_cache", {
|
||||
enumerable: true,
|
||||
get: function() {
|
||||
return unstable_cache;
|
||||
}
|
||||
});
|
||||
const _constants = require("../../../lib/constants");
|
||||
const _patchfetch = require("../../lib/patch-fetch");
|
||||
const _workasyncstorageexternal = require("../../app-render/work-async-storage.external");
|
||||
const _workunitasyncstorageexternal = require("../../app-render/work-unit-async-storage.external");
|
||||
const _responsecache = require("../../response-cache");
|
||||
let noStoreFetchIdx = 0;
|
||||
async function cacheNewResult(result, incrementalCache, cacheKey, tags, revalidate, fetchIdx, fetchUrl) {
|
||||
await incrementalCache.set(cacheKey, {
|
||||
kind: _responsecache.CachedRouteKind.FETCH,
|
||||
data: {
|
||||
headers: {},
|
||||
// TODO: handle non-JSON values?
|
||||
body: JSON.stringify(result),
|
||||
status: 200,
|
||||
url: ''
|
||||
},
|
||||
revalidate: typeof revalidate !== 'number' ? _constants.CACHE_ONE_YEAR_SECONDS : revalidate
|
||||
}, {
|
||||
fetchCache: true,
|
||||
tags,
|
||||
fetchIdx,
|
||||
fetchUrl
|
||||
});
|
||||
return;
|
||||
}
|
||||
function unstable_cache(cb, keyParts, options = {}) {
|
||||
if (options.revalidate === 0) {
|
||||
throw Object.defineProperty(new Error(`Invariant revalidate: 0 can not be passed to unstable_cache(), must be "false" or "> 0" ${cb.toString()}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E57",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
// Validate the tags provided are valid
|
||||
const tags = options.tags ? (0, _patchfetch.validateTags)(options.tags, `unstable_cache ${cb.toString()}`) : [];
|
||||
// Validate the revalidate options
|
||||
(0, _patchfetch.validateRevalidate)(options.revalidate, `unstable_cache ${cb.name || cb.toString()}`);
|
||||
// Stash the fixed part of the key at construction time. The invocation key will combine
|
||||
// the fixed key with the arguments when actually called
|
||||
// @TODO if cb.toString() is long we should hash it
|
||||
// @TODO come up with a collision-free way to combine keyParts
|
||||
// @TODO consider validating the keyParts are all strings. TS can't provide runtime guarantees
|
||||
// and the error produced by accidentally using something that cannot be safely coerced is likely
|
||||
// hard to debug
|
||||
const fixedKey = `${cb.toString()}-${Array.isArray(keyParts) && keyParts.join(',')}`;
|
||||
const cachedCb = async (...args)=>{
|
||||
const workStore = _workasyncstorageexternal.workAsyncStorage.getStore();
|
||||
const workUnitStore = _workunitasyncstorageexternal.workUnitAsyncStorage.getStore();
|
||||
// We must be able to find the incremental cache otherwise we throw
|
||||
const maybeIncrementalCache = (workStore == null ? void 0 : workStore.incrementalCache) || globalThis.__incrementalCache;
|
||||
if (!maybeIncrementalCache) {
|
||||
throw Object.defineProperty(new Error(`Invariant: incrementalCache missing in unstable_cache ${cb.toString()}`), "__NEXT_ERROR_CODE", {
|
||||
value: "E469",
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
const incrementalCache = maybeIncrementalCache;
|
||||
const cacheSignal = workUnitStore ? (0, _workunitasyncstorageexternal.getCacheSignal)(workUnitStore) : null;
|
||||
if (cacheSignal) {
|
||||
cacheSignal.beginRead();
|
||||
}
|
||||
try {
|
||||
// If there's no request store, we aren't in a request (or we're not in
|
||||
// app router) and if there's no static generation store, we aren't in app
|
||||
// router. Default to an empty pathname and search params when there's no
|
||||
// request store or static generation store available.
|
||||
const fetchUrlPrefix = workStore && workUnitStore ? getFetchUrlPrefix(workStore, workUnitStore) : '';
|
||||
// Construct the complete cache key for this function invocation
|
||||
// @TODO stringify is likely not safe here. We will coerce undefined to null which will make
|
||||
// the keyspace smaller than the execution space
|
||||
const invocationKey = `${fixedKey}-${JSON.stringify(args)}`;
|
||||
const cacheKey = await incrementalCache.generateCacheKey(invocationKey);
|
||||
// $urlWithPath,$sortedQueryStringKeys,$hashOfEveryThingElse
|
||||
const fetchUrl = `unstable_cache ${fetchUrlPrefix} ${cb.name ? ` ${cb.name}` : cacheKey}`;
|
||||
const fetchIdx = (workStore ? workStore.nextFetchId : noStoreFetchIdx) ?? 1;
|
||||
const implicitTags = workUnitStore == null ? void 0 : workUnitStore.implicitTags;
|
||||
const innerCacheStore = {
|
||||
type: 'unstable-cache',
|
||||
phase: 'render',
|
||||
implicitTags,
|
||||
draftMode: workUnitStore && workStore && (0, _workunitasyncstorageexternal.getDraftModeProviderForCacheScope)(workStore, workUnitStore),
|
||||
rootParams: undefined
|
||||
};
|
||||
if (workStore) {
|
||||
workStore.nextFetchId = fetchIdx + 1;
|
||||
// We are in an App Router context. We try to return the cached entry if it exists and is valid
|
||||
// If the entry is fresh we return it. If the entry is stale we return it but revalidate the entry in
|
||||
// the background. If the entry is missing or invalid we generate a new entry and return it.
|
||||
let isNestedUnstableCache = false;
|
||||
if (workUnitStore) {
|
||||
switch(workUnitStore.type){
|
||||
case 'cache':
|
||||
case 'private-cache':
|
||||
case 'prerender':
|
||||
case 'prerender-runtime':
|
||||
case 'prerender-ppr':
|
||||
case 'prerender-legacy':
|
||||
// We update the store's revalidate property if the option.revalidate is a higher precedence
|
||||
// options.revalidate === undefined doesn't affect timing.
|
||||
// options.revalidate === false doesn't shrink timing. it stays at the maximum.
|
||||
if (typeof options.revalidate === 'number') {
|
||||
if (workUnitStore.revalidate < options.revalidate) {
|
||||
// The store is already revalidating on a shorter time interval, leave it alone
|
||||
} else {
|
||||
workUnitStore.revalidate = options.revalidate;
|
||||
}
|
||||
}
|
||||
// We need to accumulate the tags for this invocation within the store
|
||||
const collectedTags = workUnitStore.tags;
|
||||
if (collectedTags === null) {
|
||||
workUnitStore.tags = tags.slice();
|
||||
} else {
|
||||
for (const tag of tags){
|
||||
// @TODO refactor tags to be a set to avoid this O(n) lookup
|
||||
if (!collectedTags.includes(tag)) {
|
||||
collectedTags.push(tag);
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 'unstable-cache':
|
||||
isNestedUnstableCache = true;
|
||||
break;
|
||||
case 'prerender-client':
|
||||
case 'validation-client':
|
||||
case 'request':
|
||||
case 'generate-static-params':
|
||||
break;
|
||||
default:
|
||||
workUnitStore;
|
||||
}
|
||||
}
|
||||
if (// when we are nested inside of other unstable_cache's
|
||||
// we should bypass cache similar to fetches
|
||||
!isNestedUnstableCache && workStore.fetchCache !== 'force-no-store' && !workStore.isOnDemandRevalidate && !incrementalCache.isOnDemandRevalidate && !workStore.isDraftMode) {
|
||||
// We attempt to get the current cache entry from the incremental cache.
|
||||
const cacheEntry = await incrementalCache.get(cacheKey, {
|
||||
kind: _responsecache.IncrementalCacheKind.FETCH,
|
||||
revalidate: options.revalidate,
|
||||
tags,
|
||||
softTags: implicitTags == null ? void 0 : implicitTags.tags,
|
||||
fetchIdx,
|
||||
fetchUrl
|
||||
});
|
||||
if (cacheEntry && cacheEntry.value) {
|
||||
// The entry exists and has a value
|
||||
if (cacheEntry.value.kind !== _responsecache.CachedRouteKind.FETCH) {
|
||||
// The entry is invalid and we need a special warning
|
||||
// @TODO why do we warn this way? Should this just be an error? How are these errors surfaced
|
||||
// so bugs can be reported
|
||||
// @TODO the invocation key can have sensitive data in it. we should not log this entire object
|
||||
console.error(`Invariant invalid cacheEntry returned for ${invocationKey}`);
|
||||
// will fall through to generating a new cache entry below
|
||||
} else {
|
||||
// We have a valid cache entry so we will be returning it. We also check to see if we need
|
||||
// to background revalidate it by checking if it is stale.
|
||||
const cachedResponse = cacheEntry.value.data.body !== undefined ? JSON.parse(cacheEntry.value.data.body) : undefined;
|
||||
if (cacheEntry.isStale) {
|
||||
if (!workStore.pendingRevalidates) {
|
||||
workStore.pendingRevalidates = {};
|
||||
}
|
||||
// Check if there's already a pending revalidation to avoid duplicate work
|
||||
if (!workStore.pendingRevalidates[invocationKey]) {
|
||||
// Create the revalidation promise
|
||||
const revalidationPromise = _workunitasyncstorageexternal.workUnitAsyncStorage.run(innerCacheStore, cb, ...args).then(async (result)=>{
|
||||
await cacheNewResult(result, incrementalCache, cacheKey, tags, options.revalidate, fetchIdx, fetchUrl);
|
||||
return result;
|
||||
}).catch((err)=>{
|
||||
// @TODO This error handling seems wrong. We swallow the error?
|
||||
console.error(`revalidating cache with key: ${invocationKey}`, err);
|
||||
// Return the stale value on error for foreground revalidation
|
||||
return cachedResponse;
|
||||
});
|
||||
// Attach the empty catch here so we don't get a "unhandled promise
|
||||
// rejection" warning. (Behavior is matched with patch-fetch)
|
||||
if (workStore.isStaticGeneration) {
|
||||
revalidationPromise.catch(()=>{});
|
||||
}
|
||||
workStore.pendingRevalidates[invocationKey] = revalidationPromise;
|
||||
}
|
||||
// Check if we need to do foreground revalidation
|
||||
if (workStore.isStaticGeneration) {
|
||||
// When the page is revalidating and the cache entry is stale,
|
||||
// we need to wait for fresh data (blocking revalidate)
|
||||
return workStore.pendingRevalidates[invocationKey];
|
||||
}
|
||||
// Otherwise, we're doing background revalidation - return stale immediately
|
||||
}
|
||||
// We had a valid cache entry so we return it here
|
||||
return cachedResponse;
|
||||
}
|
||||
}
|
||||
}
|
||||
// If we got this far then we had an invalid cache entry and need to generate a new one
|
||||
const result = await _workunitasyncstorageexternal.workUnitAsyncStorage.run(innerCacheStore, cb, ...args);
|
||||
if (!workStore.isDraftMode) {
|
||||
if (!workStore.pendingRevalidates) {
|
||||
workStore.pendingRevalidates = {};
|
||||
}
|
||||
// We need to push the cache result promise to pending
|
||||
// revalidates otherwise it won't be awaited and is just
|
||||
// dangling
|
||||
workStore.pendingRevalidates[invocationKey] = cacheNewResult(result, incrementalCache, cacheKey, tags, options.revalidate, fetchIdx, fetchUrl);
|
||||
}
|
||||
return result;
|
||||
} else {
|
||||
noStoreFetchIdx += 1;
|
||||
// We are in Pages Router or were called outside of a render. We don't have a store
|
||||
// so we just call the callback directly when it needs to run.
|
||||
// If the entry is fresh we return it. If the entry is stale we return it but revalidate the entry in
|
||||
// the background. If the entry is missing or invalid we generate a new entry and return it.
|
||||
if (!incrementalCache.isOnDemandRevalidate) {
|
||||
// We aren't doing an on demand revalidation so we check use the cache if valid
|
||||
const cacheEntry = await incrementalCache.get(cacheKey, {
|
||||
kind: _responsecache.IncrementalCacheKind.FETCH,
|
||||
revalidate: options.revalidate,
|
||||
tags,
|
||||
fetchIdx,
|
||||
fetchUrl,
|
||||
softTags: implicitTags == null ? void 0 : implicitTags.tags
|
||||
});
|
||||
if (cacheEntry && cacheEntry.value) {
|
||||
// The entry exists and has a value
|
||||
if (cacheEntry.value.kind !== _responsecache.CachedRouteKind.FETCH) {
|
||||
// The entry is invalid and we need a special warning
|
||||
// @TODO why do we warn this way? Should this just be an error? How are these errors surfaced
|
||||
// so bugs can be reported
|
||||
console.error(`Invariant invalid cacheEntry returned for ${invocationKey}`);
|
||||
// will fall through to generating a new cache entry below
|
||||
} else if (!cacheEntry.isStale) {
|
||||
// We have a valid cache entry and it is fresh so we return it
|
||||
return cacheEntry.value.data.body !== undefined ? JSON.parse(cacheEntry.value.data.body) : undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
// If we got this far then we had an invalid cache entry and need to generate a new one
|
||||
const result = await _workunitasyncstorageexternal.workUnitAsyncStorage.run(innerCacheStore, cb, ...args);
|
||||
// we need to wait setting the new cache result here as
|
||||
// we don't have pending revalidates on workStore to
|
||||
// push to and we can't have a dangling promise
|
||||
await cacheNewResult(result, incrementalCache, cacheKey, tags, options.revalidate, fetchIdx, fetchUrl);
|
||||
return result;
|
||||
}
|
||||
} finally{
|
||||
if (cacheSignal) {
|
||||
cacheSignal.endRead();
|
||||
}
|
||||
}
|
||||
};
|
||||
// TODO: once AsyncLocalStorage.run() returns the correct types this override will no longer be necessary
|
||||
return cachedCb;
|
||||
}
|
||||
function getFetchUrlPrefix(workStore, workUnitStore) {
|
||||
switch(workUnitStore.type){
|
||||
case 'request':
|
||||
const pathname = workUnitStore.url.pathname;
|
||||
const searchParams = new URLSearchParams(workUnitStore.url.search);
|
||||
const sortedSearch = [
|
||||
...searchParams.keys()
|
||||
].sort((a, b)=>a.localeCompare(b)).map((key)=>`${key}=${searchParams.get(key)}`).join('&');
|
||||
return `${pathname}${sortedSearch.length ? '?' : ''}${sortedSearch}`;
|
||||
case 'prerender':
|
||||
case 'prerender-client':
|
||||
case 'validation-client':
|
||||
case 'prerender-runtime':
|
||||
case 'prerender-ppr':
|
||||
case 'prerender-legacy':
|
||||
case 'cache':
|
||||
case 'private-cache':
|
||||
case 'unstable-cache':
|
||||
case 'generate-static-params':
|
||||
return workStore.route;
|
||||
default:
|
||||
return workUnitStore;
|
||||
}
|
||||
}
|
||||
|
||||
//# sourceMappingURL=unstable-cache.js.map
|
||||
Reference in New Issue
Block a user