From 58233e78e356fd003fbdf2a52f13c802c737203d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=B3=96=E9=A5=BC?= Date: Fri, 19 Apr 2024 19:23:31 +0800 Subject: [PATCH] fix: put cache error --- .changeset/pretty-pumas-cheer.md | 5 ++++ src/cache.ts | 18 ++++++------ src/fetch.test.ts | 8 +++--- src/fetch.ts | 47 ++++++++++++++++++-------------- 4 files changed, 44 insertions(+), 34 deletions(-) create mode 100644 .changeset/pretty-pumas-cheer.md diff --git a/.changeset/pretty-pumas-cheer.md b/.changeset/pretty-pumas-cheer.md new file mode 100644 index 0000000..225c444 --- /dev/null +++ b/.changeset/pretty-pumas-cheer.md @@ -0,0 +1,5 @@ +--- +"@web-widget/shared-cache": patch +--- + +Fixed the problem of failing to process logs correctly after updating the cache error. diff --git a/src/cache.ts b/src/cache.ts index 22104ee..442020d 100644 --- a/src/cache.ts +++ b/src/cache.ts @@ -29,7 +29,9 @@ export class SharedCache implements Cache { } const resolveOptions = { - waitUntil() {}, + async waitUntil(promise: Promise) { + await promise.catch(console.error); + }, ...options, }; @@ -138,11 +140,9 @@ export class SharedCache implements Cache { // Well actually, in this case it's fine to return the stale response. // But we'll update the cache in the background. this.#waitUntil( - this.#revalidate(request, resolveCacheItem, cacheKey, fetch).then( - () => {} - ) + this.#revalidate(request, resolveCacheItem, cacheKey, fetch) ); - this.#setCacheStatus(response.headers, STALE); + this.#setCacheStatus(response, STALE); } else { // NOTE: This will take effect when caching TTL is not working. await deleteCacheItem(request, this.#storage, cacheKey); @@ -152,10 +152,10 @@ export class SharedCache implements Cache { cacheKey, fetch ); - this.#setCacheStatus(response.headers, EXPIRED); + this.#setCacheStatus(response, EXPIRED); } } else { - this.#setCacheStatus(response.headers, HIT); + this.#setCacheStatus(response, HIT); } return response; @@ -273,8 +273,8 @@ export class SharedCache implements Cache { }); } - #setCacheStatus(headers: Headers, status: SharedCacheStatus) { - headers.set(CACHE_STATUS_HEADERS_NAME, status); + #setCacheStatus(response: Response, status: SharedCacheStatus) { + response.headers.set(CACHE_STATUS_HEADERS_NAME, status); } } diff --git a/src/fetch.test.ts b/src/fetch.test.ts index b1f3b68..3c88f37 100644 --- a/src/fetch.test.ts +++ b/src/fetch.test.ts @@ -1,5 +1,5 @@ import { LRUCache } from 'lru-cache'; -import { CacheItem, KVStorage } from './types'; +import { KVStorage } from './types'; import { createSharedCacheFetch } from './fetch'; import { SharedCache } from './cache'; import { BYPASS, DYNAMIC, HIT, MISS, STALE } from './constants'; @@ -7,13 +7,13 @@ import { BYPASS, DYNAMIC, HIT, MISS, STALE } from './constants'; const TEST_URL = 'http://localhost/'; const createCacheStore = (): KVStorage => { - const store = new LRUCache({ max: 1024 }); + const store = new LRUCache({ max: 1024 }); return { async get(cacheKey: string) { - return store.get(cacheKey) as CacheItem | undefined; + return store.get(cacheKey); }, - async set(cacheKey: string, value: CacheItem, ttl?: number) { + async set(cacheKey: string, value: any, ttl?: number) { store.set(cacheKey, value, { ttl }); }, async delete(cacheKey: string) { diff --git a/src/fetch.ts b/src/fetch.ts index b19f7a6..990ff63 100644 --- a/src/fetch.ts +++ b/src/fetch.ts @@ -39,23 +39,23 @@ export function createSharedCacheFetch( const request = new Request(input, init); const requestCache = getRequestCacheMode(request, init?.cache); const sharedCache = init?.sharedCache; + const interceptor = createInterceptor(fetcher, sharedCache); if (requestCache === 'no-store') { - const fetchedResponse = await fetcher(request); - setCacheControlAndVary(fetchedResponse, sharedCache); - setCacheStatus(fetchedResponse.headers, BYPASS); + const fetchedResponse = await interceptor(input, init); + setCacheStatus(fetchedResponse, BYPASS); return fetchedResponse; } const cachedResponse = await cache.match(request, { ...sharedCache, - _fetch: fetcher, + _fetch: interceptor, forceCache: requestCache === 'force-cache' || requestCache === 'only-if-cached', }); if (cachedResponse) { - setCacheStatus(cachedResponse.headers, HIT); + setCacheStatus(cachedResponse, HIT); return cachedResponse; } @@ -63,22 +63,21 @@ export function createSharedCacheFetch( throw TypeError('Failed to fetch.'); } - const fetchedResponse = await fetcher(request); - setCacheControlAndVary(fetchedResponse, sharedCache); + const fetchedResponse = await interceptor(request); const cacheControl = fetchedResponse.headers.get('cache-control'); if (cacheControl) { if (bypassCache(cacheControl)) { - setCacheStatus(fetchedResponse.headers, BYPASS); + setCacheStatus(fetchedResponse, BYPASS); } else { const ok = await cache.put(request, fetchedResponse, sharedCache).then( () => true, () => false ); - setCacheStatus(fetchedResponse.headers, ok ? MISS : DYNAMIC); + setCacheStatus(fetchedResponse, ok ? MISS : DYNAMIC); } } else { - setCacheStatus(fetchedResponse.headers, DYNAMIC); + setCacheStatus(fetchedResponse, DYNAMIC); } return fetchedResponse; @@ -87,24 +86,30 @@ export function createSharedCacheFetch( export const sharedCacheFetch = createSharedCacheFetch(); -function setCacheStatus(headers: Headers, status: SharedCacheStatus) { +function setCacheStatus(response: Response, status: SharedCacheStatus) { + const headers = response.headers; if (!headers.has(CACHE_STATUS_HEADERS_NAME)) { headers.set(CACHE_STATUS_HEADERS_NAME, status); } } -function setCacheControlAndVary( - response: Response, +function createInterceptor( + fetcher: typeof fetch, sharedCache?: SharedCacheRequestInitProperties -) { - if (response.ok) { - if (sharedCache?.cacheControlOverride) { - cacheControl(response.headers, sharedCache.cacheControlOverride); - } - if (sharedCache?.varyOverride) { - vary(response.headers, sharedCache.varyOverride); +): typeof fetch { + return async function fetch(...args) { + const response = await fetcher(...args); + const headers = response.headers; + if (response.ok) { + if (sharedCache?.cacheControlOverride) { + cacheControl(headers, sharedCache.cacheControlOverride); + } + if (sharedCache?.varyOverride) { + vary(headers, sharedCache.varyOverride); + } } - } + return response; + }; } function bypassCache(cacheControl: string) {