Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: put cache error #11

Merged
merged 1 commit into from
Apr 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/pretty-pumas-cheer.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@web-widget/shared-cache": patch
---

Fixed the problem of failing to process logs correctly after updating the cache error.
18 changes: 9 additions & 9 deletions src/cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,9 @@ export class SharedCache implements Cache {
}

const resolveOptions = {
waitUntil() {},
async waitUntil(promise: Promise<any>) {
await promise.catch(console.error);
},
...options,
};

Expand Down Expand Up @@ -138,11 +140,9 @@ export class SharedCache implements Cache {
// Well actually, in this case it's fine to return the stale response.
// But we'll update the cache in the background.
this.#waitUntil(
this.#revalidate(request, resolveCacheItem, cacheKey, fetch).then(
() => {}
)
this.#revalidate(request, resolveCacheItem, cacheKey, fetch)
);
this.#setCacheStatus(response.headers, STALE);
this.#setCacheStatus(response, STALE);
} else {
// NOTE: This will take effect when caching TTL is not working.
await deleteCacheItem(request, this.#storage, cacheKey);
Expand All @@ -152,10 +152,10 @@ export class SharedCache implements Cache {
cacheKey,
fetch
);
this.#setCacheStatus(response.headers, EXPIRED);
this.#setCacheStatus(response, EXPIRED);
}
} else {
this.#setCacheStatus(response.headers, HIT);
this.#setCacheStatus(response, HIT);
}

return response;
Expand Down Expand Up @@ -273,8 +273,8 @@ export class SharedCache implements Cache {
});
}

#setCacheStatus(headers: Headers, status: SharedCacheStatus) {
headers.set(CACHE_STATUS_HEADERS_NAME, status);
#setCacheStatus(response: Response, status: SharedCacheStatus) {
response.headers.set(CACHE_STATUS_HEADERS_NAME, status);
}
}

Expand Down
8 changes: 4 additions & 4 deletions src/fetch.test.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
import { LRUCache } from 'lru-cache';
import { CacheItem, KVStorage } from './types';
import { KVStorage } from './types';
import { createSharedCacheFetch } from './fetch';
import { SharedCache } from './cache';
import { BYPASS, DYNAMIC, HIT, MISS, STALE } from './constants';

const TEST_URL = 'http://localhost/';

const createCacheStore = (): KVStorage => {
const store = new LRUCache<string, CacheItem>({ max: 1024 });
const store = new LRUCache<string, any>({ max: 1024 });

return {
async get(cacheKey: string) {
return store.get(cacheKey) as CacheItem | undefined;
return store.get(cacheKey);
},
async set(cacheKey: string, value: CacheItem, ttl?: number) {
async set(cacheKey: string, value: any, ttl?: number) {
store.set(cacheKey, value, { ttl });
},
async delete(cacheKey: string) {
Expand Down
47 changes: 26 additions & 21 deletions src/fetch.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,46 +39,45 @@ export function createSharedCacheFetch(
const request = new Request(input, init);
const requestCache = getRequestCacheMode(request, init?.cache);
const sharedCache = init?.sharedCache;
const interceptor = createInterceptor(fetcher, sharedCache);

if (requestCache === 'no-store') {
const fetchedResponse = await fetcher(request);
setCacheControlAndVary(fetchedResponse, sharedCache);
setCacheStatus(fetchedResponse.headers, BYPASS);
const fetchedResponse = await interceptor(input, init);
setCacheStatus(fetchedResponse, BYPASS);
return fetchedResponse;
}

const cachedResponse = await cache.match(request, {
...sharedCache,
_fetch: fetcher,
_fetch: interceptor,
forceCache:
requestCache === 'force-cache' || requestCache === 'only-if-cached',
});

if (cachedResponse) {
setCacheStatus(cachedResponse.headers, HIT);
setCacheStatus(cachedResponse, HIT);
return cachedResponse;
}

if (requestCache === 'only-if-cached') {
throw TypeError('Failed to fetch.');
}

const fetchedResponse = await fetcher(request);
setCacheControlAndVary(fetchedResponse, sharedCache);
const fetchedResponse = await interceptor(request);
const cacheControl = fetchedResponse.headers.get('cache-control');

if (cacheControl) {
if (bypassCache(cacheControl)) {
setCacheStatus(fetchedResponse.headers, BYPASS);
setCacheStatus(fetchedResponse, BYPASS);
} else {
const ok = await cache.put(request, fetchedResponse, sharedCache).then(
() => true,
() => false
);
setCacheStatus(fetchedResponse.headers, ok ? MISS : DYNAMIC);
setCacheStatus(fetchedResponse, ok ? MISS : DYNAMIC);
}
} else {
setCacheStatus(fetchedResponse.headers, DYNAMIC);
setCacheStatus(fetchedResponse, DYNAMIC);
}

return fetchedResponse;
Expand All @@ -87,24 +86,30 @@ export function createSharedCacheFetch(

export const sharedCacheFetch = createSharedCacheFetch();

function setCacheStatus(headers: Headers, status: SharedCacheStatus) {
function setCacheStatus(response: Response, status: SharedCacheStatus) {
const headers = response.headers;
if (!headers.has(CACHE_STATUS_HEADERS_NAME)) {
headers.set(CACHE_STATUS_HEADERS_NAME, status);
}
}

function setCacheControlAndVary(
response: Response,
function createInterceptor(
fetcher: typeof fetch,
sharedCache?: SharedCacheRequestInitProperties
) {
if (response.ok) {
if (sharedCache?.cacheControlOverride) {
cacheControl(response.headers, sharedCache.cacheControlOverride);
}
if (sharedCache?.varyOverride) {
vary(response.headers, sharedCache.varyOverride);
): typeof fetch {
return async function fetch(...args) {
const response = await fetcher(...args);
const headers = response.headers;
if (response.ok) {
if (sharedCache?.cacheControlOverride) {
cacheControl(headers, sharedCache.cacheControlOverride);
}
if (sharedCache?.varyOverride) {
vary(headers, sharedCache.varyOverride);
}
}
}
return response;
};
}

function bypassCache(cacheControl: string) {
Expand Down