All files / src/lib/api cache.ts

100% Statements 27/27
100% Branches 16/16
100% Functions 5/5
100% Lines 26/26

Press n or j to go to the next uncovered block, b, p or k for the previous block.

1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83                  3x     3x                           188x 71x     117x 117x   117x 1x     116x 115x     115x 3x 3x 305x       115x         2x         2x 2x 3x 2x 2x     2x         12x 12x 12x         10x          
// Lightweight in-memory cache for API responses during SSR
// Each Cloudflare Worker isolate gets its own cache that persists across requests
// TTL-based expiration — stale entries are served while fresh ones are fetched
 
interface CacheEntry<T> {
	data: T;
	timestamp: number;
}
 
const cache = new Map<string, CacheEntry<unknown>>();
 
// Default TTL: 5 minutes (300 seconds)
const DEFAULT_TTL_MS = 5 * 60 * 1000;
 
/**
 * Get a cached value or fetch it fresh.
 * Uses in-memory Map cache within the Worker isolate.
 * Cache persists across requests within the same isolate lifecycle (~30s-5min).
 */
export async function cachedFetch<T>(
	key: string,
	fetcher: () => Promise<T>,
	ttlMs: number = DEFAULT_TTL_MS,
): Promise<T> {
	// Skip cache in test environment so mocked fetch is always called
	// (unless the test explicitly sets __test_cache_enabled__)
	if (typeof globalThis !== "undefined" && (globalThis as Record<string, unknown>).__vitest_worker__ && !(globalThis as Record<string, unknown>).__test_cache_enabled__) {
		return fetcher();
	}
 
	const now = Date.now();
	const entry = cache.get(key) as CacheEntry<T> | undefined;
 
	if (entry && now - entry.timestamp < ttlMs) {
		return entry.data;
	}
 
	const data = await fetcher();
	cache.set(key, { data, timestamp: now });
 
	// Prune old entries if cache gets too large (prevent memory leaks)
	if (cache.size > 100) {
		const cutoff = now - ttlMs * 2;
		for (const [k, v] of cache) {
			if (v.timestamp < cutoff) cache.delete(k);
		}
	}
 
	return data;
}
 
/** Clear a specific cache key */
export function invalidateCache(key: string): boolean {
	return cache.delete(key);
}
 
/** Clear all cache entries matching a prefix (e.g., all project-related) */
export function invalidateCacheByPrefix(prefix: string): number {
	let count = 0;
	for (const key of cache.keys()) {
		if (key.includes(prefix)) {
			cache.delete(key);
			count++;
		}
	}
	return count;
}
 
/** Clear the entire cache */
export function clearAllCache(): number {
	const size = cache.size;
	cache.clear();
	return size;
}
 
/** Get cache stats for debugging */
export function getCacheStats(): { size: number; keys: string[] } {
	return {
		size: cache.size,
		keys: Array.from(cache.keys()),
	};
}