Skip to content

Commit 6212e6c

Browse files
committed
Add initial implementation for a SerialMultiCache
1 parent b615f5f commit 6212e6c

File tree

2 files changed

+155
-0
lines changed

2 files changed

+155
-0
lines changed
Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
import {
2+
TestableKeyValueCache,
3+
KeyValueCacheSetOptions,
4+
} from './KeyValueCache';
5+
6+
/**
7+
* The SerialMultiCache class's purpose is to support a number of cache implementations
8+
* with fallback behavior. The order of the provided `caches` argument is important - it
9+
* determines the order in which the multicache will look, with the zero index being first
10+
* and so on. A good application of this multicache might be, for example, an
11+
* `InMemoryCache` followed by a `RedisCache`. Expected behavior of the multicache:
12+
* 1. `get()` will look to the first cache, then second, etc. until it finds a result.
13+
* Upon a cache hit, all previous caches that missed will be populated with the
14+
* result.
15+
* 2. `set()` will populate all caches with the provided value.
16+
* 3. `delete()` will clear the entry from all caches.
17+
* 4. `flush()` will call `flush()` on each cache if the function is implemented.
18+
* 5. `close()` will call `close()` on each cache if the function is implemented.
19+
*/
20+
export class SerialMultiCache<T = string> implements TestableKeyValueCache<T> {
21+
constructor(private caches: TestableKeyValueCache<T>[]) {}
22+
23+
async get(key: string) {
24+
let foundIndex = 0;
25+
let result: T | undefined;
26+
for (const cache of this.caches) {
27+
result = await cache.get(key);
28+
// cache hit, we don't need to look any further in our array of caches
29+
if (typeof result !== 'undefined') break;
30+
foundIndex++;
31+
}
32+
33+
// Populate the cache misses with the result
34+
if (result) {
35+
for (let i = 0; i < foundIndex; i++) {
36+
this.caches[i].set(key, result);
37+
}
38+
}
39+
40+
return result;
41+
}
42+
43+
async set(key: string, value: T, options?: KeyValueCacheSetOptions) {
44+
for (const cache of this.caches) {
45+
cache.set(key, value, options);
46+
}
47+
}
48+
49+
async delete(key: string) {
50+
for (const cache of this.caches) {
51+
cache.delete(key);
52+
}
53+
}
54+
55+
async flush() {
56+
for (const cache of this.caches) {
57+
cache.flush?.();
58+
}
59+
}
60+
61+
async close() {
62+
for (const cache of this.caches) {
63+
cache.close?.();
64+
}
65+
}
66+
}
Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
import { SerialMultiCache } from '../SerialMultiCache';
2+
import { KeyValueCache } from '../KeyValueCache';
3+
4+
function getMockCache(
5+
cache: Record<string, number> = {},
6+
): KeyValueCache<number> {
7+
const get = jest.fn(async (key: string) => cache[key]);
8+
const set = jest.fn(async (key: string, value: number) => {
9+
cache[key] = value;
10+
});
11+
const del = jest.fn(async (key: string) => delete cache[key]);
12+
13+
return { get, set, delete: del };
14+
}
15+
16+
let multiCache: SerialMultiCache<number>;
17+
let tier0: KeyValueCache<number>;
18+
let tier1: KeyValueCache<number>;
19+
let tier2: KeyValueCache<number>;
20+
21+
beforeEach(() => {
22+
tier0 = getMockCache({ zero: 0 });
23+
tier1 = getMockCache({ one: 1 });
24+
tier2 = getMockCache({ two: 2 });
25+
26+
multiCache = new SerialMultiCache([tier0, tier1, tier2]);
27+
});
28+
29+
describe('SerialMultiCache', () => {
30+
describe('get', () => {
31+
it("returns cached value on first cache hit, doesn't look further (hit tier0)", async () => {
32+
const result = await multiCache.get('zero');
33+
34+
expect(result).toEqual(0);
35+
expect(tier0.get).toHaveBeenCalled();
36+
expect(tier1.get).not.toHaveBeenCalled();
37+
});
38+
39+
it("returns cached value on first cache hit, doesn't look further (hit tier1)", async () => {
40+
const result = await multiCache.get('one');
41+
42+
// expect a call up the chain until a hit
43+
expect(tier0.get).toHaveBeenCalled();
44+
expect(tier1.get).toHaveBeenCalled();
45+
expect(tier2.get).not.toHaveBeenCalled();
46+
47+
expect(result).toEqual(1);
48+
49+
// expect lower tiers to have been populated with the result
50+
expect(tier0.set).toHaveBeenCalledWith('one', 1);
51+
expect(await tier0.get('one')).toEqual(1);
52+
});
53+
54+
it('returns cached value from tier2 cache and populates tier0 and tier1', async () => {
55+
const result = await multiCache.get('two');
56+
57+
// expect a call up the chain until a hit
58+
expect(tier0.get).toHaveBeenCalled();
59+
expect(tier1.get).toHaveBeenCalled();
60+
expect(tier2.get).toHaveBeenCalled();
61+
62+
expect(result).toEqual(2);
63+
64+
// expect lower tiers to have been populated with the result
65+
expect(tier0.set).toHaveBeenCalledWith('two', 2);
66+
expect(tier1.set).toHaveBeenCalledWith('two', 2);
67+
expect(await tier0.get('two')).toEqual(2);
68+
expect(await tier1.get('two')).toEqual(2);
69+
});
70+
});
71+
72+
it('sets a cached value on all tiers', async () => {
73+
multiCache.set('three', 3);
74+
75+
expect(await tier0.get('three')).toEqual(3);
76+
expect(await tier1.get('three')).toEqual(3);
77+
expect(await tier2.get('three')).toEqual(3);
78+
});
79+
80+
it('deletes a cached value from all tiers', async () => {
81+
// populate all caches with the 'two' entry
82+
const result = await multiCache.get('two');
83+
expect(result).toEqual(2);
84+
85+
multiCache.delete('two');
86+
87+
expect(await multiCache.get('two')).toBeUndefined();
88+
});
89+
});

0 commit comments

Comments
 (0)