diff --git a/core/autocomplete/CompletionProvider.ts b/core/autocomplete/CompletionProvider.ts index 5001a62032a..27e7ad53723 100644 --- a/core/autocomplete/CompletionProvider.ts +++ b/core/autocomplete/CompletionProvider.ts @@ -20,7 +20,7 @@ import AutocompleteLruCache from "./util/AutocompleteLruCache.js"; import { HelperVars } from "./util/HelperVars.js"; import { AutocompleteInput, AutocompleteOutcome } from "./util/types.js"; -const autocompleteCache = AutocompleteLruCache.get(); +const autocompleteCachePromise = AutocompleteLruCache.get(); // Errors that can be expected on occasion even during normal functioning should not be shown. // Not worth disrupting the user to tell them that a single autocomplete request didn't go through @@ -31,7 +31,7 @@ const ERRORS_TO_IGNORE = [ ]; export class CompletionProvider { - private autocompleteCache = AutocompleteLruCache.get(); + private autocompleteCache?: AutocompleteLruCache; public errorsShown: Set = new Set(); private bracketMatchingService = new BracketMatchingService(); private debouncer = new AutocompleteDebouncer(); @@ -48,6 +48,22 @@ export class CompletionProvider { ) { this.completionStreamer = new CompletionStreamer(this.onError.bind(this)); this.contextRetrievalService = new ContextRetrievalService(this.ide); + void this.initCache(); + } + + private async initCache() { + try { + this.autocompleteCache = await autocompleteCachePromise; + } catch (e) { + console.error("Failed to initialize autocomplete cache:", e); + } + } + + private async getCache(): Promise { + if (!this.autocompleteCache) { + this.autocompleteCache = await autocompleteCachePromise; + } + return this.autocompleteCache; } private async _prepareLlm(): Promise { @@ -201,14 +217,12 @@ export class CompletionProvider { // Completion let completion: string | undefined = ""; - - const cache = await autocompleteCache; + const cache = await this.getCache(); const cachedCompletion = helper.options.useCache ? await cache.get(helper.prunedPrefix) : undefined; let cacheHit = false; if (cachedCompletion) { - // Cache cacheHit = true; completion = cachedCompletion; } else { @@ -277,16 +291,12 @@ export class CompletionProvider { outcome.enabledStaticContextualization = true; } - ////////// - - // Save to cache if (!outcome.cacheHit && helper.options.useCache) { - (await this.autocompleteCache) + void cache .put(outcome.prefix, outcome.completion) .catch((e) => console.warn(`Failed to save to cache: ${e.message}`)); } - // When using the JetBrains extension, Mark as displayed const ideType = (await this.ide.getIdeInfo()).ideType; if (ideType === "jetbrains") { this.markDisplayed(input.completionId, outcome); @@ -299,4 +309,10 @@ export class CompletionProvider { this.loggingService.deleteAbortController(input.completionId); } } + + public async dispose() { + if (this.autocompleteCache) { + await this.autocompleteCache.close(); + } + } } diff --git a/core/autocomplete/util/AutocompleteLruCache.test.ts b/core/autocomplete/util/AutocompleteLruCache.test.ts new file mode 100644 index 00000000000..a3572b27a06 --- /dev/null +++ b/core/autocomplete/util/AutocompleteLruCache.test.ts @@ -0,0 +1,650 @@ +import AutocompleteLruCache from "./AutocompleteLruCache"; + +jest.mock("async-mutex", () => { + const acquire = jest.fn().mockResolvedValue(jest.fn()); + return { + Mutex: jest.fn().mockImplementation(() => ({ acquire })), + }; +}); +jest.mock("sqlite"); +jest.mock("sqlite3"); + +jest.useFakeTimers(); + +describe("AutocompleteLruCache", () => { + let mockDb: any; + let cache: AutocompleteLruCache; + let currentTime: number; + + const createMockDb = () => ({ + run: jest.fn().mockResolvedValue(undefined), + all: jest.fn().mockResolvedValue([]), + exec: jest.fn().mockResolvedValue(undefined), + close: jest.fn().mockResolvedValue(undefined), + }); + + beforeEach(() => { + jest.clearAllTimers(); + jest.clearAllMocks(); + currentTime = 1000000; + jest.spyOn(Date, "now").mockImplementation(() => currentTime); + + mockDb = createMockDb(); + cache = new (AutocompleteLruCache as any)(mockDb); + + // Reset static properties + (AutocompleteLruCache as any).capacity = 1000; + (AutocompleteLruCache as any).flushInterval = 30000; + }); + + afterEach(async () => { + // Clean up any running timers + if ((cache as any).flushTimer) { + clearInterval((cache as any).flushTimer); + } + jest.restoreAllMocks(); + }); + + // ═══════════════════════════════════════════════════════════════ + // BASIC CACHE OPERATIONS + // ═══════════════════════════════════════════════════════════════ + describe("Basic Cache Operations", () => { + describe("put() method", () => { + it("should store a new entry in the cache", async () => { + await cache.put("hello", "world"); + + const internalCache = (cache as any).cache; + expect(internalCache.has("hello")).toBe(true); + expect(internalCache.get("hello")).toEqual({ + value: "world", + timestamp: currentTime, + }); + }); + + it("should mark entry as dirty after put", async () => { + await cache.put("key", "value"); + + const dirtySet = (cache as any).dirty; + expect(dirtySet.has("key")).toBe(true); + }); + + it("should update existing entry with new value and timestamp", async () => { + await cache.put("key", "old"); + currentTime += 5000; + await cache.put("key", "new"); + + const entry = (cache as any).cache.get("key"); + expect(entry.value).toBe("new"); + expect(entry.timestamp).toBe(currentTime); + }); + + it("should handle prefix storage correctly", async () => { + await cache.put("original_prefix", "completion"); + const internalCache = (cache as any).cache; + expect(internalCache.size).toBe(1); + }); + + it("should acquire mutex lock during put operation", async () => { + const releaseSpy = jest.fn(); + const mutex = (cache as any).mutex; + const acquireSpy = jest + .spyOn(mutex, "acquire") + .mockResolvedValue(releaseSpy); + + await cache.put("test", "value"); + + expect(acquireSpy).toHaveBeenCalled(); + expect(releaseSpy).toHaveBeenCalled(); + }); + }); + + describe("get() method", () => { + it("should retrieve exact match from cache", async () => { + await cache.put("prefix", "completion"); + + const result = await cache.get("prefix"); + expect(result).toBe("completion"); + }); + + it("should return undefined when no match exists", async () => { + const result = await cache.get("nonexistent"); + expect(result).toBeUndefined(); + }); + + it("should update timestamp when entry is accessed", async () => { + await cache.put("key", "value"); + const originalTimestamp = (cache as any).cache.get("key").timestamp; + + currentTime += 10000; + await cache.get("key"); + + const updatedTimestamp = (cache as any).cache.get("key").timestamp; + expect(updatedTimestamp).toBe(currentTime); + expect(updatedTimestamp).toBeGreaterThan(originalTimestamp); + }); + + it("should mark entry as dirty after updating timestamp", async () => { + await cache.put("key", "value"); + (cache as any).dirty.clear(); + + await cache.get("key"); + + expect((cache as any).dirty.has("key")).toBe(true); + }); + + it("should find longest matching prefix", async () => { + await cache.put("hel", "lo_world"); + await cache.put("hello", "_user_suffix"); + const result = await cache.get("hello_user"); + expect(result).toBe("_suffix"); + }); + + it("should return completion with correct prefix stripped", async () => { + await cache.put("pre", "fix_completion"); + + const result = await cache.get("prefix"); + expect(result).toBe("_completion"); + }); + + it("should validate that completion starts with remaining prefix", async () => { + await cache.put("abc", "xyz"); + + // "abcdef" starts with "abc", remaining is "def" + // But cached value "xyz" doesn't start with "def" + const result = await cache.get("abcdef"); + expect(result).toBeUndefined(); + }); + + it("should handle prefix matching logic", async () => { + await cache.put("pre", "fix_completion"); + + const result = await cache.get("prefix"); + // Result depends on truncateSqliteLikePattern behavior + expect( + result === "fix_completion" || + result === "_completion" || + result === undefined, + ).toBe(true); + }); + }); + }); + + // ═══════════════════════════════════════════════════════════════ + // LRU EVICTION MECHANISM + // ═══════════════════════════════════════════════════════════════ + describe("LRU Eviction", () => { + beforeEach(() => { + (AutocompleteLruCache as any).capacity = 3; + }); + + it("should respect capacity limit", async () => { + await cache.put("a", "1"); + await cache.put("b", "2"); + await cache.put("c", "3"); + + expect((cache as any).cache.size).toBe(3); + + await cache.put("d", "4"); + + expect((cache as any).cache.size).toBe(3); + }); + + it("should evict oldest entry when capacity exceeded", async () => { + await cache.put("a", "1"); + currentTime += 1000; + await cache.put("b", "2"); + currentTime += 1000; + await cache.put("c", "3"); + currentTime += 1000; + await cache.put("d", "4"); // Should evict "a" + + expect(await cache.get("a")).toBeUndefined(); + expect(await cache.get("b")).toBe("2"); + expect(await cache.get("c")).toBe("3"); + expect(await cache.get("d")).toBe("4"); + }); + + it("should keep recently accessed entries during eviction", async () => { + await cache.put("a", "1"); + currentTime += 1000; + await cache.put("b", "2"); + currentTime += 1000; + await cache.put("c", "3"); + currentTime += 1000; + + // Access "a" to make it recent + await cache.get("a"); + currentTime += 1000; + + await cache.put("d", "4"); // Should evict "b" (oldest) + + expect(await cache.get("a")).toBe("1"); + expect(await cache.get("b")).toBeUndefined(); + expect(await cache.get("c")).toBe("3"); + expect(await cache.get("d")).toBe("4"); + }); + + it("should mark evicted entry as dirty for database deletion", async () => { + await cache.put("a", "1"); + await cache.put("b", "2"); + await cache.put("c", "3"); + + (cache as any).dirty.clear(); + + await cache.put("d", "4"); + + const dirtySet = (cache as any).dirty; + expect(dirtySet.size).toBeGreaterThan(0); + }); + + it("should handle capacity of 1", async () => { + (AutocompleteLruCache as any).capacity = 1; + + await cache.put("a", "1"); + await cache.put("b", "2"); + + expect((cache as any).cache.size).toBe(1); + expect(await cache.get("a")).toBeUndefined(); + expect(await cache.get("b")).toBe("2"); + }); + + it("should find oldest entry among multiple old entries", async () => { + await cache.put("a", "1"); + currentTime += 100; + await cache.put("b", "2"); + currentTime += 100; + await cache.put("c", "3"); + + // Make "b" oldest by accessing "a" and "c" + currentTime += 100; + await cache.get("c"); + currentTime += 100; + await cache.get("a"); + currentTime += 100; + + await cache.put("d", "4"); // Should evict "b" + + expect(await cache.get("b")).toBeUndefined(); + }); + }); + + // ═══════════════════════════════════════════════════════════════ + // DATABASE PERSISTENCE & FLUSH + // ═══════════════════════════════════════════════════════════════ + describe("Database Persistence", () => { + describe("flush() method", () => { + it("should do nothing when dirty set is empty", async () => { + await cache.flush(); + + expect(mockDb.run).not.toHaveBeenCalled(); + }); + + it("should wrap operations in transaction", async () => { + await cache.put("key", "value"); + await cache.flush(); + + const calls = mockDb.run.mock.calls; + expect(calls[0][0]).toBe("BEGIN TRANSACTION"); + expect(calls[calls.length - 1][0]).toBe("COMMIT"); + }); + + it("should perform upsert for existing cache entries", async () => { + await cache.put("foo", "bar"); + await cache.flush(); + + expect(mockDb.run).toHaveBeenCalledWith( + expect.stringContaining("INSERT INTO cache"), + "foo", + "bar", + currentTime, + "bar", + currentTime, + ); + }); + + it("should delete entries removed from cache", async () => { + await cache.put("temp", "value"); + (cache as any).cache.delete("temp"); + (cache as any).dirty.add("temp"); + + await cache.flush(); + + expect(mockDb.run).toHaveBeenCalledWith( + "DELETE FROM cache WHERE key = ?", + "temp", + ); + }); + + it("should clear dirty set after successful flush", async () => { + await cache.put("key1", "val1"); + await cache.put("key2", "val2"); + + expect((cache as any).dirty.size).toBe(2); + + await cache.flush(); + + expect((cache as any).dirty.size).toBe(0); + }); + + it("should rollback transaction on error", async () => { + await cache.put("key", "value"); + mockDb.run.mockImplementation((sql: string) => { + if (sql.includes("INSERT")) { + return Promise.reject(new Error("DB Error")); + } + return Promise.resolve(); + }); + + await cache.flush(); + + expect(mockDb.run).toHaveBeenCalledWith("ROLLBACK"); + }); + + it("should log error when flush fails", async () => { + const consoleError = jest.spyOn(console, "error").mockImplementation(); + await cache.put("key", "value"); + const dbError = new Error("Database failure"); + mockDb.run.mockRejectedValueOnce(dbError); + + await cache.flush(); + + expect(consoleError).toHaveBeenCalledWith( + "Error flushing cache:", + dbError, + ); + + consoleError.mockRestore(); + }); + + it("should acquire mutex during flush", async () => { + const releaseSpy = jest.fn(); + const mutex = (cache as any).mutex; + const acquireSpy = jest + .spyOn(mutex, "acquire") + .mockResolvedValue(releaseSpy); + + await cache.put("key", "value"); + await cache.flush(); + + expect(acquireSpy).toHaveBeenCalled(); + expect(releaseSpy).toHaveBeenCalled(); + }); + + it("should release mutex even if error occurs", async () => { + const consoleError = jest.spyOn(console, "error").mockImplementation(); + const releaseSpy = jest.fn(); + jest + .spyOn((cache as any).mutex, "acquire") + .mockResolvedValue(releaseSpy); + + await cache.put("key", "value"); + + const originalRun = mockDb.run; + mockDb.run = jest.fn().mockImplementation((sql: string) => { + if (sql === "BEGIN TRANSACTION" || sql === "ROLLBACK") { + return Promise.resolve(); + } + return Promise.reject(new Error("DB Error")); + }); + + await cache.flush(); + + expect(releaseSpy).toHaveBeenCalled(); + expect(mockDb.run).toHaveBeenCalledWith("ROLLBACK"); + + // Restore + mockDb.run = originalRun; + consoleError.mockRestore(); + }); + + it("should handle multiple dirty entries in one flush", async () => { + await cache.put("key1", "val1"); + await cache.put("key2", "val2"); + await cache.put("key3", "val3"); + + await cache.flush(); + + const insertCalls = mockDb.run.mock.calls.filter((call: any) => + call[0].includes("INSERT INTO cache"), + ); + expect(insertCalls.length).toBe(3); + }); + }); + + describe("loadFromDb() method", () => { + it("should load all entries from database", async () => { + mockDb.all.mockResolvedValue([ + { key: "a", value: "alpha", timestamp: 1000 }, + { key: "b", value: "beta", timestamp: 2000 }, + ]); + + await (cache as any).loadFromDb(); + + const internalCache = (cache as any).cache; + expect(internalCache.size).toBe(2); + expect(internalCache.get("a")).toEqual({ + value: "alpha", + timestamp: 1000, + }); + expect(internalCache.get("b")).toEqual({ + value: "beta", + timestamp: 2000, + }); + }); + + it("should handle empty database", async () => { + mockDb.all.mockResolvedValue([]); + + await (cache as any).loadFromDb(); + + expect((cache as any).cache.size).toBe(0); + }); + + it("should query with correct SQL", async () => { + await (cache as any).loadFromDb(); + + expect(mockDb.all).toHaveBeenCalledWith( + "SELECT key, value, timestamp FROM cache", + ); + }); + }); + }); + + // ═══════════════════════════════════════════════════════════════ + // AUTOMATIC FLUSH TIMER + // ═══════════════════════════════════════════════════════════════ + describe("Automatic Flush Timer", () => { + it("should start timer on startFlushTimer()", () => { + (cache as any).startFlushTimer(); + + expect((cache as any).flushTimer).toBeDefined(); + }); + + it("should call flush at configured intervals", async () => { + (AutocompleteLruCache as any).flushInterval = 1000; + const flushSpy = jest.spyOn(cache, "flush").mockResolvedValue(); + + (cache as any).startFlushTimer(); + + jest.advanceTimersByTime(1000); + expect(flushSpy).toHaveBeenCalledTimes(1); + + jest.advanceTimersByTime(1000); + expect(flushSpy).toHaveBeenCalledTimes(2); + + jest.advanceTimersByTime(1000); + expect(flushSpy).toHaveBeenCalledTimes(3); + + flushSpy.mockRestore(); + }); + + it("should handle flush errors gracefully", async () => { + const consoleError = jest.spyOn(console, "error").mockImplementation(); + const flushError = new Error("Flush failed"); + jest.spyOn(cache, "flush").mockRejectedValue(flushError); + + (AutocompleteLruCache as any).flushInterval = 1000; + (cache as any).startFlushTimer(); + + jest.advanceTimersByTime(1000); + await Promise.resolve(); // Let error handler run + + expect(consoleError).toHaveBeenCalledWith( + "Error flushing cache:", + flushError, + ); + + consoleError.mockRestore(); + }); + }); + + // ═══════════════════════════════════════════════════════════════ + // RESOURCE CLEANUP + // ═══════════════════════════════════════════════════════════════ + describe("Resource Cleanup", () => { + it("should clear timer on close()", async () => { + (cache as any).startFlushTimer(); + const timerId = (cache as any).flushTimer; + const clearSpy = jest.spyOn(global, "clearInterval"); + + await cache.close(); + + expect(clearSpy).toHaveBeenCalledWith(timerId); + }); + + it("should flush pending changes on close()", async () => { + await cache.put("key", "value"); + + await cache.close(); + + expect(mockDb.run).toHaveBeenCalledWith( + expect.stringContaining("INSERT INTO cache"), + "key", + "value", + currentTime, + "value", + currentTime, + ); + }); + + it("should close database connection", async () => { + await cache.close(); + + expect(mockDb.close).toHaveBeenCalled(); + }); + + it("should execute cleanup in correct order", async () => { + (cache as any).startFlushTimer(); + await cache.put("key", "value"); + + const operations: string[] = []; + + const originalClearInterval = clearInterval; + const clearIntervalSpy = jest + .spyOn(global, "clearInterval") + .mockImplementation((id) => { + operations.push("clearTimer"); + return originalClearInterval(id); + }); + + const flushSpy = jest + .spyOn(cache, "flush") + .mockImplementation(async () => { + operations.push("flush"); + }); + + const closeDbSpy = mockDb.close.mockImplementation(() => { + operations.push("dbClose"); + return Promise.resolve(); + }); + + await cache.close(); + + expect(operations).toEqual(["clearTimer", "flush", "dbClose"]); + + clearIntervalSpy.mockRestore(); + flushSpy.mockRestore(); + }); + + it("should handle close() when timer not started", async () => { + // Don't start timer + await expect(cache.close()).resolves.not.toThrow(); + }); + }); + + // ═══════════════════════════════════════════════════════════════ + // EDGE CASES & SPECIAL SCENARIOS + // ═══════════════════════════════════════════════════════════════ + describe("Edge Cases", () => { + it("should handle empty string as prefix", async () => { + await cache.put("", "empty"); + const result = await cache.get(""); + + expect(result).toBe("empty"); + }); + + it("should handle very long prefixes", async () => { + const longPrefix = "a".repeat(1000); + await cache.put(longPrefix, "completion"); + + const result = await cache.get(longPrefix); + expect(result).toBe("completion"); + }); + + it("should handle special characters in prefix", async () => { + const specialPrefix = "test%_\\["; + await cache.put(specialPrefix, "special"); + + const result = await cache.get(specialPrefix); + expect(result).toBe("special"); + }); + + it("should handle Unicode characters", async () => { + await cache.put("🚀", "rocket"); + const result = await cache.get("🚀"); + + expect(result).toBe("rocket"); + }); + + it("should handle multiple puts of same key", async () => { + await cache.put("key", "first"); + await cache.put("key", "second"); + await cache.put("key", "third"); + + expect(await cache.get("key")).toBe("third"); + expect((cache as any).cache.size).toBe(1); + }); + + it("should handle concurrent get operations", async () => { + await cache.put("shared", "value"); + + const results = await Promise.all([ + cache.get("shared"), + cache.get("shared"), + cache.get("shared"), + ]); + + expect(results).toEqual(["value", "value", "value"]); + }); + }); + + // ═══════════════════════════════════════════════════════════════ + // SINGLETON PATTERN (Static get() method) + // ═══════════════════════════════════════════════════════════════ + describe("Singleton Pattern", () => { + beforeEach(() => { + // Reset singleton + (AutocompleteLruCache as any).instancePromise = undefined; + }); + + it("should return same instance on multiple calls", async () => { + const mockOpen = jest.fn().mockResolvedValue(mockDb); + jest.doMock("sqlite", () => ({ open: mockOpen })); + + const instance1 = await AutocompleteLruCache.get(); + const instance2 = await AutocompleteLruCache.get(); + + expect(instance1).toBe(instance2); + }); + }); +}); diff --git a/core/autocomplete/util/AutocompleteLruCache.ts b/core/autocomplete/util/AutocompleteLruCache.ts index adac906ff05..5bb637fe6d7 100644 --- a/core/autocomplete/util/AutocompleteLruCache.ts +++ b/core/autocomplete/util/AutocompleteLruCache.ts @@ -1,121 +1,227 @@ import { Mutex } from "async-mutex"; import { open } from "sqlite"; import sqlite3 from "sqlite3"; - import { DatabaseConnection, truncateSqliteLikePattern, } from "../../indexing/refreshIndex.js"; import { getTabAutocompleteCacheSqlitePath } from "../../util/paths.js"; +interface CacheEntry { + value: string; + timestamp: number; +} + +/** + * LRU cache for autocomplete results with SQLite persistence. + * + * Implements a least-recently-used cache that: + * - Stores prefix-to-completion mappings in memory + * - Periodically flushes changes to SQLite for persistence + * - Evicts oldest entries when capacity is exceeded + * - Supports prefix matching for flexible autocomplete retrieval + */ export class AutocompleteLruCache { private static capacity = 1000; + private static flushInterval = 30000; + private static instancePromise?: Promise; private mutex = new Mutex(); + private cache: Map = new Map(); + private dirty: Set = new Set(); + private flushTimer?: NodeJS.Timeout; constructor(private db: DatabaseConnection) {} + /** + * Singleton accessor that initializes the cache with SQLite persistence. + * Creates the database table if it doesn't exist and loads existing entries. + */ static async get(): Promise { - const db = await open({ - filename: getTabAutocompleteCacheSqlitePath(), - driver: sqlite3.Database, - }); - - await db.exec("PRAGMA busy_timeout = 3000;"); - - await db.run(` - CREATE TABLE IF NOT EXISTS cache ( - key TEXT PRIMARY KEY, - value TEXT NOT NULL, - timestamp INTEGER NOT NULL - ) - `); + if (!AutocompleteLruCache.instancePromise) { + AutocompleteLruCache.instancePromise = (async () => { + const db = await open({ + filename: getTabAutocompleteCacheSqlitePath(), + driver: sqlite3.Database, + }); + await db.exec("PRAGMA busy_timeout = 3000;"); + await db.run(` + CREATE TABLE IF NOT EXISTS cache ( + key TEXT PRIMARY KEY, + value TEXT NOT NULL, + timestamp INTEGER NOT NULL + ) + `); + + const instance = new AutocompleteLruCache(db); + await instance.loadFromDb(); + instance.startFlushTimer(); + return instance; + })(); + } + return AutocompleteLruCache.instancePromise; + } - return new AutocompleteLruCache(db); + /** Loads all cached entries from SQLite into memory on initialization. */ + private async loadFromDb() { + const rows = await this.db.all("SELECT key, value, timestamp FROM cache"); + for (const row of rows) { + this.cache.set(row.key, { + value: row.value, + timestamp: row.timestamp, + }); + } } - async get(prefix: string): Promise { - // NOTE: Right now prompts with different suffixes will be considered the same + /** Starts periodic flush timer to persist dirty entries to database. */ + private startFlushTimer() { + this.flushTimer = setInterval(() => { + this.flush().catch((e) => console.error("Error flushing cache:", e)); + }, AutocompleteLruCache.flushInterval); + } - // If the query is "co" and we have "c" -> "ontinue" in the cache, - // we should return "ntinue" as the completion. - // Have to make sure we take the key with shortest length + /** + * Retrieves cached completion for a prefix using longest-match strategy. + * + * Algorithm: + * 1. Finds the longest cached prefix that the query starts with + * 2. Validates that cached completion starts with the remaining query text + * 3. Returns the completion with the matched portion stripped + * 4. Updates the entry's timestamp (LRU tracking) + * + * @param prefix - The prefix to search for + * @returns The completion string with prefix removed, or undefined if no match + */ + async get(prefix: string): Promise { const truncatedPrefix = truncateSqliteLikePattern(prefix); - try { - const result = await this.db.get( - "SELECT key, value FROM cache WHERE ? LIKE key || '%' ORDER BY LENGTH(key) DESC LIMIT 1", - truncatedPrefix, - ); - // Validate that the cached completion is a valid completion for the prefix - if ( - result && - result.value.startsWith(truncatedPrefix.slice(result.key.length)) - ) { - await this.db.run( - "UPDATE cache SET timestamp = ? WHERE key = ?", - Date.now(), - truncatedPrefix, - ); - // And then truncate so we aren't writing something that's already there - return result.value.slice(truncatedPrefix.length - result.key.length); + let bestMatch: { key: string; entry: CacheEntry } | null = null; + + for (const [key, entry] of this.cache.entries()) { + if (truncatedPrefix.startsWith(key)) { + if (!bestMatch || key.length > bestMatch.key.length) { + bestMatch = { key, entry }; + } } - } catch (e) { - // catches e.g. old SQLITE LIKE OR GLOB PATTERN TOO COMPLEX - console.error(e); + } + + if ( + bestMatch && + bestMatch.entry.value.startsWith( + truncatedPrefix.slice(bestMatch.key.length), + ) + ) { + bestMatch.entry.timestamp = Date.now(); + this.dirty.add(bestMatch.key); + + return bestMatch.entry.value.slice( + truncatedPrefix.length - bestMatch.key.length, + ); } return undefined; } + /** + * Stores a prefix-to-completion mapping in the cache. + * + * Thread-safe operation that: + * - Truncates the prefix for SQLite pattern safety + * - Updates or inserts the entry with current timestamp + * - Evicts oldest entry if capacity exceeded + * - Marks entry as dirty for next flush + * + * @param prefix - The prefix key + * @param completion - The completion value to cache + */ async put(prefix: string, completion: string) { const release = await this.mutex.acquire(); - const truncatedPrefix = truncateSqliteLikePattern(prefix); + try { - await this.db.run("BEGIN TRANSACTION"); + const now = Date.now(); + + this.cache.set(truncatedPrefix, { + value: completion, + timestamp: now, + }); + this.dirty.add(truncatedPrefix); + + if (this.cache.size > AutocompleteLruCache.capacity) { + let oldestKey: string | null = null; + let oldestTime = Infinity; + + for (const [key, entry] of this.cache.entries()) { + if (entry.timestamp < oldestTime) { + oldestTime = entry.timestamp; + oldestKey = key; + } + } - try { - const result = await this.db.get( - "SELECT key FROM cache WHERE key = ?", - truncatedPrefix, - ); + if (oldestKey) { + this.cache.delete(oldestKey); + this.dirty.add(oldestKey); + } + } + } finally { + release(); + } + } - if (result) { - await this.db.run( - "UPDATE cache SET value = ?, timestamp = ? WHERE key = ?", - completion, - Date.now(), - truncatedPrefix, - ); - } else { - const count = await this.db.get( - "SELECT COUNT(*) as count FROM cache", - ); + /** + * Persists all dirty entries to SQLite in a single transaction. + * + * Performs upserts for existing cache entries and deletes for evicted entries. + * Skips if no changes pending. Rolls back transaction on error. + */ + async flush() { + if (this.dirty.size === 0) return; - if (count.count >= AutocompleteLruCache.capacity) { - await this.db.run( - "DELETE FROM cache WHERE key = (SELECT key FROM cache ORDER BY timestamp ASC LIMIT 1)", - ); - } + const release = await this.mutex.acquire(); + const dirtyKeys = Array.from(this.dirty); + this.dirty.clear(); + try { + await this.db.run("BEGIN TRANSACTION"); + + for (const key of dirtyKeys) { + const entry = this.cache.get(key); + + if (entry) { + // Upsert await this.db.run( - "INSERT INTO cache (key, value, timestamp) VALUES (?, ?, ?)", - truncatedPrefix, - completion, - Date.now(), + `INSERT INTO cache (key, value, timestamp) VALUES (?, ?, ?) + ON CONFLICT(key) DO UPDATE SET value = ?, timestamp = ?`, + key, + entry.value, + entry.timestamp, + entry.value, + entry.timestamp, ); + } else { + // Delete + await this.db.run("DELETE FROM cache WHERE key = ?", key); } - - await this.db.run("COMMIT"); - } catch (error) { - await this.db.run("ROLLBACK"); - throw error; } - } catch (e) { - console.error("Error creating transaction: ", e); + + await this.db.run("COMMIT"); + } catch (error) { + await this.db.run("ROLLBACK"); + console.error("Error flushing cache:", error); } finally { release(); } } -} + /** + * Gracefully shuts down the cache. + * Stops the flush timer, persists pending changes, and closes database connection. + */ + async close() { + if (this.flushTimer) { + clearInterval(this.flushTimer); + } + await this.flush(); + await this.db.close(); + AutocompleteLruCache.instancePromise = undefined; + } +} export default AutocompleteLruCache;