diff --git a/.env.local.sample b/.env.local.sample index fb3277abb..cb3f20880 100644 --- a/.env.local.sample +++ b/.env.local.sample @@ -23,6 +23,8 @@ REDIS_PORT=6379 REDIS_PASSWORD= REDIS_MAX_AGE=3600000 REDIS_PREFIX=scix_ +REDIS_CACHE_TTL=300 +REDIS_CACHE_MAX_SIZE=5242880 # SEARCH NEXT_PUBLIC_SEARCH_API_TIMEOUT_MS=30000 diff --git a/package.json b/package.json index 1bd68ff95..d1c252351 100644 --- a/package.json +++ b/package.json @@ -171,6 +171,7 @@ "iron-session": "^6.3.1", "lint-staged": "^11.2.6", "msw": "^1.2.3", + "node-mocks-http": "^1.17.2", "prettier": "^2.3.0", "react-dom": "^18.3.1", "react-is": "^17.0.2", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index fc600051c..97f397dcd 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -399,6 +399,9 @@ importers: msw: specifier: ^1.2.3 version: 1.3.5(typescript@5.9.3) + node-mocks-http: + specifier: ^1.17.2 + version: 1.17.2(@types/express@5.0.3)(@types/node@20.19.9) prettier: specifier: ^2.3.0 version: 2.8.8 @@ -2819,6 +2822,10 @@ packages: resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} engines: {node: '>=6.5'} + accepts@1.3.8: + resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==} + engines: {node: '>= 0.6'} + accepts@2.0.0: resolution: {integrity: sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==} engines: {node: '>= 0.6'} @@ -3256,6 +3263,10 @@ packages: confbox@0.1.8: resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==} + content-disposition@0.5.4: + resolution: {integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==} + engines: {node: '>= 0.6'} + content-disposition@1.0.1: resolution: {integrity: sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==} engines: {node: '>=18'} @@ -3603,6 +3614,10 @@ packages: resolution: {integrity: sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==} engines: {node: '>=0.10'} + depd@1.1.2: + resolution: {integrity: sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==} + engines: {node: '>= 0.6'} + depd@2.0.0: resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} engines: {node: '>= 0.8'} @@ -4114,6 +4129,10 @@ packages: framesync@6.1.2: resolution: {integrity: sha512-jBTqhX6KaQVDyus8muwZbBeGGP0XgujBRbQ7gM7BRdS3CadCZIHiawyzYLnafYcvZIh5j8WE7cxZKFn7dXhu9g==} + fresh@0.5.2: + resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==} + engines: {node: '>= 0.6'} + fresh@2.0.0: resolution: {integrity: sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==} engines: {node: '>= 0.8'} @@ -4819,6 +4838,10 @@ packages: resolution: {integrity: sha512-+LfG9Fik+OuI8SLwsiR02IVdjcnRCy5MufYLi0C3TdMT56L/pjB0alMVGgoWJF8pN9Rc7FESycZB9BMNWIid5w==} deprecated: Version 4 replaces this package with the scoped package @mathjax/src + media-typer@0.3.0: + resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==} + engines: {node: '>= 0.6'} + media-typer@1.1.0: resolution: {integrity: sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==} engines: {node: '>= 0.8'} @@ -4829,6 +4852,9 @@ packages: mensch@0.3.4: resolution: {integrity: sha512-IAeFvcOnV9V0Yk+bFhYR07O3yNina9ANIN5MoXBKYJ/RLYPurd2d0yw14MDhpr9/momp0WofT1bPUh3hkzdi/g==} + merge-descriptors@1.0.3: + resolution: {integrity: sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==} + merge-descriptors@2.0.0: resolution: {integrity: sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==} engines: {node: '>=18'} @@ -4840,6 +4866,10 @@ packages: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} + methods@1.1.2: + resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==} + engines: {node: '>= 0.6'} + mhchemparser@4.2.1: resolution: {integrity: sha512-kYmyrCirqJf3zZ9t/0wGgRZ4/ZJw//VwaRVGA75C4nhE60vtnIzhl9J9ndkX/h6hxSN7pjg/cE0VxbnNM+bnDQ==} @@ -4863,6 +4893,11 @@ packages: resolution: {integrity: sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==} engines: {node: '>=18'} + mime@1.6.0: + resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==} + engines: {node: '>=4'} + hasBin: true + mime@2.6.0: resolution: {integrity: sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==} engines: {node: '>=4.0.0'} @@ -4936,6 +4971,10 @@ packages: natural-compare@1.4.0: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + negotiator@0.6.3: + resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==} + engines: {node: '>= 0.6'} + negotiator@1.0.0: resolution: {integrity: sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==} engines: {node: '>= 0.6'} @@ -4977,6 +5016,18 @@ packages: encoding: optional: true + node-mocks-http@1.17.2: + resolution: {integrity: sha512-HVxSnjNzE9NzoWMx9T9z4MLqwMpLwVvA0oVZ+L+gXskYXEJ6tFn3Kx4LargoB6ie7ZlCLplv7QbWO6N+MysWGA==} + engines: {node: '>=14'} + peerDependencies: + '@types/express': ^4.17.21 || ^5.0.0 + '@types/node': '*' + peerDependenciesMeta: + '@types/express': + optional: true + '@types/node': + optional: true + node-releases@2.0.27: resolution: {integrity: sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==} @@ -6045,6 +6096,10 @@ packages: resolution: {integrity: sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==} engines: {node: '>=12.20'} + type-is@1.6.18: + resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==} + engines: {node: '>= 0.6'} + type-is@2.0.1: resolution: {integrity: sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==} engines: {node: '>= 0.6'} @@ -9144,6 +9199,11 @@ snapshots: dependencies: event-target-shim: 5.0.1 + accepts@1.3.8: + dependencies: + mime-types: 2.1.35 + negotiator: 0.6.3 + accepts@2.0.0: dependencies: mime-types: 3.0.2 @@ -9622,6 +9682,10 @@ snapshots: confbox@0.1.8: {} + content-disposition@0.5.4: + dependencies: + safe-buffer: 5.2.1 + content-disposition@1.0.1: {} content-type@1.0.5: {} @@ -10001,6 +10065,8 @@ snapshots: denque@2.1.0: {} + depd@1.1.2: {} + depd@2.0.0: {} detect-libc@2.1.2: @@ -10720,6 +10786,8 @@ snapshots: dependencies: tslib: 2.4.0 + fresh@0.5.2: {} + fresh@2.0.0: {} fs.realpath@1.0.0: {} @@ -11513,18 +11581,24 @@ snapshots: mj-context-menu: 0.6.1 speech-rule-engine: 4.1.2 + media-typer@0.3.0: {} + media-typer@1.1.0: {} memoize-one@6.0.0: {} mensch@0.3.4: {} + merge-descriptors@1.0.3: {} + merge-descriptors@2.0.0: {} merge-stream@2.0.0: {} merge2@1.4.1: {} + methods@1.1.2: {} + mhchemparser@4.2.1: {} micromatch@4.0.8: @@ -11544,6 +11618,8 @@ snapshots: dependencies: mime-db: 1.54.0 + mime@1.6.0: {} + mime@2.6.0: {} mimic-fn@2.1.0: {} @@ -11614,6 +11690,8 @@ snapshots: natural-compare@1.4.0: {} + negotiator@0.6.3: {} + negotiator@1.0.0: {} neo-async@2.6.2: {} @@ -11650,6 +11728,22 @@ snapshots: dependencies: whatwg-url: 5.0.0 + node-mocks-http@1.17.2(@types/express@5.0.3)(@types/node@20.19.9): + dependencies: + accepts: 1.3.8 + content-disposition: 0.5.4 + depd: 1.1.2 + fresh: 0.5.2 + merge-descriptors: 1.0.3 + methods: 1.1.2 + mime: 1.6.0 + parseurl: 1.3.3 + range-parser: 1.2.1 + type-is: 1.6.18 + optionalDependencies: + '@types/express': 5.0.3 + '@types/node': 20.19.9 + node-releases@2.0.27: {} normalize-path@3.0.0: {} @@ -12862,6 +12956,11 @@ snapshots: type-fest@2.19.0: {} + type-is@1.6.18: + dependencies: + media-typer: 0.3.0 + mime-types: 2.1.35 + type-is@2.0.1: dependencies: content-type: 1.0.5 diff --git a/src/api/resolver/resolver.ts b/src/api/resolver/resolver.ts index f7c4a6e95..069395d0b 100644 --- a/src/api/resolver/resolver.ts +++ b/src/api/resolver/resolver.ts @@ -1,3 +1,5 @@ +import axios from 'axios'; +import { isBrowser } from '@/utils/common/guards'; import api, { ApiRequestConfig } from '@/api/api'; import { ApiTargets } from '@/api/models'; import { ADSQuery } from '@/api/types'; @@ -21,16 +23,26 @@ export const useResolverQuery: ADSQuery status === 200 || status === 404; + export const fetchLinks: QueryFunction = async ({ meta }) => { const { params } = meta as { params: IADSApiResolverParams }; + const resolverPath = `${ApiTargets.RESOLVER}/${params.bibcode}/${params.link_type}`; + + if (isBrowser()) { + const { data } = await axios.get(`/api/proxy${resolverPath}`, { + withCredentials: true, + validateStatus: acceptOkOrNotFound, + }); + return data; + } const config: ApiRequestConfig = { method: 'GET', - url: `${ApiTargets.RESOLVER}/${params.bibcode}/${params.link_type}`, - validateStatus: (status) => status === 200 || status === 404, + url: resolverPath, + validateStatus: acceptOkOrNotFound, }; const { data } = await api.request(config); - return data; }; diff --git a/src/api/search/search.ts b/src/api/search/search.ts index cc7bfea06..8ed21d6eb 100644 --- a/src/api/search/search.ts +++ b/src/api/search/search.ts @@ -8,6 +8,7 @@ import { useInfiniteQuery, useMutation, useQuery, + useQueryClient, UseQueryOptions, } from '@tanstack/react-query'; import { @@ -33,7 +34,7 @@ import { resolveObjectQuery, resolveObjectQuerySSR } from '@/api/objects/objects import { GetServerSidePropsContext } from 'next'; import { defaultRequestConfig } from '../config'; import { APP_DEFAULTS, pickTracingHeaders } from '@/config'; -import { isString } from '@/utils/common/guards'; +import { isBrowser, isString } from '@/utils/common/guards'; import { IADSApiSearchParams, IADSApiSearchResponse, IBigQueryMutationParams, IDocsEntity } from '@/api/search/types'; import { ADSMutation, ADSQuery, InfiniteADSQuery } from '@/api/types'; import api, { ApiRequestConfig } from '@/api/api'; @@ -41,6 +42,7 @@ import { ApiTargets } from '@/api/models'; import { logger } from '@/logger'; import { normalizeFields } from '@/api/search/utils'; import { trackUserFlow, PERF_SPANS } from '@/lib/performance'; +import { flattenParams } from '@/lib/proxy-cache'; type PostTransformer = (data: IADSApiSearchResponse) => IADSApiSearchResponse; @@ -94,9 +96,12 @@ export const searchKeys = { bigquery: () => [SEARCH_API_KEYS.bigquery] as const, }; -// default params to omit to keep cache entries more concise +// Omit params that don't affect query results from the React Query cache key. +// - fl: field list doesn't change result set +// - p: page number alias, start/rows already present +// - d: discipline URL bookmark param, boostType already captures its effect const omitParams = (query: IADSApiSearchParams) => - omit(['fl', 'p'], query) as IADSApiSearchParams; + omit(['fl', 'p', 'd'], query) as IADSApiSearchParams; /** * Generic search hook. @@ -106,8 +111,11 @@ export function useSearch( params: IADSApiSearchParams, options?: Omit, 'queryKey' | 'queryFn'>, ) { + const queryClient = useQueryClient(); + // omit fields from queryKey const cleanParams = omitParams(getSearchParams(params)); + const queryKey = searchKeys.primary(cleanParams); // If options.select is provided, use it; otherwise use default const select = @@ -116,12 +124,19 @@ export function useSearch( : (responseSelector as (d: IADSApiSearchResponse) => TData); return useQuery({ - queryKey: searchKeys.primary(cleanParams), - queryHash: JSON.stringify(searchKeys.primary(cleanParams)), + queryKey, + queryHash: JSON.stringify(queryKey), queryFn: fetchSearch, meta: { params }, select, retry: (failCount, error) => failCount < 1 && axios.isAxiosError(error) && error.response?.status !== 400, + onSuccess: () => { + // Don't cache partial results — invalidate so the next render refetches + const raw = queryClient.getQueryData(queryKey); + if (raw?.responseHeader?.partialResults === true) { + void queryClient.invalidateQueries(queryKey); + } + }, ...(options as Omit, 'queryKey' | 'queryFn' | 'select'>), }); } @@ -360,7 +375,7 @@ export const useGetSearchFacet: SearchADSQuery { @@ -462,23 +477,13 @@ export const fetchBigQuerySearch: MutationFunction< return data.response; }; -/** - * Fetches search results from the API based on provided search parameters. - * - * @function - * @param {Object} options - The function options. - * @param {Object} options.meta - Metadata for the search query. - * @param {Object} options.meta.params - The search parameters to be used in the query. - * - * @returns {Promise} - A promise that resolves to the search response data. - */ export const fetchSearch: QueryFunction = async ({ meta }) => { const { params, postTransformers } = meta as { params: IADSApiSearchParams; postTransformers?: Array; }; - const finalParams = { ...params }; + const { d: _, ...finalParams } = params; if (isString(params.q) && params.q.includes('object:')) { const { query } = await resolveObjectQuery({ query: params.q }); finalParams.q = query; @@ -487,14 +492,29 @@ export const fetchSearch: QueryFunction = async ({ meta } // normalize fields in the query finalParams.q = normalizeFields(finalParams.q); - const config: ApiRequestConfig = { - method: 'GET', - url: ApiTargets.SEARCH, - params: finalParams, - }; - // Wrap API request in performance span const data = await trackUserFlow(PERF_SPANS.SEARCH_QUERY_REQUEST, async () => { + // Client-side: route through cache proxy + if (isBrowser()) { + const response = await axios.get('/api/proxy/search/query', { + params: flattenParams(finalParams as Record), + withCredentials: true, + }); + if (process.env.NODE_ENV !== 'production') { + const xCache = (response.headers['x-cache'] as string)?.toLowerCase(); + if (xCache) { + window.dispatchEvent(new CustomEvent('cache-status', { detail: xCache })); + } + } + return response.data; + } + + // Server-side: direct to upstream via Api class + const config: ApiRequestConfig = { + method: 'GET', + url: ApiTargets.SEARCH, + params: finalParams, + }; const response = await api.request(config); return response.data; }); diff --git a/src/lib/proxy-cache.test.ts b/src/lib/proxy-cache.test.ts new file mode 100644 index 000000000..52285e952 --- /dev/null +++ b/src/lib/proxy-cache.test.ts @@ -0,0 +1,97 @@ +import { describe, expect, it } from 'vitest'; + +import { buildCacheKey, flattenParams, isAllowedPath } from './proxy-cache'; + +describe('buildCacheKey', () => { + it('builds key with sorted params', () => { + const key = buildCacheKey('GET', '/search/query', { + q: 'black holes', + fl: 'title', + rows: '10', + }); + + expect(key).toBe('scix_cache:GET:/search/query?fl=title&q=black%20holes&rows=10'); + }); + + it('sorts params deterministically regardless of input order', () => { + const params = { z: 'last', a: 'first', m: 'mid' }; + const key = buildCacheKey('POST', '/resolver', params); + expect(key).toBe('scix_cache:POST:/resolver?a=first&m=mid&z=last'); + }); + + it('omits query string when params are empty', () => { + const key = buildCacheKey('GET', '/search/query', {}); + expect(key).toBe('scix_cache:GET:/search/query'); + }); + + it('handles special characters deterministically', () => { + const key = buildCacheKey('PATCH', '/resolver/2024ApJ...123A', { + 'weird key': 'value/value', + 'another key': 'emoji 😀', + }); + + expect(key).toBe( + 'scix_cache:PATCH:/resolver/2024ApJ...123A?another%20key=emoji%20%F0%9F%98%80&weird%20key=value%2Fvalue', + ); + }); +}); + +describe('flattenParams', () => { + it('joins arrays into comma-separated strings', () => { + const result = flattenParams({ fl: ['bibcode', 'title', 'author'], q: 'black holes' }); + expect(result).toEqual({ fl: 'bibcode,title,author', q: 'black holes' }); + }); + + it('passes scalar strings through unchanged', () => { + const result = flattenParams({ q: '*:*', rows: '10', start: '0' }); + expect(result).toEqual({ q: '*:*', rows: '10', start: '0' }); + }); + + it('drops null and undefined values', () => { + const result = flattenParams({ q: 'test', fl: undefined, sort: undefined }); + expect(result).toEqual({ q: 'test' }); + }); + + it('handles empty arrays as empty strings', () => { + const result = flattenParams({ fl: [] }); + expect(result).toEqual({ fl: '' }); + }); + + it('handles single-element arrays', () => { + const result = flattenParams({ fl: ['bibcode'] }); + expect(result).toEqual({ fl: 'bibcode' }); + }); + + it('returns empty object for empty input', () => { + expect(flattenParams({})).toEqual({}); + }); +}); + +describe('isAllowedPath', () => { + it('allows /search/query', () => { + expect(isAllowedPath('/search/query')).toBe(true); + }); + + it('allows resolver paths with resources', () => { + expect(isAllowedPath('/resolver/2024ApJ...123A/esources')).toBe(true); + }); + + it('allows resolver bibcode only', () => { + expect(isAllowedPath('/resolver/2024ApJ...123A')).toBe(true); + }); + + it('rejects non-allowlisted paths', () => { + expect(isAllowedPath('/accounts/bootstrap')).toBe(false); + expect(isAllowedPath('/biblib/libraries')).toBe(false); + expect(isAllowedPath('/vault/user-data')).toBe(false); + expect(isAllowedPath('/orcid/preferences')).toBe(false); + }); + + it('rejects empty path', () => { + expect(isAllowedPath('')).toBe(false); + }); + + it('rejects path traversal attempts', () => { + expect(isAllowedPath('/search/query/../accounts/bootstrap')).toBe(false); + }); +}); diff --git a/src/lib/proxy-cache.ts b/src/lib/proxy-cache.ts new file mode 100644 index 000000000..25a194b8a --- /dev/null +++ b/src/lib/proxy-cache.ts @@ -0,0 +1,46 @@ +const CACHE_PREFIX = `${process.env.REDIS_PREFIX ?? 'scix_'}cache`; + +export const CACHE_TTL = parseInt(process.env.REDIS_CACHE_TTL ?? '300', 10) || 300; +export const CACHE_MAX_SIZE = parseInt(process.env.REDIS_CACHE_MAX_SIZE ?? '5242880', 10) || 5242880; + +export function flattenParams(params: Record): Record { + const out: Record = {}; + for (const [key, value] of Object.entries(params)) { + if (value == null) { + continue; + } + out[key] = Array.isArray(value) ? value.join(',') : String(value); + } + return out; +} + +export function buildCacheKey(method: string, path: string, params: Record): string { + const upperMethod = method.toUpperCase(); + const sortedKeys = Object.keys(params).sort((a, b) => a.localeCompare(b)); + + const query = sortedKeys + .map((key) => { + const encodedKey = encodeURIComponent(key); + const encodedValue = encodeURIComponent(params[key]); + return `${encodedKey}=${encodedValue}`; + }) + .join('&'); + + const baseKey = `${CACHE_PREFIX}:${upperMethod}:${path}`; + return query ? `${baseKey}?${query}` : baseKey; +} + +const ALLOWED_PATTERNS = [/^\/search\/query$/, /^\/resolver\/.+$/]; + +export function isAllowedPath(path: string): boolean { + if (!path) { + return false; + } + + const hasTraversal = path.split('/').some((segment) => segment === '..'); + if (hasTraversal) { + return false; + } + + return ALLOWED_PATTERNS.some((pattern) => pattern.test(path)); +} diff --git a/src/lib/redis.test.ts b/src/lib/redis.test.ts new file mode 100644 index 000000000..cd0973106 --- /dev/null +++ b/src/lib/redis.test.ts @@ -0,0 +1,81 @@ +import { EventEmitter } from 'node:events'; +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; + +vi.mock('ioredis', () => { + class MockRedis extends EventEmitter { + status = 'ready'; + + constructor() { + super(); + setImmediate(() => { + this.emit('ready'); + }); + } + + async ping(): Promise { + return 'PONG'; + } + + async quit(): Promise { + return 'OK'; + } + } + + return { + __esModule: true, + default: MockRedis, + }; +}); + +const waitForReady = () => new Promise((resolve) => setImmediate(resolve)); + +describe('redis singleton', () => { + beforeEach(() => { + vi.resetModules(); + process.env.REDIS_HOST = 'localhost'; + process.env.REDIS_PORT = '6380'; + process.env.REDIS_PASSWORD = 'secret'; + }); + + afterEach(() => { + delete process.env.REDIS_HOST; + delete process.env.REDIS_PORT; + delete process.env.REDIS_PASSWORD; + }); + + it('returns a redis client instance', async () => { + const { getRedisClient } = await import('./redis'); + const client = getRedisClient(); + + expect(client).not.toBeNull(); + expect(typeof client?.ping).toBe('function'); + }); + + it('returns the same instance on subsequent calls', async () => { + const { getRedisClient } = await import('./redis'); + + const first = getRedisClient(); + const second = getRedisClient(); + + expect(first).toBe(second); + }); + + it('reports availability as true when client exists', async () => { + const { getRedisClient, isRedisAvailable } = await import('./redis'); + + getRedisClient(); + await waitForReady(); + + expect(isRedisAvailable()).toBe(true); + }); + + it('returns null and unavailable when REDIS_HOST is missing', async () => { + delete process.env.REDIS_HOST; + + const { getRedisClient, isRedisAvailable } = await import('./redis'); + const client = getRedisClient(); + + expect(client).toBeNull(); + expect(isRedisAvailable()).toBe(false); + }); +}); diff --git a/src/lib/redis.ts b/src/lib/redis.ts new file mode 100644 index 000000000..6167adbdf --- /dev/null +++ b/src/lib/redis.ts @@ -0,0 +1,64 @@ +import Redis, { RedisOptions } from 'ioredis'; +import { logger } from '@/logger'; + +let redisClient: Redis | null = null; +let redisAvailable = false; + +const redisLogger = logger.child({ msgPrefix: '[redis] ' }); + +const buildRedisOptions = (): RedisOptions => { + const portEnv = process.env.REDIS_PORT || '6379'; + const parsedPort = Number(portEnv); + const port = Number.isNaN(parsedPort) ? 6379 : parsedPort; + + return { + host: process.env.REDIS_HOST, + port, + password: process.env.REDIS_PASSWORD, + maxRetriesPerRequest: 1, + commandTimeout: 100, + enableReadyCheck: true, + lazyConnect: false, + retryStrategy: (times) => { + if (times >= 3) { + return null; + } + + return Math.min(times * 200, 2000); + }, + }; +}; + +export const getRedisClient = (): Redis | null => { + if (redisClient) { + return redisClient; + } + + if (!process.env.REDIS_HOST) { + redisAvailable = false; + redisLogger.warn('REDIS_HOST is not set; Redis client disabled'); + return null; + } + + redisClient = new Redis(buildRedisOptions()); + + redisClient.on('ready', () => { + redisAvailable = true; + redisLogger.info('Redis connection ready'); + }); + + redisClient.on('error', (err) => { + redisAvailable = false; + const message = err instanceof Error ? err.message : String(err); + redisLogger.warn(message); + }); + + redisClient.on('close', () => { + redisAvailable = false; + redisLogger.info('Redis connection closed'); + }); + + return redisClient; +}; + +export const isRedisAvailable = (): boolean => redisAvailable; diff --git a/src/lib/serverside/__tests__/absCanonicalization.test.ts b/src/lib/serverside/__tests__/absCanonicalization.test.ts index 545575595..41fe9aa6f 100644 --- a/src/lib/serverside/__tests__/absCanonicalization.test.ts +++ b/src/lib/serverside/__tests__/absCanonicalization.test.ts @@ -9,6 +9,11 @@ vi.mock('../bootstrap', () => ({ bootstrap: vi.fn(), })); +vi.mock('@/lib/redis', () => ({ + getRedisClient: vi.fn(() => null), + isRedisAvailable: vi.fn(() => false), +})); + vi.mock('@/ssr-utils', () => ({ composeNextGSSP: (fn: (ctx: GetServerSidePropsContext) => Promise) => @@ -78,11 +83,11 @@ beforeEach(() => { describe('createAbsGetServerSideProps', () => { it('redirects to canonical bibcode with encoding and preserves query', async () => { + const body = { response: { docs: [{ bibcode: 'canonical&/bib' }] } }; fetchMock.mockResolvedValue({ ok: true, - json: async () => ({ - response: { docs: [{ bibcode: 'canonical&/bib' }] }, - }), + status: 200, + text: async () => JSON.stringify(body), }); const ctx = buildCtx({ @@ -101,11 +106,11 @@ describe('createAbsGetServerSideProps', () => { }); it('redirects for other views', async () => { + const body = { response: { docs: [{ bibcode: 'BIBCODE' }] } }; fetchMock.mockResolvedValue({ ok: true, - json: async () => ({ - response: { docs: [{ bibcode: 'BIBCODE' }] }, - }), + status: 200, + text: async () => JSON.stringify(body), }); const ctx = buildCtx({ @@ -125,11 +130,11 @@ describe('createAbsGetServerSideProps', () => { it('returns props when identifier is already canonical', async () => { const bibcode = 'MATCHING'; + const body = { response: { docs: [{ bibcode }] } }; fetchMock.mockResolvedValue({ ok: true, - json: async () => ({ - response: { docs: [{ bibcode }] }, - }), + status: 200, + text: async () => JSON.stringify(body), }); const ctx = buildCtx({ @@ -183,11 +188,11 @@ describe('createAbsGetServerSideProps', () => { }); it('does not redirect when no docs are returned', async () => { + const body = { response: { docs: [] } }; fetchMock.mockResolvedValue({ ok: true, - json: async () => ({ - response: { docs: [] }, - }), + status: 200, + text: async () => JSON.stringify(body), }); const ctx = buildCtx({ diff --git a/src/lib/serverside/absCanonicalization.ts b/src/lib/serverside/absCanonicalization.ts index 4200400db..d89b15bb9 100644 --- a/src/lib/serverside/absCanonicalization.ts +++ b/src/lib/serverside/absCanonicalization.ts @@ -12,6 +12,8 @@ import { logger } from '@/logger'; import { composeNextGSSP } from '@/ssr-utils'; import { isAuthenticated } from '@/api/api'; import { ErrorSeverity, ErrorSource, handleError } from '@/lib/errorHandler'; +import { getRedisClient, isRedisAvailable } from '@/lib/redis'; +import { buildCacheKey, CACHE_MAX_SIZE, CACHE_TTL, flattenParams } from '@/lib/proxy-cache'; const log = logger.child({ module: 'abs-canonical' }, { msgPrefix: '[abs-canonical] ' }); @@ -21,6 +23,7 @@ type AbsProps = { isAuthenticated?: boolean; pageError?: string; statusCode?: number; + cacheStatus?: 'hit' | 'miss'; }; type IncomingGSSPResult = GetServerSidePropsResult; @@ -107,8 +110,64 @@ const absCanonicalize = (viewPathResolver: ViewPathResolver): IncomingGSSP => { const queryClient = new QueryClient(); + // Shared handler for building the response from parsed search data + const buildResult = (data: IADSApiSearchResponse, cacheStatus: 'hit' | 'miss'): IncomingGSSPResult => { + queryClient.setQueryData(searchKeys.abstract(requestedId), data); + ctx.res.setHeader('Cache-Control', 's-maxage=60, stale-while-revalidate=300'); + ctx.res.setHeader('X-Cache', cacheStatus.toUpperCase()); + const initialDoc = data?.response?.docs?.[0] ?? null; + const canonicalIdentifier = initialDoc?.bibcode; + if (canonicalIdentifier && canonicalIdentifier !== requestedId) { + log.info({ requestedId, canonicalIdentifier, viewPath }, 'Redirecting to canonical'); + const requestUrl = new URL(ctx.req.url ?? ctx.resolvedUrl, 'http://adsabs.local'); + return { + redirect: { + destination: buildRedirect({ + canonicalIdentifier, + viewPath, + search: requestUrl.search, + }), + statusCode: 302, + }, + }; + } + return { + props: { + dehydratedState: dehydrate(queryClient), + initialDoc, + isAuthenticated: isAuthenticated(bootstrapResult.token), + cacheStatus, + }, + }; + }; + try { const tracingHeaders = pickTracingHeaders(ctx.req.headers); + + // Cache lookup + const redis = getRedisClient(); + const { fl: _fl, ...cacheParams } = params; + const cacheKey = + redis && isRedisAvailable() + ? buildCacheKey( + 'GET', + ApiTargets.SEARCH, + flattenParams(cacheParams as Record), + ) + : null; + + if (redis && cacheKey) { + try { + const cached = await redis.hgetall(cacheKey); + if (cached?.body) { + log.info({ requestedId, viewPath, cache: 'hit' }, 'Abstract cache hit'); + return buildResult(JSON.parse(cached.body) as IADSApiSearchResponse, 'hit'); + } + } catch (cacheErr) { + log.warn({ err: (cacheErr as Error).message, requestedId }, 'Abstract cache read failed, falling through'); + } + } + const response = await fetch(url, { headers: { Authorization: `Bearer ${bootstrapResult.token.access_token}`, @@ -138,31 +197,28 @@ const absCanonicalize = (viewPathResolver: ViewPathResolver): IncomingGSSP => { }; } - const data = (await response.json()) as IADSApiSearchResponse; - queryClient.setQueryData(searchKeys.abstract(requestedId), data); - ctx.res.setHeader('Cache-Control', 's-maxage=60, stale-while-revalidate=300'); + const body = await response.text(); + const data = JSON.parse(body) as IADSApiSearchResponse; + log.info({ requestedId, viewPath, cache: 'miss' }, 'Abstract cache miss'); - const initialDoc = data?.response?.docs?.[0] ?? null; - const canonicalIdentifier = initialDoc?.bibcode; + // Skip caching partial results (Solr sets this when results are incomplete) + const isPartial = data?.responseHeader?.partialResults === true; - if (canonicalIdentifier && canonicalIdentifier !== requestedId) { - log.info({ requestedId, canonicalIdentifier, viewPath }, 'Redirecting to canonical identifier'); - const requestUrl = new URL(ctx.req.url ?? ctx.resolvedUrl, 'http://adsabs.local'); - return { - redirect: { - destination: buildRedirect({ canonicalIdentifier, viewPath, search: requestUrl.search }), - statusCode: 302, - }, - }; + // Cache successful, complete responses within size limit + if (redis && cacheKey && isRedisAvailable() && body.length <= CACHE_MAX_SIZE && !isPartial) { + const redisPipeline = redis.multi(); + redisPipeline.hset(cacheKey, { + body, + contentType: 'application/json', + statusCode: String(response.status), + }); + redisPipeline.expire(cacheKey, CACHE_TTL); + redisPipeline.exec().catch((writeErr: Error) => { + log.warn({ err: writeErr.message, requestedId }, 'Abstract cache write failed'); + }); } - return { - props: { - dehydratedState: dehydrate(queryClient), - initialDoc, - isAuthenticated: isAuthenticated(bootstrapResult.token), - }, - }; + return buildResult(data, 'miss'); } catch (error) { handleError(error, { source: ErrorSource.SERVER, diff --git a/src/pages/_app.tsx b/src/pages/_app.tsx index 75e41ac95..76090879f 100644 --- a/src/pages/_app.tsx +++ b/src/pages/_app.tsx @@ -4,7 +4,7 @@ import { AppProps, NextWebVitalsMetric } from 'next/app'; import dynamic from 'next/dynamic'; import { useRouter } from 'next/router'; import 'nprogress/nprogress.css'; -import { memo, ReactElement, useEffect, useMemo } from 'react'; +import { FC, memo, ReactElement, useCallback, useEffect, useMemo, useState } from 'react'; import { DehydratedState, useQuery, useQueryClient } from '@tanstack/react-query'; import { IronSession } from 'iron-session'; import axios from 'axios'; @@ -45,6 +45,50 @@ export type AppPageProps = { [key: string]: unknown; }; +const CacheStatusBadge: FC<{ status?: string }> = ({ status: ssrStatus }) => { + const [clientStatus, setClientStatus] = useState(null); + + const handleCacheEvent = useCallback((e: Event) => { + const detail = (e as CustomEvent).detail; + setClientStatus(detail); + }, []); + + useEffect(() => { + if (process.env.NODE_ENV === 'production') { + return; + } + window.addEventListener('cache-status', handleCacheEvent); + return () => window.removeEventListener('cache-status', handleCacheEvent); + }, [handleCacheEvent]); + + const status = clientStatus ?? ssrStatus; + if (!status || process.env.NODE_ENV === 'production') { + return null; + } + const isHit = status === 'hit'; + return ( +
+ cache: {status} +
+ ); +}; + const NectarApp = memo(({ Component, pageProps }: AppProps): ReactElement => { logger.debug('App', { props: pageProps as unknown }); const router = useRouter(); @@ -65,6 +109,9 @@ const NectarApp = memo(({ Component, pageProps }: AppProps): ReactElement => { + {process.env.NODE_ENV !== 'production' && ( + + )} diff --git a/src/pages/api/proxy/[...path].ts b/src/pages/api/proxy/[...path].ts new file mode 100644 index 000000000..56fb1e137 --- /dev/null +++ b/src/pages/api/proxy/[...path].ts @@ -0,0 +1,149 @@ +import { NextApiRequest, NextApiResponse } from 'next'; +import { getIronSession } from 'iron-session'; +import axios from 'axios'; +import { sessionConfig } from '@/config'; +import { rateLimit } from '@/rateLimit'; +import { getRedisClient, isRedisAvailable } from '@/lib/redis'; +import { buildCacheKey, CACHE_MAX_SIZE, CACHE_TTL, flattenParams, isAllowedPath } from '@/lib/proxy-cache'; +import { defaultRequestConfig } from '@/api/config'; +import { logger } from '@/logger'; + +const log = logger.child({}, { msgPrefix: '[proxy] ' }); + +const getClientIp = (req: NextApiRequest): string => + ( + (req.headers['x-original-forwarded-for'] as string) || + (req.headers['x-forwarded-for'] as string) || + (req.headers['x-real-ip'] as string) || + req.socket.remoteAddress || + '' + ) + .split(',')[0] + .trim() || 'unknown'; + +const isValidOrigin = (req: NextApiRequest): boolean => { + if (process.env.NODE_ENV === 'development') { + return true; + } + const origin = (req.headers['origin'] as string) || (req.headers['referer'] as string) || ''; + const host = req.headers['host'] || ''; + if (!host) { + return false; + } + try { + const originHost = new URL(origin).host; + return originHost === host; + } catch { + return false; + } +}; + +export default async function handler(req: NextApiRequest, res: NextApiResponse) { + const startTime = Date.now(); + + if (req.method !== 'GET') { + return res.status(405).json({ error: 'Method not allowed' }); + } + + const pathSegments = req.query.path; + if (!Array.isArray(pathSegments) || pathSegments.length === 0) { + return res.status(400).json({ error: 'Invalid path' }); + } + const upstreamPath = `/${pathSegments.join('/')}`; + + if (!isAllowedPath(upstreamPath)) { + return res.status(404).json({ error: 'Not found' }); + } + + const session = await getIronSession(req, res, sessionConfig); + if (!session.token?.access_token) { + return res.status(401).json({ error: 'Unauthorized' }); + } + + if (!isValidOrigin(req)) { + log.warn({ path: upstreamPath }, 'Origin check failed'); + return res.status(403).json({ error: 'Forbidden' }); + } + + const ip = getClientIp(req); + if (!rateLimit(ip)) { + log.warn({ ip, path: upstreamPath }, 'Rate limit exceeded'); + return res.status(429).json({ error: 'Too many requests' }); + } + + const { path: _, ...queryParams } = req.query; + const params = flattenParams(queryParams as Record); + const cacheKey = buildCacheKey('GET', upstreamPath, params); + + // Cache lookup + const redis = getRedisClient(); + if (redis && isRedisAvailable()) { + try { + const cached = await redis.hgetall(cacheKey); + if (cached && cached.body) { + const duration = Date.now() - startTime; + log.info({ path: upstreamPath, duration, cache: 'hit' }, 'Cache hit'); + res.setHeader('X-Cache', 'HIT'); + res.setHeader('Content-Type', cached.contentType || 'application/json'); + return res.status(parseInt(cached.statusCode || '200', 10)).send(cached.body); + } + } catch (err) { + log.warn({ err: (err as Error).message, path: upstreamPath }, 'Cache read failed, falling through'); + } + } + + // Forward to upstream + try { + const upstreamUrl = `${defaultRequestConfig.baseURL}${upstreamPath}`; + const upstreamResponse = await axios.get(upstreamUrl, { + params, + headers: { + Authorization: `Bearer ${session.token.access_token}`, + 'Content-Type': 'application/json', + }, + timeout: defaultRequestConfig.timeout as number, + validateStatus: () => true, + }); + + const statusCode = upstreamResponse.status; + const contentType = (upstreamResponse.headers['content-type'] as string) || 'application/json'; + const body = + typeof upstreamResponse.data === 'string' ? upstreamResponse.data : JSON.stringify(upstreamResponse.data); + + const duration = Date.now() - startTime; + log.info({ path: upstreamPath, duration, statusCode, cache: 'miss' }, 'Cache miss'); + + // Skip caching partial results (Solr sets this when results are incomplete) + const isPartial = + typeof upstreamResponse.data === 'object' && upstreamResponse.data?.responseHeader?.partialResults === true; + + // Cache successful, complete responses within size limit + if ( + redis && + isRedisAvailable() && + statusCode >= 200 && + statusCode < 300 && + body.length <= CACHE_MAX_SIZE && + !isPartial + ) { + const pipeline = redis.multi(); + pipeline.hset(cacheKey, { + body, + contentType, + statusCode: String(statusCode), + }); + pipeline.expire(cacheKey, CACHE_TTL); + pipeline.exec().catch((writeErr) => { + log.warn({ err: (writeErr as Error).message, path: upstreamPath }, 'Cache write failed'); + }); + } + + res.setHeader('X-Cache', 'MISS'); + res.setHeader('Content-Type', contentType); + return res.status(statusCode).send(body); + } catch (err) { + const duration = Date.now() - startTime; + log.error({ err: (err as Error).message, path: upstreamPath, duration }, 'Upstream request failed'); + return res.status(502).json({ error: 'Upstream request failed' }); + } +} diff --git a/src/pages/api/proxy/proxy.test.ts b/src/pages/api/proxy/proxy.test.ts new file mode 100644 index 000000000..b3fdce241 --- /dev/null +++ b/src/pages/api/proxy/proxy.test.ts @@ -0,0 +1,222 @@ +import { beforeAll, beforeEach, describe, expect, it, vi } from 'vitest'; +import type { NextApiRequest, NextApiResponse } from 'next'; +import { createMocks } from 'node-mocks-http'; +import axios from 'axios'; +import { getIronSession } from 'iron-session'; +import { rateLimit } from '@/rateLimit'; +import { getRedisClient, isRedisAvailable } from '@/lib/redis'; + +vi.mock('@/lib/redis', () => ({ + __esModule: true, + getRedisClient: vi.fn(() => null), + isRedisAvailable: vi.fn(() => false), +})); + +vi.mock('iron-session', () => ({ + __esModule: true, + getIronSession: vi.fn(), +})); + +vi.mock('@/rateLimit', () => ({ + __esModule: true, + rateLimit: vi.fn(() => true), +})); + +vi.mock('axios', () => { + const get = vi.fn(() => + Promise.resolve({ + status: 200, + headers: { 'content-type': 'application/json' }, + data: { response: { numFound: 1, docs: [] } }, + }), + ); + return { + __esModule: true, + default: { get }, + }; +}); + +vi.mock('@/logger', () => ({ + __esModule: true, + logger: { + child: () => ({ + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + }), + }, +})); + +type ProxyTestOptions = { + method?: string; + path?: string[]; + query?: Record; + headers?: Record; +}; + +const defaultSession = { + token: { + access_token: 'test-token', + anonymous: false, + expires_at: '2099-01-01', + username: 'test-user', + }, + isAuthenticated: true, +}; + +const mockedGetIronSession = vi.mocked(getIronSession); +const mockedRateLimit = vi.mocked(rateLimit); +const mockedGetRedisClient = vi.mocked(getRedisClient); +const mockedIsRedisAvailable = vi.mocked(isRedisAvailable); +const mockedAxiosGet = vi.mocked(axios.get); +type HandlerModule = typeof import('./[...path]'); +let handler: HandlerModule['default']; + +const executeHandler = async (options: ProxyTestOptions = {}) => { + const { method = 'GET', path = ['search', 'query'], query = {}, headers = {} } = options; + + const { req, res } = createMocks({ + method, + headers: { + host: 'example.com', + origin: 'https://example.com', + ...headers, + }, + query: { + ...query, + path, + }, + }); + + if (!req.socket) { + (req as unknown as { socket: { remoteAddress: string } }).socket = { + remoteAddress: '127.0.0.1', + }; + } else { + (req.socket as unknown as { remoteAddress: string }).remoteAddress = '127.0.0.1'; + } + + await handler!(req, res); + return { req, res }; +}; + +beforeAll(async () => { + process.env.API_HOST_SERVER = 'https://upstream.example.com'; + handler = (await import('./[...path]')).default; +}); + +beforeEach(() => { + vi.clearAllMocks(); + mockedGetIronSession.mockResolvedValue({ ...defaultSession }); + mockedRateLimit.mockReturnValue(true); + mockedGetRedisClient.mockReturnValue(null); + mockedIsRedisAvailable.mockReturnValue(false); + mockedAxiosGet.mockResolvedValue({ + status: 200, + headers: { 'content-type': 'application/json' }, + data: { response: { numFound: 1, docs: [] } }, + }); +}); + +describe('proxy API handler', () => { + it('rejects non-GET methods with 405', async () => { + const { res } = await executeHandler({ method: 'POST' }); + expect(res._getStatusCode()).toBe(405); + expect(res._getJSONData()).toEqual({ error: 'Method not allowed' }); + }); + + it('rejects disallowed proxy paths with 404', async () => { + const { res } = await executeHandler({ path: ['not', 'allowed'] }); + expect(res._getStatusCode()).toBe(404); + expect(res._getJSONData()).toEqual({ error: 'Not found' }); + }); + + it('rejects requests without a session token with 401', async () => { + mockedGetIronSession.mockResolvedValueOnce({ token: null }); + const { res } = await executeHandler(); + expect(res._getStatusCode()).toBe(401); + expect(res._getJSONData()).toEqual({ error: 'Unauthorized' }); + }); + + it('forwards allowed GET requests and returns upstream response with cache miss headers', async () => { + const { res } = await executeHandler({ query: { q: 'mars' } }); + + expect(mockedAxiosGet).toHaveBeenCalledWith( + '/search/query', + expect.objectContaining({ + params: expect.objectContaining({ q: 'mars' }), + headers: expect.objectContaining({ + Authorization: 'Bearer test-token', + }), + }), + ); + + expect(res._getStatusCode()).toBe(200); + expect(JSON.parse(res._getData())).toEqual({ + response: { numFound: 1, docs: [] }, + }); + expect(res.getHeader('X-Cache')).toBe('MISS'); + expect(res.getHeader('Content-Type')).toBe('application/json'); + }); + + it('rejects rate-limited requests with 429', async () => { + mockedRateLimit.mockReturnValueOnce(false); + const { res } = await executeHandler(); + expect(res._getStatusCode()).toBe(429); + expect(res._getJSONData()).toEqual({ error: 'Too many requests' }); + }); + + it('returns cached response when Redis has the entry', async () => { + const cachedBody = JSON.stringify({ response: { numFound: 5 } }); + const mockRedis = { + hgetall: vi.fn().mockResolvedValue({ + body: cachedBody, + contentType: 'application/json', + statusCode: '201', + }), + }; + mockedGetRedisClient.mockReturnValueOnce(mockRedis as unknown as ReturnType); + mockedIsRedisAvailable.mockReturnValueOnce(true); + + const { res } = await executeHandler(); + + expect(res._getStatusCode()).toBe(201); + expect(res.getHeader('X-Cache')).toBe('HIT'); + expect(res._getData()).toBe(cachedBody); + expect(mockedAxiosGet).not.toHaveBeenCalled(); + }); + + it('does not cache partial results from Solr', async () => { + const mockRedis = { + hgetall: vi.fn().mockResolvedValue(null), + multi: vi.fn(), + }; + mockedGetRedisClient.mockReturnValue(mockRedis as unknown as ReturnType); + mockedIsRedisAvailable.mockReturnValue(true); + + mockedAxiosGet.mockResolvedValueOnce({ + status: 200, + headers: { 'content-type': 'application/json' }, + data: { + responseHeader: { status: 0, QTime: 100, partialResults: true }, + response: { numFound: 50000, docs: [{ bibcode: '2024Test' }] }, + }, + }); + + const { res } = await executeHandler({ query: { q: 'galaxies' } }); + + expect(res._getStatusCode()).toBe(200); + expect(res.getHeader('X-Cache')).toBe('MISS'); + expect(mockRedis.multi).not.toHaveBeenCalled(); + }); + + it('rejects path traversal attempts with 404', async () => { + const { res } = await executeHandler({ + path: ['resolver', '..', 'secret'], + }); + + expect(res._getStatusCode()).toBe(404); + expect(res._getJSONData()).toEqual({ error: 'Not found' }); + }); +});