From f328d7baee557e03d69702028d6627de50e7ced8 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 15 Nov 2024 11:27:46 +0000 Subject: [PATCH 1/8] try debug test --- src/test/integration/logs.test.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/test/integration/logs.test.ts b/src/test/integration/logs.test.ts index f216d8a0c..2427b5850 100644 --- a/src/test/integration/logs.test.ts +++ b/src/test/integration/logs.test.ts @@ -451,6 +451,7 @@ describe('LogDatabase retrieveMultipleLogs with pagination', () => { undefined, 1 // Page 1 ) + console.log('LOGS_PAGE 1: ', logsPage1) const logsPage2 = await database.logs.retrieveMultipleLogs( new Date(Date.now() - 10000), // 10 seconds ago new Date(), // now @@ -459,8 +460,10 @@ describe('LogDatabase retrieveMultipleLogs with pagination', () => { undefined, 2 // Page 2 ) + console.log('LOGS_PAGE 2: ', logsPage1) // make sure we have enough logs for 2 pages const logsCount = await database.logs.getLogsCount() + console.log('LOGS_COUNT: ', logsCount) // Ensure that the logs on page 2 are different from those on page 1 if logsPage2 is not empty if (logsCount > LOGS_PER_PAGE && logsPage2.length > 0) { expect(logsPage1[0].id).to.not.equal(logsPage2[0].id) From 5d4687b61c1b66188052dbeed60ad03b13544012 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 15 Nov 2024 12:13:20 +0000 Subject: [PATCH 2/8] try run only log tests on typesense --- package.json | 2 +- src/test/integration/logs.test.ts | 11 +++++++++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index d09f7d0aa..468f2b617 100644 --- a/package.json +++ b/package.json @@ -32,7 +32,7 @@ "mocha-light": "mocha --node-env=test --config .mocharc.json --exclude \"./dist/test/integration/compute.test.js\"", "test": "npm run lint && npm run test:unit:cover && npm run test:integration:cover", "test:unit": "npm run build-tests && npm run mocha \"./dist/test/unit/**/*.test.js\"", - "test:integration": "npm run build-tests && npm run mocha \"./dist/test/integration/**/*.test.js\"", + "test:integration": "npm run build-tests && npm run mocha \"./dist/test/integration/logs.test.js\"", "test:indexer": "npm run build-tests && npm run mocha \"./dist/test/integration/indexer.test.js\"", "test:integration:light": "npm run build-tests && npm run mocha-light \"./dist/test/integration/**/*.test.js\"", "test:unit:cover": "nyc --report-dir coverage/unit npm run test:unit", diff --git a/src/test/integration/logs.test.ts b/src/test/integration/logs.test.ts index 2427b5850..982223d2d 100644 --- a/src/test/integration/logs.test.ts +++ b/src/test/integration/logs.test.ts @@ -7,7 +7,7 @@ import { configureCustomDBTransport, getCustomLoggerForModule } from '../../utils/logging/Logger.js' -import { ENVIRONMENT_VARIABLES } from '../../utils/constants.js' +import { DB_TYPES, ENVIRONMENT_VARIABLES } from '../../utils/constants.js' import { buildEnvOverrideConfig, OverrideEnvConfig, @@ -34,7 +34,14 @@ describe('LogDatabase CRUD', () => { before(async () => { previousConfiguration = await setupEnvironment( TEST_ENV_CONFIG_FILE, - buildEnvOverrideConfig([ENVIRONMENT_VARIABLES.LOG_DB], ['true']) + buildEnvOverrideConfig( + [ + ENVIRONMENT_VARIABLES.LOG_DB, + ENVIRONMENT_VARIABLES.DB_TYPE, + ENVIRONMENT_VARIABLES.DB_URL + ], + ['true', DB_TYPES.TYPESENSE, 'http://localhost:8108/?apiKey=xyz'] + ) ) const { dbConfig } = await getConfiguration(true) database = await new Database(dbConfig) From f200678ab0552a7d7fe6981a7c95f22d952be460 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 15 Nov 2024 12:33:17 +0000 Subject: [PATCH 3/8] debug test query --- src/components/database/TypenseDatabase.ts | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/src/components/database/TypenseDatabase.ts b/src/components/database/TypenseDatabase.ts index 6a67fc281..ebe8bd965 100644 --- a/src/components/database/TypenseDatabase.ts +++ b/src/components/database/TypenseDatabase.ts @@ -825,6 +825,22 @@ export class TypesenseLogDatabase extends AbstractLogDatabase { ) } + console.log('PAGE: ', page || 1) + + const searchParametersTest = { + q: '*', + query_by: 'message,level,meta', + filter_by: filterConditions, + sort_by: 'timestamp:desc' + } + const resultTest = await this.provider + .collections(this.schema.name) + .documents() + .search(searchParametersTest) + + // Map and return the search hits as log entries + const out = resultTest.hits.map((hit) => hit.document) + console.log('Without page filtering. ', out) // Define search parameters const searchParameters = { q: '*', From a6f5d291b869b7d18e6e534a17a3a2b916b60e27 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 15 Nov 2024 12:48:29 +0000 Subject: [PATCH 4/8] more debug --- src/test/integration/logs.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/integration/logs.test.ts b/src/test/integration/logs.test.ts index 982223d2d..f3501cf28 100644 --- a/src/test/integration/logs.test.ts +++ b/src/test/integration/logs.test.ts @@ -467,7 +467,7 @@ describe('LogDatabase retrieveMultipleLogs with pagination', () => { undefined, 2 // Page 2 ) - console.log('LOGS_PAGE 2: ', logsPage1) + console.log('LOGS_PAGE 2: ', logsPage2) // make sure we have enough logs for 2 pages const logsCount = await database.logs.getLogsCount() console.log('LOGS_COUNT: ', logsCount) From 525c196ccb08b8db1358064491d9f8c2efb25b77 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 15 Nov 2024 14:49:09 +0000 Subject: [PATCH 5/8] refactor multipleLogs to be consistent & avoid an ambigous response --- src/components/database/BaseDatabase.ts | 2 +- .../database/ElasticSearchDatabase.ts | 17 +++------ src/components/database/TypenseDatabase.ts | 6 +-- src/components/httpRoutes/logs.ts | 2 +- src/test/integration/logs.test.ts | 38 +++++++++---------- 5 files changed, 30 insertions(+), 35 deletions(-) diff --git a/src/components/database/BaseDatabase.ts b/src/components/database/BaseDatabase.ts index 637b82aec..f02153415 100644 --- a/src/components/database/BaseDatabase.ts +++ b/src/components/database/BaseDatabase.ts @@ -63,7 +63,7 @@ export abstract class AbstractLogDatabase { moduleName?: string, level?: string, page?: number - ): Promise[] | null> + ): Promise[]> abstract delete(logId: string): Promise abstract deleteOldLogs(): Promise diff --git a/src/components/database/ElasticSearchDatabase.ts b/src/components/database/ElasticSearchDatabase.ts index 87bf8d807..cf332b951 100644 --- a/src/components/database/ElasticSearchDatabase.ts +++ b/src/components/database/ElasticSearchDatabase.ts @@ -849,7 +849,7 @@ export class ElasticsearchLogDatabase extends AbstractLogDatabase { moduleName?: string, level?: string, page?: number - ): Promise[] | null> { + ): Promise[]> { try { const filterConditions: any = { bool: { @@ -888,10 +888,6 @@ export class ElasticsearchLogDatabase extends AbstractLogDatabase { from }) - console.log('logs results:', result) - console.log('logs results hits:', result.hits) - console.log('logs results hits hits:', result.hits.hits) - return result.hits.hits.map((hit: any) => { return normalizeDocumentId(hit._source, hit._id) }) @@ -903,7 +899,7 @@ export class ElasticsearchLogDatabase extends AbstractLogDatabase { GENERIC_EMOJIS.EMOJI_CROSS_MARK, LOG_LEVELS_STR.LEVEL_ERROR ) - return null + return [] } } @@ -943,13 +939,12 @@ export class ElasticsearchLogDatabase extends AbstractLogDatabase { try { const oldLogs = await this.retrieveMultipleLogs(new Date(0), deleteBeforeTime, 200) - if (oldLogs) { - for (const log of oldLogs) { - if (log.id) { - await this.delete(log.id) - } + for (const log of oldLogs) { + if (log.id) { + await this.delete(log.id) } } + return oldLogs ? oldLogs.length : 0 } catch (error) { DATABASE_LOGGER.logMessageWithEmoji( diff --git a/src/components/database/TypenseDatabase.ts b/src/components/database/TypenseDatabase.ts index ebe8bd965..d441b3719 100644 --- a/src/components/database/TypenseDatabase.ts +++ b/src/components/database/TypenseDatabase.ts @@ -805,7 +805,7 @@ export class TypesenseLogDatabase extends AbstractLogDatabase { moduleName?: string, level?: string, page?: number - ): Promise[] | null> { + ): Promise[]> { try { let filterConditions = `timestamp:>=${startTime.getTime()} && timestamp:<${endTime.getTime()}` if (moduleName) { @@ -867,7 +867,7 @@ export class TypesenseLogDatabase extends AbstractLogDatabase { GENERIC_EMOJIS.EMOJI_CROSS_MARK, LOG_LEVELS_STR.LEVEL_ERROR ) - return null + return [] } } @@ -918,7 +918,7 @@ export class TypesenseLogDatabase extends AbstractLogDatabase { } } } - return oldLogs ? oldLogs.length : 0 + return oldLogs.length } catch (error) { DATABASE_LOGGER.logMessageWithEmoji( `Error when deleting old log entries: ${error.message}`, diff --git a/src/components/httpRoutes/logs.ts b/src/components/httpRoutes/logs.ts index ba0631b03..7826ce452 100644 --- a/src/components/httpRoutes/logs.ts +++ b/src/components/httpRoutes/logs.ts @@ -56,7 +56,7 @@ logRoutes.post('/logs', express.json(), validateRequest, async (req, res) => { .getDatabase() .logs.retrieveMultipleLogs(startTime, endTime, maxLogs, moduleName, level, page) - if (logs) { + if (logs.length > 0) { res.json(logs) } else { res.status(404).send('No logs found') diff --git a/src/test/integration/logs.test.ts b/src/test/integration/logs.test.ts index f3501cf28..6d90626af 100644 --- a/src/test/integration/logs.test.ts +++ b/src/test/integration/logs.test.ts @@ -94,11 +94,11 @@ describe('LogDatabase CRUD', () => { if (logs.length > 0) { logs = logs.filter((log) => log.message === newLogEntry.message) - expect(logs?.length).to.equal(1) - expect(Number(logs?.[0].id)).to.greaterThan(Number(logId)) - expect(logs?.[0].level).to.equal(newLogEntry.level) - expect(logs?.[0].message).to.equal(newLogEntry.message) - expect(logs?.[0].moduleName).to.equal('HTTP') + expect(logs.length).to.equal(1) + expect(Number(logs[0].id)).to.greaterThan(Number(logId)) + expect(logs[0].level).to.equal(newLogEntry.level) + expect(logs[0].message).to.equal(newLogEntry.message) + expect(logs[0].moduleName).to.equal('HTTP') } }) @@ -125,11 +125,11 @@ describe('LogDatabase CRUD', () => { logs = logs.filter((log) => log.message === newLogEntry.message) if (logs.length > 0) { - expect(logs?.length).to.equal(1) - expect(Number(logs?.[0].id)).to.greaterThan(Number(logId)) - expect(logs?.[0].level).to.equal(newLogEntry.level) - expect(logs?.[0].message).to.equal(newLogEntry.message) - expect(logs?.[0].moduleName).to.equal('HTTP') + expect(logs.length).to.equal(1) + expect(Number(logs[0].id)).to.greaterThan(Number(logId)) + expect(logs[0].level).to.equal(newLogEntry.level) + expect(logs[0].message).to.equal(newLogEntry.message) + expect(logs[0].moduleName).to.equal('HTTP') } }) @@ -160,11 +160,11 @@ describe('LogDatabase CRUD', () => { logs = logs.filter((log) => log.message.includes(newLogEntry.message)) if (logs.length > 0) { - expect(logs?.length).to.equal(1) - expect(Number(logs?.[0].id)).to.greaterThan(Number(logId)) - expect(logs?.[0].level).to.equal(newLogEntry.level) - assert(logs?.[0].message) - expect(logs?.[0].moduleName).to.equal('HTTP') + expect(logs.length).to.equal(1) + expect(Number(logs[0].id)).to.greaterThan(Number(logId)) + expect(logs[0].level).to.equal(newLogEntry.level) + assert(logs[0].message) + expect(logs[0].moduleName).to.equal('HTTP') } }) @@ -300,13 +300,13 @@ describe('LogDatabase retrieveMultipleLogs with specific parameters', () => { it('should return an empty array for negative maxLogs', async () => { const logs = await database.logs.retrieveMultipleLogs(startTime, endTime, -1) - assert.isNull(logs, 'Expected logs to be null') + assert.isEmpty(logs, 'Expected logs to be empty') }) it('should retrieve a maximum of one log when maxLogs is set to 1', async () => { const logs = await database.logs.retrieveMultipleLogs(startTime, endTime, 1) // check if the length of logs is 1 or less - expect(logs?.length).to.be.at.most(1) + expect(logs.length).to.be.at.most(1) }) it('should retrieve no logs when maxLogs is set to 0', async () => { @@ -390,14 +390,14 @@ describe('LogDatabase deleteOldLogs', () => { let logs = await database.logs.retrieveMultipleLogs(startTime, endTime, 100) // Check that the old log is not present, but the recent one is - const oldLogPresent = logs?.some((log) => log.message === oldLogEntry.message) + const oldLogPresent = logs.some((log) => log.message === oldLogEntry.message) assert(oldLogPresent === false, 'Old logs are still present') // since we have many logs going to DB by default, we need to re-frame the timestamp to grab it startTime = new Date(recentLogEntry.timestamp - 1000) endTime = new Date(recentLogEntry.timestamp + 1000) logs = await database.logs.retrieveMultipleLogs(startTime, endTime, 100) - const recentLogPresent = logs?.some((log) => log.message === recentLogEntry.message) + const recentLogPresent = logs.some((log) => log.message === recentLogEntry.message) assert(recentLogPresent === true, 'Recent logs are not present') } else assert( From 17029ca85f7a06649244458ce77b54b9e9267531 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 15 Nov 2024 15:22:50 +0000 Subject: [PATCH 6/8] remove debug lines --- src/components/database/TypenseDatabase.ts | 16 ---------------- src/test/integration/logs.test.ts | 3 --- 2 files changed, 19 deletions(-) diff --git a/src/components/database/TypenseDatabase.ts b/src/components/database/TypenseDatabase.ts index d441b3719..004826866 100644 --- a/src/components/database/TypenseDatabase.ts +++ b/src/components/database/TypenseDatabase.ts @@ -825,22 +825,6 @@ export class TypesenseLogDatabase extends AbstractLogDatabase { ) } - console.log('PAGE: ', page || 1) - - const searchParametersTest = { - q: '*', - query_by: 'message,level,meta', - filter_by: filterConditions, - sort_by: 'timestamp:desc' - } - const resultTest = await this.provider - .collections(this.schema.name) - .documents() - .search(searchParametersTest) - - // Map and return the search hits as log entries - const out = resultTest.hits.map((hit) => hit.document) - console.log('Without page filtering. ', out) // Define search parameters const searchParameters = { q: '*', diff --git a/src/test/integration/logs.test.ts b/src/test/integration/logs.test.ts index 6d90626af..308f8f794 100644 --- a/src/test/integration/logs.test.ts +++ b/src/test/integration/logs.test.ts @@ -458,7 +458,6 @@ describe('LogDatabase retrieveMultipleLogs with pagination', () => { undefined, 1 // Page 1 ) - console.log('LOGS_PAGE 1: ', logsPage1) const logsPage2 = await database.logs.retrieveMultipleLogs( new Date(Date.now() - 10000), // 10 seconds ago new Date(), // now @@ -467,10 +466,8 @@ describe('LogDatabase retrieveMultipleLogs with pagination', () => { undefined, 2 // Page 2 ) - console.log('LOGS_PAGE 2: ', logsPage2) // make sure we have enough logs for 2 pages const logsCount = await database.logs.getLogsCount() - console.log('LOGS_COUNT: ', logsCount) // Ensure that the logs on page 2 are different from those on page 1 if logsPage2 is not empty if (logsCount > LOGS_PER_PAGE && logsPage2.length > 0) { expect(logsPage1[0].id).to.not.equal(logsPage2[0].id) From 489d050b9e7c99361a454fff7eb00c849a34d8fb Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 15 Nov 2024 15:32:43 +0000 Subject: [PATCH 7/8] clean the rest --- src/test/integration/logs.test.ts | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/src/test/integration/logs.test.ts b/src/test/integration/logs.test.ts index 308f8f794..c87feb8da 100644 --- a/src/test/integration/logs.test.ts +++ b/src/test/integration/logs.test.ts @@ -7,7 +7,7 @@ import { configureCustomDBTransport, getCustomLoggerForModule } from '../../utils/logging/Logger.js' -import { DB_TYPES, ENVIRONMENT_VARIABLES } from '../../utils/constants.js' +import { ENVIRONMENT_VARIABLES } from '../../utils/constants.js' import { buildEnvOverrideConfig, OverrideEnvConfig, @@ -34,14 +34,7 @@ describe('LogDatabase CRUD', () => { before(async () => { previousConfiguration = await setupEnvironment( TEST_ENV_CONFIG_FILE, - buildEnvOverrideConfig( - [ - ENVIRONMENT_VARIABLES.LOG_DB, - ENVIRONMENT_VARIABLES.DB_TYPE, - ENVIRONMENT_VARIABLES.DB_URL - ], - ['true', DB_TYPES.TYPESENSE, 'http://localhost:8108/?apiKey=xyz'] - ) + buildEnvOverrideConfig([ENVIRONMENT_VARIABLES.LOG_DB], ['true']) ) const { dbConfig } = await getConfiguration(true) database = await new Database(dbConfig) From 6c4f53f246895e82c3414eaff9be5fb39806a77b Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 15 Nov 2024 15:42:57 +0000 Subject: [PATCH 8/8] restore package.json, all tests suites again --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 468f2b617..d09f7d0aa 100644 --- a/package.json +++ b/package.json @@ -32,7 +32,7 @@ "mocha-light": "mocha --node-env=test --config .mocharc.json --exclude \"./dist/test/integration/compute.test.js\"", "test": "npm run lint && npm run test:unit:cover && npm run test:integration:cover", "test:unit": "npm run build-tests && npm run mocha \"./dist/test/unit/**/*.test.js\"", - "test:integration": "npm run build-tests && npm run mocha \"./dist/test/integration/logs.test.js\"", + "test:integration": "npm run build-tests && npm run mocha \"./dist/test/integration/**/*.test.js\"", "test:indexer": "npm run build-tests && npm run mocha \"./dist/test/integration/indexer.test.js\"", "test:integration:light": "npm run build-tests && npm run mocha-light \"./dist/test/integration/**/*.test.js\"", "test:unit:cover": "nyc --report-dir coverage/unit npm run test:unit",