diff --git a/Makefile b/Makefile
index 80632143..c61d2fd5 100644
--- a/Makefile
+++ b/Makefile
@@ -12,10 +12,10 @@ quick-start: config clean build serve-docs # Quick start target to setup, build
dependencies:: # Install dependencies needed to build and test the project @Pipeline
$(MAKE) -C src/cloudevents install
$(MAKE) -C src/eventcatalogasyncapiimporter install
+ $(MAKE) -C lambdas/mesh-acknowledge install
+ $(MAKE) -C utils/py-utils install
$(MAKE) -C lambdas/mesh-poll install
$(MAKE) -C lambdas/mesh-download install
- $(MAKE) -C utils/metric-publishers install
- $(MAKE) -C utils/event-publisher-py install
$(MAKE) -C utils/py-mock-mesh install
./scripts/set-github-token.sh
npm install --workspaces
@@ -44,10 +44,10 @@ clean:: # Clean-up project resources (main) @Operations
$(MAKE) -C src/cloudevents clean && \
$(MAKE) -C src/eventcatalogasyncapiimporter clean && \
$(MAKE) -C src/eventcatalogasyncapiimporter clean-output && \
+ $(MAKE) -C lambdas/mesh-acknowledge clean && \
+ $(MAKE) -C utils/py-utils clean && \
$(MAKE) -C lambdas/mesh-poll clean && \
$(MAKE) -C lambdas/mesh-download clean && \
- $(MAKE) -C utils/metric-publishers clean && \
- $(MAKE) -C utils/event-publisher-py clean && \
$(MAKE) -C utils/py-mock-mesh clean && \
$(MAKE) -C src/python-schema-generator clean && \
rm -f .version
diff --git a/infrastructure/terraform/components/dl/README.md b/infrastructure/terraform/components/dl/README.md
index f763d5c0..88998226 100644
--- a/infrastructure/terraform/components/dl/README.md
+++ b/infrastructure/terraform/components/dl/README.md
@@ -45,6 +45,7 @@ No requirements.
| [kms](#module\_kms) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-kms.zip | n/a |
| [lambda\_apim\_key\_generation](#module\_lambda\_apim\_key\_generation) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip | n/a |
| [lambda\_lambda\_apim\_refresh\_token](#module\_lambda\_lambda\_apim\_refresh\_token) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip | n/a |
+| [mesh\_acknowledge](#module\_mesh\_acknowledge) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip | n/a |
| [mesh\_download](#module\_mesh\_download) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip | n/a |
| [mesh\_poll](#module\_mesh\_poll) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip | n/a |
| [pdm\_mock](#module\_pdm\_mock) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip | n/a |
@@ -60,6 +61,7 @@ No requirements.
| [s3bucket\_static\_assets](#module\_s3bucket\_static\_assets) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-s3bucket.zip | n/a |
| [sqs\_core\_notifier](#module\_sqs\_core\_notifier) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-sqs.zip | n/a |
| [sqs\_event\_publisher\_errors](#module\_sqs\_event\_publisher\_errors) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-sqs.zip | n/a |
+| [sqs\_mesh\_acknowledge](#module\_sqs\_mesh\_acknowledge) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-sqs.zip | n/a |
| [sqs\_mesh\_download](#module\_sqs\_mesh\_download) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-sqs.zip | n/a |
| [sqs\_pdm\_poll](#module\_sqs\_pdm\_poll) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-sqs.zip | n/a |
| [sqs\_pdm\_uploader](#module\_sqs\_pdm\_uploader) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-sqs.zip | n/a |
diff --git a/infrastructure/terraform/components/dl/cloudwatch_event_rule_mesh_inbox_message_downloaded.tf b/infrastructure/terraform/components/dl/cloudwatch_event_rule_mesh_inbox_message_downloaded.tf
index dc316333..ced71bca 100644
--- a/infrastructure/terraform/components/dl/cloudwatch_event_rule_mesh_inbox_message_downloaded.tf
+++ b/infrastructure/terraform/components/dl/cloudwatch_event_rule_mesh_inbox_message_downloaded.tf
@@ -25,3 +25,10 @@ resource "aws_cloudwatch_event_target" "pdm-uploader-target" {
target_id = "pdm-uploader-target"
event_bus_name = aws_cloudwatch_event_bus.main.name
}
+
+resource "aws_cloudwatch_event_target" "mesh-acknowledge-target" {
+ rule = aws_cloudwatch_event_rule.mesh_inbox_message_downloaded.name
+ arn = module.sqs_mesh_acknowledge.sqs_queue_arn
+ target_id = "mesh-acknowledge-target"
+ event_bus_name = aws_cloudwatch_event_bus.main.name
+}
diff --git a/infrastructure/terraform/components/dl/lambda_event_source_mapping_mesh_acknowledge_lambda.tf b/infrastructure/terraform/components/dl/lambda_event_source_mapping_mesh_acknowledge_lambda.tf
new file mode 100644
index 00000000..6bdd3e7f
--- /dev/null
+++ b/infrastructure/terraform/components/dl/lambda_event_source_mapping_mesh_acknowledge_lambda.tf
@@ -0,0 +1,10 @@
+resource "aws_lambda_event_source_mapping" "sqs_mesh_acknowledge_lambda" {
+ event_source_arn = module.sqs_mesh_acknowledge.sqs_queue_arn
+ function_name = module.mesh_acknowledge.function_name
+ batch_size = var.queue_batch_size
+ maximum_batching_window_in_seconds = var.queue_batch_window_seconds
+
+ function_response_types = [
+ "ReportBatchItemFailures"
+ ]
+}
diff --git a/infrastructure/terraform/components/dl/locals.tf b/infrastructure/terraform/components/dl/locals.tf
index 9298b97b..6953c5c1 100644
--- a/infrastructure/terraform/components/dl/locals.tf
+++ b/infrastructure/terraform/components/dl/locals.tf
@@ -5,7 +5,9 @@ locals {
apim_api_key_ssm_parameter_name = "/${var.component}/${var.environment}/apim/api_key"
apim_private_key_ssm_parameter_name = "/${var.component}/${var.environment}/apim/private_key"
apim_keystore_s3_bucket = "nhs-${var.aws_account_id}-${var.region}-${var.environment}-${var.component}-static-assets"
- ssm_mesh_prefix = "/${var.component}/${var.environment}/mesh"
+ ssm_prefix = "/${var.component}/${var.environment}"
+ ssm_mesh_prefix = "${local.ssm_prefix}/mesh"
+ ssm_senders_prefix = "${local.ssm_prefix}/senders"
mock_mesh_endpoint = "s3://${module.s3bucket_non_pii_data.bucket}/mock-mesh"
root_domain_name = "${var.environment}.${local.acct.route53_zone_names["digital-letters"]}"
root_domain_id = local.acct.route53_zone_ids["digital-letters"]
diff --git a/infrastructure/terraform/components/dl/module_lambda_mesh_acknowledge.tf b/infrastructure/terraform/components/dl/module_lambda_mesh_acknowledge.tf
new file mode 100644
index 00000000..80632f39
--- /dev/null
+++ b/infrastructure/terraform/components/dl/module_lambda_mesh_acknowledge.tf
@@ -0,0 +1,150 @@
+module "mesh_acknowledge" {
+ source = "https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip"
+
+ function_name = "mesh-acknowledge"
+ description = "A lambda function for acknowledging MESH messages"
+ aws_account_id = var.aws_account_id
+ component = local.component
+ environment = var.environment
+ project = var.project
+ region = var.region
+ group = var.group
+
+ log_retention_in_days = var.log_retention_in_days
+ kms_key_arn = module.kms.key_arn
+
+ iam_policy_document = {
+ body = data.aws_iam_policy_document.mesh_acknowledge_lambda.json
+ }
+
+ function_s3_bucket = local.acct.s3_buckets["lambda_function_artefacts"]["id"]
+ function_code_base_path = local.aws_lambda_functions_dir_path
+ function_code_dir = "mesh-acknowledge/target/dist"
+ function_include_common = true
+ function_module_name = "mesh_acknowledge"
+ handler_function_name = "handler.handler"
+ runtime = "python3.14"
+ memory = 128
+ timeout = 5
+ log_level = var.log_level
+
+ force_lambda_code_deploy = var.force_lambda_code_deploy
+ enable_lambda_insights = false
+
+ log_destination_arn = local.log_destination_arn
+ log_subscription_role_arn = local.acct.log_subscription_role_arn
+
+ lambda_env_vars = {
+ DLQ_URL = module.sqs_mesh_acknowledge.sqs_dlq_url
+ ENVIRONMENT = var.environment
+ EVENT_PUBLISHER_DLQ_URL = module.sqs_event_publisher_errors.sqs_queue_url
+ EVENT_PUBLISHER_EVENT_BUS_ARN = aws_cloudwatch_event_bus.main.arn
+ MOCK_MESH_BUCKET = module.s3bucket_non_pii_data.bucket
+ SSM_MESH_PREFIX = "${local.ssm_mesh_prefix}"
+ SSM_SENDERS_PREFIX = "${local.ssm_senders_prefix}"
+ USE_MESH_MOCK = var.enable_mock_mesh ? "true" : "false"
+ }
+
+}
+
+data "aws_iam_policy_document" "mesh_acknowledge_lambda" {
+ statement {
+ sid = "KMSPermissions"
+ effect = "Allow"
+
+ actions = [
+ "kms:Decrypt",
+ "kms:GenerateDataKey",
+ ]
+
+ resources = [
+ module.kms.key_arn,
+ ]
+ }
+
+ statement {
+ sid = "SQSPermissions"
+ effect = "Allow"
+
+ actions = [
+ "sqs:ReceiveMessage",
+ "sqs:DeleteMessage",
+ "sqs:GetQueueAttributes",
+ ]
+
+ resources = [
+ module.sqs_mesh_acknowledge.sqs_queue_arn,
+ ]
+ }
+
+ statement {
+ sid = "SQSDLQPermissions"
+ effect = "Allow"
+
+ actions = [
+ "sqs:SendMessage",
+ ]
+
+ resources = [
+ module.sqs_mesh_acknowledge.sqs_dlq_arn,
+ ]
+ }
+
+ statement {
+ sid = "EventBridgePermissions"
+ effect = "Allow"
+
+ actions = [
+ "events:PutEvents",
+ ]
+
+ resources = [
+ aws_cloudwatch_event_bus.main.arn,
+ ]
+ }
+
+ statement {
+ sid = "DLQPermissions"
+ effect = "Allow"
+
+ actions = [
+ "sqs:SendMessage",
+ "sqs:SendMessageBatch",
+ ]
+
+ resources = [
+ module.sqs_event_publisher_errors.sqs_queue_arn,
+ ]
+ }
+
+ statement {
+ sid = "SSMPermissions"
+ effect = "Allow"
+
+ actions = [
+ "ssm:GetParameter",
+ "ssm:GetParametersByPath",
+ ]
+
+ resources = [
+ "arn:aws:ssm:${var.region}:${var.aws_account_id}:parameter${local.ssm_prefix}/*"
+ ]
+ }
+
+ # Grant S3 PutObject permissions for the mock-mesh directory only when the mock is enabled
+ dynamic "statement" {
+ for_each = var.enable_mock_mesh ? [1] : []
+ content {
+ sid = "MockMeshPutObject"
+ effect = "Allow"
+
+ actions = [
+ "s3:PutObject",
+ ]
+
+ resources = [
+ "${module.s3bucket_non_pii_data.arn}/mock-mesh/*"
+ ]
+ }
+ }
+}
diff --git a/infrastructure/terraform/components/dl/module_lambda_mesh_download.tf b/infrastructure/terraform/components/dl/module_lambda_mesh_download.tf
index 4fd952bf..e660ae0c 100644
--- a/infrastructure/terraform/components/dl/module_lambda_mesh_download.tf
+++ b/infrastructure/terraform/components/dl/module_lambda_mesh_download.tf
@@ -37,15 +37,14 @@ module "mesh_download" {
log_subscription_role_arn = local.acct.log_subscription_role_arn
lambda_env_vars = {
- CERTIFICATE_EXPIRY_METRIC_NAME = "mesh-download-client-certificate-near-expiry"
- CERTIFICATE_EXPIRY_METRIC_NAMESPACE = "dl-mesh-download"
DOWNLOAD_METRIC_NAME = "mesh-download-successful-downloads"
DOWNLOAD_METRIC_NAMESPACE = "dl-mesh-download"
ENVIRONMENT = var.environment
EVENT_PUBLISHER_DLQ_URL = module.sqs_event_publisher_errors.sqs_queue_url
EVENT_PUBLISHER_EVENT_BUS_ARN = aws_cloudwatch_event_bus.main.arn
PII_BUCKET = module.s3bucket_pii_data.bucket
- SSM_PREFIX = "${local.ssm_mesh_prefix}"
+ SSM_MESH_PREFIX = "${local.ssm_mesh_prefix}"
+ SSM_SENDERS_PREFIX = "${local.ssm_senders_prefix}"
USE_MESH_MOCK = var.enable_mock_mesh ? "true" : "false"
}
@@ -172,7 +171,8 @@ data "aws_iam_policy_document" "mesh_download_lambda" {
]
resources = [
- "arn:aws:ssm:${var.region}:${var.aws_account_id}:parameter${local.ssm_mesh_prefix}/*"
+ "arn:aws:ssm:${var.region}:${var.aws_account_id}:parameter${local.ssm_mesh_prefix}/*",
+ "arn:aws:ssm:${var.region}:${var.aws_account_id}:parameter${local.ssm_senders_prefix}/*"
]
}
}
diff --git a/infrastructure/terraform/components/dl/module_lambda_mesh_poll.tf b/infrastructure/terraform/components/dl/module_lambda_mesh_poll.tf
index 4fd2076f..9d526e3f 100644
--- a/infrastructure/terraform/components/dl/module_lambda_mesh_poll.tf
+++ b/infrastructure/terraform/components/dl/module_lambda_mesh_poll.tf
@@ -45,7 +45,8 @@ module "mesh_poll" {
MAXIMUM_RUNTIME_MILLISECONDS = "240000" # 4 minutes (Lambda has 5 min timeout)
POLLING_METRIC_NAME = "mesh-poll-successful-polls"
POLLING_METRIC_NAMESPACE = "dl-mesh-poll"
- SSM_PREFIX = "${local.ssm_mesh_prefix}"
+ SSM_MESH_PREFIX = "${local.ssm_mesh_prefix}"
+ SSM_SENDERS_PREFIX = "${local.ssm_senders_prefix}"
USE_MESH_MOCK = var.enable_mock_mesh ? "true" : "false"
}
@@ -144,7 +145,8 @@ data "aws_iam_policy_document" "mesh_poll_lambda" {
]
resources = [
- "arn:aws:ssm:${var.region}:${var.aws_account_id}:parameter${local.ssm_mesh_prefix}/*"
+ "arn:aws:ssm:${var.region}:${var.aws_account_id}:parameter${local.ssm_mesh_prefix}/*",
+ "arn:aws:ssm:${var.region}:${var.aws_account_id}:parameter${local.ssm_senders_prefix}/*"
]
}
}
diff --git a/infrastructure/terraform/components/dl/module_lambda_pdm_uploader.tf b/infrastructure/terraform/components/dl/module_lambda_pdm_uploader.tf
index eeeec268..6bf6bb39 100644
--- a/infrastructure/terraform/components/dl/module_lambda_pdm_uploader.tf
+++ b/infrastructure/terraform/components/dl/module_lambda_pdm_uploader.tf
@@ -67,7 +67,8 @@ data "aws_iam_policy_document" "pdm_uploader_lambda" {
]
resources = [
- "${module.s3bucket_letters.arn}/*"
+ "${module.s3bucket_letters.arn}/*",
+ "${module.s3bucket_pii_data.arn}/*"
]
}
diff --git a/infrastructure/terraform/components/dl/module_sqs_mesh_acknowledge.tf b/infrastructure/terraform/components/dl/module_sqs_mesh_acknowledge.tf
new file mode 100644
index 00000000..67716352
--- /dev/null
+++ b/infrastructure/terraform/components/dl/module_sqs_mesh_acknowledge.tf
@@ -0,0 +1,38 @@
+module "sqs_mesh_acknowledge" {
+ source = "https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-sqs.zip"
+
+ aws_account_id = var.aws_account_id
+ component = local.component
+ environment = var.environment
+ project = var.project
+ region = var.region
+ name = "mesh-acknowledge"
+
+ sqs_kms_key_arn = module.kms.key_arn
+
+ visibility_timeout_seconds = 60
+
+ create_dlq = true
+
+ sqs_policy_overload = data.aws_iam_policy_document.sqs_mesh_acknowledge.json
+}
+
+data "aws_iam_policy_document" "sqs_mesh_acknowledge" {
+ statement {
+ sid = "AllowEventBridgeToSendMessage"
+ effect = "Allow"
+
+ principals {
+ type = "Service"
+ identifiers = ["events.amazonaws.com"]
+ }
+
+ actions = [
+ "sqs:SendMessage"
+ ]
+
+ resources = [
+ "arn:aws:sqs:${var.region}:${var.aws_account_id}:${local.csi}-mesh-acknowledge-queue"
+ ]
+ }
+}
diff --git a/infrastructure/terraform/components/dl/module_sqs_mesh_download.tf b/infrastructure/terraform/components/dl/module_sqs_mesh_download.tf
index f33e7881..509218ea 100644
--- a/infrastructure/terraform/components/dl/module_sqs_mesh_download.tf
+++ b/infrastructure/terraform/components/dl/module_sqs_mesh_download.tf
@@ -32,7 +32,7 @@ data "aws_iam_policy_document" "sqs_mesh_download" {
]
resources = [
- "arn:aws:sqs:${var.region}:${var.aws_account_id}:${var.project}-${var.environment}-${local.component}-mesh-download-queue"
+ "arn:aws:sqs:${var.region}:${var.aws_account_id}:${local.csi}-mesh-download-queue"
]
}
}
diff --git a/infrastructure/terraform/components/dl/pre.sh b/infrastructure/terraform/components/dl/pre.sh
index 4ef25bb3..fd613c06 100755
--- a/infrastructure/terraform/components/dl/pre.sh
+++ b/infrastructure/terraform/components/dl/pre.sh
@@ -23,5 +23,6 @@ npm run lambda-build --workspaces --if-present
# Build Python lambdas
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../.." && pwd)"
+make -C "$ROOT/lambdas/mesh-acknowledge" package
make -C "$ROOT/lambdas/mesh-poll" package
make -C "$ROOT/lambdas/mesh-download" package
diff --git a/infrastructure/terraform/components/dl/ssm_parameter_mesh_config.tf b/infrastructure/terraform/components/dl/ssm_parameter_mesh.tf
similarity index 97%
rename from infrastructure/terraform/components/dl/ssm_parameter_mesh_config.tf
rename to infrastructure/terraform/components/dl/ssm_parameter_mesh.tf
index 35045c18..5dc974b4 100644
--- a/infrastructure/terraform/components/dl/ssm_parameter_mesh_config.tf
+++ b/infrastructure/terraform/components/dl/ssm_parameter_mesh.tf
@@ -8,7 +8,7 @@ resource "aws_ssm_parameter" "mesh_config" {
value = var.enable_mock_mesh ? jsonencode({
mesh_endpoint = local.mock_mesh_endpoint
mesh_mailbox = "mock-mailbox"
- mesh_mailbox_password = "mock-password"
+ mesh_mailbox_password = "UNSET"
mesh_shared_key = "mock-shared-key"
}) : jsonencode({
mesh_endpoint = "UNSET"
diff --git a/lambdas/mesh-acknowledge/.gitignore b/lambdas/mesh-acknowledge/.gitignore
new file mode 100644
index 00000000..9f7550b1
--- /dev/null
+++ b/lambdas/mesh-acknowledge/.gitignore
@@ -0,0 +1,2 @@
+__pycache__
+.venv
diff --git a/lambdas/mesh-acknowledge/Makefile b/lambdas/mesh-acknowledge/Makefile
new file mode 100644
index 00000000..936f7c43
--- /dev/null
+++ b/lambdas/mesh-acknowledge/Makefile
@@ -0,0 +1,34 @@
+PACKAGE=mesh_acknowledge
+VERSION=0.1.0
+
+install:
+ pip install -r requirements.txt
+
+install-dev:
+ pip install -r requirements-dev.txt
+
+test:
+ cd ../.. && PYTHONPATH=lambdas/mesh-acknowledge:$$PYTHONPATH pytest lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/ -v
+
+coverage:
+ cd ../.. && PYTHONPATH=lambdas/mesh-acknowledge:$$PYTHONPATH pytest lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/ \
+ --cov=lambdas/mesh-acknowledge/mesh_acknowledge \
+ --cov-config=lambdas/mesh-acknowledge/pytest.ini \
+ --cov-report=html:lambdas/mesh-acknowledge/htmlcov \
+ --cov-report=term-missing \
+ --cov-report=xml:lambdas/mesh-acknowledge/coverage.xml \
+ --cov-branch
+
+lint:
+ pylint mesh_acknowledge
+
+format:
+ autopep8 -ri .
+
+package:
+ ../../utils/package_python_lambda.sh meshacknowledgelambda
+
+clean:
+ rm -rf target
+
+.PHONY: install install-dev test coverage lint format package clean
diff --git a/lambdas/mesh-acknowledge/mesh_acknowledge/__init__.py b/lambdas/mesh-acknowledge/mesh_acknowledge/__init__.py
new file mode 100644
index 00000000..e0fc8b08
--- /dev/null
+++ b/lambdas/mesh-acknowledge/mesh_acknowledge/__init__.py
@@ -0,0 +1,9 @@
+"""
+MESH Acknowledge Lambda
+
+This lambda handles acknowledging received MESH files, by sending a message to the MESH inbox of
+their sender.
+"""
+
+__version__ = '0.1.0'
+from .handler import *
diff --git a/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/__init__.py b/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/__init__.py
new file mode 100644
index 00000000..3be5b3aa
--- /dev/null
+++ b/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/__init__.py
@@ -0,0 +1 @@
+# Test package init
diff --git a/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/fixtures.py b/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/fixtures.py
new file mode 100644
index 00000000..c21eb130
--- /dev/null
+++ b/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/fixtures.py
@@ -0,0 +1,40 @@
+"""Fixtures for tests"""
+from typing import Dict
+
+
+def create_downloaded_event_dict(event_id: str) -> Dict[str, str | int | Dict[str, str]]:
+ """Create a dictionary representing a MESHInboxMessageDownloaded event"""
+ return {
+ "id": event_id,
+ "specversion": "1.0",
+ "source": (
+ "/nhs/england/notify/production/primary/"
+ 'data-plane/digitalletters/mesh'
+ ),
+ "subject": (
+ 'customer/920fca11-596a-4eca-9c47-99f624614658/'
+ 'recipient/769acdd4-6a47-496f-999f-76a6fd2c3959'
+ ),
+ "type": (
+ 'uk.nhs.notify.digital.letters.mesh.inbox.message.downloaded.v1'
+ ),
+ "time": '2026-01-08T10:00:00Z',
+ "recordedtime": '2026-01-08T10:00:00Z',
+ "severitynumber": 2,
+ "severitytext": 'INFO',
+ "traceparent": '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
+ "datacontenttype": 'application/json',
+ "dataschema": (
+ 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/'
+ 'digital-letters-mesh-inbox-message-downloaded-data.schema.json'
+ ),
+ "datacategory": "non-sensitive",
+ "dataclassification": "public",
+ "dataregulation": "GDPR",
+ "data": {
+ "meshMessageId": "MSG123456",
+ "messageUri": f"https://example.com/ttl/resource/{event_id}",
+ "messageReference": "REF123",
+ "senderId": "SENDER001",
+ }
+ }
diff --git a/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/test_acknowledger.py b/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/test_acknowledger.py
new file mode 100644
index 00000000..71c47396
--- /dev/null
+++ b/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/test_acknowledger.py
@@ -0,0 +1,109 @@
+"""
+Tests for MeshAcknowledger class
+"""
+import json
+from unittest.mock import Mock
+import pytest
+from mesh_acknowledge.acknowledger import (
+ MeshAcknowledger,
+ NOTIFY_ACK_WORKFLOW_ID,
+ ACK_SUBJECT
+)
+
+SENT_MESH_MESSAGE_ID = "MSG123456"
+
+
+@pytest.fixture(name='mock_mesh_client')
+def create_mock_mesh_client():
+ """Create a mock MeshClient for testing"""
+ client = Mock()
+ client.handshake = Mock()
+ client.send_message = Mock(return_value=SENT_MESH_MESSAGE_ID)
+ return client
+
+
+@pytest.fixture(name='mock_logger')
+def create_mock_logger():
+ """Create a mock logger for testing"""
+ logger = Mock()
+ logger.debug = Mock()
+ return logger
+
+
+@pytest.fixture(name='acknowledger')
+def create_acknowledger(mock_mesh_client, mock_logger):
+ """Create a MeshAcknowledger instance with mocked dependencies"""
+ return MeshAcknowledger(mock_mesh_client, mock_logger)
+
+
+class TestMeshAcknowledger:
+ """Test suite for MeshAcknowledger class"""
+
+ def test_init_performs_handshake(self, mock_mesh_client, mock_logger):
+ """Test that __init__ performs a MESH handshake"""
+ MeshAcknowledger(mock_mesh_client, mock_logger)
+
+ mock_mesh_client.handshake.assert_called_once()
+
+ def test_acknowledge_message_sends_correct_message(
+ self, acknowledger, mock_mesh_client
+ ):
+ """Test that acknowledge_message sends the correct message via MESH"""
+ mailbox_id = "MAILBOX001"
+ message_id = "MSG123456"
+ message_reference = "REF789"
+ sender_id = "SENDER001"
+
+ expected_body = json.dumps({
+ "meshMessageId": message_id,
+ "requestId": f"{sender_id}_{message_reference}"
+ }).encode()
+
+ acknowledger.acknowledge_message(
+ mailbox_id, message_id, message_reference, sender_id
+ )
+
+ mock_mesh_client.send_message.assert_called_once_with(
+ mailbox_id,
+ expected_body,
+ workflow_id=NOTIFY_ACK_WORKFLOW_ID,
+ local_id=message_reference,
+ subject=ACK_SUBJECT
+ )
+
+ def test_acknowledge_message_returns_ack_id(
+ self, acknowledger, mock_mesh_client
+ ):
+ """Test that acknowledge_message returns the acknowledgment ID"""
+ mailbox_id = "MAILBOX001"
+ message_id = "MSG123456"
+ message_reference = "REF789"
+ sender_id = "SENDER001"
+
+ expected_ack_id = "ACK_CUSTOM_ID"
+
+ mock_mesh_client.send_message.return_value = expected_ack_id
+
+ ack_message_id = acknowledger.acknowledge_message(
+ mailbox_id, message_id, message_reference, sender_id
+ )
+
+ assert ack_message_id == expected_ack_id
+
+ def test_acknowledge_message_raises_error_if_mesh_send_fails(
+ self, acknowledger, mock_mesh_client
+ ):
+ """Test that the MeshAcknowledger raises an error if MESH send_message fails"""
+ mailbox_id = "MAILBOX001"
+ message_id = "MSG123"
+ message_reference = "REF123"
+ sender_id = "SENDER001"
+ expected_exception_message = "MESH send failed"
+
+ mock_mesh_client.send_message.side_effect = Exception(
+ expected_exception_message)
+
+ with pytest.raises(Exception, match=expected_exception_message):
+ acknowledger.acknowledge_message(
+ mailbox_id, message_id, message_reference, sender_id
+ )
diff --git a/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/test_dlq.py b/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/test_dlq.py
new file mode 100644
index 00000000..d56ad7ef
--- /dev/null
+++ b/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/test_dlq.py
@@ -0,0 +1,89 @@
+"""
+Tests for Dlq class in mesh_acknowledge.dlq
+"""
+import json
+from unittest.mock import Mock
+
+import pytest
+from botocore.exceptions import ClientError
+
+from mesh_acknowledge.dlq import Dlq
+
+
+@pytest.fixture(name='mock_sqs_client')
+def create_mock_sqs_client():
+ """Create a mock SQS client for testing"""
+ client = Mock()
+ client.send_message = Mock(return_value={'MessageId': 'msg-12345'})
+ return client
+
+
+@pytest.fixture(name='mock_logger')
+def create_mock_logger():
+ """Create a mock logger for testing"""
+ logger = Mock()
+ logger.info = Mock()
+ logger.error = Mock()
+ return logger
+
+@pytest.fixture(name='dlq_url')
+def create_dlq_url():
+ """Create a DLQ URL for testing"""
+ return "https://sqs.us-east-1.amazonaws.com/123456789012/test-dlq"
+
+@pytest.fixture(name='dlq')
+def create_dlq(mock_sqs_client, mock_logger, dlq_url):
+ """Create a Dlq instance for testing"""
+ return Dlq(
+ sqs_client=mock_sqs_client,
+ dlq_url=dlq_url,
+ logger=mock_logger
+ )
+
+
+class TestSendToQueue:
+ """Tests for send_to_queue method"""
+
+ def test_sends_record_to_dlq_successfully(
+ self,
+ dlq,
+ mock_sqs_client,
+ dlq_url
+ ):
+ """Test that a record is sent to DLQ successfully"""
+ record = {
+ "id": "test-event-123",
+ "type": "test.event.v1",
+ "data": {"key": "value"}
+ }
+ reason = "Validation failed"
+
+ dlq.send_to_queue(record, reason)
+
+ mock_sqs_client.send_message.assert_called_once_with(
+ QueueUrl=dlq_url,
+ MessageBody=json.dumps(record),
+ MessageAttributes={
+ 'DlqReason': {
+ 'DataType': 'String',
+ 'StringValue': reason
+ }
+ }
+ )
+
+ def test_handles_sqs_client_error(
+ self,
+ dlq,
+ mock_sqs_client,
+ ):
+ """Test that ClientError from SQS is handled and re-raised"""
+ record = {"id": "test-event-123"}
+ reason = "Processing error"
+ error = ClientError(
+ {'Error': {'Code': 'InvalidParameterValue', 'Message': 'Invalid queue URL'}},
+ 'SendMessage'
+ )
+ mock_sqs_client.send_message.side_effect = error
+
+ with pytest.raises(ClientError):
+ dlq.send_to_queue(record, reason)
diff --git a/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/test_events.py b/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/test_events.py
new file mode 100644
index 00000000..cbfa2de8
--- /dev/null
+++ b/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/test_events.py
@@ -0,0 +1,230 @@
+"""
+Tests for event parsing and publishing in mesh_acknowledge.events
+"""
+import json
+from datetime import datetime, timezone
+from typing import Dict
+from uuid import uuid4
+
+from unittest.mock import Mock, patch
+import pytest
+from digital_letters_events import MESHInboxMessageAcknowledged, MESHInboxMessageDownloaded
+from mesh_acknowledge.events import (
+ parse_downloaded_event,
+ publish_acknowledged_event
+)
+
+from .fixtures import create_downloaded_event_dict
+
+
+@pytest.fixture(name='mock_logger')
+def create_mock_logger():
+ """Create a mock logger for testing"""
+ logger = Mock()
+ logger.info = Mock()
+ logger.error = Mock()
+ return logger
+
+
+@pytest.fixture(name='mock_event_publisher')
+def create_mock_event_publisher():
+ """Create a mock EventPublisher for testing"""
+ publisher = Mock()
+ publisher.send_events = Mock(return_value=[])
+ return publisher
+
+
+@pytest.fixture(name='event_id')
+def generate_event_id() -> str:
+ """Generate a unique event ID"""
+ return str(uuid4())
+
+
+@pytest.fixture(name='downloaded_event')
+def downloaded_event_fixture(event_id: str) -> MESHInboxMessageDownloaded:
+ """Create a MESHInboxMessageDownloaded event"""
+ return MESHInboxMessageDownloaded(**create_downloaded_event_dict(event_id))
+
+
+@pytest.fixture(name='valid_sqs_record')
+def create_valid_sqs_record(event_id: str) -> Dict[str, str | int]:
+ """Create a valid SQS record with MESHInboxMessageDownloaded event"""
+ return {
+ 'body': json.dumps({
+ 'detail': {
+ **create_downloaded_event_dict(event_id),
+ }
+ })
+ }
+
+
+@pytest.fixture(name='invalid_sqs_record')
+def create_invalid_sqs_record(event_id: str) -> Dict[str, str]:
+ """Create a valid SQS record with an invalid MESHInboxMessageDownloaded event"""
+ return {
+ 'body': json.dumps({
+ 'detail': {
+ 'id': event_id,
+ 'specversion': '1.0',
+ 'source': '/nhs/england/notify/production/primary/data-plane/digitalletters/mesh',
+ 'subject': (
+ 'customer/920fca11-596a-4eca-9c47-99f624614658/recipient/'
+ '769acdd4-6a47-496f-999f-76a6fd2c3959'
+ ),
+ 'type': 'uk.nhs.notify.digital.letters.mesh.inbox.message.downloaded.v1',
+ 'time': '2026-01-08T10:00:00Z',
+ 'recordedtime': '2026-01-08T10:00:00Z',
+ 'severitynumber': 2,
+ 'traceparent': '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
+ 'datacontenttype': 'application/json',
+ 'dataschema': (
+ 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/'
+ 'digital-letters-mesh-inbox-message-downloaded-data.schema.json'
+ ),
+ 'data': {
+ 'meshMessageId': 'MSG123456',
+ 'messageUri': f"https://example.com/ttl/resource/{event_id}",
+ 'messageReference': 'REF123',
+ 'senderId': 'SENDER001',
+ 'extraField': 'INVALID' # Invalid extra field
+ }
+ }
+ })
+ }
+
+
+class TestParseDownloadedEvent:
+ """Test suite for parse_downloaded_event function"""
+
+ def test_parse_valid_event(
+ self, valid_sqs_record: Dict[str, str | int],
+ downloaded_event: MESHInboxMessageDownloaded,
+ mock_logger):
+ """Test parsing a valid SQS record"""
+ result = parse_downloaded_event(valid_sqs_record, mock_logger, )
+
+ assert result == downloaded_event
+
+ def test_parse_event_with_missing_detail(self, mock_logger):
+ """Test parsing SQS record with missing 'detail' field"""
+ sqs_record = {'body': json.dumps({})}
+
+ with pytest.raises(ValueError):
+ parse_downloaded_event(sqs_record, mock_logger)
+
+ def test_parse_event_validation_error(
+ self, invalid_sqs_record: Dict[str, str | int],
+ mock_logger):
+ """Test handling validation errors from Pydantic model"""
+ with pytest.raises(ValueError, match="Error processing MESHInboxMessageDownloaded event"):
+ parse_downloaded_event(invalid_sqs_record, mock_logger)
+
+ def test_parse_event_json_decode_error(self, mock_logger):
+ """Test handling JSON decode errors"""
+ sqs_record = {'body': 'invalid json'}
+
+ with pytest.raises(ValueError, match="Error parsing SQS record"):
+ parse_downloaded_event(sqs_record, mock_logger)
+
+
+class TestPublishAcknowledgedEvent:
+ """Test suite for publish_acknowledged_event function"""
+
+ @patch('mesh_acknowledge.events.uuid4')
+ @patch('mesh_acknowledge.events.datetime')
+ def test_publish_success(
+ self,
+ mock_datetime,
+ mock_uuid,
+ mock_logger,
+ mock_event_publisher,
+ downloaded_event: MESHInboxMessageDownloaded
+ ):
+ """Test successful event publishing"""
+ new_event_id = str(uuid4())
+ mock_uuid.return_value = new_event_id
+ fixed_time = datetime(2026, 1, 8, 10, 30, 0, tzinfo=timezone.utc)
+ mock_datetime.now.return_value = fixed_time
+
+ mock_event_publisher.send_events.return_value = []
+
+ mesh_mailbox_id = 'MAILBOX001'
+ expected_ack_event = {
+ **downloaded_event.model_dump(exclude_none=True),
+ 'id': new_event_id,
+ 'time': fixed_time.isoformat(),
+ 'recordedtime': fixed_time.isoformat(),
+ 'type': 'uk.nhs.notify.digital.letters.mesh.inbox.message.acknowledged.v1',
+ 'dataschema': (
+ 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/'
+ 'digital-letters-mesh-inbox-message-acknowledged-data.schema.json'
+ ),
+ 'data': {
+ 'messageReference': downloaded_event.data.messageReference,
+ 'senderId': downloaded_event.data.senderId,
+ 'meshMailboxId': mesh_mailbox_id,
+ }
+ }
+
+ publish_acknowledged_event(
+ mock_logger,
+ mock_event_publisher,
+ downloaded_event,
+ mesh_mailbox_id
+ )
+
+ # Verify event was sent
+ mock_event_publisher.send_events.assert_called_once_with(
+ [expected_ack_event], MESHInboxMessageAcknowledged)
+
+ @patch('mesh_acknowledge.events.uuid4')
+ @patch('mesh_acknowledge.events.datetime')
+ def test_publish_failure_raises_error(
+ self,
+ mock_datetime,
+ mock_uuid,
+ mock_logger,
+ mock_event_publisher,
+ downloaded_event
+ ):
+ """Test that publishing failures raise RuntimeError"""
+ mock_uuid.return_value = str(uuid4())
+ fixed_time = datetime(2026, 1, 8, 12, 0, 0, tzinfo=timezone.utc)
+ mock_datetime.now.return_value = fixed_time
+
+ failed_events = [{'error': 'send failed'}]
+ mock_event_publisher.send_events.return_value = failed_events
+
+ with pytest.raises(
+ RuntimeError, match="Failed to publish MESHInboxMessageAcknowledged event"):
+ publish_acknowledged_event(
+ mock_logger,
+ mock_event_publisher,
+ downloaded_event,
+ 'MAILBOX001'
+ )
+
+ @patch('mesh_acknowledge.events.uuid4')
+ @patch('mesh_acknowledge.events.datetime')
+ def test_publish_error_event_raises_error(
+ self,
+ mock_datetime,
+ mock_uuid,
+ mock_logger,
+ mock_event_publisher,
+ downloaded_event
+ ):
+ """Test that if the event publisher raises an error, an error is raised"""
+ mock_uuid.return_value = str(uuid4())
+ fixed_time = datetime(2026, 1, 8, 13, 0, 0, tzinfo=timezone.utc)
+ mock_datetime.now.return_value = fixed_time
+
+ mock_event_publisher.send_events.side_effect = Exception("Publisher error")
+
+ with pytest.raises(Exception, match="Publisher error"):
+ publish_acknowledged_event(
+ mock_logger,
+ mock_event_publisher,
+ downloaded_event,
+ 'MAILBOX001'
+ )
diff --git a/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/test_handler.py b/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/test_handler.py
new file mode 100644
index 00000000..31d1f72c
--- /dev/null
+++ b/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/test_handler.py
@@ -0,0 +1,295 @@
+"""
+Tests for Lambda handler in mesh_acknowledge.handler
+"""
+from unittest.mock import Mock, MagicMock, patch, call
+
+import pytest
+from dl_utils import log
+from mesh_acknowledge.handler import handler
+
+
+def setup_mocks(config_cls,
+ event_publisher_cls,
+ acknowledger_cls,
+ message_processor_cls,
+ sender_lookup_cls,
+ dlq_cls,
+ boto3_client_cls):
+ """Setup common mocks for handler tests"""
+ config = Mock()
+ config.event_publisher_event_bus_arn = (
+ "arn:aws:eventbridge:eu-west-2:123456789012:event-bus/test"
+ )
+ config.event_publisher_dlq_url = "https://sqs.eu-west-2.amazonaws.com/123456789012/event-dlq"
+ config.dlq_url = "https://sqs.eu-west-2.amazonaws.com/123456789012/dlq"
+ config.mesh_client = Mock()
+
+ config_cm = MagicMock()
+ config_cm.__enter__.return_value = config
+ config_cls.return_value = config_cm
+
+ event_publisher = Mock()
+ event_publisher_cls.return_value = event_publisher
+
+ acknowledger = Mock()
+ acknowledger_cls.return_value = acknowledger
+
+ processor = Mock()
+ message_processor_cls.return_value = processor
+
+ sender_lookup = Mock()
+ sender_lookup_cls.return_value = sender_lookup
+
+ dlq = Mock()
+ dlq_cls.return_value = dlq
+
+ boto_client = Mock()
+ boto3_client_cls.return_value = boto_client
+
+ return (config_cm, config, event_publisher, acknowledger,
+ processor, sender_lookup, dlq, boto_client)
+
+
+class TestHandler:
+ """Test suite for Lambda handler"""
+
+ @patch("mesh_acknowledge.handler.client")
+ @patch("mesh_acknowledge.handler.Dlq")
+ @patch("mesh_acknowledge.handler.SenderLookup")
+ @patch("mesh_acknowledge.handler.MessageProcessor")
+ @patch("mesh_acknowledge.handler.MeshAcknowledger")
+ @patch("mesh_acknowledge.handler.EventPublisher")
+ @patch("mesh_acknowledge.handler.Config")
+ def test_handler_returns_batch_failures(
+ self,
+ config_cls,
+ event_publisher_cls,
+ acknowledger_cls,
+ message_processor_cls,
+ sender_lookup_cls,
+ dlq_cls,
+ boto3_client_cls,
+ ):
+ """Test that handler returns batch item failures from MessageProcessor."""
+ (
+ _config_cm,
+ _config,
+ _event_publisher,
+ _acknowledger,
+ processor,
+ _sender_lookup,
+ _dlq,
+ _boto_client
+ ) = setup_mocks(
+ config_cls,
+ event_publisher_cls,
+ acknowledger_cls,
+ message_processor_cls,
+ sender_lookup_cls,
+ dlq_cls,
+ boto3_client_cls
+ )
+
+ batch_failures = [{"itemIdentifier": "abc"}]
+ processor.process_message.return_value = batch_failures
+
+ message = {"Records": []}
+
+ result = handler(message, None)
+
+ assert result == {"batchItemFailures": batch_failures}
+
+ processor.process_message.assert_called_with(message)
+
+ @patch("mesh_acknowledge.handler.client")
+ @patch("mesh_acknowledge.handler.Dlq")
+ @patch("mesh_acknowledge.handler.SenderLookup")
+ @patch("mesh_acknowledge.handler.MessageProcessor")
+ @patch("mesh_acknowledge.handler.MeshAcknowledger")
+ @patch("mesh_acknowledge.handler.EventPublisher")
+ @patch("mesh_acknowledge.handler.Config")
+ def test_handler_passes_correct_parameters(
+ self,
+ config_cls,
+ event_publisher_cls,
+ acknowledger_cls,
+ message_processor_cls,
+ sender_lookup_cls,
+ dlq_cls,
+ boto3_client_cls,
+ ):
+ """Test that handler passes correct parameters to dependencies."""
+ (
+ _config_cm,
+ config,
+ event_publisher,
+ acknowledger,
+ _processor,
+ sender_lookup,
+ dlq,
+ boto_client
+ ) = setup_mocks(
+ config_cls,
+ event_publisher_cls,
+ acknowledger_cls,
+ message_processor_cls,
+ sender_lookup_cls,
+ dlq_cls,
+ boto3_client_cls
+ )
+
+ handler({"Records": []}, None)
+
+ event_publisher_cls.assert_called_once_with(
+ event_bus_arn=config.event_publisher_event_bus_arn,
+ dlq_url=config.event_publisher_dlq_url,
+ logger=log,
+ )
+ acknowledger_cls.assert_called_once_with(
+ logger=log,
+ mesh_client=config.mesh_client,
+ )
+ sender_lookup_cls.assert_called_once_with(
+ ssm=boto_client,
+ config=config,
+ logger=log,
+ )
+ dlq_cls.assert_called_once_with(
+ sqs_client=boto_client,
+ dlq_url=config.dlq_url,
+ logger=log,
+ )
+ message_processor_cls.assert_called_once_with(
+ acknowledger=acknowledger,
+ event_publisher=event_publisher,
+ sender_lookup=sender_lookup,
+ dlq=dlq,
+ logger=log,
+ )
+ assert boto3_client_cls.call_count == 2
+ boto3_client_cls.assert_has_calls([
+ call("ssm"),
+ call("sqs"),
+ ])
+
+ @patch("mesh_acknowledge.handler.client")
+ @patch("mesh_acknowledge.handler.Dlq")
+ @patch("mesh_acknowledge.handler.SenderLookup")
+ @patch("mesh_acknowledge.handler.MessageProcessor")
+ @patch("mesh_acknowledge.handler.MeshAcknowledger")
+ @patch("mesh_acknowledge.handler.EventPublisher")
+ @patch("mesh_acknowledge.handler.Config")
+ def test_handler_reraises_on_processing_error(
+ self,
+ config_cls,
+ event_publisher_cls,
+ acknowledger_cls,
+ message_processor_cls,
+ sender_lookup_cls,
+ dlq_cls,
+ boto3_client_cls,
+ ):
+ """Test that handler re-raises exceptions from MessageProcessor."""
+ (
+ _config_cm,
+ _config,
+ _event_publisher,
+ _acknowledger,
+ processor,
+ _sender_lookup,
+ _dlq,
+ _boto_client
+ ) = setup_mocks(
+ config_cls,
+ event_publisher_cls,
+ acknowledger_cls,
+ message_processor_cls,
+ sender_lookup_cls,
+ dlq_cls,
+ boto3_client_cls
+ )
+
+ processor.process_message.side_effect = RuntimeError("boom")
+
+ with pytest.raises(RuntimeError, match="boom"):
+ handler({"Records": []}, None)
+
+ @patch("mesh_acknowledge.handler.Config", side_effect=Exception("bad config"))
+ def test_handler_reraises_on_config_error(self, _config_cls):
+ """Test that handler re-raises exceptions from Config."""
+ with pytest.raises(Exception, match="bad config"):
+ handler({"Records": []}, None)
+
+ @patch("mesh_acknowledge.handler.client")
+ @patch("mesh_acknowledge.handler.Dlq")
+ @patch("mesh_acknowledge.handler.SenderLookup")
+ @patch("mesh_acknowledge.handler.MessageProcessor")
+ @patch("mesh_acknowledge.handler.MeshAcknowledger")
+ @patch("mesh_acknowledge.handler.EventPublisher")
+ @patch("mesh_acknowledge.handler.Config")
+ def test_handler_config_cleanup_on_success(
+ self,
+ config_cls,
+ event_publisher_cls,
+ acknowledger_cls,
+ message_processor_cls,
+ sender_lookup_cls,
+ dlq_cls,
+ boto3_client_cls):
+ """Test that Config context manager is cleaned up on success."""
+ setup_mocks(
+ config_cls,
+ event_publisher_cls,
+ acknowledger_cls,
+ message_processor_cls,
+ sender_lookup_cls,
+ dlq_cls,
+ boto3_client_cls
+ )
+
+ handler({"Records": []}, None)
+
+ config_cls.return_value.__exit__.assert_called_once()
+
+ @patch("mesh_acknowledge.handler.client")
+ @patch("mesh_acknowledge.handler.Dlq")
+ @patch("mesh_acknowledge.handler.SenderLookup")
+ @patch("mesh_acknowledge.handler.MessageProcessor")
+ @patch("mesh_acknowledge.handler.MeshAcknowledger")
+ @patch("mesh_acknowledge.handler.EventPublisher")
+ @patch("mesh_acknowledge.handler.Config")
+ def test_handler_config_cleanup_on_error(
+ self,
+ config_cls,
+ event_publisher_cls,
+ acknowledger_cls,
+ message_processor_cls,
+ sender_lookup_cls,
+ dlq_cls,
+ boto3_client_cls):
+ """Test that Config context manager is cleaned up on error."""
+ (
+ _config_cm,
+ _config,
+ _event_publisher,
+ _acknowledger,
+ processor,
+ _sender_lookup,
+ _dlq,
+ _boto_client
+ ) = setup_mocks(
+ config_cls,
+ event_publisher_cls,
+ acknowledger_cls,
+ message_processor_cls,
+ sender_lookup_cls,
+ dlq_cls,
+ boto3_client_cls
+ )
+
+ processor.process_message.side_effect = RuntimeError("boom")
+
+ with pytest.raises(RuntimeError):
+ handler({"Records": []}, None)
+
+ config_cls.return_value.__exit__.assert_called_once()
diff --git a/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/test_message_processor.py b/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/test_message_processor.py
new file mode 100644
index 00000000..5bea73fd
--- /dev/null
+++ b/lambdas/mesh-acknowledge/mesh_acknowledge/__tests__/test_message_processor.py
@@ -0,0 +1,460 @@
+"""
+Tests for MessageProcessor class in mesh_acknowledge.message_processor
+"""
+from unittest.mock import Mock, patch
+from uuid import uuid4
+
+import pytest
+from digital_letters_events import MESHInboxMessageDownloaded
+from mesh_acknowledge.message_processor import MessageProcessor
+
+from .fixtures import create_downloaded_event_dict
+
+
+@pytest.fixture(name='mock_logger')
+def create_mock_logger():
+ """Create a mock logger for testing"""
+ logger = Mock()
+ logger.info = Mock()
+ logger.warn = Mock()
+ logger.error = Mock()
+ return logger
+
+
+@pytest.fixture(name='mock_acknowledger')
+def create_mock_acknowledger():
+ """Create a mock MeshAcknowledger for testing"""
+ acknowledger = Mock()
+ acknowledger.acknowledge_message = Mock(return_value="ACK123")
+ return acknowledger
+
+
+@pytest.fixture(name='mock_event_publisher')
+def create_mock_event_publisher():
+ """Create a mock EventPublisher for testing"""
+ publisher = Mock()
+ publisher.send_events = Mock(return_value=[])
+ return publisher
+
+
+@pytest.fixture(name='mock_sender_lookup')
+def create_mock_sender_lookup():
+ """Create a mock SenderLookup for testing"""
+ lookup = Mock()
+ lookup.get_mailbox_id = Mock(return_value="MAILBOX001")
+ return lookup
+
+@pytest.fixture(name='mock_dlq')
+def create_mock_dlq():
+ """Create a mock Dlq for testing"""
+ dlq = Mock()
+ dlq.send_to_queue = Mock()
+ return dlq
+
+@pytest.fixture(name='message_processor')
+def create_message_processor(
+ mock_acknowledger, mock_event_publisher,
+ mock_sender_lookup, mock_logger, mock_dlq):
+ """Create a MessageProcessor instance with mocked dependencies"""
+ return MessageProcessor(
+ mock_acknowledger,
+ mock_event_publisher,
+ mock_sender_lookup,
+ mock_dlq,
+ mock_logger
+ )
+
+
+@pytest.fixture(name='downloaded_event')
+def downloaded_event_fixture():
+ """Create a MESHInboxMessageDownloaded event"""
+ event_id = str(uuid4())
+ return MESHInboxMessageDownloaded(**create_downloaded_event_dict(event_id))
+
+
+@pytest.fixture(name='valid_sqs_message')
+def create_valid_sqs_message():
+ """Create a valid SQS message with one record"""
+ return {
+ 'Records': [
+ {
+ 'messageId': 'sqs-msg-123',
+ 'eventSource': 'aws:sqs',
+ 'body': 'detail": { }',
+ }
+ ]
+ }
+
+
+class TestMessageProcessorProcessMessage:
+ """Test suite for MessageProcessor.process_message"""
+
+ @patch('mesh_acknowledge.message_processor.publish_acknowledged_event')
+ @patch('mesh_acknowledge.message_processor.parse_downloaded_event')
+ def test_process_message_success(
+ self,
+ mock_parse,
+ mock_publish,
+ message_processor,
+ mock_acknowledger,
+ mock_sender_lookup,
+ mock_event_publisher,
+ mock_logger,
+ valid_sqs_message,
+ downloaded_event: MESHInboxMessageDownloaded
+ ):
+ """Test successful processing of a single SQS message"""
+ mesh_mailbox_id = "MAILBOX001"
+
+ mock_parse.return_value = downloaded_event
+ mock_sender_lookup.get_mailbox_id.return_value = mesh_mailbox_id
+ mock_acknowledger.acknowledge_message.return_value = "ACK123"
+
+ result = message_processor.process_message(valid_sqs_message)
+
+ assert result == []
+
+ mock_parse.assert_called_once_with(
+ valid_sqs_message['Records'][0],
+ mock_logger
+ )
+ mock_sender_lookup.get_mailbox_id.assert_called_once_with(
+ downloaded_event.data.senderId)
+ mock_acknowledger.acknowledge_message.assert_called_once_with(
+ mailbox_id=mesh_mailbox_id,
+ message_reference=downloaded_event.data.messageReference,
+ sender_id=downloaded_event.data.senderId,
+ message_id=downloaded_event.data.meshMessageId
+ )
+ mock_publish.assert_called_once_with(
+ logger=mock_logger,
+ event_publisher=mock_event_publisher,
+ incoming_event=downloaded_event,
+ mesh_mailbox_id=mesh_mailbox_id
+ )
+
+ @patch('mesh_acknowledge.message_processor.publish_acknowledged_event')
+ @patch('mesh_acknowledge.message_processor.parse_downloaded_event')
+ def test_process_message_multiple_records(
+ self,
+ mock_parse,
+ mock_publish,
+ message_processor,
+ mock_sender_lookup,
+ mock_acknowledger,
+ downloaded_event
+ ):
+ """Test processing multiple SQS records"""
+ mock_parse.return_value = downloaded_event
+ mock_sender_lookup.get_mailbox_id.return_value = "MAILBOX001"
+ mock_acknowledger.acknowledge_message.return_value = "ACK123"
+
+ message = {
+ 'Records': [
+ {'messageId': 'msg-1', 'eventSource': 'aws:sqs',
+ 'body': '{"detail": {}}'},
+ {'messageId': 'msg-2', 'eventSource': 'aws:sqs',
+ 'body': '{"detail": {}}'},
+ {'messageId': 'msg-3', 'eventSource': 'aws:sqs',
+ 'body': '{"detail": {}}'}
+ ]
+ }
+
+ result = message_processor.process_message(message)
+
+ assert result == []
+ assert mock_acknowledger.acknowledge_message.call_count == 3
+ assert mock_publish.call_count == 3
+
+ @patch('mesh_acknowledge.message_processor.publish_acknowledged_event')
+ @patch('mesh_acknowledge.message_processor.parse_downloaded_event')
+ def test_process_message_partial_failures(
+ self,
+ mock_parse,
+ mock_publish,
+ message_processor,
+ mock_sender_lookup,
+ mock_acknowledger,
+ downloaded_event
+ ):
+ """Test processing with some successes and some failures"""
+ # First call succeeds, second fails, third succeeds
+ mock_parse.side_effect = [
+ downloaded_event,
+ ValueError("Parse failed"),
+ downloaded_event
+ ]
+ mock_sender_lookup.get_mailbox_id.return_value = "MAILBOX001"
+ mock_acknowledger.acknowledge_message.return_value = "ACK123"
+
+ message = {
+ 'Records': [
+ {'messageId': 'msg-1', 'eventSource': 'aws:sqs',
+ 'body': '{"detail": {}}'},
+ {'messageId': 'msg-2', 'eventSource': 'aws:sqs',
+ 'body': '{"detail": {}}'},
+ {'messageId': 'msg-3', 'eventSource': 'aws:sqs',
+ 'body': '{"detail": {}}'}
+ ]
+ }
+
+ result = message_processor.process_message(message)
+
+ assert len(result) == 1
+ assert result[0] == {"itemIdentifier": "msg-2"}
+ assert mock_acknowledger.acknowledge_message.call_count == 2
+ assert mock_publish.call_count == 2
+
+ @patch('mesh_acknowledge.message_processor.publish_acknowledged_event')
+ @patch('mesh_acknowledge.message_processor.parse_downloaded_event')
+ def test_process_message_all_failures(
+ self,
+ mock_parse,
+ mock_publish,
+ message_processor,
+ mock_acknowledger,
+ ):
+ """Test processing where all messages fail"""
+ mock_parse.side_effect = Exception("All fail")
+
+ message = {
+ 'Records': [
+ {
+ 'messageId': 'msg-1', 'eventSource': 'aws:sqs',
+ 'body': '{"detail": {}}'
+ },
+ {
+ 'messageId': 'msg-2', 'eventSource': 'aws:sqs',
+ 'body': '{"detail": {}}'
+ }
+ ]
+ }
+
+ result = message_processor.process_message(message)
+
+ assert len(result) == 2
+ assert result[0] == {"itemIdentifier": "msg-1"}
+ assert result[1] == {"itemIdentifier": "msg-2"}
+ mock_acknowledger.acknowledge_message.assert_not_called()
+ mock_publish.assert_not_called()
+
+ @patch('mesh_acknowledge.message_processor.publish_acknowledged_event')
+ def test_process_message_empty_records(
+ self,
+ mock_publish,
+ mock_acknowledger,
+ message_processor,
+ ):
+ """Test processing message with no records"""
+ message = {'Records': []}
+
+ result = message_processor.process_message(message)
+
+ assert result == []
+ mock_acknowledger.acknowledge_message.assert_not_called()
+ mock_publish.assert_not_called()
+
+ @patch('mesh_acknowledge.message_processor.publish_acknowledged_event')
+ def test_process_message_missing_records_key(
+ self,
+ mock_publish,
+ message_processor,
+ mock_acknowledger,
+ ):
+ """Test processing message without Records key"""
+ message = {}
+
+ result = message_processor.process_message(message)
+
+ assert result == []
+ mock_acknowledger.acknowledge_message.assert_not_called()
+ mock_publish.assert_not_called()
+
+ @patch('mesh_acknowledge.message_processor.publish_acknowledged_event')
+ @patch('mesh_acknowledge.message_processor.parse_downloaded_event')
+ def test_process_message_parse_error_returns_failure(
+ self,
+ mock_parse,
+ mock_publish,
+ message_processor,
+ mock_acknowledger,
+ valid_sqs_message
+ ):
+ """Test that parse errors are caught and returned as batch failures"""
+ mock_parse.side_effect = ValueError("Parse failed")
+
+ result = message_processor.process_message(valid_sqs_message)
+
+ assert len(result) == 1
+ assert result[0] == {"itemIdentifier": "sqs-msg-123"}
+
+ mock_acknowledger.acknowledge_message.assert_not_called()
+ mock_publish.assert_not_called()
+
+ @patch('mesh_acknowledge.message_processor.publish_acknowledged_event')
+ @patch('mesh_acknowledge.message_processor.parse_downloaded_event')
+ def test_process_message_empty_sender_lookup_response_returns_failure(
+ self,
+ mock_parse,
+ mock_publish,
+ message_processor,
+ mock_sender_lookup,
+ mock_acknowledger,
+ valid_sqs_message,
+ downloaded_event
+ ):
+ """Test that an empty sender lookup response causes a batch failure to be returned"""
+ mock_parse.return_value = downloaded_event
+ mock_sender_lookup.get_mailbox_id.return_value = None
+
+ result = message_processor.process_message(valid_sqs_message)
+
+ assert len(result) == 1
+ assert result[0] == {"itemIdentifier": "sqs-msg-123"}
+
+ mock_acknowledger.acknowledge_message.assert_not_called()
+ mock_publish.assert_not_called()
+
+ @patch('mesh_acknowledge.message_processor.publish_acknowledged_event')
+ @patch('mesh_acknowledge.message_processor.parse_downloaded_event')
+ def test_process_message_sender_lookup_error_returns_failure(
+ self,
+ mock_parse,
+ mock_publish,
+ message_processor,
+ mock_sender_lookup,
+ mock_acknowledger,
+ valid_sqs_message,
+ downloaded_event
+ ):
+ """Test that sender lookup errors are caught and returned as batch failures"""
+ mock_parse.return_value = downloaded_event
+ mock_sender_lookup.get_mailbox_id.side_effect = Exception(
+ "Sender lookup error")
+
+ result = message_processor.process_message(valid_sqs_message)
+
+ assert len(result) == 1
+ assert result[0] == {"itemIdentifier": "sqs-msg-123"}
+
+ mock_acknowledger.acknowledge_message.assert_not_called()
+ mock_publish.assert_not_called()
+
+ @patch('mesh_acknowledge.message_processor.publish_acknowledged_event')
+ @patch('mesh_acknowledge.message_processor.parse_downloaded_event')
+ def test_process_message_acknowledge_error_returns_failure(
+ self,
+ mock_parse,
+ mock_publish,
+ message_processor,
+ mock_sender_lookup,
+ mock_acknowledger,
+ valid_sqs_message,
+ downloaded_event
+ ):
+ """Test that acknowledge errors are caught and returned as batch failures"""
+ mock_parse.return_value = downloaded_event
+ mock_sender_lookup.get_mailbox_id.return_value = "MAILBOX001"
+ mock_acknowledger.acknowledge_message.side_effect = RuntimeError(
+ "ACK failed")
+
+ result = message_processor.process_message(valid_sqs_message)
+
+ assert len(result) == 1
+ assert result[0] == {"itemIdentifier": "sqs-msg-123"}
+
+ mock_publish.assert_not_called()
+
+ @patch('mesh_acknowledge.message_processor.publish_acknowledged_event')
+ @patch('mesh_acknowledge.message_processor.parse_downloaded_event')
+ def test_process_message_publish_error_sends_to_dlq(
+ self,
+ mock_parse,
+ mock_publish,
+ message_processor,
+ mock_sender_lookup,
+ mock_acknowledger,
+ mock_dlq,
+ valid_sqs_message,
+ downloaded_event
+ ):
+ """
+ Test that publish errors are caught and the record is sent directly to the DLQ
+ """
+ mock_parse.return_value = downloaded_event
+ mock_sender_lookup.get_mailbox_id.return_value = "MAILBOX001"
+ mock_acknowledger.acknowledge_message.return_value = "ACK123"
+ mock_publish.side_effect = Exception("Publish failed")
+
+ result = message_processor.process_message(valid_sqs_message)
+
+ assert result == []
+ mock_dlq.send_to_queue.assert_called_once_with(
+ record=valid_sqs_message['Records'][0],
+ reason="Failed to publish acknowledged event"
+ )
+
+ @patch('mesh_acknowledge.message_processor.publish_acknowledged_event')
+ @patch('mesh_acknowledge.message_processor.parse_downloaded_event')
+ def test_process_message_dlq_error_returns_failure(
+ self,
+ mock_parse,
+ mock_publish,
+ message_processor,
+ mock_sender_lookup,
+ mock_acknowledger,
+ mock_dlq,
+ valid_sqs_message,
+ downloaded_event
+ ):
+ """
+ Test that if publishing to the DLQ fails, the record is returned as a batch failure.
+ """
+ mock_parse.return_value = downloaded_event
+ mock_sender_lookup.get_mailbox_id.return_value = "MAILBOX001"
+ mock_acknowledger.acknowledge_message.return_value = "ACK123"
+ mock_publish.side_effect = Exception("Publish failed")
+ mock_dlq.send_to_queue.side_effect = Exception("DLQ send failed")
+
+ result = message_processor.process_message(valid_sqs_message)
+
+ assert len(result) == 1
+ assert result[0] == {"itemIdentifier": "sqs-msg-123"}
+
+ @patch('mesh_acknowledge.message_processor.parse_downloaded_event')
+ def test_process_message_logs_summary(
+ self,
+ mock_parse,
+ message_processor,
+ mock_sender_lookup,
+ mock_acknowledger,
+ mock_logger,
+ downloaded_event
+ ):
+ """Test that processing summary is logged correctly"""
+ mock_parse.side_effect = [
+ downloaded_event,
+ ValueError("Failed"),
+ downloaded_event
+ ]
+ mock_sender_lookup.get_mailbox_id.return_value = "MAILBOX001"
+ mock_acknowledger.acknowledge_message.return_value = "ACK123"
+
+ message = {
+ 'Records': [
+ {'messageId': 'msg-1', 'eventSource': 'aws:sqs',
+ 'body': '{"detail": {}}'},
+ {'messageId': 'msg-2', 'eventSource': 'aws:sqs',
+ 'body': '{"detail": {}}'},
+ {'messageId': 'msg-3', 'eventSource': 'aws:sqs',
+ 'body': '{"detail": {}}'}
+ ]
+ }
+
+ message_processor.process_message(message)
+
+ # Check final summary log
+ final_log_call = mock_logger.info.call_args_list[-1]
+ assert final_log_call[1]['retrieved'] == 3
+ assert final_log_call[1]['acknowledged'] == 2
+ assert final_log_call[1]['failed'] == 1
diff --git a/lambdas/mesh-acknowledge/mesh_acknowledge/acknowledger.py b/lambdas/mesh-acknowledge/mesh_acknowledge/acknowledger.py
new file mode 100644
index 00000000..be12bbb3
--- /dev/null
+++ b/lambdas/mesh-acknowledge/mesh_acknowledge/acknowledger.py
@@ -0,0 +1,75 @@
+"""Module for acknowledging MESH messages."""
+
+import json
+from mesh_client import MeshClient
+
+NOTIFY_ACK_WORKFLOW_ID = "NHS_NOTIFY_SEND_REQUEST_ACK"
+ACK_SUBJECT = "202"
+
+
+class MeshAcknowledger:
+ """
+ Class responsible for acknowledging MESH messages.
+ """
+
+ def __init__(self, mesh_client: MeshClient, logger):
+ self.__log = logger
+ self.__mesh_client = mesh_client
+
+ self.__mesh_client.handshake()
+
+ def acknowledge_message(self,
+ mailbox_id: str,
+ message_id: str,
+ message_reference: str,
+ sender_id: str
+ ) -> str:
+ """
+ Acknowledge a MESH message given its ID.
+
+ Args:
+ mailbox_id (str): The ID of the mailbox to send the acknowledgment to.
+ message_id (str): The ID of the message to acknowledge.
+ message_reference (str): The reference of the message to acknowledge.
+ sender_id (str): ID of the original message's sender to acknowledge.
+
+ Returns:
+ str: The ID of the acknowledgment message sent.
+
+ Raises:
+ Exception: If the acknowledgment fails.
+ """
+
+ message_body = json.dumps({
+ "meshMessageId": message_id,
+ "requestId": f"{sender_id}_{message_reference}"
+ }).encode()
+
+ try:
+ ack_message_id = self.__mesh_client.send_message(
+ mailbox_id,
+ message_body,
+ workflow_id=NOTIFY_ACK_WORKFLOW_ID,
+ local_id=message_reference,
+ subject=ACK_SUBJECT
+ )
+ self.__log.info(
+ "Acknowledged MESH message",
+ mesh_mailbox_id=mailbox_id,
+ mesh_message_id=message_id,
+ mesh_message_reference=message_reference,
+ ack_message_id=ack_message_id
+ )
+
+ return ack_message_id
+
+ except Exception as e:
+ self.__log.error(
+ "Failed to acknowledge MESH message",
+ mesh_mailbox_id=mailbox_id,
+ mesh_message_id=message_id,
+ mesh_message_reference=message_reference,
+ error=str(e)
+ )
+
+ raise
diff --git a/lambdas/mesh-acknowledge/mesh_acknowledge/config.py b/lambdas/mesh-acknowledge/mesh_acknowledge/config.py
new file mode 100644
index 00000000..c6fcef08
--- /dev/null
+++ b/lambdas/mesh-acknowledge/mesh_acknowledge/config.py
@@ -0,0 +1,23 @@
+"""
+Module for configuring MESH Acknowledger application
+"""
+from dl_utils import BaseMeshConfig
+
+_REQUIRED_ENV_VAR_MAP = {
+ "ssm_mesh_prefix": "SSM_MESH_PREFIX",
+ "ssm_senders_prefix": "SSM_SENDERS_PREFIX",
+ "environment": "ENVIRONMENT",
+ "event_publisher_event_bus_arn": "EVENT_PUBLISHER_EVENT_BUS_ARN",
+ "event_publisher_dlq_url": "EVENT_PUBLISHER_DLQ_URL",
+ "dlq_url": "DLQ_URL",
+}
+
+
+class Config(BaseMeshConfig):
+ """
+ Represents the configuration of the MESH Acknowledger application.
+
+ Inherits common MESH configuration from BaseMeshConfig.
+ """
+
+ _REQUIRED_ENV_VAR_MAP = _REQUIRED_ENV_VAR_MAP
diff --git a/lambdas/mesh-acknowledge/mesh_acknowledge/dlq.py b/lambdas/mesh-acknowledge/mesh_acknowledge/dlq.py
new file mode 100644
index 00000000..7380ecfb
--- /dev/null
+++ b/lambdas/mesh-acknowledge/mesh_acknowledge/dlq.py
@@ -0,0 +1,46 @@
+"""Dead Letter Queue (DLQ) handler for sending failed records to SQS DLQ."""
+from typing import Any
+import json
+
+from botocore.exceptions import ClientError
+
+class Dlq:
+ """
+ Dead Letter Queue (DLQ) handler for sending failed records to SQS DLQ."""
+ def __init__(
+ self,
+ sqs_client: Any,
+ dlq_url: str,
+ logger,
+ ):
+ self.sqs_client = sqs_client
+ self.dlq_url = dlq_url
+ self.logger = logger
+
+ def send_to_queue(self, record: Any, reason: str) -> None:
+ """
+ Send a record to the DLQ.
+ """
+ try:
+ response = self.sqs_client.send_message(
+ QueueUrl=self.dlq_url,
+ MessageBody=json.dumps(record),
+ MessageAttributes={
+ 'DlqReason': {
+ 'DataType': 'String',
+ 'StringValue': reason
+ }
+ }
+ )
+ self.logger.info(
+ "Sent message to DLQ",
+ message_id=response.get('MessageId'),
+ dlq_url=self.dlq_url,
+ )
+ except ClientError as error:
+ self.logger.error(
+ "Failed to send record to DLQ",
+ error=str(error),
+ dlq_url=self.dlq_url,
+ )
+ raise
diff --git a/lambdas/mesh-acknowledge/mesh_acknowledge/events.py b/lambdas/mesh-acknowledge/mesh_acknowledge/events.py
new file mode 100644
index 00000000..8bac47f4
--- /dev/null
+++ b/lambdas/mesh-acknowledge/mesh_acknowledge/events.py
@@ -0,0 +1,81 @@
+"""Event parsing and publishing for MESH acknowledge lambda."""
+
+import json
+from datetime import datetime, timezone
+from uuid import uuid4
+
+from digital_letters_events import MESHInboxMessageAcknowledged, MESHInboxMessageDownloaded
+from dl_utils import EventPublisher
+
+
+def parse_downloaded_event(sqs_record, logger) -> MESHInboxMessageDownloaded:
+ """
+ Parses and validates a MESHInboxMessageDownloaded event from an SQS record.
+ """
+ try:
+ message_body = json.loads(sqs_record['body'])
+ event_detail = message_body.get('detail', {})
+
+ try:
+ return MESHInboxMessageDownloaded(**event_detail)
+
+ except Exception as e:
+ logger.error(
+ "MESHInboxMessageDownloaded validation failed",
+ validation_errors=str(e),
+ event_detail=event_detail
+ )
+ raise ValueError(
+ "Error processing MESHInboxMessageDownloaded event") from e
+ except json.JSONDecodeError as e:
+ logger.error(
+ "Error parsing SQS record body as JSON",
+ body=sqs_record.get('body', ''),
+ error=str(e)
+ )
+ raise ValueError("Error parsing SQS record") from e
+
+
+def publish_acknowledged_event(
+ logger, event_publisher: EventPublisher, incoming_event: MESHInboxMessageDownloaded,
+ mesh_mailbox_id: str):
+ """
+ Publishes a MESHInboxMessageAcknowledged event.
+ """
+ now = datetime.now(timezone.utc).isoformat()
+
+ try:
+ acknowledged_event = {
+ **incoming_event.model_dump(exclude_none=True),
+ 'id': str(uuid4()),
+ 'time': now,
+ 'recordedtime': now,
+ 'type': 'uk.nhs.notify.digital.letters.mesh.inbox.message.acknowledged.v1',
+ 'dataschema': (
+ 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/'
+ 'digital-letters-mesh-inbox-message-acknowledged-data.schema.json'
+ ),
+ 'data': {
+ 'messageReference': incoming_event.data.messageReference,
+ 'senderId': incoming_event.data.senderId,
+ 'meshMailboxId': mesh_mailbox_id,
+ }
+ }
+
+ failed = event_publisher.send_events([acknowledged_event], MESHInboxMessageAcknowledged)
+
+ if failed:
+ msg = f"Failed to publish MESHInboxMessageAcknowledged event: {failed}"
+ logger.error(msg, failed_count=len(failed))
+ raise RuntimeError(msg)
+
+ logger.info(
+ "Published MESHInboxMessageAcknowledged event",
+ sender_id=incoming_event.data.senderId,
+ mesh_mailbox_id=mesh_mailbox_id,
+ message_reference=incoming_event.data.messageReference
+ )
+ except Exception as e:
+ logger.error(
+ "Failed to publish MESHInboxMessageAcknowledged event", error=str(e))
+ raise
diff --git a/lambdas/mesh-acknowledge/mesh_acknowledge/handler.py b/lambdas/mesh-acknowledge/mesh_acknowledge/handler.py
new file mode 100644
index 00000000..c3a367a5
--- /dev/null
+++ b/lambdas/mesh-acknowledge/mesh_acknowledge/handler.py
@@ -0,0 +1,54 @@
+"""lambda handler for Mesh Acknowledge application"""
+
+from typing import Dict, Any
+
+from boto3 import client
+from dl_utils import log, EventPublisher, SenderLookup
+from .acknowledger import MeshAcknowledger
+from .config import Config
+from .dlq import Dlq
+from .message_processor import MessageProcessor
+
+
+def handler(message: Dict[str, Any], _context: Any):
+ """
+ Lambda handler for Mesh Acknowledge application.
+
+ Processes events from SQS queue.
+ Returns batch item failures for partial batch failure handling.
+ """
+
+ try:
+ with Config() as config:
+ event_publisher = EventPublisher(
+ event_bus_arn=config.event_publisher_event_bus_arn,
+ dlq_url=config.event_publisher_dlq_url,
+ logger=log
+ )
+ acknowledger = MeshAcknowledger(
+ logger=log, mesh_client=config.mesh_client)
+ sender_lookup = SenderLookup(
+ ssm=client('ssm'),
+ config=config,
+ logger=log
+ )
+ dlq = Dlq(
+ sqs_client=client('sqs'),
+ dlq_url=config.dlq_url,
+ logger=log
+ )
+ message_processor = MessageProcessor(
+ acknowledger=acknowledger,
+ event_publisher=event_publisher,
+ sender_lookup=sender_lookup,
+ dlq=dlq,
+ logger=log
+ )
+
+ batch_item_failures = message_processor.process_message(message)
+
+ return {"batchItemFailures": batch_item_failures}
+
+ except Exception as exc:
+ log.error("Error in MESH Acknowledge handler", error=str(exc))
+ raise
diff --git a/lambdas/mesh-acknowledge/mesh_acknowledge/message_processor.py b/lambdas/mesh-acknowledge/mesh_acknowledge/message_processor.py
new file mode 100644
index 00000000..06639fe4
--- /dev/null
+++ b/lambdas/mesh-acknowledge/mesh_acknowledge/message_processor.py
@@ -0,0 +1,110 @@
+"""
+Processes SQS messages containing MESHInboxMessageDownloaded events
+and sends Mesh acknowledgements for each.
+"""
+from typing import Dict, Any, List
+from dl_utils import EventPublisher, SenderLookup
+from .acknowledger import MeshAcknowledger
+from .dlq import Dlq
+from .events import parse_downloaded_event, publish_acknowledged_event
+
+
+class MessageProcessor:
+ """Processes SQS messages and sends MESH acknowledgments."""
+
+ def __init__(
+ self, acknowledger: MeshAcknowledger,
+ event_publisher: EventPublisher,
+ sender_lookup: SenderLookup,
+ dlq: Dlq,
+ logger):
+ self.__acknowledger = acknowledger
+ self.__event_publisher = event_publisher
+ self.__sender_lookup = sender_lookup
+ self.__dlq = dlq
+ self.__log = logger
+
+ def process_message(self, message: Dict[str, Any]) -> List[Dict[str, str]]:
+ """
+ Processes a single SQS message.
+
+ Args:
+ message (Dict[str, Any]): The SQS message.
+ Returns:
+ List of batch item failures for failed messages.
+ """
+
+ self.__log.info("Received SQS message",
+ record_count=len(message.get('Records', [])))
+
+ batch_item_failures = []
+ processed = {
+ 'retrieved': 0,
+ 'acknowledged': 0,
+ 'failed': 0
+ }
+
+ for record in message.get('Records', []):
+ processed['retrieved'] += 1
+ message_id = record.get('messageId')
+
+ try:
+ validated_event = parse_downloaded_event(record, self.__log)
+
+ sender_id = validated_event.data.senderId
+ incoming_message_id = validated_event.data.meshMessageId
+
+ mesh_mailbox_id = self.__sender_lookup.get_mailbox_id(
+ sender_id)
+ self.__log.info("Looked up sender",
+ sender_id=sender_id, mesh_mailbox_id=mesh_mailbox_id)
+
+ if mesh_mailbox_id is None:
+ raise ValueError(
+ f"Unknown sender ID '{sender_id}' for message"
+ )
+
+ acknowledgement_message_id = self.__acknowledger.acknowledge_message(
+ mailbox_id=mesh_mailbox_id,
+ message_reference=validated_event.data.messageReference,
+ sender_id=sender_id,
+ message_id=incoming_message_id
+ )
+
+ try:
+ publish_acknowledged_event(
+ logger=self.__log,
+ event_publisher=self.__event_publisher,
+ incoming_event=validated_event,
+ mesh_mailbox_id=mesh_mailbox_id
+ )
+ except Exception:
+ # If publishing the acknowledged event fails, we've already sent
+ # the MESH acknowledgement, so we put the incoming record directly on
+ # to the DLQ rather than returning a batch item failure which would
+ # cause a retry.
+ self.__dlq.send_to_queue(
+ record=record,
+ reason="Failed to publish acknowledged event"
+ )
+
+ self.__log.info("Acknowledged message ID",
+ message_id=message_id,
+ incoming_message_id=incoming_message_id,
+ acknowledgement_message_id=acknowledgement_message_id)
+ processed['acknowledged'] += 1
+
+ except Exception as e:
+ processed['failed'] += 1
+ self.__log.error(
+ "Failed to process SQS message",
+ message_id=message_id,
+ error=str(e))
+ batch_item_failures.append({"itemIdentifier": message_id})
+
+ self.__log.info("Processed SQS message",
+ retrieved=processed['retrieved'],
+ acknowledged=processed['acknowledged'],
+ failed=processed['failed'])
+
+ return batch_item_failures
diff --git a/utils/metric-publishers/pytest.ini b/lambdas/mesh-acknowledge/pytest.ini
similarity index 61%
rename from utils/metric-publishers/pytest.ini
rename to lambdas/mesh-acknowledge/pytest.ini
index 20b86c5d..e19306a7 100644
--- a/utils/metric-publishers/pytest.ini
+++ b/lambdas/mesh-acknowledge/pytest.ini
@@ -1,19 +1,16 @@
[pytest]
-testpaths = tests
+testpaths = mesh_acknowledge/__tests__
python_files = test_*.py
python_classes = Test*
python_functions = test_*
addopts = -v --tb=short
-env =
- AWS_ACCESS_KEY_ID=testing
- AWS_SECRET_ACCESS_KEY=testing
- AWS_DEFAULT_REGION=eu-west-2
[coverage:run]
relative_files = True
omit =
- */tests/*
+ */mesh_acknowledge/__tests__/*
*/test_*.py
*/__pycache__/*
*/venv/*
+ */.venv/*
*/env/*
diff --git a/lambdas/mesh-acknowledge/requirements-dev.txt b/lambdas/mesh-acknowledge/requirements-dev.txt
new file mode 100644
index 00000000..423ec693
--- /dev/null
+++ b/lambdas/mesh-acknowledge/requirements-dev.txt
@@ -0,0 +1,6 @@
+-r requirements.txt
+autopep8>=2.3.2
+pylint>=4.0.4
+pytest>=9.0.2
+pytest-cov>=7.0.0
+jake>=3.0.14
diff --git a/lambdas/mesh-acknowledge/requirements.txt b/lambdas/mesh-acknowledge/requirements.txt
new file mode 100644
index 00000000..68ecfdcb
--- /dev/null
+++ b/lambdas/mesh-acknowledge/requirements.txt
@@ -0,0 +1,8 @@
+mesh-client>=3.2.3
+boto3>=1.28.62
+pyopenssl>=24.2.1
+pydantic>=2.0.0
+structlog>=21.5.0
+-e ../../src/digital-letters-events
+-e ../../utils/py-mock-mesh
+-e ../../utils/py-utils
diff --git a/utils/metric-publishers/setup.py b/lambdas/mesh-acknowledge/setup.py
similarity index 77%
rename from utils/metric-publishers/setup.py
rename to lambdas/mesh-acknowledge/setup.py
index 391b6ad1..0bd365ca 100644
--- a/utils/metric-publishers/setup.py
+++ b/lambdas/mesh-acknowledge/setup.py
@@ -1,7 +1,7 @@
from setuptools import setup, find_packages
setup(
- name="metric-publishers",
+ name="mesh-acknowledge",
version="0.1.0",
packages=find_packages(),
)
diff --git a/lambdas/mesh-download/mesh_download/__tests__/test_processor.py b/lambdas/mesh-download/mesh_download/__tests__/test_processor.py
index 1901ee18..ecfc997b 100644
--- a/lambdas/mesh-download/mesh_download/__tests__/test_processor.py
+++ b/lambdas/mesh-download/mesh_download/__tests__/test_processor.py
@@ -8,6 +8,7 @@
from unittest.mock import Mock, patch
from datetime import datetime, timezone
from pydantic import ValidationError
+from mesh_download.errors import MeshMessageNotFound
def setup_mocks():
@@ -162,13 +163,19 @@ def test_process_sqs_message_success(self, mock_datetime):
assert published_event['subject'] == 'customer/00000000-0000-0000-0000-000000000000/recipient/00000000-0000-0000-0000-000000000000'
assert published_event['time'] == '2025-11-19T15:30:45+00:00'
assert 'id' in published_event
+ assert 'tracestate' not in published_event
+ assert 'partitionkey' not in published_event
+ assert 'sequence' not in published_event
+ assert 'dataclassification' not in published_event
+ assert 'dataregulation' not in published_event
+ assert 'datacategory' not in published_event
# Verify CloudEvent data payload
event_data = published_event['data']
assert event_data['senderId'] == 'TEST_SENDER'
assert event_data['messageReference'] == 'ref_001'
assert event_data['messageUri'] == 's3://test-pii-bucket/document-reference/SENDER_001_ref_001'
- assert set(event_data.keys()) == {'senderId', 'messageReference', 'messageUri'}
+ assert set(event_data.keys()) == {'senderId', 'messageReference', 'messageUri', 'meshMessageId'}
def test_process_sqs_message_validation_failure(self):
"""Malformed CloudEvents should be rejected by pydantic and not trigger downloads"""
@@ -238,9 +245,11 @@ def test_download_and_store_message_not_found(self):
config.mesh_client.retrieve_message.return_value = None
sqs_record = create_sqs_record()
- processor.process_sqs_message(sqs_record)
- config.mesh_client.retrieve_message.assert_called_once_with('test_message_123')
+ with pytest.raises(MeshMessageNotFound, match="MESH message with ID test_message_123 not found"):
+ processor.process_sqs_message(sqs_record)
+
+ config.mesh_client.retrieve_message.assert_called_once_with('test_message_123')
document_store.store_document.assert_not_called()
event_publisher.send_events.assert_not_called()
config.download_metric.record.assert_not_called()
diff --git a/lambdas/mesh-download/mesh_download/config.py b/lambdas/mesh-download/mesh_download/config.py
index ab4a400b..48dd1a10 100644
--- a/lambdas/mesh-download/mesh_download/config.py
+++ b/lambdas/mesh-download/mesh_download/config.py
@@ -1,15 +1,13 @@
"""
Module for configuring MESH Download application
"""
-from event_publisher import BaseMeshConfig, log
-from metric_publishers.metric_client import Metric
+from dl_utils import BaseMeshConfig, Metric, log
_REQUIRED_ENV_VAR_MAP = {
- "ssm_prefix": "SSM_PREFIX",
+ "ssm_senders_prefix": "SSM_SENDERS_PREFIX",
+ "ssm_mesh_prefix": "SSM_MESH_PREFIX",
"environment": "ENVIRONMENT",
- "certificate_expiry_metric_name": "CERTIFICATE_EXPIRY_METRIC_NAME",
- "certificate_expiry_metric_namespace": "CERTIFICATE_EXPIRY_METRIC_NAMESPACE",
"download_metric_name": "DOWNLOAD_METRIC_NAME",
"download_metric_namespace": "DOWNLOAD_METRIC_NAMESPACE",
"event_publisher_event_bus_arn": "EVENT_PUBLISHER_EVENT_BUS_ARN",
diff --git a/lambdas/mesh-download/mesh_download/errors.py b/lambdas/mesh-download/mesh_download/errors.py
index d8b1932c..9a9d2358 100644
--- a/lambdas/mesh-download/mesh_download/errors.py
+++ b/lambdas/mesh-download/mesh_download/errors.py
@@ -11,3 +11,8 @@ def format_exception(exception):
"""
return ''.join(traceback.format_exception(
type(exception), exception, exception.__traceback__))
+
+class MeshMessageNotFound(Exception):
+ """
+ Indicates an invalid MESH message could not be retrieved
+ """
diff --git a/lambdas/mesh-download/mesh_download/handler.py b/lambdas/mesh-download/mesh_download/handler.py
index de46bf59..56bbb736 100644
--- a/lambdas/mesh-download/mesh_download/handler.py
+++ b/lambdas/mesh-download/mesh_download/handler.py
@@ -1,7 +1,7 @@
"""lambda handler for mesh download"""
import json
-from event_publisher import EventPublisher
+from dl_utils import EventPublisher
from .config import Config, log
from .processor import MeshDownloadProcessor
diff --git a/lambdas/mesh-download/mesh_download/processor.py b/lambdas/mesh-download/mesh_download/processor.py
index 7ae51ea0..e51edced 100644
--- a/lambdas/mesh-download/mesh_download/processor.py
+++ b/lambdas/mesh-download/mesh_download/processor.py
@@ -3,7 +3,8 @@
from uuid import uuid4
from pydantic import ValidationError
-from event_publisher.models import MeshInboxMessageEvent, MeshDownloadMessageEvent
+from digital_letters_events import MESHInboxMessageDownloaded, MESHInboxMessageReceived
+from mesh_download.errors import MeshMessageNotFound
class MeshDownloadProcessor:
@@ -40,7 +41,7 @@ def _parse_and_validate_event(self, sqs_record):
event_detail = message_body.get('detail', {})
try:
- event = MeshInboxMessageEvent(**event_detail)
+ event = MESHInboxMessageReceived(**event_detail)
self.__log.debug("CloudEvent validation passed")
return event
except ValidationError as e:
@@ -57,7 +58,7 @@ def _handle_download(self, event, logger):
message = self.__mesh_client.retrieve_message(data.meshMessageId)
if not message:
logger.error("Message not found in MESH inbox")
- return
+ raise MeshMessageNotFound(f"MESH message with ID {data.meshMessageId} not found")
logger.info(
"Retrieved MESH message",
@@ -109,7 +110,7 @@ def _publish_downloaded_event(self, incoming_event, message_uri):
now = datetime.now(timezone.utc).isoformat()
cloud_event = {
- **incoming_event.model_dump(),
+ **incoming_event.model_dump(exclude_none=True),
'id': str(uuid4()),
'time': now,
'recordedtime': now,
@@ -122,16 +123,11 @@ def _publish_downloaded_event(self, incoming_event, message_uri):
'senderId': incoming_event.data.senderId,
'messageReference': incoming_event.data.messageReference,
'messageUri': message_uri,
+ 'meshMessageId': incoming_event.data.meshMessageId
}
}
- try:
- MeshDownloadMessageEvent(**cloud_event)
- except ValidationError as e:
- self.__log.error("Invalid MeshDownloadMessageEvent", error=str(e))
- raise
-
- failed = self.__event_publisher.send_events([cloud_event])
+ failed = self.__event_publisher.send_events([cloud_event], MESHInboxMessageDownloaded)
if failed:
msg = f"Failed to publish MESHInboxMessageDownloaded event: {failed}"
self.__log.error(msg, failed_count=len(failed))
diff --git a/lambdas/mesh-download/requirements-dev.txt b/lambdas/mesh-download/requirements-dev.txt
index 3d0bd76e..9399d2b8 100644
--- a/lambdas/mesh-download/requirements-dev.txt
+++ b/lambdas/mesh-download/requirements-dev.txt
@@ -1,5 +1,5 @@
-r requirements.txt
-autopep8>=2.0.2
-pylint>=2.17.4
-pytest>=7.0.1
-pytest-cov>=4.0.0
+autopep8>=2.3.2
+pylint>=4.0.4
+pytest>=9.0.2
+pytest-cov>=7.0.0
diff --git a/lambdas/mesh-download/requirements.txt b/lambdas/mesh-download/requirements.txt
index e817b2f5..b9af3fe0 100644
--- a/lambdas/mesh-download/requirements.txt
+++ b/lambdas/mesh-download/requirements.txt
@@ -8,6 +8,6 @@ urllib3>=1.26.19,<2.0.0
idna>=3.7
requests>=2.32.0
pyopenssl>=24.2.1
--e ../../utils/event-publisher-py
+-e ../../src/digital-letters-events
-e ../../utils/py-mock-mesh
--e ../../utils/metric-publishers
+-e ../../utils/py-utils
diff --git a/lambdas/mesh-poll/mesh_poll/__init__.py b/lambdas/mesh-poll/mesh_poll/__init__.py
index 21ae4dbf..0e76b0c2 100644
--- a/lambdas/mesh-poll/mesh_poll/__init__.py
+++ b/lambdas/mesh-poll/mesh_poll/__init__.py
@@ -8,5 +8,4 @@
from .config import *
from .handler import *
from .processor import *
-from .sender_lookup import *
from .errors import *
diff --git a/lambdas/mesh-poll/mesh_poll/config.py b/lambdas/mesh-poll/mesh_poll/config.py
index 4db5508a..28d5d8af 100644
--- a/lambdas/mesh-poll/mesh_poll/config.py
+++ b/lambdas/mesh-poll/mesh_poll/config.py
@@ -1,12 +1,12 @@
"""
Module for configuring Mesh Poll application
"""
-from event_publisher import BaseMeshConfig, log
-from metric_publishers.metric_client import Metric
+from dl_utils import BaseMeshConfig, Metric, log
_REQUIRED_ENV_VAR_MAP = {
- "ssm_prefix": "SSM_PREFIX",
+ "ssm_senders_prefix": "SSM_SENDERS_PREFIX",
+ "ssm_mesh_prefix": "SSM_MESH_PREFIX",
"maximum_runtime_milliseconds": "MAXIMUM_RUNTIME_MILLISECONDS",
"environment": "ENVIRONMENT",
"event_bus_arn": "EVENT_PUBLISHER_EVENT_BUS_ARN",
diff --git a/lambdas/mesh-poll/mesh_poll/handler.py b/lambdas/mesh-poll/mesh_poll/handler.py
index df449e86..61d6738e 100644
--- a/lambdas/mesh-poll/mesh_poll/handler.py
+++ b/lambdas/mesh-poll/mesh_poll/handler.py
@@ -1,7 +1,7 @@
"""lambda handler for mesh poll application"""
from boto3 import client
-from .sender_lookup import SenderLookup
+from dl_utils import SenderLookup
from .config import Config, log
from .processor import MeshMessageProcessor
diff --git a/lambdas/mesh-poll/mesh_poll/processor.py b/lambdas/mesh-poll/mesh_poll/processor.py
index 8d314d86..8a88c11f 100644
--- a/lambdas/mesh-poll/mesh_poll/processor.py
+++ b/lambdas/mesh-poll/mesh_poll/processor.py
@@ -5,7 +5,8 @@
from datetime import datetime, timezone
from uuid import uuid4
-from event_publisher import EventPublisher
+from dl_utils import EventPublisher
+from digital_letters_events import MESHInboxMessageReceived
from .errors import AuthorizationError, format_exception
@@ -135,7 +136,7 @@ def _publish_mesh_inbox_message_received_event(self, event_detail):
'id': str(uuid4()),
'specversion': '1.0',
'source': self.__cloud_event_source,
- 'subject': 'customer/00000000-0000-0000-0000-000000000000/recipient/00000000-0000-0000-0000-000000000000',
+ 'subject': f'customer/{event_detail["data"]["senderId"]}/recipient/{event_detail["data"]["messageReference"]}',
'type': 'uk.nhs.notify.digital.letters.mesh.inbox.message.received.v1',
'time': now,
'recordedtime': now,
@@ -143,10 +144,11 @@ def _publish_mesh_inbox_message_received_event(self, event_detail):
'severitytext': 'INFO',
'traceparent': '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
'dataschema': 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-received-data.schema.json',
+ 'datacontenttype': 'application/json',
'data': event_detail.get('data', {}),
}
- failed_events = self.__event_publisher.send_events([cloud_event])
+ failed_events = self.__event_publisher.send_events([cloud_event], MESHInboxMessageReceived)
if failed_events:
error_msg = f"Failed to publish MESHInboxMessageReceived event: {failed_events}"
diff --git a/lambdas/mesh-poll/requirements-dev.txt b/lambdas/mesh-poll/requirements-dev.txt
index 1f257452..423ec693 100644
--- a/lambdas/mesh-poll/requirements-dev.txt
+++ b/lambdas/mesh-poll/requirements-dev.txt
@@ -1,6 +1,6 @@
-r requirements.txt
-autopep8>=2.0.2
-pylint>=2.17.4
-pytest>=7.0.1
-pytest-cov>=4.0.0
-jake>=3.0.1
+autopep8>=2.3.2
+pylint>=4.0.4
+pytest>=9.0.2
+pytest-cov>=7.0.0
+jake>=3.0.14
diff --git a/lambdas/mesh-poll/requirements.txt b/lambdas/mesh-poll/requirements.txt
index 8550d255..5e7f1345 100644
--- a/lambdas/mesh-poll/requirements.txt
+++ b/lambdas/mesh-poll/requirements.txt
@@ -8,6 +8,6 @@ idna>=3.7
requests>=2.32.0
pyopenssl>=24.2.1
pydantic>=2.0.0
--e ../../utils/metric-publishers
--e ../../utils/event-publisher-py
+-e ../../src/digital-letters-events
-e ../../utils/py-mock-mesh
+-e ../../utils/py-utils
diff --git a/lambdas/pdm-uploader-lambda/src/__tests__/apis/sqs-trigger-lambda.test.ts b/lambdas/pdm-uploader-lambda/src/__tests__/apis/sqs-trigger-lambda.test.ts
index d9cf76c6..0d8df784 100644
--- a/lambdas/pdm-uploader-lambda/src/__tests__/apis/sqs-trigger-lambda.test.ts
+++ b/lambdas/pdm-uploader-lambda/src/__tests__/apis/sqs-trigger-lambda.test.ts
@@ -3,6 +3,7 @@ import type { SQSEvent } from 'aws-lambda';
import type { EventPublisher, Logger } from 'utils';
import { createHandler } from 'apis/sqs-trigger-lambda';
import type { UploadToPdm } from 'app/upload-to-pdm';
+import { mockEvent } from '__tests__/data';
jest.mock('node:crypto');
@@ -13,29 +14,7 @@ const createValidSQSEvent = (overrides?: Partial): SQSEvent => ({
{
messageId: 'msg-1',
body: JSON.stringify({
- detail: {
- id: 'a449d419-e683-4ab4-9291-a0451b5cef8e',
- specversion: '1.0',
- source:
- '/nhs/england/notify/production/primary/data-plane/digitalletters/mesh',
- subject:
- 'customer/920fca11-596a-4eca-9c47-99f624614658/recipient/769acdd4-6a47-496f-999f-76a6fd2c3959',
- type: 'uk.nhs.notify.digital.letters.mesh.inbox.message.downloaded.v1',
- time: '2025-01-01T00:00:00Z',
- recordedtime: '2025-01-01T00:00:00Z',
- severitynumber: 2,
- traceparent:
- '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
- datacontenttype: 'application/json',
- dataschema:
- 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-downloaded-data.schema.json',
- severitytext: 'INFO',
- data: {
- messageReference: 'test-message-reference',
- senderId: 'test-sender-id',
- messageUri: 's3://bucket/key',
- },
- },
+ detail: mockEvent,
}),
receiptHandle: 'receipt-1',
attributes: {} as any,
diff --git a/lambdas/pdm-uploader-lambda/src/__tests__/app/upload-to-pdm.test.ts b/lambdas/pdm-uploader-lambda/src/__tests__/app/upload-to-pdm.test.ts
index 3adae4dd..a861570a 100644
--- a/lambdas/pdm-uploader-lambda/src/__tests__/app/upload-to-pdm.test.ts
+++ b/lambdas/pdm-uploader-lambda/src/__tests__/app/upload-to-pdm.test.ts
@@ -1,5 +1,5 @@
+import { mockEvent } from '__tests__/data';
import { UploadToPdm } from 'app/upload-to-pdm';
-import { MESHInboxMessageDownloaded } from 'digital-letters-events';
import { IPdmClient, Logger, getS3ObjectFromUri } from 'utils';
jest.mock('utils', () => ({
@@ -12,28 +12,6 @@ describe('UploadToPdm', () => {
let mockLogger: jest.Mocked;
let uploadToPdm: UploadToPdm;
- const mockEvent: MESHInboxMessageDownloaded = {
- id: 'test-event-id',
- specversion: '1.0',
- source: '/nhs/england/notify/production/primary/data-plane/digital-letters',
- subject:
- 'customer/920fca11-596a-4eca-9c47-99f624614658/recipient/769acdd4-6a47-496f-999f-76a6fd2c3959',
- type: 'uk.nhs.notify.digital.letters.mesh.inbox.message.downloaded.v1',
- time: '2023-06-20T12:00:00Z',
- recordedtime: '2023-06-20T12:00:00.250Z',
- severitynumber: 2,
- traceparent: '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
- datacontenttype: 'application/json',
- dataschema:
- 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-downloaded-data.schema.json',
- severitytext: 'INFO',
- data: {
- messageReference: 'test-message-reference',
- senderId: 'test-sender-id',
- messageUri: 's3://bucket/key',
- },
- };
-
const mockFhirRequest = { resourceType: 'Bundle' };
const mockPdmResponse = {
id: 'test-resource-id',
@@ -87,7 +65,7 @@ describe('UploadToPdm', () => {
expect(getS3ObjectFromUri).toHaveBeenCalledWith('s3://bucket/key');
expect(mockPdmClient.createDocumentReference).toHaveBeenCalledWith(
mockFhirRequest,
- 'test-message-reference',
+ mockEvent.data.messageReference,
);
expect(result).toEqual({
outcome: 'sent',
@@ -96,8 +74,8 @@ describe('UploadToPdm', () => {
expect(mockLogger.info).toHaveBeenCalledWith(
expect.objectContaining({
description: 'Successfully sent request to PDM',
- eventId: 'test-event-id',
- messageReference: 'test-message-reference',
+ eventId: mockEvent.id,
+ messageReference: mockEvent.data.messageReference,
resourceId: 'test-resource-id',
}),
);
diff --git a/lambdas/pdm-uploader-lambda/src/__tests__/data.ts b/lambdas/pdm-uploader-lambda/src/__tests__/data.ts
new file mode 100644
index 00000000..bc8bef9d
--- /dev/null
+++ b/lambdas/pdm-uploader-lambda/src/__tests__/data.ts
@@ -0,0 +1,25 @@
+import { MESHInboxMessageDownloaded } from 'digital-letters-events';
+
+export const mockEvent: MESHInboxMessageDownloaded = {
+ id: 'a449d419-e683-4ab4-9291-a0451b5cef8e',
+ specversion: '1.0',
+ source:
+ '/nhs/england/notify/production/primary/data-plane/digitalletters/mesh',
+ subject:
+ 'customer/920fca11-596a-4eca-9c47-99f624614658/recipient/769acdd4-6a47-496f-999f-76a6fd2c3959',
+ type: 'uk.nhs.notify.digital.letters.mesh.inbox.message.downloaded.v1',
+ time: '2023-06-20T12:00:00Z',
+ recordedtime: '2023-06-20T12:00:00.250Z',
+ severitynumber: 2,
+ traceparent: '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
+ datacontenttype: 'application/json',
+ dataschema:
+ 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-downloaded-data.schema.json',
+ severitytext: 'INFO',
+ data: {
+ meshMessageId: '12345',
+ messageReference: 'test-message-reference',
+ senderId: 'test-sender-id',
+ messageUri: 's3://bucket/key',
+ },
+};
diff --git a/lambdas/ttl-create-lambda/src/__tests__/apis/sqs-trigger-lambda.test.ts b/lambdas/ttl-create-lambda/src/__tests__/apis/sqs-trigger-lambda.test.ts
index cc87b11b..e2320758 100644
--- a/lambdas/ttl-create-lambda/src/__tests__/apis/sqs-trigger-lambda.test.ts
+++ b/lambdas/ttl-create-lambda/src/__tests__/apis/sqs-trigger-lambda.test.ts
@@ -1,9 +1,7 @@
+import { messageDownloadedEvent } from '__tests__/data';
import { createHandler } from 'apis/sqs-trigger-lambda';
import type { SQSEvent } from 'aws-lambda';
-import {
- ItemEnqueued,
- MESHInboxMessageDownloaded,
-} from 'digital-letters-events';
+import { ItemEnqueued } from 'digital-letters-events';
import itemEnqueuedValidator from 'digital-letters-events/ItemEnqueued.js';
import { randomUUID } from 'node:crypto';
@@ -22,29 +20,6 @@ describe('createHandler', () => {
let logger: any;
let handler: any;
- const messageDownloadedEvent: MESHInboxMessageDownloaded = {
- id: '550e8400-e29b-41d4-a716-446655440001',
- specversion: '1.0',
- source:
- '/nhs/england/notify/production/primary/data-plane/digitalletters/mesh',
- subject:
- 'customer/920fca11-596a-4eca-9c47-99f624614658/recipient/769acdd4-6a47-496f-999f-76a6fd2c3959',
- type: 'uk.nhs.notify.digital.letters.mesh.inbox.message.downloaded.v1',
- time: '2023-06-20T12:00:00Z',
- recordedtime: '2023-06-20T12:00:00.250Z',
- severitynumber: 2,
- traceparent: '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
- datacontenttype: 'application/json',
- dataschema:
- 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-downloaded-data.schema.json',
- severitytext: 'INFO',
- data: {
- messageUri: 'https://example.com/ttl/resource',
- messageReference: 'ref1',
- senderId: 'sender1',
- },
- };
-
const eventBusEvent = {
detail: messageDownloadedEvent,
};
@@ -59,6 +34,11 @@ describe('createHandler', () => {
recordedtime: '2023-06-20T12:00:00.250Z',
dataschema:
'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-queue-item-enqueued-data.schema.json',
+ data: {
+ messageReference: messageDownloadedEvent.data.messageReference,
+ senderId: messageDownloadedEvent.data.senderId,
+ messageUri: messageDownloadedEvent.data.messageUri,
+ },
};
beforeEach(() => {
diff --git a/lambdas/ttl-create-lambda/src/__tests__/app/create-ttl.test.ts b/lambdas/ttl-create-lambda/src/__tests__/app/create-ttl.test.ts
index 3998ddad..167aa7cf 100644
--- a/lambdas/ttl-create-lambda/src/__tests__/app/create-ttl.test.ts
+++ b/lambdas/ttl-create-lambda/src/__tests__/app/create-ttl.test.ts
@@ -1,32 +1,11 @@
+import { messageDownloadedEvent } from '__tests__/data';
import { CreateTtl } from 'app/create-ttl';
import { TtlRepository } from 'infra/ttl-repository';
-import { MESHInboxMessageDownloaded } from 'digital-letters-events';
describe('CreateTtl', () => {
let repo: jest.Mocked;
let logger: any;
let createTtl: CreateTtl;
- const item: MESHInboxMessageDownloaded = {
- id: '550e8400-e29b-41d4-a716-446655440001',
- specversion: '1.0',
- source: '/nhs/england/notify/production/primary/data-plane/digital-letters',
- subject:
- 'customer/920fca11-596a-4eca-9c47-99f624614658/recipient/769acdd4-6a47-496f-999f-76a6fd2c3959',
- type: 'uk.nhs.notify.digital.letters.mesh.inbox.message.downloaded.v1',
- time: '2023-06-20T12:00:00Z',
- recordedtime: '2023-06-20T12:00:00.250Z',
- severitynumber: 2,
- traceparent: '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
- datacontenttype: 'application/json',
- dataschema:
- 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-downloaded-data.schema.json',
- severitytext: 'INFO',
- data: {
- messageReference: 'ref1',
- senderId: 'sender1',
- messageUri: 'https://example.com/ttl/resource',
- },
- };
beforeEach(() => {
repo = { insertTtlRecord: jest.fn() } as any;
@@ -37,17 +16,17 @@ describe('CreateTtl', () => {
it('returns sent when insert succeeds', async () => {
repo.insertTtlRecord.mockResolvedValue();
- const result = await createTtl.send(item);
+ const result = await createTtl.send(messageDownloadedEvent);
expect(result).toBe('sent');
- expect(repo.insertTtlRecord).toHaveBeenCalledWith(item);
+ expect(repo.insertTtlRecord).toHaveBeenCalledWith(messageDownloadedEvent);
});
it('returns failed and logs error when insert throws', async () => {
const error = new Error('fail');
repo.insertTtlRecord.mockRejectedValue(error);
- const result = await createTtl.send(item);
+ const result = await createTtl.send(messageDownloadedEvent);
expect(result).toBe('failed');
expect(logger.error).toHaveBeenCalledWith(
diff --git a/lambdas/ttl-create-lambda/src/__tests__/data.ts b/lambdas/ttl-create-lambda/src/__tests__/data.ts
new file mode 100644
index 00000000..b0c36fa1
--- /dev/null
+++ b/lambdas/ttl-create-lambda/src/__tests__/data.ts
@@ -0,0 +1,25 @@
+import { MESHInboxMessageDownloaded } from 'digital-letters-events';
+
+export const messageDownloadedEvent: MESHInboxMessageDownloaded = {
+ id: '550e8400-e29b-41d4-a716-446655440001',
+ specversion: '1.0',
+ source:
+ '/nhs/england/notify/production/primary/data-plane/digitalletters/mesh',
+ subject:
+ 'customer/920fca11-596a-4eca-9c47-99f624614658/recipient/769acdd4-6a47-496f-999f-76a6fd2c3959',
+ type: 'uk.nhs.notify.digital.letters.mesh.inbox.message.downloaded.v1',
+ time: '2023-06-20T12:00:00Z',
+ recordedtime: '2023-06-20T12:00:00.250Z',
+ severitynumber: 2,
+ traceparent: '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
+ datacontenttype: 'application/json',
+ dataschema:
+ 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-downloaded-data.schema.json',
+ severitytext: 'INFO',
+ data: {
+ meshMessageId: '12345',
+ senderId: 'sender1',
+ messageReference: 'ref1',
+ messageUri: 'https://example.com/ttl/resource',
+ },
+};
diff --git a/lambdas/ttl-create-lambda/src/__tests__/infra/ttl-repository.test.ts b/lambdas/ttl-create-lambda/src/__tests__/infra/ttl-repository.test.ts
index f47c10bc..08b5db3c 100644
--- a/lambdas/ttl-create-lambda/src/__tests__/infra/ttl-repository.test.ts
+++ b/lambdas/ttl-create-lambda/src/__tests__/infra/ttl-repository.test.ts
@@ -1,6 +1,6 @@
import { PutCommand } from '@aws-sdk/lib-dynamodb';
+import { messageDownloadedEvent } from '__tests__/data';
import { TtlRepository } from 'infra/ttl-repository';
-import { MESHInboxMessageDownloaded } from 'digital-letters-events';
jest.useFakeTimers();
@@ -16,27 +16,6 @@ describe('TtlRepository', () => {
let senderRepository: any;
let repo: TtlRepository;
const tableName = 'table';
- const item: MESHInboxMessageDownloaded = {
- id: '550e8400-e29b-41d4-a716-446655440001',
- specversion: '1.0',
- source: '/nhs/england/notify/production/primary/data-plane/digital-letters',
- subject:
- 'customer/920fca11-596a-4eca-9c47-99f624614658/recipient/769acdd4-6a47-496f-999f-76a6fd2c3959',
- type: 'uk.nhs.notify.digital.letters.mesh.inbox.message.downloaded.v1',
- time: '2023-06-20T12:00:00Z',
- recordedtime: '2023-06-20T12:00:00.250Z',
- severitynumber: 2,
- traceparent: '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
- datacontenttype: 'application/json',
- dataschema:
- 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-downloaded-data.schema.json',
- severitytext: 'INFO',
- data: {
- messageReference: 'ref1',
- senderId: 'sender1',
- messageUri: 'https://example.com/ttl/resource',
- },
- };
beforeEach(() => {
logger = { info: jest.fn(), error: jest.fn() };
@@ -68,7 +47,7 @@ describe('TtlRepository', () => {
.split('T')[0];
const expectedDateOfExpiry = `${expectedTtlDate}#${expectedShard}`;
- await repo.insertTtlRecord(item);
+ await repo.insertTtlRecord(messageDownloadedEvent);
expect(logger.info).toHaveBeenCalledWith(
expect.objectContaining({
@@ -77,17 +56,17 @@ describe('TtlRepository', () => {
);
expect(senderRepository.getSender).toHaveBeenCalledWith({
- senderId: item.data.senderId,
+ senderId: messageDownloadedEvent.data.senderId,
});
const putCommand: PutCommand = dynamoClient.send.mock.calls[0][0];
expect(putCommand.input).toStrictEqual({
TableName: tableName,
Item: {
- PK: item.data.messageUri,
+ PK: messageDownloadedEvent.data.messageUri,
SK: 'TTL',
dateOfExpiry: expectedDateOfExpiry,
- event: item,
+ event: messageDownloadedEvent,
ttl: expectedTtlSeconds,
},
});
@@ -97,7 +76,9 @@ describe('TtlRepository', () => {
const error = new Error('fail');
dynamoClient.send.mockRejectedValue(error);
- await expect(repo.insertTtlRecord(item)).rejects.toThrow(error);
+ await expect(repo.insertTtlRecord(messageDownloadedEvent)).rejects.toThrow(
+ error,
+ );
expect(logger.error).toHaveBeenCalledWith(
expect.objectContaining({
@@ -114,7 +95,7 @@ describe('TtlRepository', () => {
return Promise.resolve({});
});
- await repo.insertTtlRecord(item);
+ await repo.insertTtlRecord(messageDownloadedEvent);
expect(gsiPk).toMatch(/^\d{4}-\d{2}-\d{2}#\d{1,2}$/);
});
@@ -122,12 +103,12 @@ describe('TtlRepository', () => {
it('throws and logs error when sender not found', async () => {
senderRepository.getSender.mockResolvedValue(null);
- await expect(repo.insertTtlRecord(item)).rejects.toThrow(
- `Sender not found for sender ID ${item.data.senderId}`,
+ await expect(repo.insertTtlRecord(messageDownloadedEvent)).rejects.toThrow(
+ `Sender not found for sender ID ${messageDownloadedEvent.data.senderId}`,
);
expect(logger.error).toHaveBeenCalledWith({
- description: `Sender not found for sender ID ${item.data.senderId}`,
+ description: `Sender not found for sender ID ${messageDownloadedEvent.data.senderId}`,
});
});
});
diff --git a/lambdas/ttl-handle-expiry-lambda/src/__tests__/apis/dynamodb-stream-handler.test.ts b/lambdas/ttl-handle-expiry-lambda/src/__tests__/apis/dynamodb-stream-handler.test.ts
index f69f1df6..7ef9c53c 100644
--- a/lambdas/ttl-handle-expiry-lambda/src/__tests__/apis/dynamodb-stream-handler.test.ts
+++ b/lambdas/ttl-handle-expiry-lambda/src/__tests__/apis/dynamodb-stream-handler.test.ts
@@ -54,6 +54,7 @@ const mockEvent: DynamoDBStreamEvent = {
severitytext: { S: 'INFO' },
data: {
M: {
+ meshMessageId: { S: '12345' },
messageUri: { S: 'https://example.com/ttl/resource' },
messageReference: { S: 'ref1' },
senderId: { S: 'sender1' },
diff --git a/lambdas/ttl-handle-expiry-lambda/src/apis/dynamodb-stream-handler.ts b/lambdas/ttl-handle-expiry-lambda/src/apis/dynamodb-stream-handler.ts
index d8f42d5e..a3ba0f7b 100644
--- a/lambdas/ttl-handle-expiry-lambda/src/apis/dynamodb-stream-handler.ts
+++ b/lambdas/ttl-handle-expiry-lambda/src/apis/dynamodb-stream-handler.ts
@@ -101,6 +101,11 @@ export const createHandler = ({
dataschema:
'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-queue-item-dequeued-data.schema.json',
source: itemEvent.source.replace(/\/mesh$/, '/queue'),
+ data: {
+ senderId: itemEvent.data.senderId,
+ messageReference: itemEvent.data.messageReference,
+ messageUri: itemEvent.data.messageUri,
+ },
},
],
itemDequeuedValidator,
diff --git a/project.code-workspace b/project.code-workspace
index b7407a2a..f89f5fce 100644
--- a/project.code-workspace
+++ b/project.code-workspace
@@ -81,8 +81,11 @@
"shellcheck.run": "onSave",
"jest.virtualFolders": [
{ "name": "key-generation", "rootPath": "lambdas/key-generation" },
+ { "name": "pdm-mock-lambda", "rootPath": "lambdas/pdm-mock-lambda" },
+ { "name": "pdm-uploader-lambda", "rootPath": "lambdas/pdm-uploader-lambda" },
{ "name": "refresh-apim-access-token", "rootPath": "lambdas/refresh-apim-access-token" },
{ "name": "python-schema-generator", "rootPath": "src/python-schema-generator" },
+ { "name": "refresh-apim-access-token", "rootPath": "lambdas/refresh-apim-access-token" },
{ "name": "ttl-create-lambda", "rootPath": "lambdas/ttl-create-lambda/" },
{ "name": "ttl-handle-expiry-lambda", "rootPath": "lambdas/ttl-handle-expiry-lambda" },
{ "name": "ttl-poll-lambda", "rootPath": "lambdas/ttl-poll-lambda" },
@@ -112,6 +115,7 @@
"maattdd.gitless",
"mhutchie.git-graph",
"ms-azuretools.vscode-docker",
+ "ms-playwright.playwright",
"ms-vscode-remote.remote-containers",
"ms-vscode-remote.remote-wsl",
"ms-vscode.hexeditor",
diff --git a/scripts/config/sonar-scanner.properties b/scripts/config/sonar-scanner.properties
index ce9a8392..789b2989 100644
--- a/scripts/config/sonar-scanner.properties
+++ b/scripts/config/sonar-scanner.properties
@@ -21,7 +21,7 @@ sonar.coverage.exclusions=\
scripts/**/*.*, \
docs/**/*.*, \
utils/py-mock-mesh/**, \
- utils/event-publisher-py/event_publisher/mesh_config.py, \
+ lambdas/mesh-acknowledge/src/config.py, \
lambdas/mesh-download/mesh_download/config.py, \
lambdas/mesh-download/mesh_download/errors.py, \
lambdas/mesh-poll/mesh_poll/config.py, \
@@ -30,6 +30,6 @@ sonar.coverage.exclusions=\
src/eventcatalogasyncapiimporter/examples.py
# Coverage reports
-sonar.python.coverage.reportPaths=.coverage/coverage.xml,src/asyncapigenerator/coverage.xml,src/cloudeventjekylldocs/coverage.xml,src/eventcatalogasyncapiimporter/coverage.xml,utils/event-publisher-py/coverage.xml,utils/metric-publishers/coverage.xml,lambdas/mesh-poll/coverage.xml,lambdas/mesh-download/coverage.xml,src/python-schema-generator/coverage.xml
+sonar.python.coverage.reportPaths=.coverage/coverage.xml,src/asyncapigenerator/coverage.xml,src/cloudeventjekylldocs/coverage.xml,src/eventcatalogasyncapiimporter/coverage.xml,utils/py-utils/coverage.xml,lambdas/mesh-acknowledge/coverage.xml,src/python-schema-generator/coverage.xml,lambdas/mesh-poll/coverage.xml,lambdas/mesh-download/coverage.xml
sonar.javascript.lcov.reportPaths=lcov.info,src/cloudevents/coverage/lcov.info
sonar.typescript.lcov.reportPaths=lcov.info,src/cloudevents/coverage/lcov.info
diff --git a/scripts/tests/unit.sh b/scripts/tests/unit.sh
index b9035e69..90e5109d 100755
--- a/scripts/tests/unit.sh
+++ b/scripts/tests/unit.sh
@@ -19,6 +19,12 @@ cd "$(git rev-parse --show-toplevel)"
# run tests
+# TypeScript/JavaScript projects (npm workspace)
+# Note: src/cloudevents is included in workspaces, so it will be tested here
+npm ci
+npm run generate-dependencies
+npm run test:unit --workspaces
+
# Python projects - asyncapigenerator
echo "Setting up and running asyncapigenerator tests..."
make -C ./src/asyncapigenerator install-dev
@@ -34,15 +40,15 @@ echo "Setting up and running eventcatalogasyncapiimporter tests..."
make -C ./src/eventcatalogasyncapiimporter install-dev
make -C ./src/eventcatalogasyncapiimporter coverage # Run with coverage to generate coverage.xml for SonarCloud
-# Python utility packages - event-publisher-py
-echo "Setting up and running event-publisher-py tests..."
-make -C ./utils/event-publisher-py install-dev
-make -C ./utils/event-publisher-py coverage # Run with coverage to generate coverage.xml for SonarCloud
+# Python utility packages - py-utils
+echo "Setting up and running py-utils tests..."
+make -C ./utils/py-utils install-dev
+make -C ./utils/py-utils coverage # Run with coverage to generate coverage.xml for SonarCloud
-# Python utility packages - metric-publishers
-echo "Setting up and running metric-publishers tests..."
-make -C ./utils/metric-publishers install-dev
-make -C ./utils/metric-publishers coverage # Run with coverage to generate coverage.xml for SonarCloud
+# Python Lambda - mesh-acknowledge
+echo "Setting up and running mesh-acknowledge tests..."
+make -C ./lambdas/mesh-acknowledge install-dev
+make -C ./lambdas/mesh-acknowledge coverage # Run with coverage to generate coverage.xml for SonarCloud
# Python Lambda - mesh-poll
echo "Setting up and running mesh-poll tests..."
@@ -53,17 +59,12 @@ make -C ./lambdas/mesh-poll coverage # Run with coverage to generate coverage.x
echo "Setting up and running mesh-download tests..."
make -C ./lambdas/mesh-download install-dev
make -C ./lambdas/mesh-download coverage # Run with coverage to generate coverage.xml for SonarCloud
+
# Python projects - python-schema-generator
echo "Setting up and running python-schema-generator tests..."
make -C ./src/python-schema-generator install-dev
make -C ./src/python-schema-generator coverage # Run with coverage to generate coverage.xml for SonarCloud
-# TypeScript/JavaScript projects (npm workspace)
-# Note: src/cloudevents is included in workspaces, so it will be tested here
-npm ci
-npm run generate-dependencies
-npm run test:unit --workspaces
-
# merge coverage reports
mkdir -p .reports
TMPDIR="./.reports" ./node_modules/.bin/lcov-result-merger "**/.reports/unit/coverage/lcov.info" ".reports/lcov.info" --ignore "node_modules" --prepend-source-files --prepend-path-fix "../../.."
diff --git a/src/cloudevents/domains/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-downloaded-data.schema.yaml b/src/cloudevents/domains/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-downloaded-data.schema.yaml
index 30bc7a5e..c92e1514 100644
--- a/src/cloudevents/domains/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-downloaded-data.schema.yaml
+++ b/src/cloudevents/domains/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-downloaded-data.schema.yaml
@@ -4,13 +4,16 @@ description: Data payload of the MESHInboxMessageDownloaded event
type: object
additionalProperties: false
properties:
- messageReference:
- $ref: ../defs/requests.schema.yaml#/properties/messageReference
+ meshMessageId:
+ $ref: ../defs/mesh.schema.yaml#/properties/meshMessageId
senderId:
$ref: ../defs/requests.schema.yaml#/properties/senderId
+ messageReference:
+ $ref: ../defs/requests.schema.yaml#/properties/messageReference
messageUri:
$ref: ../defs/requests.schema.yaml#/properties/messageUri
required:
- - messageReference
+ - meshMessageId
- senderId
+ - messageReference
- messageUri
diff --git a/src/python-schema-generator/src/model_generator.py b/src/python-schema-generator/src/model_generator.py
index 154b5864..cdb443d0 100644
--- a/src/python-schema-generator/src/model_generator.py
+++ b/src/python-schema-generator/src/model_generator.py
@@ -26,6 +26,7 @@ def generate_pydantic_model(
output_model_type=DataModelType.PydanticV2BaseModel,
class_name=class_name,
use_schema_description=True,
+ use_subclass_enum=True,
custom_file_header='''"""Generated Pydantic model for NHS Notify Digital Letters events.
This file is auto-generated. Do not edit manually.
diff --git a/src/python-schema-generator/tests/test_model_generator.py b/src/python-schema-generator/tests/test_model_generator.py
index f307692f..32adf75b 100644
--- a/src/python-schema-generator/tests/test_model_generator.py
+++ b/src/python-schema-generator/tests/test_model_generator.py
@@ -30,6 +30,7 @@ def test_calls_datamodel_codegen_with_expected_arguments(self, mock_generate):
output_model_type=DataModelType.PydanticV2BaseModel,
class_name="TestModel",
use_schema_description=True,
+ use_subclass_enum=True,
custom_file_header='''"""Generated Pydantic model for NHS Notify Digital Letters events.
This file is auto-generated. Do not edit manually.
diff --git a/tests/playwright/constants/backend-constants.ts b/tests/playwright/constants/backend-constants.ts
index d9d78913..41ad4406 100644
--- a/tests/playwright/constants/backend-constants.ts
+++ b/tests/playwright/constants/backend-constants.ts
@@ -17,7 +17,9 @@ export const CORE_NOTIFIER_LAMBDA_NAME = `${CSI}-core-notifier`;
export const TTL_QUEUE_NAME = `${CSI}-ttl-queue`;
export const TTL_DLQ_NAME = `${CSI}-ttl-dlq`;
export const PDM_UPLOADER_DLQ_NAME = `${CSI}-pdm-uploader-dlq`;
+export const MESH_DOWNLOAD_DLQ_NAME = `${CSI}-mesh-download-dlq`;
export const PDM_POLL_DLQ_NAME = `${CSI}-pdm-poll-dlq`;
+export const MESH_ACKNOWLEDGE_DLQ_NAME = `${CSI}-mesh-acknowledge-dlq`;
export const CORE_NOTIFIER_DLQ_NAME = `${CSI}-core-notifier-dlq`;
export const PRINT_STATUS_HANDLER_DLQ_NAME = `${CSI}-print-status-handler-dlq`;
export const HANDLE_TTL_DLQ_NAME = `${CSI}-ttl-handle-expiry-errors-queue`;
@@ -36,6 +38,8 @@ export const TTL_TABLE_NAME = `${CSI}-ttl`;
// S3
export const LETTERS_S3_BUCKET_NAME = `nhs-${process.env.AWS_ACCOUNT_ID}-${REGION}-${ENV}-dl-letters`;
+export const NON_PII_S3_BUCKET_NAME = `nhs-${process.env.AWS_ACCOUNT_ID}-${REGION}-${ENV}-dl-non-pii-data`;
+export const PII_S3_BUCKET_NAME = `nhs-${process.env.AWS_ACCOUNT_ID}-${REGION}-${ENV}-dl-pii-data`;
export const FILE_SAFE_S3_BUCKET_NAME = `nhs-${process.env.AWS_ACCOUNT_ID}-${REGION}-${ENV}-dl-file-safe`;
// Cloudwatch
diff --git a/tests/playwright/constants/tests-constants.ts b/tests/playwright/constants/tests-constants.ts
index 3936dc85..8f23d49e 100644
--- a/tests/playwright/constants/tests-constants.ts
+++ b/tests/playwright/constants/tests-constants.ts
@@ -1,6 +1,6 @@
// senderIds
export const SENDER_ID_VALID_FOR_NOTIFY_SANDBOX =
- 'componentTestSender_RoutingConfig';
+ '2b8ebb33-8b33-49bd-949e-c12e22d25320';
export const SENDER_ID_THAT_TRIGGERS_ERROR_IN_NOTIFY_SANDBOX =
- 'componentTestSender_RoutingConfigInvalid';
-export const SENDER_ID_SKIPS_NOTIFY = 'test-sender-1';
+ 'f017669b-6da4-4576-9d59-3d2b7f005ae2';
+export const SENDER_ID_SKIPS_NOTIFY = '67403568-166e-41d0-900a-1f31fe93a091';
diff --git a/tests/playwright/digital-letters-component-tests/mesh-acknowledge.component.spec.ts b/tests/playwright/digital-letters-component-tests/mesh-acknowledge.component.spec.ts
new file mode 100644
index 00000000..7d105005
--- /dev/null
+++ b/tests/playwright/digital-letters-component-tests/mesh-acknowledge.component.spec.ts
@@ -0,0 +1,169 @@
+import { expect, test } from '@playwright/test';
+import {
+ ENV,
+ MESH_ACKNOWLEDGE_DLQ_NAME,
+ NON_PII_S3_BUCKET_NAME,
+} from 'constants/backend-constants';
+import { SENDER_ID_SKIPS_NOTIFY } from 'constants/tests-constants';
+import { MESHInboxMessageDownloaded } from 'digital-letters-events';
+import messageDownloadedValidator from 'digital-letters-events/MESHInboxMessageDownloaded.js';
+import { getLogsFromCloudwatch } from 'helpers/cloudwatch-helpers';
+import eventPublisher from 'helpers/event-bus-helpers';
+import expectToPassEventually from 'helpers/expectations';
+import { downloadFromS3 } from 'helpers/s3-helpers';
+import { expectMessageContainingString } from 'helpers/sqs-helpers';
+import { v4 as uuidv4 } from 'uuid';
+
+test.describe('Digital Letters - Mesh Acknowledger', () => {
+ // These values match the ones configured in senders.setup.ts
+ const senderId = SENDER_ID_SKIPS_NOTIFY;
+ const sendersMeshMailboxId = 'test-mesh-sender-1';
+
+ const validMessageDownloadedEvent: MESHInboxMessageDownloaded = {
+ id: uuidv4(),
+ specversion: '1.0',
+ source:
+ '/nhs/england/notify/production/primary/data-plane/digitalletters/mesh',
+ subject:
+ 'customer/920fca11-596a-4eca-9c47-99f624614658/recipient/769acdd4-6a47-496f-999f-76a6fd2c3959',
+ type: 'uk.nhs.notify.digital.letters.mesh.inbox.message.downloaded.v1',
+ time: '2023-06-20T12:00:00Z',
+ recordedtime: '2023-06-20T12:00:00.250Z',
+ severitynumber: 2,
+ traceparent: '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
+ datacontenttype: 'application/json',
+ dataschema:
+ 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-downloaded-data.schema.json',
+ severitytext: 'INFO',
+ datacategory: 'non-sensitive',
+ dataclassification: 'public',
+ dataregulation: 'GDPR',
+ tracestate: 'rojo=00f067aa0ba902b7,congo=t61rcWkgMzE',
+ partitionkey: 'customer-920fca11',
+ sampledrate: 5,
+ sequence: '00000000000000000042',
+ data: {
+ meshMessageId: '12345',
+ messageUri: `https://example.com/ttl/resource/${uuidv4()}`,
+ messageReference: 'ref1',
+ senderId,
+ },
+ };
+
+ test('should send MESH acknowledgement and publish message acknowledged event following message downloaded event', async () => {
+ const letterId = uuidv4();
+ const messageReference = uuidv4();
+ const meshMessageId = '20200601122152994285_D59900';
+
+ await eventPublisher.sendEvents(
+ [
+ {
+ ...validMessageDownloadedEvent,
+ id: letterId,
+ data: {
+ ...validMessageDownloadedEvent.data,
+ messageUri: `https://example.com/ttl/resource/${letterId}`,
+ messageReference,
+ meshMessageId,
+ },
+ },
+ ],
+ messageDownloadedValidator,
+ );
+
+ // The mailbox ID matches the Mock MESH config in SSM.
+ const meshMailboxId = 'mock-mailbox';
+
+ // Verify MESH acknowledgement message was published.
+ await expectToPassEventually(async () => {
+ const messageContent = await downloadFromS3(
+ NON_PII_S3_BUCKET_NAME,
+ `mock-mesh/${meshMailboxId}/out/${sendersMeshMailboxId}/${messageReference}_`,
+ );
+
+ const messageHeaders = messageContent.metadata ?? {};
+ expect(messageHeaders.subject).toEqual('202');
+ expect(messageHeaders.local_id).toEqual(messageReference);
+ expect(messageHeaders.workflow_id).toEqual('NHS_NOTIFY_SEND_REQUEST_ACK');
+
+ const messageBody = JSON.parse(messageContent.body);
+ expect(messageBody).toEqual({
+ meshMessageId,
+ requestId: `${senderId}_${messageReference}`,
+ });
+ });
+
+ // Verify message acknowledged event was published.
+ await expectToPassEventually(async () => {
+ const eventLogEntry = await getLogsFromCloudwatch(
+ `/aws/vendedlogs/events/event-bus/nhs-${ENV}-dl`,
+ [
+ '$.message_type = "EVENT_RECEIPT"',
+ '$.details.detail_type = "uk.nhs.notify.digital.letters.mesh.inbox.message.acknowledged.v1"',
+ `$.details.event_detail = "*\\"messageReference\\":\\"${messageReference}\\"*"`,
+ `$.details.event_detail = "*\\"senderId\\":\\"${senderId}\\"*"`,
+ `$.details.event_detail = "*\\"meshMailboxId\\":\\"${sendersMeshMailboxId}\\"*"`,
+ ],
+ );
+
+ expect(eventLogEntry.length).toEqual(1);
+ });
+ });
+
+ test('should send an event for an unknown sender to dlq', async () => {
+ // We need to leave time to go through the 3 retries before it's sent to the DLQ.
+ test.setTimeout(550_000);
+
+ const letterId = uuidv4();
+
+ await eventPublisher.sendEvents(
+ [
+ {
+ ...validMessageDownloadedEvent,
+ id: letterId,
+ data: {
+ ...validMessageDownloadedEvent.data,
+ senderId: 'unknown-sender-id',
+ },
+ },
+ ],
+ messageDownloadedValidator,
+ );
+
+ await expectMessageContainingString(
+ MESH_ACKNOWLEDGE_DLQ_NAME,
+ letterId,
+ 420,
+ );
+ });
+
+ test('should send invalid event to dlq', async () => {
+ // We need to leave time to go through the 3 retries before it's sent to the DLQ.
+ test.setTimeout(550_000);
+
+ const letterId = uuidv4();
+
+ await eventPublisher.sendEvents<
+ MESHInboxMessageDownloaded & { data: { unexpectedField: string } }
+ >(
+ [
+ {
+ ...validMessageDownloadedEvent,
+ id: letterId,
+ data: {
+ ...validMessageDownloadedEvent.data,
+ unexpectedField: 'I should not be here',
+ },
+ },
+ ],
+ // We don't actually want to validate this event on the way out, as we intend it to be invalid.
+ () => true,
+ );
+
+ await expectMessageContainingString(
+ MESH_ACKNOWLEDGE_DLQ_NAME,
+ letterId,
+ 420,
+ );
+ });
+});
diff --git a/tests/playwright/digital-letters-component-tests/mesh-poll-download.component.spec.ts b/tests/playwright/digital-letters-component-tests/mesh-poll-download.component.spec.ts
new file mode 100644
index 00000000..45ce052d
--- /dev/null
+++ b/tests/playwright/digital-letters-component-tests/mesh-poll-download.component.spec.ts
@@ -0,0 +1,182 @@
+import { expect, test } from '@playwright/test';
+import {
+ ENV,
+ MESH_DOWNLOAD_DLQ_NAME,
+ MESH_POLL_LAMBDA_NAME,
+ NON_PII_S3_BUCKET_NAME,
+ PII_S3_BUCKET_NAME,
+} from 'constants/backend-constants';
+import { getLogsFromCloudwatch } from 'helpers/cloudwatch-helpers';
+import eventPublisher from 'helpers/event-bus-helpers';
+import expectToPassEventually from 'helpers/expectations';
+import { invokeLambda } from 'helpers/lambda-helpers';
+import { downloadFromS3, uploadToS3 } from 'helpers/s3-helpers';
+import { expectMessageContainingString } from 'helpers/sqs-helpers';
+import { v4 as uuidv4 } from 'uuid';
+import messageMessageReceived from 'digital-letters-events/MESHInboxMessageReceived.js';
+import { SENDER_ID_SKIPS_NOTIFY } from 'constants/tests-constants';
+
+test.describe('Digital Letters - MESH Poll and Download', () => {
+ const senderId = SENDER_ID_SKIPS_NOTIFY;
+ const sendersMeshMailboxId = 'test-mesh-sender-1';
+ const meshMailboxId = 'mock-mailbox';
+
+ async function uploadMeshMessage(
+ meshMessageId: string,
+ messageReference: string,
+ messageContent: string,
+ metadata: Record = {},
+ ): Promise {
+ const key = `mock-mesh/${meshMailboxId}/in/${meshMessageId}`;
+ const meshMetadata = {
+ sender: sendersMeshMailboxId,
+ subject: '201',
+ workflow_id: 'NHS_NOTIFY_SEND_REQUEST',
+ local_id: messageReference,
+ ...metadata,
+ };
+
+ await uploadToS3(messageContent, NON_PII_S3_BUCKET_NAME, key, meshMetadata);
+ }
+
+ async function expectMeshInboxMessageReceivedEvent(
+ meshMessageId: string,
+ ): Promise {
+ await expectToPassEventually(async () => {
+ const eventLogEntry = await getLogsFromCloudwatch(
+ `/aws/vendedlogs/events/event-bus/nhs-${ENV}-dl`,
+ [
+ '$.message_type = "EVENT_RECEIPT"',
+ '$.details.detail_type = "uk.nhs.notify.digital.letters.mesh.inbox.message.received.v1"',
+ `$.details.event_detail = "*\\"meshMessageId\\":\\"${meshMessageId}\\"*"`,
+ `$.details.event_detail = "*\\"senderId\\":\\"${senderId}\\"*"`,
+ ],
+ );
+
+ expect(eventLogEntry.length).toBeGreaterThanOrEqual(1);
+ }, 120_000);
+ }
+
+ async function expectMeshInboxMessageDownloadedEvent(
+ messageReference: string,
+ ): Promise {
+ await expectToPassEventually(async () => {
+ const eventLogEntry = await getLogsFromCloudwatch(
+ `/aws/vendedlogs/events/event-bus/nhs-${ENV}-dl`,
+ [
+ '$.message_type = "EVENT_RECEIPT"',
+ '$.details.detail_type = "uk.nhs.notify.digital.letters.mesh.inbox.message.downloaded.v1"',
+ `$.details.event_detail = "*\\"messageReference\\":\\"${messageReference}\\"*"`,
+ `$.details.event_detail = "*\\"senderId\\":\\"${senderId}\\"*"`,
+ ],
+ );
+
+ expect(eventLogEntry.length).toBeGreaterThanOrEqual(1);
+ }, 180_000);
+ }
+
+ test('should poll message from MESH inbox, publish received event, download message, and publish downloaded event', async () => {
+ const meshMessageId = `${Date.now()}_TEST_${uuidv4().slice(0, 8)}`;
+ const messageReference = uuidv4();
+ const messageContent = JSON.stringify({
+ senderId,
+ messageReference,
+ testData: 'This is a test letter content',
+ timestamp: new Date().toISOString(),
+ });
+
+ await uploadMeshMessage(meshMessageId, messageReference, messageContent);
+
+ await invokeLambda(MESH_POLL_LAMBDA_NAME);
+
+ await expectMeshInboxMessageReceivedEvent(meshMessageId);
+ await expectMeshInboxMessageDownloadedEvent(messageReference);
+
+ await expectToPassEventually(async () => {
+ const storedMessage = await downloadFromS3(
+ PII_S3_BUCKET_NAME,
+ `document-reference/${senderId}_${messageReference}`,
+ );
+
+ expect(storedMessage.body).toContain(messageContent);
+ }, 60_000);
+
+ await expectToPassEventually(async () => {
+ await expect(async () => {
+ await downloadFromS3(
+ NON_PII_S3_BUCKET_NAME,
+ `mock-mesh/${meshMailboxId}/in/${meshMessageId}`,
+ );
+ }).rejects.toThrow('No objects found');
+ }, 60_000);
+ });
+
+ test('should send message to mesh-download DLQ when download fails', async () => {
+ test.setTimeout(400_000);
+
+ const invalidMeshMessageId = `${Date.now()}_DLQ_${uuidv4().slice(0, 8)}`;
+ const messageReference = uuidv4();
+
+ await eventPublisher.sendEvents(
+ [
+ {
+ id: uuidv4(),
+ specversion: '1.0',
+ source:
+ '/nhs/england/notify/development/primary/data-plane/digitalletters/mesh',
+ subject:
+ 'customer/00000000-0000-0000-0000-000000000000/recipient/00000000-0000-0000-0000-000000000000',
+ type: 'uk.nhs.notify.digital.letters.mesh.inbox.message.received.v1',
+ time: '2026-01-20T15:48:21.636284+00:00',
+ recordedtime: '2026-01-20T15:48:21.636284+00:00',
+ severitynumber: 2,
+ severitytext: 'INFO',
+ traceparent:
+ '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
+ dataschema:
+ 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-received-data.schema.json',
+ data: {
+ meshMessageId: invalidMeshMessageId,
+ senderId,
+ messageReference,
+ },
+ },
+ ],
+ messageMessageReceived,
+ );
+
+ await expectMessageContainingString(
+ MESH_DOWNLOAD_DLQ_NAME,
+ invalidMeshMessageId,
+ 300,
+ );
+ });
+
+ test('should handle multiple messages in inbox', async () => {
+ test.setTimeout(300_000);
+
+ const messages = Array.from({ length: 3 }, (_, i) => ({
+ meshMessageId: `${Date.now()}_MULTI_${i}_${uuidv4().slice(0, 8)}`,
+ messageReference: uuidv4(),
+ messageContent: JSON.stringify({
+ senderId,
+ messageReference: uuidv4(),
+ testData: `Test message ${i}`,
+ }),
+ }));
+
+ for (const msg of messages) {
+ await uploadMeshMessage(
+ msg.meshMessageId,
+ msg.messageReference,
+ msg.messageContent,
+ );
+ }
+
+ await invokeLambda(MESH_POLL_LAMBDA_NAME);
+
+ for (const msg of messages) {
+ await expectMeshInboxMessageReceivedEvent(msg.meshMessageId);
+ }
+ });
+});
diff --git a/tests/playwright/digital-letters-component-tests/pdm-uploader.component.spec.ts b/tests/playwright/digital-letters-component-tests/pdm-uploader.component.spec.ts
index 5a2d99c4..6e033af7 100644
--- a/tests/playwright/digital-letters-component-tests/pdm-uploader.component.spec.ts
+++ b/tests/playwright/digital-letters-component-tests/pdm-uploader.component.spec.ts
@@ -12,6 +12,7 @@ import expectToPassEventually from 'helpers/expectations';
import { uploadToS3 } from 'helpers/s3-helpers';
import { expectMessageContainingString, purgeQueue } from 'helpers/sqs-helpers';
import { v4 as uuidv4 } from 'uuid';
+import { SENDER_ID_SKIPS_NOTIFY } from 'constants/tests-constants';
const pdmRequest = {
resourceType: 'DocumentReference',
@@ -62,7 +63,8 @@ test.describe('Digital Letters - Upload to PDM', () => {
const resourceKey = `test/${letterId}`;
const messageUri = `s3://${LETTERS_S3_BUCKET_NAME}/${resourceKey}`;
const messageReference = uuidv4();
- const senderId = 'test-sender-1';
+ const senderId = SENDER_ID_SKIPS_NOTIFY;
+ const meshMessageId = '12345';
uploadToS3(JSON.stringify(pdmRequest), LETTERS_S3_BUCKET_NAME, resourceKey);
@@ -72,6 +74,7 @@ test.describe('Digital Letters - Upload to PDM', () => {
...baseEvent,
id: eventId,
data: {
+ meshMessageId,
messageUri,
messageReference,
senderId,
@@ -114,7 +117,8 @@ test.describe('Digital Letters - Upload to PDM', () => {
const resourceKey = `test/${letterId}`;
const messageUri = `s3://${LETTERS_S3_BUCKET_NAME}/${resourceKey}`;
const messageReference = uuidv4();
- const senderId = 'test-sender-1';
+ const senderId = SENDER_ID_SKIPS_NOTIFY;
+ const meshMessageId = '12345';
const invalidPdmRequest = {
...pdmRequest,
unexpectedField: 'I should not be here',
@@ -132,6 +136,7 @@ test.describe('Digital Letters - Upload to PDM', () => {
...baseEvent,
id: eventId,
data: {
+ meshMessageId,
messageUri,
messageReference,
senderId,
@@ -174,7 +179,8 @@ test.describe('Digital Letters - Upload to PDM', () => {
const eventId = uuidv4();
const messageUri = `not-a-valid-s3-uri`;
const messageReference = uuidv4();
- const senderId = 'test-sender-1';
+ const senderId = SENDER_ID_SKIPS_NOTIFY;
+ const meshMessageId = '12345';
await eventPublisher.sendEvents(
[
@@ -182,13 +188,14 @@ test.describe('Digital Letters - Upload to PDM', () => {
...baseEvent,
id: eventId,
data: {
+ meshMessageId,
messageUri,
messageReference,
senderId,
},
},
],
- messageDownloadedValidator,
+ () => true,
);
await expectToPassEventually(async () => {
diff --git a/tests/playwright/digital-letters-component-tests/ttl-create.component.spec.ts b/tests/playwright/digital-letters-component-tests/ttl-create.component.spec.ts
index 8434d27f..eada1588 100644
--- a/tests/playwright/digital-letters-component-tests/ttl-create.component.spec.ts
+++ b/tests/playwright/digital-letters-component-tests/ttl-create.component.spec.ts
@@ -1,5 +1,6 @@
import { expect, test } from '@playwright/test';
import { ENV } from 'constants/backend-constants';
+import { SENDER_ID_SKIPS_NOTIFY } from 'constants/tests-constants';
import { MESHInboxMessageDownloaded } from 'digital-letters-events';
import messageDownloadedValidator from 'digital-letters-events/MESHInboxMessageDownloaded.js';
import { getLogsFromCloudwatch } from 'helpers/cloudwatch-helpers';
@@ -33,9 +34,10 @@ test.describe('Digital Letters - Create TTL', () => {
'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-downloaded-data.schema.json',
severitytext: 'INFO',
data: {
+ meshMessageId: '12345',
messageUri,
messageReference: 'ref1',
- senderId: 'test-sender-1',
+ senderId: SENDER_ID_SKIPS_NOTIFY,
},
},
],
diff --git a/tests/playwright/digital-letters-component-tests/ttl-handle.component.spec.ts b/tests/playwright/digital-letters-component-tests/ttl-handle.component.spec.ts
index 66c03131..dac678a8 100644
--- a/tests/playwright/digital-letters-component-tests/ttl-handle.component.spec.ts
+++ b/tests/playwright/digital-letters-component-tests/ttl-handle.component.spec.ts
@@ -29,6 +29,7 @@ test.describe('Digital Letters - Handle TTL', () => {
'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-downloaded-data.schema.json',
severitytext: 'INFO',
data: {
+ meshMessageId: '12345',
messageReference: 'ref1',
senderId: 'sender1',
messageUri: 'https://example.com/ttl/resource/sample',
diff --git a/tests/playwright/helpers/lambda-helpers.ts b/tests/playwright/helpers/lambda-helpers.ts
new file mode 100644
index 00000000..bc23a646
--- /dev/null
+++ b/tests/playwright/helpers/lambda-helpers.ts
@@ -0,0 +1,40 @@
+import { InvokeCommand, LambdaClient } from '@aws-sdk/client-lambda';
+
+const lambda = new LambdaClient({
+ region: process.env.AWS_REGION || 'eu-west-2',
+});
+
+async function invokeLambda(
+ functionName: string,
+ payload?: Record,
+): Promise {
+ await lambda.send(
+ new InvokeCommand({
+ FunctionName: functionName,
+ InvocationType: 'Event', // Async invocation
+ Payload: payload ? JSON.stringify(payload) : undefined,
+ }),
+ );
+}
+
+async function invokeLambdaSync(
+ functionName: string,
+ payload?: Record,
+): Promise {
+ const response = await lambda.send(
+ new InvokeCommand({
+ FunctionName: functionName,
+ InvocationType: 'RequestResponse', // Sync invocation
+ Payload: payload ? JSON.stringify(payload) : undefined,
+ }),
+ );
+
+ if (response.Payload) {
+ const payloadString = new TextDecoder().decode(response.Payload);
+ return JSON.parse(payloadString) as T;
+ }
+
+ return undefined;
+}
+
+export { invokeLambda, invokeLambdaSync };
diff --git a/tests/playwright/helpers/s3-helpers.ts b/tests/playwright/helpers/s3-helpers.ts
index 2b6ec2e6..9a1d77c5 100644
--- a/tests/playwright/helpers/s3-helpers.ts
+++ b/tests/playwright/helpers/s3-helpers.ts
@@ -1,5 +1,7 @@
import {
+ GetObjectCommand,
ListBucketsCommand,
+ ListObjectsV2Command,
PutObjectCommand,
S3Client,
} from '@aws-sdk/client-s3';
@@ -26,14 +28,52 @@ async function uploadToS3(
content: string,
bucket: string,
key: string,
+ metadata?: Record,
): Promise {
await s3.send(
new PutObjectCommand({
Bucket: bucket,
Key: key,
Body: content,
+ Metadata: metadata,
}),
);
}
-export { listBuckets, uploadToS3 };
+async function downloadFromS3(
+ bucket: string,
+ keyPrefix: string,
+): Promise<{ body: string; metadata?: Record }> {
+ const objects = await s3.send(
+ new ListObjectsV2Command({ Bucket: bucket, Prefix: keyPrefix }),
+ );
+
+ if ((objects.Contents?.length ?? 0) > 1) {
+ throw new Error(
+ `Multiple objects found for prefix s3://${bucket}/${keyPrefix}`,
+ );
+ }
+
+ if ((objects.Contents?.length ?? 0) === 0) {
+ throw new Error(`No objects found for prefix s3://${bucket}/${keyPrefix}`);
+ }
+
+ const key = objects.Contents?.[0]?.Key;
+ const response = await s3.send(
+ new GetObjectCommand({
+ Bucket: bucket,
+ Key: key,
+ }),
+ );
+
+ if (!response.Body) {
+ throw new Error(`No content found for s3://${bucket}/${key}`);
+ }
+
+ return {
+ body: await response.Body.transformToString(),
+ metadata: response.Metadata,
+ };
+}
+
+export { downloadFromS3, listBuckets, uploadToS3 };
diff --git a/utils/event-publisher-py/Makefile b/utils/event-publisher-py/Makefile
deleted file mode 100644
index a9ee1437..00000000
--- a/utils/event-publisher-py/Makefile
+++ /dev/null
@@ -1,24 +0,0 @@
-.PHONY: install install-dev test coverage clean
-
-install:
- pip install -r requirements.txt
-
-install-dev: install
- pip install -r requirements-dev.txt
-
-test:
- cd ../.. && PYTHONPATH=utils/event-publisher-py:$$PYTHONPATH pytest utils/event-publisher-py/event_publisher/__tests__/ -v
-
-coverage:
- cd ../.. && PYTHONPATH=utils/event-publisher-py:$$PYTHONPATH pytest utils/event-publisher-py/event_publisher/__tests__/ \
- --cov=utils/event-publisher-py/event_publisher \
- --cov-config=utils/event-publisher-py/pytest.ini \
- --cov-report=html:utils/event-publisher-py/htmlcov \
- --cov-report=term-missing \
- --cov-report=xml:utils/event-publisher-py/coverage.xml \
- --cov-branch
-
-clean:
- rm -rf dist/ .coverage htmlcov/ .pytest_cache/ coverage.xml
- find . -type d -name __pycache__ -exec rm -rf {} +
- find . -type f -name '*.pyc' -delete
diff --git a/utils/event-publisher-py/event_publisher/__init__.py b/utils/event-publisher-py/event_publisher/__init__.py
deleted file mode 100644
index d618812f..00000000
--- a/utils/event-publisher-py/event_publisher/__init__.py
+++ /dev/null
@@ -1,26 +0,0 @@
-"""
-Event Publisher for AWS EventBridge with DLQ support.
-
-This module provides a Python equivalent of the TypeScript EventPublisher
-for publishing CloudEvents to EventBridge.
-"""
-
-from .event_publisher import EventPublisher
-from . import models
-from .mesh_config import (
- BaseMeshConfig,
- InvalidMeshEndpointError,
- InvalidEnvironmentVariableError,
- store_file,
- log
-)
-
-__all__ = [
- 'EventPublisher',
- 'models',
- 'BaseMeshConfig',
- 'InvalidMeshEndpointError',
- 'InvalidEnvironmentVariableError',
- 'store_file',
- 'log'
-]
diff --git a/utils/event-publisher-py/event_publisher/__tests__/test_models.py b/utils/event-publisher-py/event_publisher/__tests__/test_models.py
deleted file mode 100644
index dea3fdf2..00000000
--- a/utils/event-publisher-py/event_publisher/__tests__/test_models.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import pytest
-from pydantic import ValidationError
-from event_publisher.models import CloudEvent, MeshInboxMessageEvent
-
-
-class TestCloudEvent:
- """Test CloudEvent validation"""
-
- @pytest.fixture
- def valid_event(self):
- return {
- 'id': '550e8400-e29b-41d4-a716-446655440001',
- 'specversion': '1.0',
- 'source': '/nhs/england/notify/production/primary/data-plane/digitalletters/mesh',
- 'subject': 'customer/920fca11-596a-4eca-9c47-99f624614658/recipient/769acdd4-6a47-496f-999f-76a6fd2c3959',
- 'type': 'uk.nhs.notify.digital.letters.example.v1',
- 'time': '2024-07-10T14:30:00Z',
- 'recordedtime': '2024-07-10T14:30:00.250Z',
- 'severitynumber': 2,
- 'traceparent': '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
- 'dataschema': 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10/digital-letter-base-data.schema.json',
- 'data': {
- 'digital-letter-id': '123e4567-e89b-12d3-a456-426614174000',
- 'messageReference': 'ref1',
- 'senderId': 'sender1',
- },
- }
-
- def test_parses_valid_cloud_event(self, valid_event):
- event = CloudEvent(**valid_event)
- assert str(event.id) == valid_event['id']
- assert event.source == valid_event['source']
- assert event.subject == valid_event['subject']
- assert event.type == valid_event['type']
-
- def test_fails_for_missing_required_fields(self):
- with pytest.raises(ValidationError):
- CloudEvent(**{})
-
- def test_fails_for_invalid_source_pattern(self, valid_event):
- invalid = valid_event.copy()
- invalid['source'] = 'invalid-source'
- with pytest.raises(ValidationError) as exc_info:
- CloudEvent(**invalid)
- assert 'source' in str(exc_info.value).lower()
-
- def test_fails_for_invalid_subject_pattern(self, valid_event):
- invalid = valid_event.copy()
- invalid['subject'] = 'invalid-subject'
- with pytest.raises(ValidationError) as exc_info:
- CloudEvent(**invalid)
- assert 'subject' in str(exc_info.value).lower()
-
- def test_fails_for_invalid_type_pattern(self, valid_event):
- invalid = valid_event.copy()
- invalid['type'] = 'invalid.type'
- with pytest.raises(ValidationError) as exc_info:
- CloudEvent(**invalid)
- assert 'type' in str(exc_info.value).lower()
-
- def test_allows_any_data_structure(self, valid_event):
- """Base CloudEvent accepts any dict as data, but specific event types validate data structure"""
- event_with_empty_data = valid_event.copy()
- event_with_empty_data['data'] = {}
- # Base CloudEvent accepts any dict
- event = CloudEvent(**event_with_empty_data)
- assert event.data == {}
-
- # But MeshInboxMessageEvent should reject empty data
- with pytest.raises(ValidationError) as exc_info:
- MeshInboxMessageEvent(**event_with_empty_data)
- assert 'meshMessageId' in str(exc_info.value).lower() or 'field required' in str(exc_info.value).lower()
diff --git a/utils/event-publisher-py/event_publisher/models.py b/utils/event-publisher-py/event_publisher/models.py
deleted file mode 100644
index 2fa5e889..00000000
--- a/utils/event-publisher-py/event_publisher/models.py
+++ /dev/null
@@ -1,202 +0,0 @@
-from typing import Any, Literal, Optional
-from pydantic import BaseModel, ConfigDict, Field, field_validator
-
-
-class CloudEvent(BaseModel):
- # Required fields - NHS Notify CloudEvents profile
- specversion: Literal['1.0'] = Field(
- default='1.0',
- description='CloudEvents specification version'
- )
- id: str = Field(
- ...,
- description='Unique identifier for this event instance (UUID)'
- )
- source: str = Field(
- ...,
- description='Event source for digital letters domain'
- )
- subject: str = Field(
- ...,
- description='Path in the form customer/{id}/recipient/{id} where each {id} is a UUID'
- )
- type: str = Field(
- ...,
- description='Concrete versioned event type string'
- )
- time: str = Field(
- ...,
- description='Timestamp when the event occurred (RFC 3339)'
- )
- recordedtime: str = Field(
- ...,
- description='Timestamp when the event was recorded/persisted'
- )
- severitynumber: int = Field(
- ...,
- ge=0,
- le=5,
- description='Numeric severity (TRACE=0, DEBUG=1, INFO=2, WARN=3, ERROR=4, FATAL=5)'
- )
- traceparent: str = Field(
- ...,
- description='W3C Trace Context traceparent header value'
- )
- data: dict[str, Any] = Field(
- ...,
- description='Digital letters payload'
- )
-
- # Optional fields
- dataschema: Optional[str] = Field(
- None,
- description='Canonical URI of the event data schema'
- )
- datacontenttype: Optional[Literal['application/json']] = Field(
- None,
- description='Media type for the data field'
- )
- severitytext: Optional[Literal['TRACE', 'DEBUG', 'INFO', 'WARN', 'ERROR', 'FATAL']] = Field(
- None,
- description='Log severity level name'
- )
- tracestate: Optional[str] = Field(
- None,
- description='Optional W3C Trace Context tracestate header value'
- )
- partitionkey: Optional[str] = Field(
- None,
- min_length=1,
- max_length=64,
- description='Partition / ordering key'
- )
- sequence: Optional[str] = Field(
- None,
- description='Zero-padded 20 digit numeric sequence'
- )
- sampledrate: Optional[int] = Field(
- None,
- ge=1,
- description='Sampling factor: number of similar occurrences this event represents'
- )
- dataclassification: Optional[Literal['public', 'internal', 'confidential', 'restricted']] = Field(
- None,
- description='Data sensitivity classification'
- )
- dataregulation: Optional[Literal['GDPR', 'HIPAA', 'PCI-DSS', 'ISO-27001', 'NIST-800-53', 'CCPA']] = Field(
- None,
- description='Regulatory regime tag'
- )
- datacategory: Optional[Literal['non-sensitive', 'standard', 'sensitive', 'special-category']] = Field(
- None,
- description='Data category classification'
- )
-
- @field_validator('source')
- @classmethod
- def validate_source(cls, v: str) -> str:
- if not v:
- raise ValueError('Source cannot be empty')
- import re
- # Must match NHS Notify CloudEvents pattern
- pattern = r'^/nhs/england/notify/(production|staging|development|uat)/(primary|secondary|dev-\d+)/data-plane/digitalletters/mesh$'
-
- if not re.match(pattern, v):
- raise ValueError(
- f'Invalid source pattern: {v}. '
- 'Must match /nhs/england/notify/{environment}/{instance}/{plane}/digitalletters/mesh'
- )
- return v
-
- @field_validator('subject')
- @classmethod
- def validate_subject(cls, v: str) -> str:
- import re
- if not re.match(
- r'^customer/[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}/recipient/[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$',
- v
- ):
- raise ValueError('Subject must be in format customer/{uuid}/recipient/{uuid}')
- return v
-
- @field_validator('type')
- @classmethod
- def validate_type(cls, v: str) -> str:
- import re
- if not re.match(r'^uk\.nhs\.notify\.digital\.letters\.[a-z0-9.]+\.v\d+$', v):
- raise ValueError(f'Invalid type pattern: {v}')
- return v
-
- @field_validator('traceparent')
- @classmethod
- def validate_traceparent(cls, v: str) -> str:
- import re
- if not re.match(r'^00-[0-9a-f]{32}-[0-9a-f]{16}-[0-9a-f]{2}$', v):
- raise ValueError('Invalid traceparent format')
- return v
-
- @field_validator('partitionkey')
- @classmethod
- def validate_partitionkey(cls, v: Optional[str]) -> Optional[str]:
- if v is None:
- return v
- import re
- if not re.match(r'^[a-z0-9-]+$', v):
- raise ValueError('Partition key must only contain lowercase letters, numbers, and hyphens')
- return v
-
- @field_validator('sequence')
- @classmethod
- def validate_sequence(cls, v: Optional[str]) -> Optional[str]:
- if v is None:
- return v
- import re
- if not re.match(r'^\d{20}$', v):
- raise ValueError('Sequence must be exactly 20 digits')
- return v
-
- model_config = ConfigDict(extra='allow')
-
-
-class MeshInboxMessageData(BaseModel):
- """Data payload for MESH inbox message received event"""
- meshMessageId: str = Field(..., min_length=1)
- senderId: str = Field(..., min_length=1)
- messageReference: str = Field(..., min_length=1)
-
-
-class MeshInboxMessageEvent(CloudEvent):
- """Complete CloudEvent for MESH inbox message received"""
- data: MeshInboxMessageData
-
- @field_validator('data', mode='before')
- @classmethod
- def validate_data(cls, v: Any) -> MeshInboxMessageData:
- """Ensure data is validated as MeshInboxMessageData"""
- if isinstance(v, MeshInboxMessageData):
- return v
- if isinstance(v, dict):
- return MeshInboxMessageData(**v)
- raise ValueError('data must be a dict with meshMessageId and senderId')
-
-
-class MeshDownloadMessageData(BaseModel):
- """Data payload for MESH inbox message downloaded event"""
- messageReference: str = Field(..., min_length=1)
- senderId: str = Field(..., min_length=1)
- messageUri: str = Field(..., min_length=1)
-
-
-class MeshDownloadMessageEvent(CloudEvent):
- """Complete CloudEvent for MESH inbox message downloaded"""
- data: MeshDownloadMessageData
-
- @field_validator('data', mode='before')
- @classmethod
- def validate_data(cls, v: Any) -> MeshDownloadMessageData:
- """Ensure data is validated as MeshDownloadMessageData"""
- if isinstance(v, MeshDownloadMessageData):
- return v
- if isinstance(v, dict):
- return MeshDownloadMessageData(**v)
- raise ValueError('data must be a dict with messageReference, senderId, and messageUri')
diff --git a/utils/get_version.sh b/utils/get_version.sh
old mode 100755
new mode 100644
diff --git a/utils/metric-publishers/Makefile b/utils/metric-publishers/Makefile
deleted file mode 100644
index 1a78108a..00000000
--- a/utils/metric-publishers/Makefile
+++ /dev/null
@@ -1,31 +0,0 @@
-PACKAGE=metric_publishers
-VERSION=0.1.0
-
-install:
- pip install -r requirements.txt
-
-install-dev: install
- pip install -r requirements-dev.txt
-
-test:
- cd ../.. && PYTHONPATH=utils/metric-publishers:$$PYTHONPATH pytest utils/metric-publishers/tests/ -v
-
-coverage:
- cd ../.. && PYTHONPATH=utils/metric-publishers:$$PYTHONPATH pytest utils/metric-publishers/tests/ \
- --cov=utils/metric-publishers/metric_publishers \
- --cov-config=utils/metric-publishers/pytest.ini \
- --cov-report=html:utils/metric-publishers/htmlcov \
- --cov-report=term-missing \
- --cov-report=xml:utils/metric-publishers/coverage.xml \
- --cov-branch
-
-lint:
- pylint metric_publishers
-
-format:
- autopep8 -ri .
-
-clean:
- rm -rf target
-
-.PHONY: audit install install-dev test coverage lint format clean
diff --git a/utils/metric-publishers/README.md b/utils/metric-publishers/README.md
deleted file mode 100644
index 7ddd6d53..00000000
--- a/utils/metric-publishers/README.md
+++ /dev/null
@@ -1,22 +0,0 @@
-# metric-publishers module
-
-This is a module is for reporting the certificate expiry time to CloudWatch metric, currently this is used for monitoring the MeshClient certificate expiry.
-
-## Dependencies
-
-- make
-- [poetry](https://python-poetry.org/docs/#installation) - package management for Python applications
-
-## Test, Build and Package
-
-`make install` - install dependencies into local virtual environment (in `.venv` directory)
-
-`make test` - run unit tests for the package
-
-`make clean` - remove generated files from the project
-
-## Configuration
-
-### VSCode
-
-If using VSCode, after running `make install`, ensure your Python interpreter is set to the `.venv` directory (cmd+shift+p, "Python: Select Interpreter")
diff --git a/utils/metric-publishers/metric_publishers/__init__.py b/utils/metric-publishers/metric_publishers/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/utils/metric-publishers/package.json b/utils/metric-publishers/package.json
deleted file mode 100644
index 7b112beb..00000000
--- a/utils/metric-publishers/package.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
- "description": "python metrics library",
- "name": "@comms/metric-publishers",
- "private": true,
- "scripts": {
- "audit": "make audit",
- "lint": "make lint",
- "lint:fmt": "make format",
- "python-install": "make install",
- "test:unit": "make test",
- "test:unit:coverage": "make test-coverage",
- "typecheck": "echo this package contains no typescript"
- },
- "version": "1.0.0"
-}
diff --git a/utils/metric-publishers/requirements-dev.txt b/utils/metric-publishers/requirements-dev.txt
deleted file mode 100644
index 8a4c6ef1..00000000
--- a/utils/metric-publishers/requirements-dev.txt
+++ /dev/null
@@ -1,6 +0,0 @@
--r requirements.txt
-pylint>=2.17.5
-pytest>=7.4.0
-pytest-cov>=4.1.0
-autopep8>=2.0.2
-jake>=3.0.1
diff --git a/utils/metric-publishers/requirements.txt b/utils/metric-publishers/requirements.txt
deleted file mode 100644
index 5def5b4a..00000000
--- a/utils/metric-publishers/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-boto3>=1.28.62
-urllib3>=1.26.19,<2.0.0
-idna>=3.7
-requests>=2.32.0
-pyopenssl>=24.2.1
diff --git a/utils/package_python_lambda.sh b/utils/package_python_lambda.sh
new file mode 100755
index 00000000..b32d8de6
--- /dev/null
+++ b/utils/package_python_lambda.sh
@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+set -e
+
+component_name="$1"
+
+utilsdir=$(realpath "$(dirname "$0")")
+source ${utilsdir}/get_version.sh
+
+dist_dir="${PWD}/target/dist"
+rm -rf "${dist_dir}"
+mkdir -p "${dist_dir}"
+
+# Extract internal (file://) and external dependencies from requirements.txt
+grep -E '^-e ' requirements.txt | sed 's|^-e ||' > target/internal_requirements.txt || true
+grep -vE '^-e ' requirements.txt > target/external_requirements.txt || true
+
+# Install external dependencies (from PyPI)
+pip install --platform manylinux2014_x86_64 --only-binary=:all: -r target/external_requirements.txt --target ${dist_dir} --python-version 3.14 --implementation cp
+
+# Install internal dependencies (local packages)
+pip install -r target/internal_requirements.txt --target ${dist_dir}
+
+# Bundle application code
+pip install . --no-deps --target ${dist_dir}
diff --git a/utils/py-mock-mesh/py_mock_mesh/mesh_client.py b/utils/py-mock-mesh/py_mock_mesh/mesh_client.py
index 46ae669c..e35729f4 100644
--- a/utils/py-mock-mesh/py_mock_mesh/mesh_client.py
+++ b/utils/py-mock-mesh/py_mock_mesh/mesh_client.py
@@ -73,7 +73,8 @@ def retrieve_message(self, message_id):
self.__log.warning(f"Message {message_id} not found in inbox")
return None
except Exception as e:
- self.__log.error(f"Error retrieving message {message_id}: {str(e)}")
+ self.__log.error(
+ f"Error retrieving message {message_id}: {str(e)}")
return None
def send_message(self, recipient, data, **kwargs):
diff --git a/utils/py-mock-mesh/py_mock_mesh/mesh_message.py b/utils/py-mock-mesh/py_mock_mesh/mesh_message.py
index af627744..1a1c25d8 100644
--- a/utils/py-mock-mesh/py_mock_mesh/mesh_message.py
+++ b/utils/py-mock-mesh/py_mock_mesh/mesh_message.py
@@ -22,6 +22,7 @@ class InvalidHeaderException(Exception):
"""
Indicates an invalid header on a MESH message
"""
+
def __init__(self, header_key, header_value):
self.header_key = header_key
self.header_value = header_value
diff --git a/utils/py-utils/Makefile b/utils/py-utils/Makefile
new file mode 100644
index 00000000..fbc1583e
--- /dev/null
+++ b/utils/py-utils/Makefile
@@ -0,0 +1,24 @@
+.PHONY: install install-dev test coverage clean
+
+install:
+ pip install -r requirements.txt
+
+install-dev: install
+ pip install -r requirements-dev.txt
+
+test:
+ cd ../.. && PYTHONPATH=utils/py-utils:$$PYTHONPATH pytest utils/py-utils/dl_utils/__tests__/ -v
+
+coverage:
+ cd ../.. && PYTHONPATH=utils/py-utils:$$PYTHONPATH pytest utils/py-utils/dl_utils/__tests__/ \
+ --cov=utils/py-utils/dl_utils \
+ --cov-config=utils/py-utils/pytest.ini \
+ --cov-report=html:utils/py-utils/htmlcov \
+ --cov-report=term-missing \
+ --cov-report=xml:utils/py-utils/coverage.xml \
+ --cov-branch
+
+clean:
+ rm -rf dist/ .coverage htmlcov/ .pytest_cache/ coverage.xml
+ find . -type d -name __pycache__ -exec rm -rf {} +
+ find . -type f -name '*.pyc' -delete
diff --git a/utils/py-utils/dl_utils/__init__.py b/utils/py-utils/dl_utils/__init__.py
new file mode 100644
index 00000000..1c8046ba
--- /dev/null
+++ b/utils/py-utils/dl_utils/__init__.py
@@ -0,0 +1,36 @@
+"""
+Utility library for Python projects.
+
+"""
+
+from .event_publisher import EventPublisher
+
+from .mesh_config import (
+ BaseMeshConfig,
+ InvalidMeshEndpointError,
+ InvalidEnvironmentVariableError,
+)
+
+from .log_config import log
+from .store_file import store_file
+
+from .sender_lookup import SenderLookup
+
+from .metric_client import Metric
+from .certificate_monitor import (
+ CertificateExpiryMonitor,
+ report_expiry_time
+)
+
+__all__ = [
+ 'EventPublisher',
+ 'BaseMeshConfig',
+ 'InvalidMeshEndpointError',
+ 'InvalidEnvironmentVariableError',
+ 'store_file',
+ 'log',
+ 'SenderLookup',
+ 'Metric',
+ 'CertificateExpiryMonitor',
+ 'report_expiry_time',
+]
diff --git a/utils/metric-publishers/tests/test_certificate_monitor.py b/utils/py-utils/dl_utils/__tests__/test_certificate_monitor.py
similarity index 95%
rename from utils/metric-publishers/tests/test_certificate_monitor.py
rename to utils/py-utils/dl_utils/__tests__/test_certificate_monitor.py
index 8fcbcc6d..95a8a9e1 100644
--- a/utils/metric-publishers/tests/test_certificate_monitor.py
+++ b/utils/py-utils/dl_utils/__tests__/test_certificate_monitor.py
@@ -1,6 +1,6 @@
from unittest.mock import Mock, patch
-from metric_publishers.certificate_monitor import CertificateExpiryMonitor, report_expiry_time
-from metric_publishers.metric_client import Metric
+from dl_utils.certificate_monitor import CertificateExpiryMonitor, report_expiry_time
+from dl_utils.metric_client import Metric
import OpenSSL
import OpenSSL.crypto
import tempfile
diff --git a/utils/event-publisher-py/event_publisher/__tests__/test_event_publisher.py b/utils/py-utils/dl_utils/__tests__/test_event_publisher.py
similarity index 78%
rename from utils/event-publisher-py/event_publisher/__tests__/test_event_publisher.py
rename to utils/py-utils/dl_utils/__tests__/test_event_publisher.py
index 91ba77fd..9cdd3bfc 100644
--- a/utils/event-publisher-py/event_publisher/__tests__/test_event_publisher.py
+++ b/utils/py-utils/dl_utils/__tests__/test_event_publisher.py
@@ -4,7 +4,7 @@
from uuid import uuid4
from botocore.exceptions import ClientError
-from event_publisher import EventPublisher
+from dl_utils.event_publisher import EventPublisher
@pytest.fixture
@@ -85,6 +85,19 @@ def valid_cloud_event2():
},
}
+@pytest.fixture(name='mock_validator')
+def create_mock_validator():
+ def validator(**_kwargs):
+ ## Validation always succeeds.
+ pass
+ return validator
+
+
+@pytest.fixture(name='mock_failing_validator')
+def create_mock_failing_validator():
+ def validator(**_kwargs):
+ raise ValueError('Validation failed')
+ return validator
@pytest.fixture
def invalid_cloud_event():
@@ -96,15 +109,18 @@ def invalid_cloud_event():
class TestEventPublishing:
- def test_should_return_empty_array_when_no_events_provided(self, test_config, mock_events_client, mock_sqs_client):
+ def test_should_return_empty_array_when_no_events_provided(
+ self, test_config, mock_events_client, mock_sqs_client, mock_validator):
publisher = EventPublisher(**test_config)
- result = publisher.send_events([])
+ result = publisher.send_events([], validator=mock_validator)
assert result == []
mock_events_client.put_events.assert_not_called()
mock_sqs_client.send_message_batch.assert_not_called()
- def test_should_send_valid_events_to_eventbridge(self, test_config, mock_events_client, mock_sqs_client, valid_cloud_event, valid_cloud_event2):
+ def test_should_send_valid_events_to_eventbridge(
+ self, test_config, mock_events_client, mock_sqs_client,
+ valid_cloud_event, valid_cloud_event2, mock_validator):
mock_events_client.put_events.return_value = {
'FailedEntryCount': 0,
'Entries': [{'EventId': 'event-1'}]
@@ -114,7 +130,8 @@ def test_should_send_valid_events_to_eventbridge(self, test_config, mock_events_
}
publisher = EventPublisher(**test_config)
- result = publisher.send_events([valid_cloud_event, valid_cloud_event2])
+ result = publisher.send_events([valid_cloud_event, valid_cloud_event2],
+ validator=mock_validator)
assert result == []
assert mock_events_client.put_events.call_count == 1
@@ -126,13 +143,14 @@ def test_should_send_valid_events_to_eventbridge(self, test_config, mock_events_
assert call_args['Entries'][0]['Detail'] == json.dumps(valid_cloud_event)
assert call_args['Entries'][0]['EventBusName'] == test_config['event_bus_arn']
- def test_should_send_invalid_events_directly_to_dlq(self, test_config, mock_sqs_client, invalid_cloud_event):
+ def test_should_send_invalid_events_directly_to_dlq(
+ self, test_config, mock_sqs_client, invalid_cloud_event, mock_failing_validator):
mock_sqs_client.send_message_batch.return_value = {
'Successful': [{'Id': 'msg-1', 'MessageId': 'success-1', 'MD5OfMessageBody': 'hash1'}]
}
publisher = EventPublisher(**test_config)
- result = publisher.send_events([invalid_cloud_event])
+ result = publisher.send_events([invalid_cloud_event], validator=mock_failing_validator)
assert result == []
assert mock_sqs_client.send_message_batch.call_count == 1
@@ -143,7 +161,9 @@ def test_should_send_invalid_events_directly_to_dlq(self, test_config, mock_sqs_
assert call_args['Entries'][0]['MessageBody'] == json.dumps(invalid_cloud_event)
assert call_args['Entries'][0]['MessageAttributes']['DlqReason']['StringValue'] == 'INVALID_EVENT'
- def test_should_send_failed_eventbridge_events_to_dlq(self, test_config, mock_events_client, mock_sqs_client, valid_cloud_event, valid_cloud_event2):
+ def test_should_send_failed_eventbridge_events_to_dlq(
+ self, test_config, mock_events_client, mock_sqs_client,
+ valid_cloud_event, valid_cloud_event2, mock_validator):
mock_events_client.put_events.return_value = {
'FailedEntryCount': 1,
'Entries': [
@@ -156,7 +176,8 @@ def test_should_send_failed_eventbridge_events_to_dlq(self, test_config, mock_ev
}
publisher = EventPublisher(**test_config)
- result = publisher.send_events([valid_cloud_event, valid_cloud_event2])
+ result = publisher.send_events([valid_cloud_event, valid_cloud_event2],
+ validator=mock_validator)
assert result == []
assert mock_events_client.put_events.call_count == 1
@@ -173,7 +194,9 @@ def test_should_send_failed_eventbridge_events_to_dlq(self, test_config, mock_ev
assert dlq_call_args['Entries'][0]['MessageBody'] == json.dumps(valid_cloud_event)
assert dlq_call_args['Entries'][0]['MessageAttributes']['DlqReason']['StringValue'] == 'EVENTBRIDGE_FAILURE'
- def test_should_handle_eventbridge_send_error_and_send_all_events_to_dlq(self, test_config, mock_events_client, mock_sqs_client, valid_cloud_event, valid_cloud_event2):
+ def test_should_handle_eventbridge_send_error_and_send_all_events_to_dlq(
+ self, test_config, mock_events_client, mock_sqs_client,
+ valid_cloud_event, valid_cloud_event2, mock_validator):
mock_events_client.put_events.side_effect = ClientError(
{'Error': {'Code': 'InternalError', 'Message': 'EventBridge error'}},
'PutEvents'
@@ -183,14 +206,16 @@ def test_should_handle_eventbridge_send_error_and_send_all_events_to_dlq(self, t
}
publisher = EventPublisher(**test_config)
- result = publisher.send_events([valid_cloud_event, valid_cloud_event2])
+ result = publisher.send_events([valid_cloud_event, valid_cloud_event2],
+ validator=mock_validator)
assert result == []
assert mock_events_client.put_events.call_count == 1
# Should call DLQ once for all events after EventBridge failure
assert mock_sqs_client.send_message_batch.call_count == 1
- def test_should_return_failed_events_when_dlq_also_fails(self, test_config, mock_sqs_client, invalid_cloud_event):
+ def test_should_return_failed_events_when_dlq_also_fails(
+ self, test_config, mock_sqs_client, invalid_cloud_event, mock_failing_validator):
def mock_send_message_batch(**kwargs):
first_entry_id = kwargs['Entries'][0]['Id']
return {
@@ -205,24 +230,26 @@ def mock_send_message_batch(**kwargs):
mock_sqs_client.send_message_batch.side_effect = mock_send_message_batch
publisher = EventPublisher(**test_config)
- result = publisher.send_events([invalid_cloud_event])
+ result = publisher.send_events([invalid_cloud_event], validator=mock_failing_validator)
assert result == [invalid_cloud_event]
assert mock_sqs_client.send_message_batch.call_count == 1
- def test_should_handle_dlq_send_error_and_return_all_events_as_failed(self, test_config, mock_sqs_client, invalid_cloud_event):
+ def test_should_handle_dlq_send_error_and_return_all_events_as_failed(
+ self, test_config, mock_sqs_client, invalid_cloud_event, mock_failing_validator):
mock_sqs_client.send_message_batch.side_effect = ClientError(
{'Error': {'Code': 'InternalError', 'Message': 'DLQ error'}},
'SendMessageBatch'
)
publisher = EventPublisher(**test_config)
- result = publisher.send_events([invalid_cloud_event])
+ result = publisher.send_events([invalid_cloud_event], validator=mock_failing_validator)
assert result == [invalid_cloud_event]
assert mock_sqs_client.send_message_batch.call_count == 1
- def test_should_send_to_eventbridge_in_batches(self, test_config, mock_events_client, valid_cloud_event):
+ def test_should_send_to_eventbridge_in_batches(
+ self, test_config, mock_events_client, valid_cloud_event, mock_validator):
large_event_array = [
{**valid_cloud_event, 'id': str(uuid4())}
for _ in range(25)
@@ -234,7 +261,7 @@ def test_should_send_to_eventbridge_in_batches(self, test_config, mock_events_cl
}
publisher = EventPublisher(**test_config)
- result = publisher.send_events(large_event_array)
+ result = publisher.send_events(large_event_array, validator=mock_validator)
assert result == []
assert mock_events_client.put_events.call_count == 3
@@ -245,7 +272,8 @@ def test_should_send_to_eventbridge_in_batches(self, test_config, mock_events_cl
assert len(calls[1][1]['Entries']) == 10
assert len(calls[2][1]['Entries']) == 5
- def test_should_send_to_dlq_in_batches(self, test_config, mock_sqs_client, invalid_cloud_event):
+ def test_should_send_to_dlq_in_batches(
+ self, test_config, mock_sqs_client, invalid_cloud_event, mock_failing_validator):
large_event_array = [
{**invalid_cloud_event, 'id': str(uuid4())}
for _ in range(25)
@@ -264,7 +292,7 @@ def mock_send_message_batch(**kwargs):
mock_sqs_client.send_message_batch.side_effect = mock_send_message_batch
publisher = EventPublisher(**test_config)
- result = publisher.send_events(large_event_array)
+ result = publisher.send_events(large_event_array, validator=mock_failing_validator)
assert len(result) == 25
assert mock_sqs_client.send_message_batch.call_count == 3
@@ -288,7 +316,9 @@ def test_should_throw_error_when_dlq_url_is_missing(self, test_config):
with pytest.raises(ValueError, match='dlq_url has not been specified'):
EventPublisher(**test_config)
- def test_should_be_reusable_for_multiple_calls(self, test_config, mock_events_client, mock_sqs_client, valid_cloud_event, valid_cloud_event2):
+ def test_should_be_reusable_for_multiple_calls(
+ self, test_config, mock_events_client, mock_sqs_client,
+ valid_cloud_event, valid_cloud_event2, mock_validator):
mock_events_client.put_events.return_value = {
'FailedEntryCount': 0,
'Entries': [{'EventId': 'event-1'}]
@@ -300,11 +330,11 @@ def test_should_be_reusable_for_multiple_calls(self, test_config, mock_events_cl
publisher = EventPublisher(**test_config)
# First call
- result1 = publisher.send_events([valid_cloud_event])
+ result1 = publisher.send_events([valid_cloud_event], validator=mock_validator)
assert result1 == []
# Second call with same publisher instance
- result2 = publisher.send_events([valid_cloud_event2])
+ result2 = publisher.send_events([valid_cloud_event2], validator=mock_validator)
assert result2 == []
assert mock_events_client.put_events.call_count == 2
diff --git a/utils/py-utils/dl_utils/__tests__/test_log_config.py b/utils/py-utils/dl_utils/__tests__/test_log_config.py
new file mode 100644
index 00000000..19390d00
--- /dev/null
+++ b/utils/py-utils/dl_utils/__tests__/test_log_config.py
@@ -0,0 +1,29 @@
+import pytest
+from unittest.mock import patch, MagicMock
+import structlog
+from dl_utils import log_config
+import importlib
+import json
+
+class TestLogConfig:
+ def test_log_output_is_json_formatted(self, capsys):
+ """Test that log output is JSON formatted."""
+
+ # Create a new logger with the configured settings
+ test_log = structlog.get_logger()
+
+ # Log a test message
+ test_log.info("test_event", test_key="test_value")
+
+ # Capture output
+ captured = capsys.readouterr()
+
+ # Verify output is valid JSON
+ try:
+ log_output = json.loads(captured.out.strip())
+ assert "event" in log_output
+ assert log_output["event"] == "test_event"
+ assert "test_key" in log_output
+ assert log_output["test_key"] == "test_value"
+ except json.JSONDecodeError:
+ pytest.fail("Log output should be valid JSON")
diff --git a/utils/py-utils/dl_utils/__tests__/test_mesh_config.py b/utils/py-utils/dl_utils/__tests__/test_mesh_config.py
new file mode 100644
index 00000000..a42253b7
--- /dev/null
+++ b/utils/py-utils/dl_utils/__tests__/test_mesh_config.py
@@ -0,0 +1,134 @@
+import pytest
+import os
+from unittest.mock import Mock, patch, MagicMock
+from dl_utils.mesh_config import (
+ BaseMeshConfig,
+ InvalidMeshEndpointError,
+ InvalidEnvironmentVariableError
+)
+
+
+class TestBaseMeshConfig:
+ """Test suite for BaseMeshConfig class"""
+
+ @pytest.fixture
+ def mock_ssm(self):
+ """Mock SSM client"""
+ ssm = Mock()
+ ssm.get_parameter.return_value = {
+ 'Parameter': {
+ 'Value': '{"mesh_endpoint": "TEST", "mesh_mailbox": "test_mailbox", '
+ '"mesh_mailbox_password": "test_password", "mesh_shared_key": "test_key"}'
+ }
+ }
+ return ssm
+
+ @pytest.fixture
+ def mock_s3(self):
+ """Mock S3 client"""
+ return Mock()
+
+ @pytest.fixture
+ def env_vars(self):
+ """Setup required environment variables"""
+ return {
+ 'ENVIRONMENT': 'test',
+ 'SSM_MESH_PREFIX': '/test/mesh'
+ }
+
+ def test_init_without_required_env_vars(self, mock_ssm, mock_s3):
+ """Test initialization fails without required environment variables"""
+
+ class TestConfig(BaseMeshConfig):
+ _REQUIRED_ENV_VAR_MAP = {
+ 'environment': 'ENVIRONMENT',
+ 'ssm_mesh_prefix': 'SSM_MESH_PREFIX'
+ }
+
+ with pytest.raises(InvalidEnvironmentVariableError) as exc_info:
+ TestConfig(ssm=mock_ssm, s3_client=mock_s3)
+
+ assert 'Required environment variables' in str(exc_info.value)
+
+ def test_init_with_required_env_vars(self, mock_ssm, mock_s3, env_vars):
+ """Test successful initialization with required environment variables"""
+
+ class TestConfig(BaseMeshConfig):
+ _REQUIRED_ENV_VAR_MAP = {
+ 'environment': 'ENVIRONMENT',
+ 'ssm_mesh_prefix': 'SSM_MESH_PREFIX'
+ }
+
+ with patch.dict(os.environ, env_vars):
+ config = TestConfig(ssm=mock_ssm, s3_client=mock_s3)
+ assert config.environment == 'test'
+ assert config.ssm_mesh_prefix == '/test/mesh'
+
+ def test_optional_env_vars_use_mesh_mock(self, mock_ssm, mock_s3, env_vars):
+ """Test optional USE_MESH_MOCK environment variable"""
+
+ class TestConfig(BaseMeshConfig):
+ _REQUIRED_ENV_VAR_MAP = {}
+
+ env_with_mock = {**env_vars, 'USE_MESH_MOCK': 'true'}
+
+ with patch.dict(os.environ, env_with_mock, clear=True):
+ config = TestConfig(ssm=mock_ssm, s3_client=mock_s3)
+ assert config.use_mesh_mock is True
+
+ def test_optional_env_vars_use_mesh_mock_false(self, mock_ssm, mock_s3, env_vars):
+ """Test USE_MESH_MOCK set to false"""
+
+ class TestConfig(BaseMeshConfig):
+ _REQUIRED_ENV_VAR_MAP = {}
+
+ env_with_mock = {**env_vars, 'USE_MESH_MOCK': 'false'}
+
+ with patch.dict(os.environ, env_with_mock, clear=True):
+ config = TestConfig(ssm=mock_ssm, s3_client=mock_s3)
+ assert config.use_mesh_mock is False
+
+ @patch('dl_utils.mesh_config.mesh_client')
+ def test_lookup_endpoint_valid(self, mock_mesh_client, mock_ssm, mock_s3):
+ """Test lookup_endpoint with valid endpoint"""
+
+ class TestConfig(BaseMeshConfig):
+ _REQUIRED_ENV_VAR_MAP = {}
+
+ mock_mesh_client.TEST_ENDPOINT = 'https://test.endpoint'
+
+ config = TestConfig(ssm=mock_ssm, s3_client=mock_s3)
+ result = config.lookup_endpoint('TEST')
+
+ assert result == 'https://test.endpoint'
+
+
+ @patch('dl_utils.mesh_config.MockMeshClient')
+ def test_build_mesh_client_mock(self, mock_mesh_client_class, mock_ssm, mock_s3):
+ """Test build_mesh_client with USE_MESH_MOCK=true"""
+
+ class TestConfig(BaseMeshConfig):
+ _REQUIRED_ENV_VAR_MAP = {}
+
+ config = TestConfig(ssm=mock_ssm, s3_client=mock_s3)
+ config.use_mesh_mock = True
+ config.mesh_endpoint = 'https://mock.endpoint'
+ config.mesh_mailbox = 'test_mailbox'
+
+ result = config.build_mesh_client()
+
+ mock_mesh_client_class.assert_called_once()
+ assert result is not None
+
+ def test_lookup_endpoint_invalid(self, mock_ssm, mock_s3):
+ """Test lookup_endpoint with invalid endpoint raises error"""
+
+ class TestConfig(BaseMeshConfig):
+ _REQUIRED_ENV_VAR_MAP = {}
+
+ config = TestConfig(ssm=mock_ssm, s3_client=mock_s3)
+
+ with pytest.raises(InvalidMeshEndpointError) as exc_info:
+ config.lookup_endpoint('INVALID')
+
+ assert 'mesh_client module has no such endpoint INVALID_ENDPOINT' in str(exc_info.value)
diff --git a/utils/metric-publishers/tests/test_metric_client.py b/utils/py-utils/dl_utils/__tests__/test_metric_client.py
similarity index 94%
rename from utils/metric-publishers/tests/test_metric_client.py
rename to utils/py-utils/dl_utils/__tests__/test_metric_client.py
index cb422184..6d22cf0a 100644
--- a/utils/metric-publishers/tests/test_metric_client.py
+++ b/utils/py-utils/dl_utils/__tests__/test_metric_client.py
@@ -1,6 +1,6 @@
import json
from unittest.mock import Mock, patch
-from metric_publishers.metric_client import Metric
+from dl_utils.metric_client import Metric
@patch('builtins.print')
diff --git a/lambdas/mesh-poll/mesh_poll/__tests__/test_sender_lookup.py b/utils/py-utils/dl_utils/__tests__/test_sender_lookup.py
similarity index 71%
rename from lambdas/mesh-poll/mesh_poll/__tests__/test_sender_lookup.py
rename to utils/py-utils/dl_utils/__tests__/test_sender_lookup.py
index 9eec72ac..873df41d 100644
--- a/lambdas/mesh-poll/mesh_poll/__tests__/test_sender_lookup.py
+++ b/utils/py-utils/dl_utils/__tests__/test_sender_lookup.py
@@ -3,14 +3,14 @@
"""
import json
from unittest.mock import Mock, call
-from mesh_poll.sender_lookup import SenderLookup
+from dl_utils.sender_lookup import SenderLookup
def setup_mocks():
ssm = Mock()
config = Mock()
- config.ssm_prefix = "/dl/test"
+ config.ssm_senders_prefix = "/dl/test/senders"
logger = Mock()
@@ -81,7 +81,10 @@ def test_load_valid_senders_multiple_pages(self):
assert ssm.get_parameters_by_path.call_count == 2
ssm.get_parameters_by_path.assert_has_calls([
call(Path="/dl/test/senders/", WithDecryption=True),
- call(Path="/dl/test/senders/", WithDecryption=True, NextToken="token123")
+ call(
+ Path="/dl/test/senders/",
+ WithDecryption=True,
+ NextToken="token123")
], any_order=False)
assert sender_lookup.is_valid_sender("MAILBOX_001")
assert sender_lookup.is_valid_sender("MAILBOX_002")
@@ -192,7 +195,7 @@ def test_load_valid_senders_handles_empty_mailbox_id(self):
def test_load_valid_senders_with_trailing_slash_in_path(self):
"""Test that paths with trailing slashes are handled correctly"""
ssm, config, logger = setup_mocks()
- config.ssm_prefix = "/dl/test/" # Trailing slash
+ config.ssm_senders_prefix = "/dl/test/senders/" # Trailing slash
ssm.get_parameters_by_path.return_value = {
"Parameters": [
@@ -239,6 +242,32 @@ def test_get_sender_id_returns_correct_sender_id(self):
assert sender_lookup.get_sender_id("MAILBOX_001") == "sender1"
assert sender_lookup.get_sender_id("MAILBOX_002") == "sender2"
+ def test_get_sender_id_multiple_pages(self):
+ """Test get_sender_id with paginated SSM responses"""
+ ssm, config, logger = setup_mocks()
+
+ # Simulate paginated response
+ ssm.get_parameters_by_path.side_effect = [
+ {
+ "Parameters": [
+ create_sender_parameter("sender1", "MAILBOX_001"),
+ create_sender_parameter("sender2", "MAILBOX_002"),
+ ],
+ "NextToken": "token123"
+ },
+ {
+ "Parameters": [
+ create_sender_parameter("sender3", "MAILBOX_003"),
+ ],
+ }
+ ]
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert sender_lookup.get_sender_id("MAILBOX_001") == "sender1"
+ assert sender_lookup.get_sender_id("MAILBOX_002") == "sender2"
+ assert sender_lookup.get_sender_id("MAILBOX_003") == "sender3"
+
def test_get_sender_id_case_insensitive(self):
"""Test that get_sender_id lookup is case-insensitive"""
ssm, config, logger = setup_mocks()
@@ -284,6 +313,93 @@ def test_get_sender_id_returns_none_for_empty_mailbox_id(self):
assert sender_lookup.get_sender_id("") is None
assert sender_lookup.get_sender_id(None) is None
+ def test_get_mailbox_id_returns_correct_mailbox_id(self):
+ """Test that get_mailbox_id returns correct mailbox ID for valid sender IDs"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "mailbox_001"),
+ create_sender_parameter("sender2", "mailbox_002"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert sender_lookup.get_mailbox_id("sender1") == "mailbox_001"
+ assert sender_lookup.get_mailbox_id("sender2") == "mailbox_002"
+
+ def test_get_mailbox_id_multiple_pages(self):
+ """Test get_mailbox_id with paginated SSM responses"""
+ ssm, config, logger = setup_mocks()
+
+ # Simulate paginated response
+ ssm.get_parameters_by_path.side_effect = [
+ {
+ "Parameters": [
+ create_sender_parameter("sender1", "mailbox_001"),
+ create_sender_parameter("sender2", "mailbox_002"),
+ ],
+ "NextToken": "token123"
+ },
+ {
+ "Parameters": [
+ create_sender_parameter("sender3", "mailbox_003"),
+ ],
+ }
+ ]
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert sender_lookup.get_mailbox_id("sender1") == "mailbox_001"
+ assert sender_lookup.get_mailbox_id("sender2") == "mailbox_002"
+ assert sender_lookup.get_mailbox_id("sender3") == "mailbox_003"
+
+ def test_get_mailbox_id_case_insensitive(self):
+ """Test that get_mailbox_id lookup is case-insensitive"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("SenderMixedCase", "mailbox_001"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert sender_lookup.get_mailbox_id("SenderMixedCase") == "mailbox_001"
+ assert sender_lookup.get_mailbox_id("SENDERMIXEDCASE") == "mailbox_001"
+ assert sender_lookup.get_mailbox_id("sendermixedcase") == "mailbox_001"
+
+ def test_get_mailbox_id_returns_none_for_unknown_sender(self):
+ """Test that get_mailbox_id returns None for unknown sender IDs"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "mailbox_001"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert sender_lookup.get_mailbox_id("UNKNOWN_SENDER") is None
+
+ def test_get_mailbox_id_returns_none_for_empty_sender_id(self):
+ """Test that get_mailbox_id returns None for empty/None sender IDs"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "mailbox_001"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert sender_lookup.get_mailbox_id("") is None
+ assert sender_lookup.get_mailbox_id(None) is None
+
def test_load_valid_senders_skips_entries_with_missing_sender_id(self):
"""Test that entries without senderId are skipped from validation and mapping"""
ssm, config, logger = setup_mocks()
@@ -307,9 +423,11 @@ def test_load_valid_senders_skips_entries_with_missing_sender_id(self):
# Entry with missing senderId should not be valid or mapped
assert not sender_lookup.is_valid_sender("MAILBOX_BAD")
assert sender_lookup.get_sender_id("MAILBOX_BAD") is None
+ assert sender_lookup.get_mailbox_id("meshMailboxSenderId") is None
assert sender_lookup.is_valid_sender("MAILBOX_001")
assert sender_lookup.get_sender_id("MAILBOX_001") == "sender1"
+ assert sender_lookup.get_mailbox_id("sender1") == "MAILBOX_001"
def test_load_valid_senders_skips_entries_with_empty_sender_id(self):
"""Test that entries with empty senderId are skipped from validation and mapping"""
@@ -327,6 +445,8 @@ def test_load_valid_senders_skips_entries_with_empty_sender_id(self):
# Entry with empty senderId should not be valid or mapped
assert not sender_lookup.is_valid_sender("MAILBOX_002")
assert sender_lookup.get_sender_id("MAILBOX_002") is None
+ assert sender_lookup.get_mailbox_id("") is None
assert sender_lookup.is_valid_sender("MAILBOX_001")
assert sender_lookup.get_sender_id("MAILBOX_001") == "sender1"
+ assert sender_lookup.get_mailbox_id("sender1") == "MAILBOX_001"
diff --git a/utils/py-utils/dl_utils/__tests__/test_store_file.py b/utils/py-utils/dl_utils/__tests__/test_store_file.py
new file mode 100644
index 00000000..84d3a950
--- /dev/null
+++ b/utils/py-utils/dl_utils/__tests__/test_store_file.py
@@ -0,0 +1,61 @@
+import os
+from dl_utils.store_file import store_file
+
+class TestStoreFile:
+ def test_store_file_creates_temp_file(self):
+ """Test that store_file creates a temporary file"""
+ content = b"test content"
+
+ filename = store_file(content)
+
+ assert filename is not None
+ assert os.path.exists(filename)
+
+ # Cleanup
+ os.unlink(filename)
+
+
+ def test_store_file_writes_content(self):
+ """Test that store_file writes the correct content to the file"""
+ content = b"test content for validation"
+
+ filename = store_file(content)
+
+ with open(filename, 'rb') as f:
+ written_content = f.read()
+
+ assert written_content == content
+
+ # Cleanup
+ os.unlink(filename)
+
+
+ def test_store_file_returns_valid_path(self):
+ """Test that store_file returns a valid file path"""
+ content = b"test"
+
+ filename = store_file(content)
+
+ assert isinstance(filename, str)
+ assert len(filename) > 0
+ assert os.path.isabs(filename)
+
+ # Cleanup
+ os.unlink(filename)
+
+
+ def test_store_file_multiple_calls_create_different_files(self):
+ """Test that multiple calls to store_file create different files"""
+ content1 = b"first"
+ content2 = b"second"
+
+ filename1 = store_file(content1)
+ filename2 = store_file(content2)
+
+ assert filename1 != filename2
+ assert os.path.exists(filename1)
+ assert os.path.exists(filename2)
+
+ # Cleanup
+ os.unlink(filename1)
+ os.unlink(filename2)
diff --git a/utils/metric-publishers/metric_publishers/certificate_monitor.py b/utils/py-utils/dl_utils/certificate_monitor.py
similarity index 100%
rename from utils/metric-publishers/metric_publishers/certificate_monitor.py
rename to utils/py-utils/dl_utils/certificate_monitor.py
diff --git a/utils/py-utils/dl_utils/errors.py b/utils/py-utils/dl_utils/errors.py
new file mode 100644
index 00000000..0f36ec17
--- /dev/null
+++ b/utils/py-utils/dl_utils/errors.py
@@ -0,0 +1,13 @@
+"""
+Error handling utilities.
+"""
+
+import traceback
+
+
+def format_exception(exception):
+ """
+ Returns a nicely formatted exception string
+ """
+ return ''.join(traceback.format_exception(
+ type(exception), exception, exception.__traceback__))
diff --git a/utils/event-publisher-py/event_publisher/event_publisher.py b/utils/py-utils/dl_utils/event_publisher.py
similarity index 93%
rename from utils/event-publisher-py/event_publisher/event_publisher.py
rename to utils/py-utils/dl_utils/event_publisher.py
index 83e22226..b533ccba 100644
--- a/utils/event-publisher-py/event_publisher/event_publisher.py
+++ b/utils/py-utils/dl_utils/event_publisher.py
@@ -6,12 +6,11 @@
import json
import logging
-from typing import List, Dict, Any, Optional, Literal
+from typing import List, Dict, Any, Optional, Literal, Callable
from uuid import uuid4
import boto3
from botocore.exceptions import ClientError
from pydantic import ValidationError
-from .models import CloudEvent
DlqReason = Literal['INVALID_EVENT', 'EVENTBRIDGE_FAILURE']
@@ -48,14 +47,14 @@ def __init__(
self.events_client = events_client or boto3.client('events')
self.sqs_client = sqs_client or boto3.client('sqs')
- def _validate_cloud_event(self, event: Dict[str, Any]) -> tuple[bool, Optional[str]]:
+ def _validate_cloud_event(self, event: Dict[str, Any], validator: Callable[..., Any]) -> tuple[bool, Optional[str]]:
"""
- Validate event using Pydantic CloudEvent model.
+ Validate event using the specified validator function.
"""
try:
- CloudEvent(**event)
+ validator(**event)
return (True, None)
- except ValidationError as e:
+ except Exception as e:
return (False, str(e))
def _send_to_event_bridge(self, events: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
@@ -216,11 +215,12 @@ def _send_to_dlq(
return failed_dlqs
- def send_events(self, events: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
+ def send_events(self, events: List[Dict[str, Any]],
+ validator: Callable[..., Any]) -> List[Dict[str, Any]]:
"""
Send CloudEvents to EventBridge with validation and DLQ support.
- 1. Validates events against CloudEvent schema
+ 1. Validates events using the specified validator function
2. Sends valid events to EventBridge
3. Routes failed events to DLQ
"""
@@ -233,13 +233,13 @@ def send_events(self, events: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
# Validate events using Pydantic
for event in events:
- is_valid, error_msg = self._validate_cloud_event(event)
+ is_valid, error_msg = self._validate_cloud_event(event, validator)
if is_valid:
valid_events.append(event)
else:
invalid_events.append(event)
self.logger.warning(
- 'CloudEvent validation failed',
+ 'Event validation failed',
extra={
'event_id': event.get('id', 'unknown'),
'validation_error': error_msg
diff --git a/utils/py-utils/dl_utils/log_config.py b/utils/py-utils/dl_utils/log_config.py
new file mode 100644
index 00000000..11baa2e3
--- /dev/null
+++ b/utils/py-utils/dl_utils/log_config.py
@@ -0,0 +1,4 @@
+import structlog
+
+structlog.configure(processors=[structlog.processors.JSONRenderer()])
+log = structlog.get_logger()
diff --git a/utils/event-publisher-py/event_publisher/mesh_config.py b/utils/py-utils/dl_utils/mesh_config.py
similarity index 86%
rename from utils/event-publisher-py/event_publisher/mesh_config.py
rename to utils/py-utils/dl_utils/mesh_config.py
index 64b9ef2c..5e7b12d1 100644
--- a/utils/event-publisher-py/event_publisher/mesh_config.py
+++ b/utils/py-utils/dl_utils/mesh_config.py
@@ -3,15 +3,12 @@
"""
import json
import os
-import tempfile
import boto3
-import structlog
import mesh_client
from py_mock_mesh.mesh_client import MockMeshClient
-from metric_publishers.certificate_monitor import report_expiry_time
-
-structlog.configure(processors=[structlog.processors.JSONRenderer()])
-log = structlog.get_logger()
+from .certificate_monitor import report_expiry_time
+from .log_config import log
+from .store_file import store_file
class InvalidMeshEndpointError(Exception):
@@ -26,16 +23,6 @@ class InvalidEnvironmentVariableError(Exception):
"""
-def store_file(content):
- """
- Writes a temp file and returns the name
- """
- with tempfile.NamedTemporaryFile(delete=False) as file:
- file.write(content)
- file.close()
- return file.name
-
-
class BaseMeshConfig: # pylint: disable=too-many-instance-attributes
"""
Base configuration class for MESH client applications.
@@ -62,7 +49,7 @@ def __init__(self, ssm=None, s3_client=None):
self.mesh_client = None
# Common configuration attributes
- self.ssm_prefix = None
+ self.ssm_mesh_prefix = None
self.environment = None
self.certificate_expiry_metric_name = None
self.certificate_expiry_metric_namespace = None
@@ -108,7 +95,7 @@ def _load_optional_env_vars(self):
def __enter__(self):
# Load MESH configuration from SSM
ssm_response = self.ssm.get_parameter(
- Name=self.ssm_prefix + '/config',
+ Name=self.ssm_mesh_prefix + '/config',
WithDecryption=True
)
mesh_config = json.loads(ssm_response['Parameter']['Value'])
@@ -120,11 +107,11 @@ def __enter__(self):
# Load client certificates from SSM
client_cert_parameter = self.ssm.get_parameter(
- Name=self.ssm_prefix + '/client-cert',
+ Name=self.ssm_mesh_prefix + '/client-cert',
WithDecryption=True
)
client_key_parameter = self.ssm.get_parameter(
- Name=self.ssm_prefix + '/client-key',
+ Name=self.ssm_mesh_prefix + '/client-key',
WithDecryption=True
)
@@ -169,12 +156,13 @@ def build_mesh_client(self):
)
# Use real MESH client
- report_expiry_time(
- self.client_cert,
- self.certificate_expiry_metric_name,
- self.certificate_expiry_metric_namespace,
- self.environment
- )
+ if self.certificate_expiry_metric_name and self.certificate_expiry_metric_namespace:
+ report_expiry_time(
+ self.client_cert,
+ self.certificate_expiry_metric_name,
+ self.certificate_expiry_metric_namespace,
+ self.environment
+ )
return mesh_client.MeshClient(
self.lookup_endpoint(self.mesh_endpoint),
diff --git a/utils/metric-publishers/metric_publishers/metric_client.py b/utils/py-utils/dl_utils/metric_client.py
similarity index 100%
rename from utils/metric-publishers/metric_publishers/metric_client.py
rename to utils/py-utils/dl_utils/metric_client.py
diff --git a/lambdas/mesh-poll/mesh_poll/sender_lookup.py b/utils/py-utils/dl_utils/sender_lookup.py
similarity index 78%
rename from lambdas/mesh-poll/mesh_poll/sender_lookup.py
rename to utils/py-utils/dl_utils/sender_lookup.py
index f44b69de..01672462 100644
--- a/lambdas/mesh-poll/mesh_poll/sender_lookup.py
+++ b/utils/py-utils/dl_utils/sender_lookup.py
@@ -1,3 +1,4 @@
+"""A tool for looking up MESH sender information from SSM Parameter Store"""
import json
from .errors import format_exception
@@ -13,6 +14,7 @@ def __init__(self, ssm, config, logger):
self.__logger = logger
self.__valid_senders = set()
self.__mailbox_to_sender = {}
+ self.__sender_to_mailbox = {}
self.load_valid_senders()
def is_valid_sender(self, mailbox_id):
@@ -33,24 +35,41 @@ def get_sender_id(self, mailbox_id):
return self.__mailbox_to_sender.get(mailbox_id.upper())
+ def get_mailbox_id(self, sender_id):
+ """
+ Get the MESH mailbox ID for a given sender ID
+ """
+ if not sender_id:
+ return None
+
+ return self.__sender_to_mailbox.get(sender_id.upper())
+
def load_valid_senders(self):
"""
Loads mailbox IDs and their corresponding sender IDs into memory
"""
mailbox_ids = set()
mailbox_to_sender = {}
+ sender_to_mailbox = {}
next_token = ""
page_number = 0
while next_token or page_number < 1:
- (page_mailbox_ids, page_mapping, token) = self.__get_page(next_token)
+ (
+ page_mailbox_ids,
+ page_mailbox_to_sender,
+ page_sender_to_mailbox,
+ token
+ ) = self.__get_page(next_token)
mailbox_ids.update(page_mailbox_ids)
- mailbox_to_sender.update(page_mapping)
+ mailbox_to_sender.update(page_mailbox_to_sender)
+ sender_to_mailbox.update(page_sender_to_mailbox)
next_token = token
page_number += 1
self.__valid_senders = mailbox_ids
self.__mailbox_to_sender = mailbox_to_sender
+ self.__sender_to_mailbox = sender_to_mailbox
self.__logger.debug(
f"Loaded {len(self.__valid_senders)} valid sender mailbox IDs")
@@ -58,7 +77,7 @@ def __get_page(self, next_token=""):
"""
Loads a page of sender data and extracts mailbox IDs and sender IDs
"""
- senders_path = f"{self.__config.ssm_prefix.rstrip('/')}/senders/"
+ senders_path = f"{self.__config.ssm_senders_prefix.rstrip('/')}/"
if len(next_token) == 0:
response = self.__ssm.get_parameters_by_path(
@@ -74,6 +93,7 @@ def __get_page(self, next_token=""):
mailbox_ids = set()
mailbox_to_sender = {}
+ sender_to_mailbox = {}
if "Parameters" in response:
for parameter in response["Parameters"]:
@@ -83,9 +103,10 @@ def __get_page(self, next_token=""):
mailbox_id_upper = mailbox_id.upper()
mailbox_ids.add(mailbox_id_upper)
mailbox_to_sender[mailbox_id_upper] = sender_id
+ sender_to_mailbox[sender_id.upper()] = mailbox_id
new_next_token = response.get("NextToken", "")
- return (mailbox_ids, mailbox_to_sender, new_next_token)
+ return (mailbox_ids, mailbox_to_sender, sender_to_mailbox, new_next_token)
def __extract_mailbox_id(self, parameter):
"""
diff --git a/utils/py-utils/dl_utils/store_file.py b/utils/py-utils/dl_utils/store_file.py
new file mode 100644
index 00000000..83a41da5
--- /dev/null
+++ b/utils/py-utils/dl_utils/store_file.py
@@ -0,0 +1,10 @@
+import tempfile
+
+def store_file(content):
+ """
+ Writes a temp file and returns the name
+ """
+ with tempfile.NamedTemporaryFile(delete=False) as file:
+ file.write(content)
+ file.close()
+ return file.name
diff --git a/utils/event-publisher-py/pytest.ini b/utils/py-utils/pytest.ini
similarity index 75%
rename from utils/event-publisher-py/pytest.ini
rename to utils/py-utils/pytest.ini
index 826ac6e2..f704cd77 100644
--- a/utils/event-publisher-py/pytest.ini
+++ b/utils/py-utils/pytest.ini
@@ -2,13 +2,13 @@
python_files = test_*.py
python_classes = Test*
python_functions = test_*
-testpaths = event_publisher/__tests__
+testpaths = dl_utils/__tests__
addopts = -v --tb=short
[coverage:run]
relative_files = True
omit =
- */event_publisher/__tests__/*
+ */dl_utils/__tests__/*
*/test_*.py
*/__pycache__/*
*/venv/*
diff --git a/utils/event-publisher-py/requirements-dev.txt b/utils/py-utils/requirements-dev.txt
similarity index 100%
rename from utils/event-publisher-py/requirements-dev.txt
rename to utils/py-utils/requirements-dev.txt
diff --git a/utils/event-publisher-py/requirements.txt b/utils/py-utils/requirements.txt
similarity index 81%
rename from utils/event-publisher-py/requirements.txt
rename to utils/py-utils/requirements.txt
index 671c270d..3ae7ecbd 100644
--- a/utils/event-publisher-py/requirements.txt
+++ b/utils/py-utils/requirements.txt
@@ -3,5 +3,4 @@ pydantic>=2.0.0
structlog>=21.5.0
mesh-client>=3.2.3
pyopenssl>=24.0.0
--e ../metric-publishers
-e ../py-mock-mesh
diff --git a/utils/event-publisher-py/setup.py b/utils/py-utils/setup.py
similarity index 77%
rename from utils/event-publisher-py/setup.py
rename to utils/py-utils/setup.py
index 8475aa40..d67e19f7 100644
--- a/utils/event-publisher-py/setup.py
+++ b/utils/py-utils/setup.py
@@ -1,7 +1,7 @@
from setuptools import setup, find_packages
setup(
- name="event-publisher-py",
+ name="utils-py",
version="0.1.0",
packages=find_packages(),
)
diff --git a/utils/sender-management/package.json b/utils/sender-management/package.json
index e008904a..88dfbd78 100644
--- a/utils/sender-management/package.json
+++ b/utils/sender-management/package.json
@@ -20,8 +20,8 @@
"private": true,
"scripts": {
"cli": "tsx ./src/entrypoint/cli/index.ts",
- "lint": "eslint .",
- "lint:fix": "eslint . --fix",
+ "lint": "eslint src",
+ "lint:fix": "eslint src --fix",
"test:unit": "jest",
"typecheck": "tsc --noEmit"
},