diff --git a/addons/migrations/.header.md b/addons/migrations/.header.md
index fb6c7c9..4c32f8e 100644
--- a/addons/migrations/.header.md
+++ b/addons/migrations/.header.md
@@ -1,6 +1,57 @@
-# Migrations addon
-This addon enables automatic migrations for Fleet.
-Due to limitations in the AWS provider for Terraform, only Linux platforms are supported.
-This module uses the local-exec provisioner to call aws-cli to complete migrations.
-Due to this, the following commands must be available to the shell:
-- aws
+# Terraform AWS Fleet Database Migration Module
+
+This Terraform module provides a mechanism to trigger database migrations for a Fleet application running on AWS ECS. It is designed to integrate into an infrastructure deployment pipeline, ensuring that database schema changes are applied gracefully, typically during application upgrades.
+
+The core functionality relies on a `null_resource` which executes a local script (`migrate.sh`) when specific triggers change (primarily the `task_definition_revision`). This script is expected to handle the actual migration process, which usually involves:
+
+1. Scaling down the main Fleet application ECS service.
+2. Running a one-off ECS task using the *new* task definition revision (which contains the updated application code capable of performing the migration). This task executes the necessary Fleet migration command (e.g., `fleetctl prepare db`).
+3. Scaling the main Fleet application ECS service back up once the migration is complete.
+
+## Usage
+
+```hcl
+module "fleet_migration" {
+ source = "./path/to/this/module" # Or Git source
+
+ ecs_cluster = "my-fleet-cluster"
+ ecs_service = "my-fleet-service"
+ task_definition = "arn:aws:ecs:us-west-2:123456789012:task-definition/my-fleet-app" # Base ARN without revision
+ task_definition_revision = 5 # The *new* revision to migrate *to*
+ min_capacity = 0 # Scale down to this during migration
+ desired_count = 2 # Scale back up to this after migration
+ subnets = ["subnet-xxxxxxxxxxxxxxxxx", "subnet-yyyyyyyyyyyyyyyyy"]
+ security_groups = ["sg-xxxxxxxxxxxxxxxxx"]
+
+ # Optional: Specify if a separate vulnerability processing service needs coordination
+ # vuln_service = "my-fleet-vuln-service"
+
+ # Optional: Provide an IAM Role ARN for the local-exec script to assume
+ # assume_role_arn = "arn:aws:iam::123456789012:role/MyMigrationRole"
+ # assume_role_session_name = "TerraformFleetMigration"
+
+ # Ensure this module depends on the resource that creates/updates the task definition revision
+ # For example:
+ # depends_on = [aws_ecs_task_definition.fleet_app]
+}
+```
+
+## Workflow
+
+1. When `var.task_definition_revision` changes, Terraform triggers the `null_resource`.
+2. The `local-exec` provisioner executes the `migrate.sh` script located within the module's directory.
+3. It passes essential AWS and ECS details (region, cluster, service, task definition, revision, network configuration, scaling parameters, optional role ARN) as command-line arguments or environment variables to the script.
+4. The `migrate.sh` script (which you must provide and maintain) performs the migration steps against the Fleet database, using the provided parameters to interact with AWS ECS.
+
+## Prerequisites
+
+* **`bash` shell:** Must be available in the environment where Terraform is executed.
+* **AWS CLI:** Must be installed and configured with credentials in the environment where Terraform is executed. The credentials need permissions to perform ECS actions (DescribeServices, UpdateService, RunTask, DescribeTasks) and potentially STS AssumeRole if `assume_role_arn` is provided.
+* **`migrate.sh` script:** A script named `migrate.sh` *must* exist within this module's directory (`path.module`). This script contains the actual logic for scaling services and running the migration task. **This module only triggers the script; it does not contain the migration logic itself.**
+* **Existing Resources:** The specified ECS Cluster, Service, Task Definition (base ARN), Subnets, and Security Groups must exist.
+
+## Important Considerations
+
+* **`local-exec`:** This provisioner runs commands on the machine executing Terraform. Ensure this machine has the necessary tools (bash, AWS CLI) and network access/credentials to interact with your AWS environment. This might require specific configuration in CI/CD pipelines.
+* **IAM Permissions:** The credentials used by `local-exec` (either default AWS credentials or the assumed role specified by `assume_role_arn`) require sufficient IAM permissions to manage the specified ECS services and tasks.
+* **State:** The `null_resource` uses the `task_definition_revision` in its `triggers` map. This ensures that Terraform re-runs the provisioner if (and only if) the revision number changes between applies.
diff --git a/addons/migrations/README.md b/addons/migrations/README.md
index 7a83426..08de5a2 100644
--- a/addons/migrations/README.md
+++ b/addons/migrations/README.md
@@ -1,9 +1,60 @@
-# Migrations addon
-This addon enables automatic migrations for Fleet.
-Due to limitations in the AWS provider for Terraform, only Linux platforms are supported.
-This module uses the local-exec provisioner to call aws-cli to complete migrations.
-Due to this, the following commands must be available to the shell:
-- aws
+# Terraform AWS Fleet Database Migration Module
+
+This Terraform module provides a mechanism to trigger database migrations for a Fleet application running on AWS ECS. It is designed to integrate into an infrastructure deployment pipeline, ensuring that database schema changes are applied gracefully, typically during application upgrades.
+
+The core functionality relies on a `null_resource` which executes a local script (`migrate.sh`) when specific triggers change (primarily the `task_definition_revision`). This script is expected to handle the actual migration process, which usually involves:
+
+1. Scaling down the main Fleet application ECS service.
+2. Running a one-off ECS task using the *new* task definition revision (which contains the updated application code capable of performing the migration). This task executes the necessary Fleet migration command (e.g., `fleetctl prepare db`).
+3. Scaling the main Fleet application ECS service back up once the migration is complete.
+
+## Usage
+
+```hcl
+module "fleet_migration" {
+ source = "./path/to/this/module" # Or Git source
+
+ ecs_cluster = "my-fleet-cluster"
+ ecs_service = "my-fleet-service"
+ task_definition = "arn:aws:ecs:us-west-2:123456789012:task-definition/my-fleet-app" # Base ARN without revision
+ task_definition_revision = 5 # The *new* revision to migrate *to*
+ min_capacity = 0 # Scale down to this during migration
+ desired_count = 2 # Scale back up to this after migration
+ subnets = ["subnet-xxxxxxxxxxxxxxxxx", "subnet-yyyyyyyyyyyyyyyyy"]
+ security_groups = ["sg-xxxxxxxxxxxxxxxxx"]
+
+ # Optional: Specify if a separate vulnerability processing service needs coordination
+ # vuln_service = "my-fleet-vuln-service"
+
+ # Optional: Provide an IAM Role ARN for the local-exec script to assume
+ # assume_role_arn = "arn:aws:iam::123456789012:role/MyMigrationRole"
+ # assume_role_session_name = "TerraformFleetMigration"
+
+ # Ensure this module depends on the resource that creates/updates the task definition revision
+ # For example:
+ # depends_on = [aws_ecs_task_definition.fleet_app]
+}
+```
+
+## Workflow
+
+1. When `var.task_definition_revision` changes, Terraform triggers the `null_resource`.
+2. The `local-exec` provisioner executes the `migrate.sh` script located within the module's directory.
+3. It passes essential AWS and ECS details (region, cluster, service, task definition, revision, network configuration, scaling parameters, optional role ARN) as command-line arguments or environment variables to the script.
+4. The `migrate.sh` script (which you must provide and maintain) performs the migration steps against the Fleet database, using the provided parameters to interact with AWS ECS.
+
+## Prerequisites
+
+* **`bash` shell:** Must be available in the environment where Terraform is executed.
+* **AWS CLI:** Must be installed and configured with credentials in the environment where Terraform is executed. The credentials need permissions to perform ECS actions (DescribeServices, UpdateService, RunTask, DescribeTasks) and potentially STS AssumeRole if `assume_role_arn` is provided.
+* **`migrate.sh` script:** A script named `migrate.sh` *must* exist within this module's directory (`path.module`). This script contains the actual logic for scaling services and running the migration task. **This module only triggers the script; it does not contain the migration logic itself.**
+* **Existing Resources:** The specified ECS Cluster, Service, Task Definition (base ARN), Subnets, and Security Groups must exist.
+
+## Important Considerations
+
+* **`local-exec`:** This provisioner runs commands on the machine executing Terraform. Ensure this machine has the necessary tools (bash, AWS CLI) and network access/credentials to interact with your AWS environment. This might require specific configuration in CI/CD pipelines.
+* **IAM Permissions:** The credentials used by `local-exec` (either default AWS credentials or the assumed role specified by `assume_role_arn`) require sufficient IAM permissions to manage the specified ECS services and tasks.
+* **State:** The `null_resource` uses the `task_definition_revision` in its `triggers` map. This ensures that Terraform re-runs the provisioner if (and only if) the revision number changes between applies.
## Requirements
@@ -13,8 +64,8 @@ No requirements.
| Name | Version |
|------|---------|
-| [aws](#provider\_aws) | 5.31.0 |
-| [null](#provider\_null) | 3.2.2 |
+| [aws](#provider\_aws) | n/a |
+| [null](#provider\_null) | n/a |
## Modules
@@ -31,6 +82,8 @@ No modules.
| Name | Description | Type | Default | Required |
|------|-------------|------|---------|:--------:|
+| [assume\_role\_arn](#input\_assume\_role\_arn) | ARN of the IAM role to assume for ECS permissions | `string` | `""` | no |
+| [assume\_role\_session\_name](#input\_assume\_role\_session\_name) | Session name to use when assuming the IAM role | `string` | `""` | no |
| [desired\_count](#input\_desired\_count) | n/a | `number` | n/a | yes |
| [ecs\_cluster](#input\_ecs\_cluster) | n/a | `string` | n/a | yes |
| [ecs\_service](#input\_ecs\_service) | n/a | `string` | n/a | yes |
@@ -39,6 +92,7 @@ No modules.
| [subnets](#input\_subnets) | n/a | `list(string)` | n/a | yes |
| [task\_definition](#input\_task\_definition) | n/a | `string` | n/a | yes |
| [task\_definition\_revision](#input\_task\_definition\_revision) | n/a | `number` | n/a | yes |
+| [vuln\_service](#input\_vuln\_service) | n/a | `string` | `""` | no |
## Outputs
diff --git a/addons/migrations/main.tf b/addons/migrations/main.tf
index 4d2ae59..4dd0484 100644
--- a/addons/migrations/main.tf
+++ b/addons/migrations/main.tf
@@ -5,6 +5,6 @@ resource "null_resource" "main" {
task_definition_revision = var.task_definition_revision
}
provisioner "local-exec" {
- command = "/bin/bash ${path.module}/migrate.sh REGION=${data.aws_region.current.name} VULN_SERVICE=${var.vuln_service} ECS_CLUSTER=${var.ecs_cluster} TASK_DEFINITION=${var.task_definition} TASK_DEFINITION_REVISION=${var.task_definition_revision} SUBNETS=${jsonencode(var.subnets)} SECURITY_GROUPS=${jsonencode(var.security_groups)} ECS_SERVICE=${var.ecs_service} MIN_CAPACITY=${var.min_capacity} DESIRED_COUNT=${var.desired_count}"
+ command = "/bin/bash ${path.module}/migrate.sh REGION=${data.aws_region.current.name} VULN_SERVICE=${var.vuln_service} ECS_CLUSTER=${var.ecs_cluster} TASK_DEFINITION=${var.task_definition} TASK_DEFINITION_REVISION=${var.task_definition_revision} SUBNETS=${jsonencode(var.subnets)} SECURITY_GROUPS=${jsonencode(var.security_groups)} ECS_SERVICE=${var.ecs_service} MIN_CAPACITY=${var.min_capacity} DESIRED_COUNT=${var.desired_count} ASSUME_ROLE_ARN=${var.assume_role_arn} ASSUME_ROLE_SESSION_NAME=${var.assume_role_session_name}"
}
}
diff --git a/addons/migrations/migrate.sh b/addons/migrations/migrate.sh
index 7d09fe7..bc2be9c 100644
--- a/addons/migrations/migrate.sh
+++ b/addons/migrations/migrate.sh
@@ -39,6 +39,21 @@ do
export "$KEY"="$VALUE"
done
+# If an IAM role ARN is provided, assume it and export AWS temporary credentials
+if [ -n "${ASSUME_ROLE_ARN:-}" ]; then
+ SESSION_NAME="${ASSUME_ROLE_SESSION_NAME:-migrate-$(date +%s)}"
+ echo "Assuming role ${ASSUME_ROLE_ARN} with session name ${SESSION_NAME}" >&2
+ # Retrieve temporary credentials via AWS STS
+ read AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN <<< "$(
+ aws sts assume-role \
+ --role-arn "${ASSUME_ROLE_ARN}" \
+ --role-session-name "${SESSION_NAME}" \
+ --query 'Credentials.[AccessKeyId,SecretAccessKey,SessionToken]' \
+ --output text
+ )"
+ export AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN
+fi
+
scale_services down "${ECS_SERVICE:?}" true "${DESIRED_COUNT}"
if [ -n "${VULN_SERVICE}" ]; then
diff --git a/addons/migrations/variables.tf b/addons/migrations/variables.tf
index 6a4002b..fafc3cd 100644
--- a/addons/migrations/variables.tf
+++ b/addons/migrations/variables.tf
@@ -41,4 +41,15 @@ variable "security_groups" {
variable "vuln_service" {
default = ""
}
+variable "assume_role_arn" {
+ description = "ARN of the IAM role to assume for ECS permissions"
+ type = string
+ default = ""
+}
+
+variable "assume_role_session_name" {
+ description = "Session name to use when assuming the IAM role"
+ type = string
+ default = ""
+}