Skip to content

Commit 9a32a6d

Browse files
authored
Merge pull request #3847 from aws/release-v1.103.0
Release 1.103.0 (to main)
2 parents 3cfec2f + 2b2e0d8 commit 9a32a6d

File tree

39 files changed

+3077
-291
lines changed

39 files changed

+3077
-291
lines changed

integration/combination/test_function_with_msk.py

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,14 +41,20 @@ def test_function_with_msk_trigger_and_s3_onfailure_events_destinations(self):
4141
"combination/function_with_msk_trigger_and_s3_onfailure_events_destinations", parameters
4242
)
4343

44-
def test_function_with_msk_trigger_and_confluent_schema_registry(self):
44+
def test_function_with_msk_trigger_and_premium_features(self):
4545
companion_stack_outputs = self.companion_stack_outputs
4646
parameters = self.get_parameters(companion_stack_outputs)
4747
cluster_name = "MskCluster4-" + generate_suffix()
4848
parameters.append(self.generate_parameter("MskClusterName4", cluster_name))
49-
self._common_validations_for_MSK(
49+
self._common_validations_for_MSK("combination/function_with_msk_trigger_and_premium_features", parameters)
50+
event_source_mapping_result = self._common_validations_for_MSK(
5051
"combination/function_with_msk_trigger_and_confluent_schema_registry", parameters
5152
)
53+
# Verify error handling properties are correctly set
54+
self.assertTrue(event_source_mapping_result.get("BisectBatchOnFunctionError"))
55+
self.assertEqual(event_source_mapping_result.get("MaximumRecordAgeInSeconds"), 3600)
56+
self.assertEqual(event_source_mapping_result.get("MaximumRetryAttempts"), 3)
57+
self.assertEqual(event_source_mapping_result.get("FunctionResponseTypes"), ["ReportBatchItemFailures"])
5258

5359
def _common_validations_for_MSK(self, file_name, parameters):
5460
self.create_and_verify_stack(file_name, parameters)
@@ -74,6 +80,7 @@ def _common_validations_for_MSK(self, file_name, parameters):
7480

7581
self.assertEqual(event_source_mapping_function_arn, lambda_function_arn)
7682
self.assertEqual(event_source_mapping_kafka_cluster_arn, msk_cluster_arn)
83+
return event_source_mapping_result
7784

7885
def get_parameters(self, dictionary):
7986
parameters = []

integration/combination/test_function_with_self_managed_kafka.py

Lines changed: 26 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@ class TestFunctionWithSelfManagedKafka(BaseTest):
1616
@pytest.mark.flaky(reruns=5)
1717
@parameterized.expand(
1818
[
19-
"combination/function_with_self_managed_kafka",
2019
"combination/function_with_self_managed_kafka_intrinsics",
2120
]
2221
)
@@ -30,3 +29,29 @@ def test_function_with_self_managed_kafka(self, file_name):
3029
event_source_mapping_result = lambda_client.get_event_source_mapping(UUID=event_source_mapping_id)
3130
event_source_mapping_function_arn = event_source_mapping_result["FunctionArn"]
3231
self.assertEqual(event_source_mapping_function_arn, lambda_function_arn)
32+
33+
@parameterized.expand(["combination/function_with_self_managed_kafka"])
34+
def test_function_with_self_managed_kafka_with_provisioned_mode(self, file_name):
35+
self.create_and_verify_stack(file_name)
36+
# Get the notification configuration and make sure Lambda Function connection is added
37+
lambda_client = self.client_provider.lambda_client
38+
function_name = self.get_physical_id_by_type("AWS::Lambda::Function")
39+
lambda_function_arn = lambda_client.get_function_configuration(FunctionName=function_name)["FunctionArn"]
40+
event_source_mapping_id = self.get_physical_id_by_type("AWS::Lambda::EventSourceMapping")
41+
event_source_mapping_result = lambda_client.get_event_source_mapping(UUID=event_source_mapping_id)
42+
event_source_mapping_function_arn = event_source_mapping_result["FunctionArn"]
43+
self.assertEqual(event_source_mapping_function_arn, lambda_function_arn)
44+
45+
# Verify error handling properties are correctly set
46+
self.assertTrue(event_source_mapping_result.get("BisectBatchOnFunctionError"))
47+
self.assertEqual(event_source_mapping_result.get("MaximumRecordAgeInSeconds"), 3600)
48+
self.assertEqual(event_source_mapping_result.get("MaximumRetryAttempts"), 3)
49+
self.assertEqual(event_source_mapping_result.get("FunctionResponseTypes"), ["ReportBatchItemFailures"])
50+
# Uncomment this once SDK is updated.
51+
# provisioned_poller_config = event_source_mapping_result["ProvisionedPollerConfig"]
52+
# actual_poller_group_name = provisioned_poller_config["PollerGroupName"]
53+
# self.assertEqual(
54+
# actual_poller_group_name,
55+
# "test1",
56+
# f"Expected PollerGroupName to be 'test1' but got '{actual_poller_group_name}'",
57+
# )
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
[
2+
{
3+
"LogicalResourceId": "MyApi",
4+
"ResourceType": "AWS::ApiGateway::RestApi"
5+
},
6+
{
7+
"LogicalResourceId": "MyApiDeployment",
8+
"ResourceType": "AWS::ApiGateway::Deployment"
9+
},
10+
{
11+
"LogicalResourceId": "MyApiProdStage",
12+
"ResourceType": "AWS::ApiGateway::Stage"
13+
},
14+
{
15+
"LogicalResourceId": "ApiGatewayDomainName",
16+
"ResourceType": "AWS::ApiGateway::DomainName"
17+
},
18+
{
19+
"LogicalResourceId": "MyApiBasePathMapping",
20+
"ResourceType": "AWS::ApiGateway::BasePathMapping"
21+
}
22+
]
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
[
2+
{
3+
"LogicalResourceId": "MyApi",
4+
"ResourceType": "AWS::ApiGateway::RestApi"
5+
},
6+
{
7+
"LogicalResourceId": "MyApiDeployment",
8+
"ResourceType": "AWS::ApiGateway::Deployment"
9+
},
10+
{
11+
"LogicalResourceId": "MyApiProdStage",
12+
"ResourceType": "AWS::ApiGateway::Stage"
13+
}
14+
]
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,17 @@ Resources:
6666
Ref: MyMskCluster
6767
Topics:
6868
- SchemaRegistryTestTopic
69+
DestinationConfig:
70+
OnFailure:
71+
Type: Kafka
72+
Destination: kafka://testTopic
6973
ProvisionedPollerConfig:
7074
MinimumPollers: 1
75+
BisectBatchOnFunctionError: true
76+
MaximumRecordAgeInSeconds: 3600
77+
MaximumRetryAttempts: 3
78+
FunctionResponseTypes:
79+
- ReportBatchItemFailures
7180
SchemaRegistryConfig:
7281
AccessConfigs:
7382
- Type: BASIC_AUTH

integration/resources/templates/combination/function_with_self_managed_kafka.yaml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,10 +15,18 @@ Resources:
1515
- 123.45.67.89:9096
1616
Topics:
1717
- Topic1
18+
ProvisionedPollerConfig:
19+
MinimumPollers: 1
20+
PollerGroupName: test1
1821
SourceAccessConfigurations:
1922
- Type: BASIC_AUTH
2023
URI:
2124
Ref: KafkaUserSecret
25+
BisectBatchOnFunctionError: true
26+
MaximumRecordAgeInSeconds: 3600
27+
MaximumRetryAttempts: 3
28+
FunctionResponseTypes:
29+
- ReportBatchItemFailures
2230

2331
KafkaUserSecret:
2432
Type: AWS::SecretsManager::Secret
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
Parameters:
2+
IpAddressType:
3+
Type: String
4+
Default: dualstack
5+
DomainName:
6+
Type: String
7+
CertificateArn:
8+
Type: String
9+
10+
Resources:
11+
MyApi:
12+
Type: AWS::Serverless::Api
13+
Properties:
14+
StageName: Prod
15+
DefinitionUri: ${definitionuri}
16+
Domain:
17+
DomainName: !Ref DomainName
18+
CertificateArn: !Ref CertificateArn
19+
EndpointConfiguration: REGIONAL
20+
IpAddressType: !Ref IpAddressType
21+
22+
Metadata:
23+
SamTransformTest: true
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
Parameters:
2+
IpAddressType:
3+
Type: String
4+
Default: ipv4
5+
6+
Resources:
7+
MyApi:
8+
Type: AWS::Serverless::Api
9+
Properties:
10+
StageName: Prod
11+
DefinitionUri: ${definitionuri}
12+
EndpointConfiguration:
13+
Type: REGIONAL
14+
IpAddressType: !Ref IpAddressType
15+
Metadata:
16+
SamTransformTest: true
Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
from unittest.case import skipIf
2+
3+
from integration.config.service_names import CUSTOM_DOMAIN
4+
from integration.helpers.base_internal_test import BaseInternalTest
5+
from integration.helpers.base_test import nonblocking
6+
from integration.helpers.resource import current_region_not_included
7+
8+
9+
@skipIf(
10+
current_region_not_included([CUSTOM_DOMAIN]),
11+
"Custom domain is not supported in this testing region",
12+
)
13+
@nonblocking
14+
class TestApiWithDomainIpAddressType(BaseInternalTest):
15+
"""
16+
Test AWS::Serverless::Api with IpAddressType in Domain configuration
17+
"""
18+
19+
def test_api_with_domain_ipaddresstype(self):
20+
"""
21+
Creates an API with custom domain and IpAddressType set to dualstack
22+
"""
23+
self.create_and_verify_stack("single/api_with_domain_ipaddresstype")
24+
25+
# Verify the domain name resource
26+
domain_name_id = self.get_physical_id_by_type("AWS::ApiGateway::DomainName")
27+
api_gateway_client = self.client_provider.api_client
28+
result = api_gateway_client.get_domain_name(domainName=domain_name_id)
29+
30+
# Verify endpoint configuration
31+
end_point_config = result["endpointConfiguration"]
32+
end_point_types = end_point_config["types"]
33+
self.assertEqual(1, len(end_point_types))
34+
self.assertEqual("REGIONAL", end_point_types[0])
35+
36+
# Verify IpAddressType is set correctly
37+
self.assertEqual("dualstack", end_point_config["ipAddressType"])

0 commit comments

Comments
 (0)