Skip to content

Commit 3baaef9

Browse files
committed
chore(tests): refactor E2E test mechanics to ease maintenance, writing tests and parallelization (aws-powertools#1444)
1 parent f697090 commit 3baaef9

11 files changed

+933
-357
lines changed

poetry.lock

+266-290
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

+2-1
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,6 @@ coverage = {extras = ["toml"], version = "^6.2"}
3434
pytest = "^7.0.1"
3535
black = "^21.12b0"
3636
flake8 = "^4.0.1"
37-
flake8-black = "^0.2.3"
3837
flake8-builtins = "^1.5.3"
3938
flake8-comprehensions = "^3.7.0"
4039
flake8-debugger = "^4.0.0"
@@ -66,6 +65,8 @@ pytest-benchmark = "^3.4.1"
6665
mypy-boto3-cloudwatch = "^1.24.35"
6766
mypy-boto3-lambda = "^1.24.0"
6867
mypy-boto3-xray = "^1.24.0"
68+
mypy-boto3-s3 = { version = "^1.24.0", python = ">=3.7" }
69+
mypy-boto3-cloudformation = { version = "^1.24.0", python = ">=3.7" }
6970
types-requests = "^2.28.8"
7071
typing-extensions = { version = "^4.3.0", python = ">=3.7" }
7172
python-snappy = "^0.6.1"

tests/e2e/metrics/conftest.py

+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
import pytest
2+
3+
from tests.e2e.metrics.infrastructure import MetricsStack
4+
from tests.e2e.utils.infrastructure import deploy_once
5+
6+
7+
@pytest.fixture(autouse=True, scope="module")
8+
def infrastructure(request: pytest.FixtureRequest, tmp_path_factory: pytest.TempPathFactory, worker_id: str):
9+
"""Setup and teardown logic for E2E test infrastructure
10+
11+
Parameters
12+
----------
13+
request : fixtures.SubRequest
14+
test fixture containing metadata about test execution
15+
16+
Yields
17+
------
18+
Dict[str, str]
19+
CloudFormation Outputs from deployed infrastructure
20+
"""
21+
yield from deploy_once(stack=MetricsStack, request=request, tmp_path_factory=tmp_path_factory, worker_id=worker_id)
+11-8
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,17 @@
1-
import os
2-
31
from aws_lambda_powertools import Metrics
4-
from aws_lambda_powertools.metrics import MetricUnit
5-
6-
METRIC_NAME = os.environ["METRIC_NAME"]
72

8-
metrics = Metrics()
3+
my_metrics = Metrics()
94

105

11-
@metrics.log_metrics
6+
@my_metrics.log_metrics
127
def lambda_handler(event, context):
13-
metrics.add_metric(name=METRIC_NAME, unit=MetricUnit.Count, value=1)
8+
metrics, namespace, service = event.get("metrics"), event.get("namespace"), event.get("service")
9+
10+
# Maintenance: create a public method to set these explicitly
11+
my_metrics.namespace = namespace
12+
my_metrics.service = service
13+
14+
for metric in metrics:
15+
my_metrics.add_metric(**metric)
16+
1417
return "success"
+12
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
from aws_lambda_powertools import Metrics
2+
3+
my_metrics = Metrics()
4+
5+
6+
@my_metrics.log_metrics(capture_cold_start_metric=True)
7+
def lambda_handler(event, context):
8+
# Maintenance: create a public method to set these explicitly
9+
my_metrics.namespace = event.get("namespace")
10+
my_metrics.service = event.get("service")
11+
12+
return "success"

tests/e2e/metrics/infrastructure.py

+11
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
from pathlib import Path
2+
3+
from tests.e2e.utils.infrastructure import BaseInfrastructureV2
4+
5+
6+
class MetricsStack(BaseInfrastructureV2):
7+
def __init__(self, handlers_dir: Path, feature_name: str = "metrics") -> None:
8+
super().__init__(feature_name, handlers_dir)
9+
10+
def create_resources(self):
11+
self.create_lambda_functions()

tests/e2e/metrics/test_metrics.py

+56-27
Original file line numberDiff line numberDiff line change
@@ -1,40 +1,69 @@
1-
import datetime
2-
import uuid
1+
import json
32

4-
import boto3
53
import pytest
6-
from e2e import conftest
7-
from e2e.utils import helpers
84

5+
from tests.e2e.utils import helpers
96

10-
@pytest.fixture(scope="module")
11-
def config() -> conftest.LambdaConfig:
12-
return {
13-
"parameters": {},
14-
"environment_variables": {
15-
"POWERTOOLS_METRICS_NAMESPACE": "powertools-e2e-metric",
16-
"POWERTOOLS_SERVICE_NAME": "test-powertools-service",
17-
"METRIC_NAME": f"business-metric-{str(uuid.uuid4()).replace('-','_')}",
18-
},
19-
}
207

8+
@pytest.fixture
9+
def basic_handler_fn(infrastructure: dict) -> str:
10+
return infrastructure.get("BasicHandler", "")
2111

22-
def test_basic_lambda_metric_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig):
12+
13+
@pytest.fixture
14+
def basic_handler_fn_arn(infrastructure: dict) -> str:
15+
return infrastructure.get("BasicHandlerArn", "")
16+
17+
18+
@pytest.fixture
19+
def cold_start_fn(infrastructure: dict) -> str:
20+
return infrastructure.get("ColdStart", "")
21+
22+
23+
@pytest.fixture
24+
def cold_start_fn_arn(infrastructure: dict) -> str:
25+
return infrastructure.get("ColdStartArn", "")
26+
27+
28+
METRIC_NAMESPACE = "powertools-e2e-metric"
29+
30+
31+
def test_basic_lambda_metric_is_visible(basic_handler_fn: str, basic_handler_fn_arn: str):
2332
# GIVEN
24-
start_date = execute_lambda.get_lambda_execution_time()
25-
end_date = start_date + datetime.timedelta(minutes=5)
33+
metric_name = helpers.build_metric_name()
34+
service = helpers.build_service_name()
35+
dimensions = helpers.build_add_dimensions_input(service=service)
36+
metrics = helpers.build_multiple_add_metric_input(metric_name=metric_name, value=1, quantity=3)
2637

2738
# WHEN
39+
event = json.dumps({"metrics": metrics, "service": service, "namespace": METRIC_NAMESPACE})
40+
_, execution_time = helpers.trigger_lambda(lambda_arn=basic_handler_fn_arn, payload=event)
41+
42+
metrics = helpers.get_metrics(
43+
namespace=METRIC_NAMESPACE, start_date=execution_time, metric_name=metric_name, dimensions=dimensions
44+
)
45+
46+
# THEN
47+
metric_data = metrics.get("Values", [])
48+
assert metric_data and metric_data[0] == 3.0
49+
50+
51+
def test_cold_start_metric(cold_start_fn_arn: str, cold_start_fn: str):
52+
# GIVEN
53+
metric_name = "ColdStart"
54+
service = helpers.build_service_name()
55+
dimensions = helpers.build_add_dimensions_input(function_name=cold_start_fn, service=service)
56+
57+
# WHEN we invoke twice
58+
event = json.dumps({"service": service, "namespace": METRIC_NAMESPACE})
59+
60+
_, execution_time = helpers.trigger_lambda(lambda_arn=cold_start_fn_arn, payload=event)
61+
_, _ = helpers.trigger_lambda(lambda_arn=cold_start_fn_arn, payload=event)
62+
2863
metrics = helpers.get_metrics(
29-
start_date=start_date,
30-
end_date=end_date,
31-
namespace=config["environment_variables"]["POWERTOOLS_METRICS_NAMESPACE"],
32-
metric_name=config["environment_variables"]["METRIC_NAME"],
33-
service_name=config["environment_variables"]["POWERTOOLS_SERVICE_NAME"],
34-
cw_client=boto3.client(service_name="cloudwatch"),
64+
namespace=METRIC_NAMESPACE, start_date=execution_time, metric_name=metric_name, dimensions=dimensions
3565
)
3666

3767
# THEN
38-
assert metrics.get("Timestamps") and len(metrics.get("Timestamps")) == 1
39-
assert metrics.get("Values") and len(metrics.get("Values")) == 1
40-
assert metrics.get("Values") and metrics.get("Values")[0] == 1
68+
metric_data = metrics.get("Values", [])
69+
assert metric_data and metric_data[0] == 1.0

tests/e2e/utils/asset.py

+120
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,120 @@
1+
import io
2+
import json
3+
import zipfile
4+
from pathlib import Path
5+
from typing import List, Optional
6+
7+
import boto3
8+
import botocore.exceptions
9+
from mypy_boto3_s3 import S3Client
10+
11+
from aws_lambda_powertools import Logger
12+
from tests.e2e.utils.models import AssetTemplateConfig, TemplateAssembly
13+
14+
logger = Logger(service="e2e-utils")
15+
16+
17+
class Asset:
18+
def __init__(
19+
self, config: AssetTemplateConfig, account_id: str, region: str, boto3_client: Optional[S3Client] = None
20+
) -> None:
21+
"""CDK Asset logic to verify existence and resolve deeply nested configuration
22+
23+
Parameters
24+
----------
25+
config : AssetTemplateConfig
26+
CDK Asset configuration found in synthesized template
27+
account_id : str
28+
AWS Account ID
29+
region : str
30+
AWS Region
31+
boto3_client : Optional["S3Client"], optional
32+
S3 client instance for asset operations, by default None
33+
"""
34+
self.config = config
35+
self.s3 = boto3_client or boto3.client("s3")
36+
self.account_id = account_id
37+
self.region = region
38+
self.asset_path = config.source.path
39+
self.asset_packaging = config.source.packaging
40+
self.object_key = config.destinations.current_account_current_region.object_key
41+
self._bucket = config.destinations.current_account_current_region.bucket_name
42+
self.bucket_name = self._resolve_bucket_name()
43+
44+
@property
45+
def is_zip(self):
46+
return self.asset_packaging == "zip"
47+
48+
def exists_in_s3(self, key: str) -> bool:
49+
try:
50+
return self.s3.head_object(Bucket=self.bucket_name, Key=key) is not None
51+
except botocore.exceptions.ClientError:
52+
return False
53+
54+
def _resolve_bucket_name(self) -> str:
55+
return self._bucket.replace("${AWS::AccountId}", self.account_id).replace("${AWS::Region}", self.region)
56+
57+
58+
class Assets:
59+
def __init__(
60+
self, asset_manifest: Path, account_id: str, region: str, boto3_client: Optional[S3Client] = None
61+
) -> None:
62+
"""CDK Assets logic to find each asset, compress, and upload
63+
64+
Parameters
65+
----------
66+
asset_manifest : Path
67+
Asset manifest JSON file (self.__synthesize)
68+
account_id : str
69+
AWS Account ID
70+
region : str
71+
AWS Region
72+
boto3_client : Optional[S3Client], optional
73+
S3 client instance for asset operations, by default None
74+
"""
75+
self.asset_manifest = asset_manifest
76+
self.account_id = account_id
77+
self.region = region
78+
self.s3 = boto3_client or boto3.client("s3")
79+
self.assets = self._find_assets_from_template()
80+
self.assets_location = str(self.asset_manifest.parent)
81+
82+
def upload(self):
83+
"""Drop-in replacement for cdk-assets package s3 upload part.
84+
https://www.npmjs.com/package/cdk-assets.
85+
We use custom solution to avoid dependencies from nodejs ecosystem.
86+
We follow the same design cdk-assets:
87+
https://github.com/aws/aws-cdk-rfcs/blob/master/text/0092-asset-publishing.md.
88+
"""
89+
for asset in self.assets:
90+
if not asset.is_zip:
91+
logger.debug(f"Asset '{asset.object_key}' is not zip. Skipping upload.")
92+
continue
93+
94+
if asset.exists_in_s3(key=asset.object_key):
95+
logger.debug(f"Asset '{asset.object_key}' already exists in S3. Skipping upload.")
96+
continue
97+
98+
archive = self._compress_assets(asset)
99+
logger.debug("Uploading archive to S3")
100+
self.s3.upload_fileobj(Fileobj=archive, Bucket=asset.bucket_name, Key=asset.object_key)
101+
logger.debug("Successfully uploaded")
102+
103+
def _find_assets_from_template(self) -> List[Asset]:
104+
data = json.loads(self.asset_manifest.read_text())
105+
template = TemplateAssembly(**data)
106+
return [
107+
Asset(config=asset_config, account_id=self.account_id, region=self.region)
108+
for asset_config in template.files.values()
109+
]
110+
111+
def _compress_assets(self, asset: Asset) -> io.BytesIO:
112+
buf = io.BytesIO()
113+
asset_dir = f"{self.assets_location}/{asset.asset_path}"
114+
asset_files = list(Path(asset_dir).iterdir())
115+
with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as archive:
116+
for asset_file in asset_files:
117+
logger.debug(f"Adding file '{asset_file}' to the archive.")
118+
archive.write(asset_file, arcname=asset_file.relative_to(asset_dir))
119+
buf.seek(0)
120+
return buf

0 commit comments

Comments
 (0)