Skip to content

chore(tests): refactor E2E test mechanics to ease maintenance, writing tests and parallelization #1444

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 20 commits into from
Aug 12, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
197eb7a
refactor(e2e): encapsulate assets logic; add sample stack
heitorlessa Aug 7, 2022
feaa491
chore(dep): remove flake8-black due to intermittent preview errors
heitorlessa Aug 7, 2022
18dadc8
refactor: setup infra fixture
heitorlessa Aug 7, 2022
6de3ce2
refactor: dummy test using new explicit infra
heitorlessa Aug 7, 2022
0280d9f
refactor: update test_metrics to use new verbose infra
heitorlessa Aug 8, 2022
d1996a6
Merge branch 'develop' into chore/refactor-e2e
heitorlessa Aug 9, 2022
48085d5
refactor(helpers): create builders for data and types to simplify wri…
heitorlessa Aug 10, 2022
54e9550
refactor(metrics): make test more explicit
heitorlessa Aug 10, 2022
4413844
feat(metrics): add cold start test
heitorlessa Aug 10, 2022
f37e988
feat(metrics): safely paralelize all defined tests per module
heitorlessa Aug 10, 2022
074ff4f
refactor(helpers): set good default on end_date metrics; fix yield
heitorlessa Aug 10, 2022
c5f8e4e
feat: create BaseInfrastructureV2 to demonstrate before/after
heitorlessa Aug 11, 2022
47f41e6
Merge branch 'develop' into chore/refactor-e2e
heitorlessa Aug 11, 2022
13df083
chore: document remaining changes, cleanup
heitorlessa Aug 11, 2022
0ba7678
chore: ensure cold start fn is invoked twice for test
heitorlessa Aug 11, 2022
d736e00
Merge branch 'develop' into chore/refactor-e2e
heitorlessa Aug 12, 2022
9c37ffe
chore: add type to worker_id
heitorlessa Aug 12, 2022
56ae2be
chore: increase delay and jitter on get_metrics
heitorlessa Aug 12, 2022
f5c0bf4
chore: rename cfn_template to asset_manifest to fix misunderstanding
heitorlessa Aug 12, 2022
a51973b
chore: abstract parallelization deployment logic
heitorlessa Aug 12, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
556 changes: 266 additions & 290 deletions poetry.lock

Large diffs are not rendered by default.

3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ coverage = {extras = ["toml"], version = "^6.2"}
pytest = "^7.0.1"
black = "^21.12b0"
flake8 = "^4.0.1"
flake8-black = "^0.2.3"
flake8-builtins = "^1.5.3"
flake8-comprehensions = "^3.7.0"
flake8-debugger = "^4.0.0"
Expand Down Expand Up @@ -66,6 +65,8 @@ pytest-benchmark = "^3.4.1"
mypy-boto3-cloudwatch = "^1.24.35"
mypy-boto3-lambda = "^1.24.0"
mypy-boto3-xray = "^1.24.0"
mypy-boto3-s3 = { version = "^1.24.0", python = ">=3.7" }
mypy-boto3-cloudformation = { version = "^1.24.0", python = ">=3.7" }
types-requests = "^2.28.8"
typing-extensions = { version = "^4.3.0", python = ">=3.7" }
python-snappy = "^0.6.1"
Expand Down
21 changes: 21 additions & 0 deletions tests/e2e/metrics/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import pytest

from tests.e2e.metrics.infrastructure import MetricsStack
from tests.e2e.utils.infrastructure import deploy_once


@pytest.fixture(autouse=True, scope="module")
def infrastructure(request: pytest.FixtureRequest, tmp_path_factory: pytest.TempPathFactory, worker_id: str):
"""Setup and teardown logic for E2E test infrastructure

Parameters
----------
request : fixtures.SubRequest
test fixture containing metadata about test execution

Yields
------
Dict[str, str]
CloudFormation Outputs from deployed infrastructure
"""
yield from deploy_once(stack=MetricsStack, request=request, tmp_path_factory=tmp_path_factory, worker_id=worker_id)
19 changes: 11 additions & 8 deletions tests/e2e/metrics/handlers/basic_handler.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
import os

from aws_lambda_powertools import Metrics
from aws_lambda_powertools.metrics import MetricUnit

METRIC_NAME = os.environ["METRIC_NAME"]

metrics = Metrics()
my_metrics = Metrics()


@metrics.log_metrics
@my_metrics.log_metrics
def lambda_handler(event, context):
metrics.add_metric(name=METRIC_NAME, unit=MetricUnit.Count, value=1)
metrics, namespace, service = event.get("metrics"), event.get("namespace"), event.get("service")

# Maintenance: create a public method to set these explicitly
my_metrics.namespace = namespace
my_metrics.service = service

for metric in metrics:
my_metrics.add_metric(**metric)

return "success"
12 changes: 12 additions & 0 deletions tests/e2e/metrics/handlers/cold_start.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from aws_lambda_powertools import Metrics

my_metrics = Metrics()


@my_metrics.log_metrics(capture_cold_start_metric=True)
def lambda_handler(event, context):
# Maintenance: create a public method to set these explicitly
my_metrics.namespace = event.get("namespace")
my_metrics.service = event.get("service")

return "success"
11 changes: 11 additions & 0 deletions tests/e2e/metrics/infrastructure.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from pathlib import Path

from tests.e2e.utils.infrastructure import BaseInfrastructureV2


class MetricsStack(BaseInfrastructureV2):
def __init__(self, handlers_dir: Path, feature_name: str = "metrics") -> None:
super().__init__(feature_name, handlers_dir)

def create_resources(self):
self.create_lambda_functions()
83 changes: 56 additions & 27 deletions tests/e2e/metrics/test_metrics.py
Original file line number Diff line number Diff line change
@@ -1,40 +1,69 @@
import datetime
import uuid
import json

import boto3
import pytest
from e2e import conftest
from e2e.utils import helpers

from tests.e2e.utils import helpers

@pytest.fixture(scope="module")
def config() -> conftest.LambdaConfig:
return {
"parameters": {},
"environment_variables": {
"POWERTOOLS_METRICS_NAMESPACE": "powertools-e2e-metric",
"POWERTOOLS_SERVICE_NAME": "test-powertools-service",
"METRIC_NAME": f"business-metric-{str(uuid.uuid4()).replace('-','_')}",
},
}

@pytest.fixture
def basic_handler_fn(infrastructure: dict) -> str:
return infrastructure.get("BasicHandler", "")

def test_basic_lambda_metric_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig):

@pytest.fixture
def basic_handler_fn_arn(infrastructure: dict) -> str:
return infrastructure.get("BasicHandlerArn", "")


@pytest.fixture
def cold_start_fn(infrastructure: dict) -> str:
return infrastructure.get("ColdStart", "")


@pytest.fixture
def cold_start_fn_arn(infrastructure: dict) -> str:
return infrastructure.get("ColdStartArn", "")


METRIC_NAMESPACE = "powertools-e2e-metric"


def test_basic_lambda_metric_is_visible(basic_handler_fn: str, basic_handler_fn_arn: str):
# GIVEN
start_date = execute_lambda.get_lambda_execution_time()
end_date = start_date + datetime.timedelta(minutes=5)
metric_name = helpers.build_metric_name()
service = helpers.build_service_name()
dimensions = helpers.build_add_dimensions_input(service=service)
metrics = helpers.build_multiple_add_metric_input(metric_name=metric_name, value=1, quantity=3)

# WHEN
event = json.dumps({"metrics": metrics, "service": service, "namespace": METRIC_NAMESPACE})
_, execution_time = helpers.trigger_lambda(lambda_arn=basic_handler_fn_arn, payload=event)

metrics = helpers.get_metrics(
namespace=METRIC_NAMESPACE, start_date=execution_time, metric_name=metric_name, dimensions=dimensions
)

# THEN
metric_data = metrics.get("Values", [])
assert metric_data and metric_data[0] == 3.0


def test_cold_start_metric(cold_start_fn_arn: str, cold_start_fn: str):
# GIVEN
metric_name = "ColdStart"
service = helpers.build_service_name()
dimensions = helpers.build_add_dimensions_input(function_name=cold_start_fn, service=service)

# WHEN we invoke twice
event = json.dumps({"service": service, "namespace": METRIC_NAMESPACE})

_, execution_time = helpers.trigger_lambda(lambda_arn=cold_start_fn_arn, payload=event)
_, _ = helpers.trigger_lambda(lambda_arn=cold_start_fn_arn, payload=event)

metrics = helpers.get_metrics(
start_date=start_date,
end_date=end_date,
namespace=config["environment_variables"]["POWERTOOLS_METRICS_NAMESPACE"],
metric_name=config["environment_variables"]["METRIC_NAME"],
service_name=config["environment_variables"]["POWERTOOLS_SERVICE_NAME"],
cw_client=boto3.client(service_name="cloudwatch"),
namespace=METRIC_NAMESPACE, start_date=execution_time, metric_name=metric_name, dimensions=dimensions
)

# THEN
assert metrics.get("Timestamps") and len(metrics.get("Timestamps")) == 1
assert metrics.get("Values") and len(metrics.get("Values")) == 1
assert metrics.get("Values") and metrics.get("Values")[0] == 1
metric_data = metrics.get("Values", [])
assert metric_data and metric_data[0] == 1.0
120 changes: 120 additions & 0 deletions tests/e2e/utils/asset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
import io
import json
import zipfile
from pathlib import Path
from typing import List, Optional

import boto3
import botocore.exceptions
from mypy_boto3_s3 import S3Client

from aws_lambda_powertools import Logger
from tests.e2e.utils.models import AssetTemplateConfig, TemplateAssembly

logger = Logger(service="e2e-utils")


class Asset:
def __init__(
self, config: AssetTemplateConfig, account_id: str, region: str, boto3_client: Optional[S3Client] = None
) -> None:
"""CDK Asset logic to verify existence and resolve deeply nested configuration

Parameters
----------
config : AssetTemplateConfig
CDK Asset configuration found in synthesized template
account_id : str
AWS Account ID
region : str
AWS Region
boto3_client : Optional["S3Client"], optional
S3 client instance for asset operations, by default None
"""
self.config = config
self.s3 = boto3_client or boto3.client("s3")
self.account_id = account_id
self.region = region
self.asset_path = config.source.path
self.asset_packaging = config.source.packaging
self.object_key = config.destinations.current_account_current_region.object_key
self._bucket = config.destinations.current_account_current_region.bucket_name
self.bucket_name = self._resolve_bucket_name()

@property
def is_zip(self):
return self.asset_packaging == "zip"

def exists_in_s3(self, key: str) -> bool:
try:
return self.s3.head_object(Bucket=self.bucket_name, Key=key) is not None
except botocore.exceptions.ClientError:
return False

def _resolve_bucket_name(self) -> str:
return self._bucket.replace("${AWS::AccountId}", self.account_id).replace("${AWS::Region}", self.region)


class Assets:
def __init__(
self, asset_manifest: Path, account_id: str, region: str, boto3_client: Optional[S3Client] = None
) -> None:
"""CDK Assets logic to find each asset, compress, and upload

Parameters
----------
asset_manifest : Path
Asset manifest JSON file (self.__synthesize)
account_id : str
AWS Account ID
region : str
AWS Region
boto3_client : Optional[S3Client], optional
S3 client instance for asset operations, by default None
"""
self.asset_manifest = asset_manifest
self.account_id = account_id
self.region = region
self.s3 = boto3_client or boto3.client("s3")
self.assets = self._find_assets_from_template()
self.assets_location = str(self.asset_manifest.parent)

def upload(self):
"""Drop-in replacement for cdk-assets package s3 upload part.
https://www.npmjs.com/package/cdk-assets.
We use custom solution to avoid dependencies from nodejs ecosystem.
We follow the same design cdk-assets:
https://github.com/aws/aws-cdk-rfcs/blob/master/text/0092-asset-publishing.md.
"""
for asset in self.assets:
if not asset.is_zip:
logger.debug(f"Asset '{asset.object_key}' is not zip. Skipping upload.")
continue

if asset.exists_in_s3(key=asset.object_key):
logger.debug(f"Asset '{asset.object_key}' already exists in S3. Skipping upload.")
continue

archive = self._compress_assets(asset)
logger.debug("Uploading archive to S3")
self.s3.upload_fileobj(Fileobj=archive, Bucket=asset.bucket_name, Key=asset.object_key)
logger.debug("Successfully uploaded")

def _find_assets_from_template(self) -> List[Asset]:
data = json.loads(self.asset_manifest.read_text())
template = TemplateAssembly(**data)
return [
Asset(config=asset_config, account_id=self.account_id, region=self.region)
for asset_config in template.files.values()
]

def _compress_assets(self, asset: Asset) -> io.BytesIO:
buf = io.BytesIO()
asset_dir = f"{self.assets_location}/{asset.asset_path}"
asset_files = list(Path(asset_dir).iterdir())
with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as archive:
for asset_file in asset_files:
logger.debug(f"Adding file '{asset_file}' to the archive.")
archive.write(asset_file, arcname=asset_file.relative_to(asset_dir))
buf.seek(0)
return buf
Loading