From 6930b42dd8f3880c6f599865c91835f2f88b2c4a Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 19 Aug 2022 16:09:17 +0200 Subject: [PATCH] chore(tests): refactor E2E logger to ease maintenance, writing tests and parallelization (#1460) --- aws_lambda_powertools/shared/constants.py | 9 ++ poetry.lock | 44 +++-- pyproject.toml | 1 + tests/e2e/logger/conftest.py | 25 +++ tests/e2e/logger/handlers/basic_handler.py | 14 +- .../e2e/logger/handlers/no_context_handler.py | 14 -- tests/e2e/logger/infrastructure.py | 11 ++ tests/e2e/logger/test_logger.py | 150 +++-------------- tests/e2e/metrics/test_metrics.py | 12 +- tests/e2e/utils/data_fetcher/logs.py | 153 +++++++++++++++--- tests/e2e/utils/data_fetcher/metrics.py | 12 +- 11 files changed, 245 insertions(+), 200 deletions(-) create mode 100644 tests/e2e/logger/conftest.py delete mode 100644 tests/e2e/logger/handlers/no_context_handler.py create mode 100644 tests/e2e/logger/infrastructure.py diff --git a/aws_lambda_powertools/shared/constants.py b/aws_lambda_powertools/shared/constants.py index 45b46d236f9..48d94d88f1d 100644 --- a/aws_lambda_powertools/shared/constants.py +++ b/aws_lambda_powertools/shared/constants.py @@ -23,3 +23,12 @@ XRAY_SDK_CORE_MODULE: str = "aws_xray_sdk.core" IDEMPOTENCY_DISABLED_ENV: str = "POWERTOOLS_IDEMPOTENCY_DISABLED" + +LOGGER_LAMBDA_CONTEXT_KEYS = [ + "function_arn", + "function_memory_size", + "function_name", + "function_request_id", + "cold_start", + "xray_trace_id", +] diff --git a/poetry.lock b/poetry.lock index c178b0c694c..905c852476c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -678,7 +678,7 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "8.3.9" +version = "8.4.0" description = "Documentation that simply works" category = "dev" optional = false @@ -774,6 +774,17 @@ python-versions = ">=3.6" [package.dependencies] typing-extensions = ">=4.1.0" +[[package]] +name = "mypy-boto3-logs" +version = "1.24.36.post1" +description = "Type annotations for boto3.CloudWatchLogs 1.24.36 service generated with mypy-boto3-builder 7.10.0" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +typing-extensions = ">=4.1.0" + [[package]] name = "mypy-boto3-s3" version = "1.24.36.post1" @@ -950,12 +961,15 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.12.0" +version = "2.13.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false python-versions = ">=3.6" +[package.extras] +plugins = ["importlib-metadata"] + [[package]] name = "pymdown-extensions" version = "9.5" @@ -1254,7 +1268,7 @@ python-versions = ">=3.6" [[package]] name = "types-requests" -version = "2.28.8" +version = "2.28.9" description = "Typing stubs for requests" category = "dev" optional = false @@ -1265,7 +1279,7 @@ types-urllib3 = "<1.27" [[package]] name = "types-urllib3" -version = "1.26.22" +version = "1.26.23" description = "Typing stubs for urllib3" category = "dev" optional = false @@ -1342,7 +1356,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "f9e26c18e24673e05314f2664f1442157e34b70ba4bdb9f912d149df96003eb9" +content-hash = "77b3593db443d2972a854cf7eaf6643e33315d5da218933f360b33a2e3bb945d" [metadata.files] atomicwrites = [ @@ -1633,8 +1647,8 @@ mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.2-py3-none-any.whl", hash = "sha256:2e67956cb01823dd2418e2833f3623dee8604cdf223bddd005fe36226a56f6ef"}, ] mkdocs-material = [ - {file = "mkdocs-material-8.3.9.tar.gz", hash = "sha256:dc82b667d2a83f0de581b46a6d0949732ab77e7638b87ea35b770b33bc02e75a"}, - {file = "mkdocs_material-8.3.9-py2.py3-none-any.whl", hash = "sha256:263f2721f3abe533b61f7c8bed435a0462620912742c919821ac2d698b4bfe67"}, + {file = "mkdocs-material-8.4.0.tar.gz", hash = "sha256:6c0a6e6cda8b43956e0c562374588160af8110584a1444f422b1cfd91930f9c7"}, + {file = "mkdocs_material-8.4.0-py2.py3-none-any.whl", hash = "sha256:ef6641e1910d4f217873ac376b4594f3157dca3949901b88b4991ba8e5477577"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"}, @@ -1685,6 +1699,10 @@ mypy-boto3-lambda = [ {file = "mypy-boto3-lambda-1.24.0.tar.gz", hash = "sha256:ab425f941d0d50a2b8a20cc13cebe03c3097b122259bf00e7b295d284814bd6f"}, {file = "mypy_boto3_lambda-1.24.0-py3-none-any.whl", hash = "sha256:a286a464513adf50847bda8573f2dc7adc348234827d1ac0200e610ee9a09b80"}, ] +mypy-boto3-logs = [ + {file = "mypy-boto3-logs-1.24.36.post1.tar.gz", hash = "sha256:8b00c2d5328e72023b1d1acd65e7cea7854f07827d23ce21c78391ca74271290"}, + {file = "mypy_boto3_logs-1.24.36.post1-py3-none-any.whl", hash = "sha256:f96257ec06099bfda1ce5f35b410e7fb93fb601bc312e8d7a09b13adaefd23f0"}, +] mypy-boto3-s3 = [ {file = "mypy-boto3-s3-1.24.36.post1.tar.gz", hash = "sha256:3bd7e06f9ade5059eae2181d7a9f1a41e7fa807ad3e94c01c9901838e87e0abe"}, {file = "mypy_boto3_s3-1.24.36.post1-py3-none-any.whl", hash = "sha256:30ae59b33c55f8b7b693170f9519ea5b91a2fbf31a73de79cdef57a27d784e5a"}, @@ -1785,8 +1803,8 @@ pyflakes = [ {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] pygments = [ - {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, - {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, + {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, + {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, ] pymdown-extensions = [ {file = "pymdown_extensions-9.5-py3-none-any.whl", hash = "sha256:ec141c0f4983755349f0c8710416348d1a13753976c028186ed14f190c8061c4"}, @@ -2007,12 +2025,12 @@ typed-ast = [ {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, ] types-requests = [ - {file = "types-requests-2.28.8.tar.gz", hash = "sha256:7a9f7b152d594a1c18dd4932cdd2596b8efbeedfd73caa4e4abb3755805b4685"}, - {file = "types_requests-2.28.8-py3-none-any.whl", hash = "sha256:b0421f9f2d0dd0f8df2c75f974686517ca67473f05b466232d4c6384d765ad7a"}, + {file = "types-requests-2.28.9.tar.gz", hash = "sha256:feaf581bd580497a47fe845d506fa3b91b484cf706ff27774e87659837de9962"}, + {file = "types_requests-2.28.9-py3-none-any.whl", hash = "sha256:86cb66d3de2f53eac5c09adc42cf6547eefbd0c7e1210beca1ee751c35d96083"}, ] types-urllib3 = [ - {file = "types-urllib3-1.26.22.tar.gz", hash = "sha256:b05af90e73889e688094008a97ca95788db8bf3736e2776fd43fb6b171485d94"}, - {file = "types_urllib3-1.26.22-py3-none-any.whl", hash = "sha256:09a8783e1002472e8d1e1f3792d4c5cca1fffebb9b48ee1512aae6d16fe186bc"}, + {file = "types-urllib3-1.26.23.tar.gz", hash = "sha256:b78e819f0e350221d0689a5666162e467ba3910737bafda14b5c2c85e9bb1e56"}, + {file = "types_urllib3-1.26.23-py3-none-any.whl", hash = "sha256:333e675b188a1c1fd980b4b352f9e40572413a4c1ac689c23cd546e96310070a"}, ] typing-extensions = [ {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, diff --git a/pyproject.toml b/pyproject.toml index 481652d9c30..ae6e1a5d56a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,6 +65,7 @@ mypy-boto3-lambda = "^1.24.0" mypy-boto3-xray = "^1.24.0" mypy-boto3-s3 = { version = "^1.24.0", python = ">=3.7" } mypy-boto3-cloudformation = { version = "^1.24.0", python = ">=3.7" } +mypy-boto3-logs = { version = "^1.24.0", python = ">=3.7" } types-requests = "^2.28.8" typing-extensions = { version = "^4.3.0", python = ">=3.7" } python-snappy = "^0.6.1" diff --git a/tests/e2e/logger/conftest.py b/tests/e2e/logger/conftest.py new file mode 100644 index 00000000000..201a5f7dca1 --- /dev/null +++ b/tests/e2e/logger/conftest.py @@ -0,0 +1,25 @@ +import pytest + +from tests.e2e.logger.infrastructure import LoggerStack +from tests.e2e.utils.infrastructure import deploy_once + + +@pytest.fixture(autouse=True, scope="module") +def infrastructure(request: pytest.FixtureRequest, tmp_path_factory: pytest.TempPathFactory, worker_id: str): + """Setup and teardown logic for E2E test infrastructure + + Parameters + ---------- + request : pytest.FixtureRequest + pytest request fixture to introspect absolute path to test being executed + tmp_path_factory : pytest.TempPathFactory + pytest temporary path factory to discover shared tmp when multiple CPU processes are spun up + worker_id : str + pytest-xdist worker identification to detect whether parallelization is enabled + + Yields + ------ + Dict[str, str] + CloudFormation Outputs from deployed infrastructure + """ + yield from deploy_once(stack=LoggerStack, request=request, tmp_path_factory=tmp_path_factory, worker_id=worker_id) diff --git a/tests/e2e/logger/handlers/basic_handler.py b/tests/e2e/logger/handlers/basic_handler.py index 34d7fb4678a..0f0dd46b4aa 100644 --- a/tests/e2e/logger/handlers/basic_handler.py +++ b/tests/e2e/logger/handlers/basic_handler.py @@ -1,17 +1,11 @@ -import os - from aws_lambda_powertools import Logger logger = Logger() -MESSAGE = os.environ["MESSAGE"] -ADDITIONAL_KEY = os.environ["ADDITIONAL_KEY"] - -@logger.inject_lambda_context(log_event=True) +@logger.inject_lambda_context def lambda_handler(event, context): - logger.debug(MESSAGE) - logger.info(MESSAGE) - logger.append_keys(**{ADDITIONAL_KEY: "test"}) - logger.info(MESSAGE) + message, append_keys = event.get("message", ""), event.get("append_keys", {}) + logger.append_keys(**append_keys) + logger.info(message) return "success" diff --git a/tests/e2e/logger/handlers/no_context_handler.py b/tests/e2e/logger/handlers/no_context_handler.py deleted file mode 100644 index 1347ba98d81..00000000000 --- a/tests/e2e/logger/handlers/no_context_handler.py +++ /dev/null @@ -1,14 +0,0 @@ -import os - -from aws_lambda_powertools import Logger - -logger = Logger() - -MESSAGE = os.environ["MESSAGE"] -ADDITIONAL_KEY = os.environ["ADDITIONAL_KEY"] - - -def lambda_handler(event, context): - logger.info(MESSAGE) - logger.append_keys(**{ADDITIONAL_KEY: "test"}) - return "success" diff --git a/tests/e2e/logger/infrastructure.py b/tests/e2e/logger/infrastructure.py new file mode 100644 index 00000000000..76595908206 --- /dev/null +++ b/tests/e2e/logger/infrastructure.py @@ -0,0 +1,11 @@ +from pathlib import Path + +from tests.e2e.utils.infrastructure import BaseInfrastructureV2 + + +class LoggerStack(BaseInfrastructureV2): + def __init__(self, handlers_dir: Path, feature_name: str = "logger") -> None: + super().__init__(feature_name, handlers_dir) + + def create_resources(self): + self.create_lambda_functions() diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index 992cf779275..e5c27dd0a8f 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -1,143 +1,37 @@ -import boto3 +import json +from uuid import uuid4 + import pytest -from e2e import conftest +from aws_lambda_powertools.shared.constants import LOGGER_LAMBDA_CONTEXT_KEYS from tests.e2e.utils import data_fetcher -@pytest.fixture(scope="module") -def config() -> conftest.LambdaConfig: - return { - "parameters": {}, - "environment_variables": { - "MESSAGE": "logger message test", - "LOG_LEVEL": "INFO", - "ADDITIONAL_KEY": "extra_info", - }, - } - - -def test_basic_lambda_logs_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): - # GIVEN - lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") - timestamp = execute_lambda.get_lambda_execution_time_timestamp() - cw_client = boto3.client("logs") +@pytest.fixture +def basic_handler_fn(infrastructure: dict) -> str: + return infrastructure.get("BasicHandler", "") - # WHEN - filtered_logs = data_fetcher.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) - # THEN - assert any( - log.message == config["environment_variables"]["MESSAGE"] - and log.level == config["environment_variables"]["LOG_LEVEL"] - for log in filtered_logs - ) +@pytest.fixture +def basic_handler_fn_arn(infrastructure: dict) -> str: + return infrastructure.get("BasicHandlerArn", "") -def test_basic_lambda_no_debug_logs_visible( - execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig -): +def test_basic_lambda_logs_visible(basic_handler_fn, basic_handler_fn_arn): # GIVEN - lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") - timestamp = execute_lambda.get_lambda_execution_time_timestamp() - cw_client = boto3.client("logs") + message = "logs should be visible with default settings" + custom_key = "order_id" + additional_keys = {custom_key: f"{uuid4()}"} + payload = json.dumps({"message": message, "append_keys": additional_keys}) # WHEN - filtered_logs = data_fetcher.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) + _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=payload) + data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=payload) # THEN - assert not any( - log.message == config["environment_variables"]["MESSAGE"] and log.level == "DEBUG" for log in filtered_logs - ) - - -def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.InfrastructureOutput): - # GIVEN - required_keys = ( - "xray_trace_id", - "function_request_id", - "function_arn", - "function_memory_size", - "function_name", - "cold_start", - ) - - lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") - timestamp = execute_lambda.get_lambda_execution_time_timestamp() - cw_client = boto3.client("logs") - - # WHEN - filtered_logs = data_fetcher.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) - - # THEN - assert all(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_keys) - - -def test_basic_lambda_additional_key_persistence_basic_lambda( - execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig -): - # GIVEN - lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") - timestamp = execute_lambda.get_lambda_execution_time_timestamp() - cw_client = boto3.client("logs") - - # WHEN - filtered_logs = data_fetcher.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) - - # THEN - assert any( - log.extra_info - and log.message == config["environment_variables"]["MESSAGE"] - and log.level == config["environment_variables"]["LOG_LEVEL"] - for log in filtered_logs - ) - - -def test_basic_lambda_empty_event_logged(execute_lambda: conftest.InfrastructureOutput): + logs = data_fetcher.get_logs(function_name=basic_handler_fn, start_time=execution_time) - # GIVEN - lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") - timestamp = execute_lambda.get_lambda_execution_time_timestamp() - cw_client = boto3.client("logs") - - # WHEN - filtered_logs = data_fetcher.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) - - # THEN - assert any(log.message == {} for log in filtered_logs) - - -def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.InfrastructureOutput): - - # GIVEN - required_missing_keys = ( - "function_request_id", - "function_arn", - "function_memory_size", - "function_name", - "cold_start", - ) - - lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="nocontexthandlerarn") - timestamp = execute_lambda.get_lambda_execution_time_timestamp() - cw_client = boto3.client("logs") - - # WHEN - filtered_logs = data_fetcher.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) - - # THEN - assert not any(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_missing_keys) - - -def test_no_context_lambda_event_not_logged(execute_lambda: conftest.InfrastructureOutput): - - # GIVEN - lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="nocontexthandlerarn") - timestamp = execute_lambda.get_lambda_execution_time_timestamp() - cw_client = boto3.client("logs") - - # WHEN - filtered_logs = data_fetcher.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) - - # THEN - assert not any(log.message == {} for log in filtered_logs) + assert len(logs) == 2 + assert len(logs.get_cold_start_log()) == 1 + assert len(logs.get_log(key=custom_key)) == 2 + assert logs.have_keys(*LOGGER_LAMBDA_CONTEXT_KEYS) is True diff --git a/tests/e2e/metrics/test_metrics.py b/tests/e2e/metrics/test_metrics.py index 01d1ba2fbf1..516f93ac1f0 100644 --- a/tests/e2e/metrics/test_metrics.py +++ b/tests/e2e/metrics/test_metrics.py @@ -39,13 +39,12 @@ def test_basic_lambda_metric_is_visible(basic_handler_fn: str, basic_handler_fn_ event = json.dumps({"metrics": metrics, "service": service, "namespace": METRIC_NAMESPACE}) _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=event) - my_metrics = data_fetcher.get_metrics( + metric_values = data_fetcher.get_metrics( namespace=METRIC_NAMESPACE, start_date=execution_time, metric_name=metric_name, dimensions=dimensions ) # THEN - metric_data = my_metrics.get("Values", []) - assert metric_data and metric_data[0] == 3.0 + assert metric_values == [3.0] def test_cold_start_metric(cold_start_fn_arn: str, cold_start_fn: str): @@ -58,12 +57,11 @@ def test_cold_start_metric(cold_start_fn_arn: str, cold_start_fn: str): event = json.dumps({"service": service, "namespace": METRIC_NAMESPACE}) _, execution_time = data_fetcher.get_lambda_response(lambda_arn=cold_start_fn_arn, payload=event) - _, _ = data_fetcher.get_lambda_response(lambda_arn=cold_start_fn_arn, payload=event) + data_fetcher.get_lambda_response(lambda_arn=cold_start_fn_arn, payload=event) - my_metrics = data_fetcher.get_metrics( + metric_values = data_fetcher.get_metrics( namespace=METRIC_NAMESPACE, start_date=execution_time, metric_name=metric_name, dimensions=dimensions ) # THEN - metric_data = my_metrics.get("Values", []) - assert metric_data and metric_data[0] == 1.0 + assert metric_values == [1.0] diff --git a/tests/e2e/utils/data_fetcher/logs.py b/tests/e2e/utils/data_fetcher/logs.py index e8211eeea30..a005009f5f5 100644 --- a/tests/e2e/utils/data_fetcher/logs.py +++ b/tests/e2e/utils/data_fetcher/logs.py @@ -1,13 +1,14 @@ import json -from functools import lru_cache +from datetime import datetime from typing import List, Optional, Union -from mypy_boto3_cloudwatch import CloudWatchClient -from pydantic import BaseModel +import boto3 +from mypy_boto3_logs import CloudWatchLogsClient +from pydantic import BaseModel, Extra from retry import retry -class Log(BaseModel): +class Log(BaseModel, extra=Extra.allow): level: str location: str message: Union[dict, str] @@ -19,21 +20,129 @@ class Log(BaseModel): function_arn: Optional[str] function_request_id: Optional[str] xray_trace_id: Optional[str] - extra_info: Optional[str] - - -@lru_cache(maxsize=10, typed=False) -@retry(ValueError, delay=1, jitter=1, tries=20) -def get_logs(lambda_function_name: str, log_client: CloudWatchClient, start_time: int, **kwargs: dict) -> List[Log]: - response = log_client.filter_log_events(logGroupName=f"/aws/lambda/{lambda_function_name}", startTime=start_time) - if not response["events"]: - raise ValueError("Empty response from Cloudwatch Logs. Repeating...") - filtered_logs = [] - for event in response["events"]: - try: - message = Log(**json.loads(event["message"])) - except json.decoder.JSONDecodeError: - continue - filtered_logs.append(message) - - return filtered_logs + + +class LogFetcher: + def __init__( + self, + function_name: str, + start_time: datetime, + log_client: Optional[CloudWatchLogsClient] = None, + filter_expression: Optional[str] = None, + ): + """Fetch and expose Powertools Logger logs from CloudWatch Logs + + Parameters + ---------- + function_name : str + Name of Lambda function to fetch logs for + start_time : datetime + Start date range to filter traces + log_client : Optional[CloudWatchLogsClient], optional + Amazon CloudWatch Logs Client, by default boto3.client('logs) + filter_expression : Optional[str], optional + CloudWatch Logs Filter Pattern expression, by default "message" + """ + self.function_name = function_name + self.start_time = int(start_time.timestamp()) + self.log_client = log_client or boto3.client("logs") + self.filter_expression = filter_expression or "message" # Logger message key + self.log_group = f"/aws/lambda/{self.function_name}" + self.logs: List[Log] = self._get_logs() + + def get_log(self, key: str, value: Optional[any] = None) -> List[Log]: + """Get logs based on key or key and value + + Parameters + ---------- + key : str + Log key name + value : Optional[any], optional + Log value, by default None + + Returns + ------- + List[Log] + List of Log instances + """ + logs = [] + for log in self.logs: + log_value = getattr(log, key, None) + if value is not None and log_value == value: + logs.append(log) + elif value is None and hasattr(log, key): + logs.append(log) + return logs + + def get_cold_start_log(self) -> List[Log]: + """Get logs where cold start was true + + Returns + ------- + List[Log] + List of Log instances + """ + return [log for log in self.logs if log.cold_start] + + def have_keys(self, *keys) -> bool: + """Whether an arbitrary number of key names exist in each log event + + Returns + ------- + bool + Whether keys are present + """ + return all(hasattr(log, key) for log in self.logs for key in keys) + + def _get_logs(self) -> List[Log]: + ret = self.log_client.filter_log_events( + logGroupName=self.log_group, + startTime=self.start_time, + filterPattern=self.filter_expression, + ) + + if not ret["events"]: + raise ValueError("Empty response from Cloudwatch Logs. Repeating...") + + filtered_logs = [] + for event in ret["events"]: + try: + message = Log(**json.loads(event["message"])) + except json.decoder.JSONDecodeError: + continue + filtered_logs.append(message) + + return filtered_logs + + def __len__(self) -> int: + return len(self.logs) + + +@retry(ValueError, delay=2, jitter=1.5, tries=10) +def get_logs( + function_name: str, + start_time: datetime, + filter_expression: Optional[str] = None, + log_client: Optional[CloudWatchLogsClient] = None, +) -> LogFetcher: + """_summary_ + + Parameters + ---------- + function_name : str + Name of Lambda function to fetch logs for + start_time : datetime + Start date range to filter traces + log_client : Optional[CloudWatchLogsClient], optional + Amazon CloudWatch Logs Client, by default boto3.client('logs) + filter_expression : Optional[str], optional + CloudWatch Logs Filter Pattern expression, by default "message" + + Returns + ------- + LogFetcher + LogFetcher instance with logs available as properties and methods + """ + return LogFetcher( + function_name=function_name, start_time=start_time, filter_expression=filter_expression, log_client=log_client + ) diff --git a/tests/e2e/utils/data_fetcher/metrics.py b/tests/e2e/utils/data_fetcher/metrics.py index 5a017f0a845..18023b18336 100644 --- a/tests/e2e/utils/data_fetcher/metrics.py +++ b/tests/e2e/utils/data_fetcher/metrics.py @@ -3,7 +3,7 @@ import boto3 from mypy_boto3_cloudwatch import CloudWatchClient -from mypy_boto3_cloudwatch.type_defs import DimensionTypeDef, MetricDataResultTypeDef +from mypy_boto3_cloudwatch.type_defs import DimensionTypeDef from retry import retry from tests.e2e.utils.data_builder import build_metric_query_data @@ -19,7 +19,7 @@ def get_metrics( end_date: Optional[datetime] = None, period: int = 60, stat: str = "Sum", -) -> MetricDataResultTypeDef: +) -> List[float]: """Fetch CloudWatch Metrics It takes into account eventual consistency with up to 10 retries and 1.5s jitter. @@ -45,8 +45,8 @@ def get_metrics( Returns ------- - MetricDataResultTypeDef - Dict with metric values found + List[float] + List with metric values found Raises ------ @@ -65,7 +65,7 @@ def get_metrics( StartTime=start_date, EndTime=end_date or datetime.utcnow(), ) - result = response["MetricDataResults"][0] - if not result["Values"]: + result = response["MetricDataResults"][0]["Values"] + if not result: raise ValueError("Empty response from Cloudwatch. Repeating...") return result