diff --git a/Makefile b/Makefile index da43c1de67..e14ccf8dde 100644 --- a/Makefile +++ b/Makefile @@ -83,3 +83,6 @@ release: pr changelog: @echo "[+] Pre-generating CHANGELOG for tag: $$(git describe --abbrev=0 --tag)" docker run -v "${PWD}":/workdir quay.io/git-chglog/git-chglog $$(git describe --abbrev=0 --tag).. > TMP_CHANGELOG.md + +mypy: + poetry run mypy aws_lambda_powertools diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index 391b1e4a2c..8b6c368af3 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -1,7 +1,9 @@ import base64 import json import logging +import os import re +import traceback import zlib from enum import Enum from http import HTTPStatus @@ -9,6 +11,8 @@ from aws_lambda_powertools.event_handler import content_types from aws_lambda_powertools.event_handler.exceptions import ServiceError +from aws_lambda_powertools.shared import constants +from aws_lambda_powertools.shared.functions import resolve_truthy_env_var_choice from aws_lambda_powertools.shared.json_encoder import Encoder from aws_lambda_powertools.utilities.data_classes import ALBEvent, APIGatewayProxyEvent, APIGatewayProxyEventV2 from aws_lambda_powertools.utilities.data_classes.common import BaseProxyEvent @@ -28,43 +32,46 @@ class ProxyEventType(Enum): class CORSConfig(object): """CORS Config - Examples -------- Simple cors example using the default permissive cors, not this should only be used during early prototyping - from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver + ```python + from aws_lambda_powertools.event_handler.api_gateway import ApiGatewayResolver - app = ApiGatewayResolver() + app = ApiGatewayResolver() - @app.get("/my/path", cors=True) - def with_cors(): - return {"message": "Foo"} + @app.get("/my/path", cors=True) + def with_cors(): + return {"message": "Foo"} + ``` Using a custom CORSConfig where `with_cors` used the custom provided CORSConfig and `without_cors` do not include any cors headers. - from aws_lambda_powertools.event_handler.api_gateway import ( - ApiGatewayResolver, CORSConfig - ) - - cors_config = CORSConfig( - allow_origin="https://wwww.example.com/", - expose_headers=["x-exposed-response-header"], - allow_headers=["x-custom-request-header"], - max_age=100, - allow_credentials=True, - ) - app = ApiGatewayResolver(cors=cors_config) - - @app.get("/my/path") - def with_cors(): - return {"message": "Foo"} + ```python + from aws_lambda_powertools.event_handler.api_gateway import ( + ApiGatewayResolver, CORSConfig + ) + + cors_config = CORSConfig( + allow_origin="https://wwww.example.com/", + expose_headers=["x-exposed-response-header"], + allow_headers=["x-custom-request-header"], + max_age=100, + allow_credentials=True, + ) + app = ApiGatewayResolver(cors=cors_config) + + @app.get("/my/path") + def with_cors(): + return {"message": "Foo"} - @app.get("/another-one", cors=False) - def without_cors(): - return {"message": "Foo"} + @app.get("/another-one", cors=False) + def without_cors(): + return {"message": "Foo"} + ``` """ _REQUIRED_HEADERS = ["Authorization", "Content-Type", "X-Amz-Date", "X-Api-Key", "X-Amz-Security-Token"] @@ -119,7 +126,11 @@ class Response: """Response data class that provides greater control over what is returned from the proxy event""" def __init__( - self, status_code: int, content_type: Optional[str], body: Union[str, bytes, None], headers: Dict = None + self, + status_code: int, + content_type: Optional[str], + body: Union[str, bytes, None], + headers: Optional[Dict] = None, ): """ @@ -160,7 +171,7 @@ def __init__( class ResponseBuilder: """Internally used Response builder""" - def __init__(self, response: Response, route: Route = None): + def __init__(self, response: Response, route: Optional[Route] = None): self.response = response self.route = route @@ -192,7 +203,7 @@ def _route(self, event: BaseProxyEvent, cors: Optional[CORSConfig]): if self.route.compress and "gzip" in (event.get_header_value("accept-encoding", "") or ""): self._compress() - def build(self, event: BaseProxyEvent, cors: CORSConfig = None) -> Dict[str, Any]: + def build(self, event: BaseProxyEvent, cors: Optional[CORSConfig] = None) -> Dict[str, Any]: """Build the full response dict to be returned by the lambda""" self._route(event, cors) @@ -240,7 +251,12 @@ def lambda_handler(event, context): current_event: BaseProxyEvent lambda_context: LambdaContext - def __init__(self, proxy_type: Enum = ProxyEventType.APIGatewayProxyEvent, cors: CORSConfig = None): + def __init__( + self, + proxy_type: Enum = ProxyEventType.APIGatewayProxyEvent, + cors: Optional[CORSConfig] = None, + debug: Optional[bool] = None, + ): """ Parameters ---------- @@ -248,14 +264,20 @@ def __init__(self, proxy_type: Enum = ProxyEventType.APIGatewayProxyEvent, cors: Proxy request type, defaults to API Gateway V1 cors: CORSConfig Optionally configure and enabled CORS. Not each route will need to have to cors=True + debug: Optional[bool] + Enables debug mode, by default False. Can be also be enabled by "POWERTOOLS_EVENT_HANDLER_DEBUG" + environment variable """ self._proxy_type = proxy_type self._routes: List[Route] = [] self._cors = cors self._cors_enabled: bool = cors is not None self._cors_methods: Set[str] = {"OPTIONS"} + self._debug = resolve_truthy_env_var_choice( + env=os.getenv(constants.EVENT_HANDLER_DEBUG_ENV, "false"), choice=debug + ) - def get(self, rule: str, cors: bool = None, compress: bool = False, cache_control: str = None): + def get(self, rule: str, cors: Optional[bool] = None, compress: bool = False, cache_control: Optional[str] = None): """Get route decorator with GET `method` Examples @@ -280,7 +302,7 @@ def lambda_handler(event, context): """ return self.route(rule, "GET", cors, compress, cache_control) - def post(self, rule: str, cors: bool = None, compress: bool = False, cache_control: str = None): + def post(self, rule: str, cors: Optional[bool] = None, compress: bool = False, cache_control: Optional[str] = None): """Post route decorator with POST `method` Examples @@ -306,7 +328,7 @@ def lambda_handler(event, context): """ return self.route(rule, "POST", cors, compress, cache_control) - def put(self, rule: str, cors: bool = None, compress: bool = False, cache_control: str = None): + def put(self, rule: str, cors: Optional[bool] = None, compress: bool = False, cache_control: Optional[str] = None): """Put route decorator with PUT `method` Examples @@ -332,7 +354,9 @@ def lambda_handler(event, context): """ return self.route(rule, "PUT", cors, compress, cache_control) - def delete(self, rule: str, cors: bool = None, compress: bool = False, cache_control: str = None): + def delete( + self, rule: str, cors: Optional[bool] = None, compress: bool = False, cache_control: Optional[str] = None + ): """Delete route decorator with DELETE `method` Examples @@ -357,7 +381,9 @@ def lambda_handler(event, context): """ return self.route(rule, "DELETE", cors, compress, cache_control) - def patch(self, rule: str, cors: bool = None, compress: bool = False, cache_control: str = None): + def patch( + self, rule: str, cors: Optional[bool] = None, compress: bool = False, cache_control: Optional[str] = None + ): """Patch route decorator with PATCH `method` Examples @@ -385,7 +411,14 @@ def lambda_handler(event, context): """ return self.route(rule, "PATCH", cors, compress, cache_control) - def route(self, rule: str, method: str, cors: bool = None, compress: bool = False, cache_control: str = None): + def route( + self, + rule: str, + method: str, + cors: Optional[bool] = None, + compress: bool = False, + cache_control: Optional[str] = None, + ): """Route decorator includes parameter `method`""" def register_resolver(func: Callable): @@ -416,6 +449,8 @@ def resolve(self, event, context) -> Dict[str, Any]: dict Returns the dict response """ + if self._debug: + print(self._json_dump(event)) self.current_event = self._to_proxy_event(event) self.lambda_context = context return self._resolve().build(self.current_event, self._cors) @@ -489,6 +524,19 @@ def _call_route(self, route: Route, args: Dict[str, str]) -> ResponseBuilder: ), route, ) + except Exception: + if self._debug: + # If the user has turned on debug mode, + # we'll let the original exception propagate so + # they get more information about what went wrong. + return ResponseBuilder( + Response( + status_code=500, + content_type=content_types.TEXT_PLAIN, + body="".join(traceback.format_exc()), + ) + ) + raise def _to_response(self, result: Union[Dict, Response]) -> Response: """Convert the route's result to a Response @@ -509,7 +557,9 @@ def _to_response(self, result: Union[Dict, Response]) -> Response: body=self._json_dump(result), ) - @staticmethod - def _json_dump(obj: Any) -> str: - """Does a concise json serialization""" - return json.dumps(obj, separators=(",", ":"), cls=Encoder) + def _json_dump(self, obj: Any) -> str: + """Does a concise json serialization or pretty print when in debug mode""" + if self._debug: + return json.dumps(obj, indent=4, cls=Encoder) + else: + return json.dumps(obj, separators=(",", ":"), cls=Encoder) diff --git a/aws_lambda_powertools/event_handler/appsync.py b/aws_lambda_powertools/event_handler/appsync.py index 021afaa665..7f4cce5c8b 100644 --- a/aws_lambda_powertools/event_handler/appsync.py +++ b/aws_lambda_powertools/event_handler/appsync.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Callable +from typing import Any, Callable, Optional from aws_lambda_powertools.utilities.data_classes import AppSyncResolverEvent from aws_lambda_powertools.utilities.typing import LambdaContext @@ -44,7 +44,7 @@ def common_field() -> str: def __init__(self): self._resolvers: dict = {} - def resolver(self, type_name: str = "*", field_name: str = None): + def resolver(self, type_name: str = "*", field_name: Optional[str] = None): """Registers the resolver for field_name Parameters diff --git a/aws_lambda_powertools/event_handler/content_types.py b/aws_lambda_powertools/event_handler/content_types.py index 00ec3db168..0f55b1088a 100644 --- a/aws_lambda_powertools/event_handler/content_types.py +++ b/aws_lambda_powertools/event_handler/content_types.py @@ -1,4 +1,5 @@ # use mimetypes library to be certain, e.g., mimetypes.types_map[".json"] APPLICATION_JSON = "application/json" -PLAIN_TEXT = "text/plain" +TEXT_PLAIN = "text/plain" +TEXT_HTML = "text/html" diff --git a/aws_lambda_powertools/logging/formatter.py b/aws_lambda_powertools/logging/formatter.py index 7ff9881062..de9254a337 100644 --- a/aws_lambda_powertools/logging/formatter.py +++ b/aws_lambda_powertools/logging/formatter.py @@ -60,8 +60,8 @@ def __init__( json_serializer: Optional[Callable[[Dict], str]] = None, json_deserializer: Optional[Callable[[Dict], str]] = None, json_default: Optional[Callable[[Any], Any]] = None, - datefmt: str = None, - log_record_order: List[str] = None, + datefmt: Optional[str] = None, + log_record_order: Optional[List[str]] = None, utc: bool = False, **kwargs ): diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index 689409d981..8ac911d4ca 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -4,7 +4,7 @@ import os import random import sys -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union import jmespath @@ -167,11 +167,11 @@ class Logger(logging.Logger): # lgtm [py/missing-call-to-init] def __init__( self, - service: str = None, - level: Union[str, int] = None, + service: Optional[str] = None, + level: Union[str, int, None] = None, child: bool = False, - sampling_rate: float = None, - stream: sys.stdout = None, + sampling_rate: Optional[float] = None, + stream: Optional[IO[str]] = None, logger_formatter: Optional[PowertoolsFormatter] = None, logger_handler: Optional[logging.Handler] = None, **kwargs, @@ -261,10 +261,10 @@ def _configure_sampling(self): def inject_lambda_context( self, - lambda_handler: Callable[[Dict, Any], Any] = None, - log_event: bool = None, - correlation_id_path: str = None, - clear_state: bool = False, + lambda_handler: Optional[Callable[[Dict, Any], Any]] = None, + log_event: Optional[bool] = None, + correlation_id_path: Optional[str] = None, + clear_state: Optional[bool] = False, ): """Decorator to capture Lambda contextual info and inject into logger @@ -324,7 +324,7 @@ def handler(event, context): ) log_event = resolve_truthy_env_var_choice( - choice=log_event, env=os.getenv(constants.LOGGER_LOG_EVENT_ENV, "false") + env=os.getenv(constants.LOGGER_LOG_EVENT_ENV, "false"), choice=log_event ) @functools.wraps(lambda_handler) @@ -363,7 +363,7 @@ def registered_handler(self) -> logging.Handler: @property def registered_formatter(self) -> Optional[PowertoolsFormatter]: """Convenience property to access logger formatter""" - return self.registered_handler.formatter + return self.registered_handler.formatter # type: ignore def structure_logs(self, append: bool = False, **keys): """Sets logging formatting to JSON. @@ -384,7 +384,7 @@ def structure_logs(self, append: bool = False, **keys): self.append_keys(**keys) else: log_keys = {**self._default_log_keys, **keys} - formatter = self.logger_formatter or LambdaPowertoolsFormatter(**log_keys) + formatter = self.logger_formatter or LambdaPowertoolsFormatter(**log_keys) # type: ignore self.registered_handler.setFormatter(formatter) def set_correlation_id(self, value: str): @@ -421,7 +421,9 @@ def _get_caller_filename(): def set_package_logger( - level: Union[str, int] = logging.DEBUG, stream: sys.stdout = None, formatter: logging.Formatter = None + level: Union[str, int] = logging.DEBUG, + stream: Optional[IO[str]] = None, + formatter: Optional[logging.Formatter] = None, ): """Set an additional stream handler, formatter, and log level for aws_lambda_powertools package logger. diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py index dc4fe34ee1..853f06f210 100644 --- a/aws_lambda_powertools/metrics/base.py +++ b/aws_lambda_powertools/metrics/base.py @@ -5,7 +5,7 @@ import os from collections import defaultdict from enum import Enum -from typing import Any, Dict, List, Union +from typing import Any, Dict, List, Optional, Union from ..shared import constants from ..shared.functions import resolve_env_var_choice @@ -76,11 +76,11 @@ class MetricManager: def __init__( self, - metric_set: Dict[str, Any] = None, - dimension_set: Dict = None, - namespace: str = None, - metadata_set: Dict[str, Any] = None, - service: str = None, + metric_set: Optional[Dict[str, Any]] = None, + dimension_set: Optional[Dict] = None, + namespace: Optional[str] = None, + metadata_set: Optional[Dict[str, Any]] = None, + service: Optional[str] = None, ): self.metric_set = metric_set if metric_set is not None else {} self.dimension_set = dimension_set if dimension_set is not None else {} @@ -136,7 +136,9 @@ def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float): # since we could have more than 100 metrics self.metric_set.clear() - def serialize_metric_set(self, metrics: Dict = None, dimensions: Dict = None, metadata: Dict = None) -> Dict: + def serialize_metric_set( + self, metrics: Optional[Dict] = None, dimensions: Optional[Dict] = None, metadata: Optional[Dict] = None + ) -> Dict: """Serializes metric and dimensions set Parameters diff --git a/aws_lambda_powertools/metrics/metric.py b/aws_lambda_powertools/metrics/metric.py index 8bdd0d800b..1ac2bd9450 100644 --- a/aws_lambda_powertools/metrics/metric.py +++ b/aws_lambda_powertools/metrics/metric.py @@ -61,7 +61,7 @@ def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float): @contextmanager -def single_metric(name: str, unit: MetricUnit, value: float, namespace: str = None): +def single_metric(name: str, unit: MetricUnit, value: float, namespace: Optional[str] = None): """Context manager to simplify creation of a single metric Example diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py index 8cc4895f03..fafc604b50 100644 --- a/aws_lambda_powertools/metrics/metrics.py +++ b/aws_lambda_powertools/metrics/metrics.py @@ -71,7 +71,7 @@ def lambda_handler(): _metadata: Dict[str, Any] = {} _default_dimensions: Dict[str, Any] = {} - def __init__(self, service: str = None, namespace: str = None): + def __init__(self, service: Optional[str] = None, namespace: Optional[str] = None): self.metric_set = self._metrics self.service = service self.namespace: Optional[str] = namespace @@ -125,10 +125,10 @@ def clear_metrics(self): def log_metrics( self, - lambda_handler: Callable[[Any, Any], Any] = None, + lambda_handler: Optional[Callable[[Any, Any], Any]] = None, capture_cold_start_metric: bool = False, raise_on_empty_metrics: bool = False, - default_dimensions: Dict[str, str] = None, + default_dimensions: Optional[Dict[str, str]] = None, ): """Decorator to serialize and publish metrics at the end of a function execution. diff --git a/aws_lambda_powertools/middleware_factory/factory.py b/aws_lambda_powertools/middleware_factory/factory.py index 7727705227..74858bf670 100644 --- a/aws_lambda_powertools/middleware_factory/factory.py +++ b/aws_lambda_powertools/middleware_factory/factory.py @@ -2,7 +2,7 @@ import inspect import logging import os -from typing import Callable +from typing import Callable, Optional from ..shared import constants from ..shared.functions import resolve_truthy_env_var_choice @@ -12,7 +12,7 @@ logger = logging.getLogger(__name__) -def lambda_handler_decorator(decorator: Callable = None, trace_execution: bool = None): +def lambda_handler_decorator(decorator: Optional[Callable] = None, trace_execution: Optional[bool] = None): """Decorator factory for decorating Lambda handlers. You can use lambda_handler_decorator to create your own middlewares, @@ -106,11 +106,11 @@ def lambda_handler(event, context): return functools.partial(lambda_handler_decorator, trace_execution=trace_execution) trace_execution = resolve_truthy_env_var_choice( - choice=trace_execution, env=os.getenv(constants.MIDDLEWARE_FACTORY_TRACE_ENV, "false") + env=os.getenv(constants.MIDDLEWARE_FACTORY_TRACE_ENV, "false"), choice=trace_execution ) @functools.wraps(decorator) - def final_decorator(func: Callable = None, **kwargs): + def final_decorator(func: Optional[Callable] = None, **kwargs): # If called with kwargs return new func with kwargs if func is None: return functools.partial(final_decorator, **kwargs) diff --git a/aws_lambda_powertools/shared/constants.py b/aws_lambda_powertools/shared/constants.py index eaad5640df..8388eded65 100644 --- a/aws_lambda_powertools/shared/constants.py +++ b/aws_lambda_powertools/shared/constants.py @@ -10,11 +10,12 @@ METRICS_NAMESPACE_ENV: str = "POWERTOOLS_METRICS_NAMESPACE" +EVENT_HANDLER_DEBUG_ENV: str = "POWERTOOLS_EVENT_HANDLER_DEBUG" + SAM_LOCAL_ENV: str = "AWS_SAM_LOCAL" CHALICE_LOCAL_ENV: str = "AWS_CHALICE_CLI_MODE" SERVICE_NAME_ENV: str = "POWERTOOLS_SERVICE_NAME" XRAY_TRACE_ID_ENV: str = "_X_AMZN_TRACE_ID" - -XRAY_SDK_MODULE = "aws_xray_sdk" -XRAY_SDK_CORE_MODULE = "aws_xray_sdk.core" +XRAY_SDK_MODULE: str = "aws_xray_sdk" +XRAY_SDK_CORE_MODULE: str = "aws_xray_sdk.core" diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py index b8f5cb9f74..0b117cc32b 100644 --- a/aws_lambda_powertools/shared/functions.py +++ b/aws_lambda_powertools/shared/functions.py @@ -2,14 +2,14 @@ from typing import Any, Optional, Union -def resolve_truthy_env_var_choice(env: Any, choice: bool = None) -> bool: +def resolve_truthy_env_var_choice(env: str, choice: Optional[bool] = None) -> bool: """Pick explicit choice over truthy env value, if available, otherwise return truthy env value NOTE: Environment variable should be resolved by the caller. Parameters ---------- - env : Any + env : str environment variable actual value choice : bool explicit choice diff --git a/aws_lambda_powertools/tracing/base.py b/aws_lambda_powertools/tracing/base.py index 1857ed52a7..722652ce08 100644 --- a/aws_lambda_powertools/tracing/base.py +++ b/aws_lambda_powertools/tracing/base.py @@ -2,11 +2,11 @@ import numbers import traceback from contextlib import contextmanager -from typing import Any, AsyncContextManager, ContextManager, List, NoReturn, Set, Union +from typing import Any, AsyncContextManager, ContextManager, List, NoReturn, Optional, Set, Union class BaseProvider(abc.ABC): - @abc.abstractmethod + @abc.abstractmethod # type: ignore @contextmanager def in_subsegment(self, name=None, **kwargs) -> ContextManager: """Return a subsegment context manger. @@ -19,7 +19,7 @@ def in_subsegment(self, name=None, **kwargs) -> ContextManager: Optional parameters to be propagated to segment """ - @abc.abstractmethod + @abc.abstractmethod # type: ignore @contextmanager def in_subsegment_async(self, name=None, **kwargs) -> AsyncContextManager: """Return a subsegment async context manger. @@ -81,7 +81,7 @@ class BaseSegment(abc.ABC): """Holds common properties and methods on segment and subsegment.""" @abc.abstractmethod - def close(self, end_time: int = None): + def close(self, end_time: Optional[int] = None): """Close the trace entity by setting `end_time` and flip the in progress flag to False. diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index 60b0a3fb0f..5709b1956c 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -5,7 +5,7 @@ import logging import numbers import os -from typing import Any, Callable, Dict, Optional, Sequence, Union +from typing import Any, Awaitable, Callable, Dict, Optional, Sequence, TypeVar, Union, cast, overload from ..shared import constants from ..shared.functions import resolve_env_var_choice, resolve_truthy_env_var_choice @@ -18,52 +18,123 @@ aws_xray_sdk = LazyLoader(constants.XRAY_SDK_MODULE, globals(), constants.XRAY_SDK_MODULE) aws_xray_sdk.core = LazyLoader(constants.XRAY_SDK_CORE_MODULE, globals(), constants.XRAY_SDK_CORE_MODULE) - -LambdaHandlerT = Union[Callable[[Dict, Any], Any], Callable[[Dict, Any, Optional[Dict]], Any]] +AnyCallableT = TypeVar("AnyCallableT", bound=Callable[..., Any]) # noqa: VNE001 +AnyAwaitableT = TypeVar("AnyAwaitableT", bound=Awaitable) class Tracer: - """Tracer provides opinionated decorators to trace Lambda functions with AWS X-Ray # noqa E501 + """Tracer using AWS-XRay to provide decorators with known defaults for Lambda functions + + When running locally, it detects whether it's running via SAM CLI, + and if it is it returns dummy segments/subsegments instead. + + By default, it patches all available libraries supported by X-Ray SDK. Patching is + automatically disabled when running locally via SAM CLI or by any other means. \n + Ref: https://docs.aws.amazon.com/xray-sdk-for-python/latest/reference/thirdparty.html + + Tracer keeps a copy of its configuration as it can be instantiated more than once. This + is useful when you are using your own middlewares and want to utilize an existing Tracer. + Make sure to set `auto_patch=False` in subsequent Tracer instances to avoid double patching. + + Environment variables + --------------------- + POWERTOOLS_TRACE_DISABLED : str + disable tracer (e.g. `"true", "True", "TRUE"`) + POWERTOOLS_SERVICE_NAME : str + service name + POWERTOOLS_TRACER_CAPTURE_RESPONSE : str + disable auto-capture response as metadata (e.g. `"true", "True", "TRUE"`) + POWERTOOLS_TRACER_CAPTURE_ERROR : str + disable auto-capture error as metadata (e.g. `"true", "True", "TRUE"`) + + Parameters + ---------- + service: str + Service name that will be appended in all tracing metadata + auto_patch: bool + Patch existing imported modules during initialization, by default True + disabled: bool + Flag to explicitly disable tracing, useful when running/testing locally + `Env POWERTOOLS_TRACE_DISABLED="true"` + patch_modules: Optional[Sequence[str]] + Tuple of modules supported by tracing provider to patch, by default all modules are patched + provider: BaseProvider + Tracing provider, by default it is aws_xray_sdk.core.xray_recorder + + Returns + ------- + Tracer + Tracer instance with imported modules patched + + Example + ------- + **A Lambda function using Tracer** - By default, it patches all [available libraries supported by X-Ray SDK](https://docs.aws.amazon.com/xray-sdk-for-python/latest/reference/thirdparty.html). + from aws_lambda_powertools import Tracer + tracer = Tracer(service="greeting") - When running locally, it disables itself whether it's running via SAM CLI or Chalice. + @tracer.capture_method + def greeting(name: str) -> Dict: + return { + "name": name + } - Note: Reusing Tracer across the codebase. - Tracer keeps a copy of its configuration after the first initialization and reuses it across instances. + @tracer.capture_lambda_handler + def handler(event: dict, context: Any) -> Dict: + print("Received event from Lambda...") + response = greeting(name="Heitor") + return response - Additional instances can override configuration via the constructor. + **Booking Lambda function using Tracer that adds additional annotation/metadata** - ## Environment variables + from aws_lambda_powertools import Tracer + tracer = Tracer(service="booking") - * `POWERTOOLS_TRACE_DISABLED`: disable tracer (`true`) - * `POWERTOOLS_SERVICE_NAME`: service name, (`payment`) - * `POWERTOOLS_TRACER_CAPTURE_RESPONSE`: disable auto-capture response as metadata, (`true`) - * `POWERTOOLS_TRACER_CAPTURE_ERROR`: disable auto-capture error as metadata, (`true`) + @tracer.capture_method + def confirm_booking(booking_id: str) -> Dict: + resp = add_confirmation(booking_id) - ## Examples + tracer.put_annotation("BookingConfirmation", resp["requestId"]) + tracer.put_metadata("Booking confirmation", resp) - ### Reuse an existing instance of Tracer across the codebase + return resp - ```python - # lambda_handler.py - from aws_lambda_powertools import Tracer + @tracer.capture_lambda_handler + def handler(event: dict, context: Any) -> Dict: + print("Received event from Lambda...") + booking_id = event.get("booking_id") + response = confirm_booking(booking_id=booking_id) + return response - tracer = Tracer(service="booking") + **A Lambda function using service name via POWERTOOLS_SERVICE_NAME** - @tracer.capture_lambda_handler - def handler(event: dict, context: Any) -> Dict: - ... + export POWERTOOLS_SERVICE_NAME="booking" + from aws_lambda_powertools import Tracer + tracer = Tracer() + + @tracer.capture_lambda_handler + def handler(event: dict, context: Any) -> Dict: + print("Received event from Lambda...") + response = greeting(name="Lessa") + return response - # utils.py - from aws_lambda_powertools import Tracer + **Reuse an existing instance of Tracer anywhere in the code** - tracer = Tracer(service="booking") - ... - ``` + # lambda_handler.py + from aws_lambda_powertools import Tracer + tracer = Tracer() - ## Limitations + @tracer.capture_lambda_handler + def handler(event: dict, context: Any) -> Dict: + ... + # utils.py + from aws_lambda_powertools import Tracer + tracer = Tracer() + ... + + Limitations + ----------- * Async handler not supported """ @@ -84,24 +155,13 @@ def __init__( patch_modules: Optional[Sequence[str]] = None, provider: Optional[BaseProvider] = None, ): - """Tracer constructor - - Parameters: - - service (str): Service name that will be appended in all tracing metadata - auto_patch (bool): Patch existing imported modules during initialization, by default True - disabled (bool): Flag to explicitly disable tracing, useful when running/testing locally - patch_modules (Sequence[str]): List of modules supported by tracing provider to patch - provider (BaseProvider): Tracing provider, by default it is `aws_xray_sdk.core.xray_recorder` - - """ self.__build_config( service=service, disabled=disabled, auto_patch=auto_patch, patch_modules=patch_modules, provider=provider ) self.provider: BaseProvider = self._config["provider"] - self.disabled: bool = self._config["disabled"] - self.service: str = self._config["service"] - self.auto_patch: Optional[bool] = self._config["auto_patch"] + self.disabled = self._config["disabled"] + self.service = self._config["service"] + self.auto_patch = self._config["auto_patch"] if self.disabled: self._disable_tracer_provider() @@ -117,32 +177,19 @@ def __init__( def put_annotation(self, key: str, value: Union[str, numbers.Number, bool]): """Adds annotation to existing segment or subsegment - Parameters: - key (str): Annotation key - key (Union[str, numbers.Number, bool): Value for annotation - - ## Example - - ```python - from aws_lambda_powertools import Tracer - - tracer = Tracer(service="booking") - - @tracer.capture_method - def confirm_booking(booking_id: str) -> Dict: - resp = add_confirmation(booking_id) - tracer.put_annotation("BookingConfirmation", resp["requestId"]) - - return resp + Parameters + ---------- + key : str + Annotation key + value : Union[str, numbers.Number, bool] + Value for annotation - @tracer.capture_lambda_handler - def handler(event: dict, context: Any) -> Dict: - booking_id = event.get("booking_id", "") - tracer.put_annotation("BookingId", booking_id) - response = confirm_booking(booking_id=booking_id) + Example + ------- + Custom annotation for a pseudo service named payment - return response - ``` + tracer = Tracer(service="payment") + tracer.put_annotation("PaymentStatus", "CONFIRMED") """ if self.disabled: logger.debug("Tracing has been disabled, aborting put_annotation") @@ -151,36 +198,25 @@ def handler(event: dict, context: Any) -> Dict: logger.debug(f"Annotating on key '{key}' with '{value}'") self.provider.put_annotation(key=key, value=value) - def put_metadata(self, key: str, value: Any, namespace: str = None): + def put_metadata(self, key: str, value: Any, namespace: Optional[str] = None): """Adds metadata to existing segment or subsegment - Parameters: - - key (str): Metadata key - value (any): Value for metadata - namespace (str): Namespace that metadata will lie under, by default None - - ## Example - - ```python - from aws_lambda_powertools import Tracer - - tracer = Tracer(service="booking") - - @tracer.capture_method - def confirm_booking(booking_id: str) -> Dict: - resp = add_confirmation(booking_id) - tracer.put_metadata("Booking request metadata", resp["Metadata"]) - - return resp["booking"] - - @tracer.capture_lambda_handler - def handler(event: dict, context: Any) -> Dict: - booking_id = event.get("booking_id") - response = confirm_booking(booking_id=booking_id) + Parameters + ---------- + key : str + Metadata key + value : any + Value for metadata + namespace : str, optional + Namespace that metadata will lie under, by default None + + Example + ------- + Custom metadata for a pseudo service named payment - return response - ``` + tracer = Tracer(service="payment") + response = collect_payment() + tracer.put_metadata("Payment collection", response) """ if self.disabled: logger.debug("Tracing has been disabled, aborting put_metadata") @@ -190,14 +226,15 @@ def handler(event: dict, context: Any) -> Dict: logger.debug(f"Adding metadata on key '{key}' with '{value}' at namespace '{namespace}'") self.provider.put_metadata(key=key, value=value, namespace=namespace) - def patch(self, modules: Sequence[str] = None): + def patch(self, modules: Optional[Sequence[str]] = None): """Patch modules for instrumentation. Patches all supported modules by default if none are given. - Parameters: - - modules (Sequence[str]): List of modules to be patched, optional by default + Parameters + ---------- + modules : Optional[Sequence[str]] + List of modules to be patched, optional by default """ if self.disabled: logger.debug("Tracing has been disabled, aborting patch") @@ -210,30 +247,44 @@ def patch(self, modules: Sequence[str] = None): def capture_lambda_handler( self, - lambda_handler: LambdaHandlerT = None, + lambda_handler: Union[Callable[[Dict, Any], Any], Optional[Callable[[Dict, Any, Optional[Dict]], Any]]] = None, capture_response: Optional[bool] = None, capture_error: Optional[bool] = None, ): """Decorator to create subsegment for lambda handlers - By default, it automatically captures Lambda Handler's response or exception as metadata. + As Lambda follows (event, context) signature we can remove some of the boilerplate + and also capture any exception any Lambda function throws or its response as metadata + + Parameters + ---------- + lambda_handler : Callable + Method to annotate on + capture_response : bool, optional + Instructs tracer to not include handler's response as metadata + capture_error : bool, optional + Instructs tracer to not include handler's error as metadata, by default True - Parameters: - lambda_handler (LambdaHandlerT): Lambda function's handler - capture_response (bool): Instructs tracer to not include handler's response as metadata - capture_error (bool): Instructs tracer to not include handler's error as metadata, by default `True` + Example + ------- + **Lambda function using capture_lambda_handler decorator** - ## Example + tracer = Tracer(service="payment") + @tracer.capture_lambda_handler + def handler(event, context): + ... - ```python - from aws_lambda_powertools import Tracer + **Preventing Tracer to log response as metadata** - tracer = Tracer(service="booking") + tracer = Tracer(service="payment") + @tracer.capture_lambda_handler(capture_response=False) + def handler(event, context): + ... - @tracer.capture_lambda_handler - def handler(event: dict, context: Any) -> Dict: - ... - ``` + Raises + ------ + err + Exception raised by method """ # If handler is None we've been called with parameters # Return a partial function with args filled @@ -281,149 +332,181 @@ def decorate(event, context, **kwargs): return decorate + # see #465 + @overload + def capture_method(self, method: "AnyCallableT") -> "AnyCallableT": + ... + + @overload def capture_method( - self, method: Callable = None, capture_response: Optional[bool] = None, capture_error: Optional[bool] = None - ): + self, + method: None = None, + capture_response: Optional[bool] = None, + capture_error: Optional[bool] = None, + ) -> Callable[["AnyCallableT"], "AnyCallableT"]: + ... + + def capture_method( + self, + method: Optional[AnyCallableT] = None, + capture_response: Optional[bool] = None, + capture_error: Optional[bool] = None, + ) -> AnyCallableT: """Decorator to create subsegment for arbitrary functions - By default, it automatically captures response or exception as metadata in a subsegment named `## `. # noqa E501 + It also captures both response and exceptions as metadata + and creates a subsegment named `## ` - Warning: Running [async functions concurrently](https://docs.python.org/3/library/asyncio-task.html#id6) - Methods may impact each others subsegment and can trigger `AlreadyEndedException` from X-Ray due to async nature. + When running [async functions concurrently](https://docs.python.org/3/library/asyncio-task.html#id6), + methods may impact each others subsegment, and can trigger + and AlreadyEndedException from X-Ray due to async nature. - For this use case, either use `capture_method` only where`async.gather` is called, - or use `in_subsegment_async` context manager via our escape hatch mechanism - See examples. + For this use case, either use `capture_method` only where + `async.gather` is called, or use `in_subsegment_async` + context manager via our escape hatch mechanism - See examples. - Parameters: - method (Callable): Method to annotate on - capture_response (bool): Instructs tracer to not include method's response as metadata, by default `True` - capture_error (bool): Instructs tracer to not include handler's error as metadata, by default `True` + Parameters + ---------- + method : Callable + Method to annotate on + capture_response : bool, optional + Instructs tracer to not include method's response as metadata + capture_error : bool, optional + Instructs tracer to not include handler's error as metadata, by default True - ## Example + Example + ------- + **Custom function using capture_method decorator** - ```python - from aws_lambda_powertools import Tracer - tracer = Tracer(service="greeting") + tracer = Tracer(service="payment") + @tracer.capture_method + def some_function() - @tracer.capture_method - def greeting(name: str) -> Dict: - return { "name": name } + **Custom async method using capture_method decorator** - @tracer.capture_lambda_handler - def handler(event: dict, context: Any) -> Dict: - response = greeting(name="Heitor") + from aws_lambda_powertools import Tracer + tracer = Tracer(service="booking") - return response - ``` + @tracer.capture_method + async def confirm_booking(booking_id: str) -> Dict: + resp = call_to_booking_service() - **Tracing async method** + tracer.put_annotation("BookingConfirmation", resp["requestId"]) + tracer.put_metadata("Booking confirmation", resp) - ```python - from aws_lambda_powertools import Tracer - tracer = Tracer(service="booking") + return resp - @tracer.capture_method - async def confirm_booking(booking_id: str) -> Dict: - resp = call_to_booking_service() + def lambda_handler(event: dict, context: Any) -> Dict: + booking_id = event.get("booking_id") + asyncio.run(confirm_booking(booking_id=booking_id)) - tracer.put_annotation("BookingConfirmation", resp["requestId"]) - tracer.put_metadata("Booking confirmation", resp) + **Custom generator function using capture_method decorator** - return resp + from aws_lambda_powertools import Tracer + tracer = Tracer(service="booking") - def lambda_handler(event: dict, context: Any) -> Dict: - booking_id = event.get("booking_id") - asyncio.run(confirm_booking(booking_id=booking_id)) - ``` + @tracer.capture_method + def bookings_generator(booking_id): + resp = call_to_booking_service() + yield resp[0] + yield resp[1] - **Tracing generator function** + def lambda_handler(event: dict, context: Any) -> Dict: + gen = bookings_generator(booking_id=booking_id) + result = list(gen) - ```python - from aws_lambda_powertools import Tracer - tracer = Tracer(service="booking") + **Custom generator context manager using capture_method decorator** - @tracer.capture_method - def bookings_generator(booking_id): - resp = call_to_booking_service() - yield resp[0] - yield resp[1] + from aws_lambda_powertools import Tracer + tracer = Tracer(service="booking") - def lambda_handler(event: dict, context: Any) -> Dict: - gen = bookings_generator(booking_id=booking_id) - result = list(gen) - ``` + @tracer.capture_method + @contextlib.contextmanager + def booking_actions(booking_id): + resp = call_to_booking_service() + yield "example result" + cleanup_stuff() - **Tracing generator context manager** + def lambda_handler(event: dict, context: Any) -> Dict: + booking_id = event.get("booking_id") - ```python - from aws_lambda_powertools import Tracer - tracer = Tracer(service="booking") + with booking_actions(booking_id=booking_id) as booking: + result = booking - @tracer.capture_method - @contextlib.contextmanager - def booking_actions(booking_id): - resp = call_to_booking_service() - yield "example result" - cleanup_stuff() + **Tracing nested async calls** - def lambda_handler(event: dict, context: Any) -> Dict: - booking_id = event.get("booking_id") + from aws_lambda_powertools import Tracer + tracer = Tracer(service="booking") - with booking_actions(booking_id=booking_id) as booking: - result = booking - ``` + @tracer.capture_method + async def get_identity(): + ... - **Tracing nested async calls** + @tracer.capture_method + async def long_async_call(): + ... - ```python - from aws_lambda_powertools import Tracer - tracer = Tracer(service="booking") + @tracer.capture_method + async def async_tasks(): + await get_identity() + ret = await long_async_call() - @tracer.capture_method - async def get_identity(): - ... + return { "task": "done", **ret } - @tracer.capture_method - async def long_async_call(): - ... + **Safely tracing concurrent async calls with decorator** - @tracer.capture_method - async def async_tasks(): - await get_identity() - ret = await long_async_call() + This may not needed once [this bug is closed](https://github.com/aws/aws-xray-sdk-python/issues/164) - return { "task": "done", **ret } - ``` + from aws_lambda_powertools import Tracer + tracer = Tracer(service="booking") - **Safely tracing concurrent async calls with decorator** + async def get_identity(): + async with aioboto3.client("sts") as sts: + account = await sts.get_caller_identity() + return account - > This may not be needed once [this bug is closed](https://github.com/aws/aws-xray-sdk-python/issues/164) + async def long_async_call(): + ... - ```python - from aws_lambda_powertools import Tracer - tracer = Tracer(service="booking") + @tracer.capture_method + async def async_tasks(): + _, ret = await asyncio.gather(get_identity(), long_async_call(), return_exceptions=True) - async def get_identity(): - async with aioboto3.client("sts") as sts: - account = await sts.get_caller_identity() - return account + return { "task": "done", **ret } - async def long_async_call(): - ... + **Safely tracing each concurrent async calls with escape hatch** - @tracer.capture_method - async def async_tasks(): - _, ret = await asyncio.gather(get_identity(), long_async_call(), return_exceptions=True) + This may not needed once [this bug is closed](https://github.com/aws/aws-xray-sdk-python/issues/164) + + from aws_lambda_powertools import Tracer + tracer = Tracer(service="booking") + + async def get_identity(): + async tracer.provider.in_subsegment_async("## get_identity"): + ... + + async def long_async_call(): + async tracer.provider.in_subsegment_async("## long_async_call"): + ... + + @tracer.capture_method + async def async_tasks(): + _, ret = await asyncio.gather(get_identity(), long_async_call(), return_exceptions=True) + + return { "task": "done", **ret } - return { "task": "done", **ret } - ``` + Raises + ------ + err + Exception raised by method """ # If method is None we've been called with parameters # Return a partial function with args filled if method is None: logger.debug("Decorator called with parameters") - return functools.partial( - self.capture_method, capture_response=capture_response, capture_error=capture_error + return cast( + AnyCallableT, + functools.partial(self.capture_method, capture_response=capture_response, capture_error=capture_error), ) method_name = f"{method.__name__}" @@ -444,7 +527,7 @@ async def async_tasks(): return self._decorate_generator_function( method=method, capture_response=capture_response, capture_error=capture_error, method_name=method_name ) - elif hasattr(method, "__wrapped__") and inspect.isgeneratorfunction(method.__wrapped__): + elif hasattr(method, "__wrapped__") and inspect.isgeneratorfunction(method.__wrapped__): # type: ignore return self._decorate_generator_function_with_context_manager( method=method, capture_response=capture_response, capture_error=capture_error, method_name=method_name ) @@ -458,7 +541,7 @@ def _decorate_async_function( method: Callable, capture_response: Optional[Union[bool, str]] = None, capture_error: Optional[Union[bool, str]] = None, - method_name: str = None, + method_name: Optional[str] = None, ): @functools.wraps(method) async def decorate(*args, **kwargs): @@ -485,7 +568,7 @@ def _decorate_generator_function( method: Callable, capture_response: Optional[Union[bool, str]] = None, capture_error: Optional[Union[bool, str]] = None, - method_name: str = None, + method_name: Optional[str] = None, ): @functools.wraps(method) def decorate(*args, **kwargs): @@ -512,7 +595,7 @@ def _decorate_generator_function_with_context_manager( method: Callable, capture_response: Optional[Union[bool, str]] = None, capture_error: Optional[Union[bool, str]] = None, - method_name: str = None, + method_name: Optional[str] = None, ): @functools.wraps(method) @contextlib.contextmanager @@ -537,11 +620,11 @@ def decorate(*args, **kwargs): def _decorate_sync_function( self, - method: Callable, + method: AnyCallableT, capture_response: Optional[Union[bool, str]] = None, capture_error: Optional[Union[bool, str]] = None, - method_name: str = None, - ): + method_name: Optional[str] = None, + ) -> AnyCallableT: @functools.wraps(method) def decorate(*args, **kwargs): with self.provider.in_subsegment(name=f"## {method_name}") as subsegment: @@ -563,13 +646,13 @@ def decorate(*args, **kwargs): return response - return decorate + return cast(AnyCallableT, decorate) def _add_response_as_metadata( self, - method_name: str = None, - data: Any = None, - subsegment: BaseSegment = None, + method_name: Optional[str] = None, + data: Optional[Any] = None, + subsegment: Optional[BaseSegment] = None, capture_response: Optional[Union[bool, str]] = None, ): """Add response as metadata for given subsegment @@ -652,11 +735,11 @@ def _is_tracer_disabled() -> Union[bool, str]: def __build_config( self, - service: str = None, - disabled: bool = None, - auto_patch: bool = None, - patch_modules: Sequence[str] = None, - provider: BaseProvider = None, + service: Optional[str] = None, + disabled: Optional[bool] = None, + auto_patch: Optional[bool] = None, + patch_modules: Optional[Sequence[str]] = None, + provider: Optional[BaseProvider] = None, ): """Populates Tracer config for new and existing initializations""" is_disabled = disabled if disabled is not None else self._is_tracer_disabled() diff --git a/aws_lambda_powertools/utilities/feature_toggles/__init__.py b/aws_lambda_powertools/utilities/feature_toggles/__init__.py new file mode 100644 index 0000000000..378f7e23f4 --- /dev/null +++ b/aws_lambda_powertools/utilities/feature_toggles/__init__.py @@ -0,0 +1,16 @@ +"""Advanced feature toggles utility +""" +from .appconfig_fetcher import AppConfigFetcher +from .configuration_store import ConfigurationStore +from .exceptions import ConfigurationException +from .schema import ACTION, SchemaValidator +from .schema_fetcher import SchemaFetcher + +__all__ = [ + "ConfigurationException", + "ConfigurationStore", + "ACTION", + "SchemaValidator", + "AppConfigFetcher", + "SchemaFetcher", +] diff --git a/aws_lambda_powertools/utilities/feature_toggles/appconfig_fetcher.py b/aws_lambda_powertools/utilities/feature_toggles/appconfig_fetcher.py new file mode 100644 index 0000000000..177d4ed0ae --- /dev/null +++ b/aws_lambda_powertools/utilities/feature_toggles/appconfig_fetcher.py @@ -0,0 +1,57 @@ +import logging +from typing import Any, Dict, Optional + +from botocore.config import Config + +from aws_lambda_powertools.utilities.parameters import AppConfigProvider, GetParameterError, TransformParameterError + +from .exceptions import ConfigurationException +from .schema_fetcher import SchemaFetcher + +logger = logging.getLogger(__name__) + + +TRANSFORM_TYPE = "json" + + +class AppConfigFetcher(SchemaFetcher): + def __init__( + self, + environment: str, + service: str, + configuration_name: str, + cache_seconds: int, + config: Optional[Config] = None, + ): + """This class fetches JSON schemas from AWS AppConfig + + Args: + environment (str): what appconfig environment to use 'dev/test' etc. + service (str): what service name to use from the supplied environment + configuration_name (str): what configuration to take from the environment & service combination + cache_seconds (int): cache expiration time, how often to call AppConfig to fetch latest configuration + config (Optional[Config]): boto3 client configuration + """ + super().__init__(configuration_name, cache_seconds) + self._logger = logger + self._conf_store = AppConfigProvider(environment=environment, application=service, config=config) + + def get_json_configuration(self) -> Dict[str, Any]: + """Get configuration string from AWs AppConfig and return the parsed JSON dictionary + + Raises: + ConfigurationException: Any validation error or appconfig error that can occur + + Returns: + Dict[str, Any]: parsed JSON dictionary + """ + try: + return self._conf_store.get( + name=self.configuration_name, + transform=TRANSFORM_TYPE, + max_age=self._cache_seconds, + ) # parse result conf as JSON, keep in cache for self.max_age seconds + except (GetParameterError, TransformParameterError) as exc: + error_str = f"unable to get AWS AppConfig configuration file, exception={str(exc)}" + self._logger.error(error_str) + raise ConfigurationException(error_str) diff --git a/aws_lambda_powertools/utilities/feature_toggles/configuration_store.py b/aws_lambda_powertools/utilities/feature_toggles/configuration_store.py new file mode 100644 index 0000000000..e540447737 --- /dev/null +++ b/aws_lambda_powertools/utilities/feature_toggles/configuration_store.py @@ -0,0 +1,191 @@ +import logging +from typing import Any, Dict, List, Optional + +from . import schema +from .exceptions import ConfigurationException +from .schema_fetcher import SchemaFetcher + +logger = logging.getLogger(__name__) + + +class ConfigurationStore: + def __init__(self, schema_fetcher: SchemaFetcher): + """constructor + + Args: + schema_fetcher (SchemaFetcher): A schema JSON fetcher, can be AWS AppConfig, Hashicorp Consul etc. + """ + self._logger = logger + self._schema_fetcher = schema_fetcher + self._schema_validator = schema.SchemaValidator(self._logger) + + def _match_by_action(self, action: str, condition_value: Any, context_value: Any) -> bool: + if not context_value: + return False + mapping_by_action = { + schema.ACTION.EQUALS.value: lambda a, b: a == b, + schema.ACTION.STARTSWITH.value: lambda a, b: a.startswith(b), + schema.ACTION.ENDSWITH.value: lambda a, b: a.endswith(b), + schema.ACTION.CONTAINS.value: lambda a, b: a in b, + } + + try: + func = mapping_by_action.get(action, lambda a, b: False) + return func(context_value, condition_value) + except Exception as exc: + self._logger.error(f"caught exception while matching action, action={action}, exception={str(exc)}") + return False + + def _is_rule_matched(self, feature_name: str, rule: Dict[str, Any], rules_context: Dict[str, Any]) -> bool: + rule_name = rule.get(schema.RULE_NAME_KEY, "") + rule_default_value = rule.get(schema.RULE_DEFAULT_VALUE) + conditions: Dict[str, str] = rule.get(schema.CONDITIONS_KEY) + + for condition in conditions: + context_value = rules_context.get(condition.get(schema.CONDITION_KEY)) + if not self._match_by_action( + condition.get(schema.CONDITION_ACTION), + condition.get(schema.CONDITION_VALUE), + context_value, + ): + logger.debug( + f"rule did not match action, rule_name={rule_name}, rule_default_value={rule_default_value}, feature_name={feature_name}, context_value={str(context_value)}" # noqa: E501 + ) + # context doesn't match condition + return False + # if we got here, all conditions match + logger.debug( + f"rule matched, rule_name={rule_name}, rule_default_value={rule_default_value}, feature_name={feature_name}" # noqa: E501 + ) + return True + + def _handle_rules( + self, + *, + feature_name: str, + rules_context: Dict[str, Any], + feature_default_value: bool, + rules: List[Dict[str, Any]], + ) -> bool: + for rule in rules: + rule_default_value = rule.get(schema.RULE_DEFAULT_VALUE) + if self._is_rule_matched(feature_name, rule, rules_context): + return rule_default_value + # no rule matched, return default value of feature + logger.debug( + f"no rule matched, returning default value of feature, feature_default_value={feature_default_value}, feature_name={feature_name}" # noqa: E501 + ) + return feature_default_value + + def get_configuration(self) -> Dict[str, Any]: + """Get configuration string from AWs AppConfig and returned the parsed JSON dictionary + + Raises: + ConfigurationException: Any validation error or appconfig error that can occur + + Returns: + Dict[str, Any]: parsed JSON dictionary + """ + schema: Dict[ + str, Any + ] = ( + self._schema_fetcher.get_json_configuration() + ) # parse result conf as JSON, keep in cache for self.max_age seconds + # validate schema + self._schema_validator.validate_json_schema(schema) + return schema + + def get_feature_toggle( + self, *, feature_name: str, rules_context: Optional[Dict[str, Any]] = None, value_if_missing: bool + ) -> bool: + """get a feature toggle boolean value. Value is calculated according to a set of rules and conditions. + see below for explanation. + + Args: + feature_name (str): feature name that you wish to fetch + rules_context (Optional[Dict[str, Any]]): dict of attributes that you would like to match the rules + against, can be {'tenant_id: 'X', 'username':' 'Y', 'region': 'Z'} etc. + value_if_missing (bool): this will be the returned value in case the feature toggle doesn't exist in + the schema or there has been an error while fetching the + configuration from appconfig + + Returns: + bool: calculated feature toggle value. several possibilities: + 1. if the feature doesn't appear in the schema or there has been an error fetching the + configuration -> error/warning log would appear and value_if_missing is returned + 2. feature exists and has no rules or no rules have matched -> return feature_default_value of + the defined feature + 3. feature exists and a rule matches -> rule_default_value of rule is returned + """ + if rules_context is None: + rules_context = {} + + try: + toggles_dict: Dict[str, Any] = self.get_configuration() + except ConfigurationException: + logger.error("unable to get feature toggles JSON, returning provided value_if_missing value") # noqa: E501 + return value_if_missing + + feature: Dict[str, Dict] = toggles_dict.get(schema.FEATURES_KEY, {}).get(feature_name, None) + if feature is None: + logger.warning( + f"feature does not appear in configuration, using provided value_if_missing, feature_name={feature_name}, value_if_missing={value_if_missing}" # noqa: E501 + ) + return value_if_missing + + rules_list = feature.get(schema.RULES_KEY) + feature_default_value = feature.get(schema.FEATURE_DEFAULT_VAL_KEY) + if not rules_list: + # not rules but has a value + logger.debug( + f"no rules found, returning feature default value, feature_name={feature_name}, default_value={feature_default_value}" # noqa: E501 + ) + return feature_default_value + # look for first rule match + logger.debug( + f"looking for rule match, feature_name={feature_name}, feature_default_value={feature_default_value}" + ) # noqa: E501 + return self._handle_rules( + feature_name=feature_name, + rules_context=rules_context, + feature_default_value=feature_default_value, + rules=rules_list, + ) + + def get_all_enabled_feature_toggles(self, *, rules_context: Optional[Dict[str, Any]] = None) -> List[str]: + """Get all enabled feature toggles while also taking into account rule_context (when a feature has defined rules) + + Args: + rules_context (Optional[Dict[str, Any]]): dict of attributes that you would like to match the rules + against, can be {'tenant_id: 'X', 'username':' 'Y', 'region': 'Z'} etc. + + Returns: + List[str]: a list of all features name that are enabled by also taking into account + rule_context (when a feature has defined rules) + """ + if rules_context is None: + rules_context = {} + try: + toggles_dict: Dict[str, Any] = self.get_configuration() + except ConfigurationException: + logger.error("unable to get feature toggles JSON") # noqa: E501 + return [] + ret_list = [] + features: Dict[str, Any] = toggles_dict.get(schema.FEATURES_KEY, {}) + for feature_name, feature_dict_def in features.items(): + rules_list = feature_dict_def.get(schema.RULES_KEY, []) + feature_default_value = feature_dict_def.get(schema.FEATURE_DEFAULT_VAL_KEY) + if feature_default_value and not rules_list: + self._logger.debug( + f"feature is enabled by default and has no defined rules, feature_name={feature_name}" + ) + ret_list.append(feature_name) + elif self._handle_rules( + feature_name=feature_name, + rules_context=rules_context, + feature_default_value=feature_default_value, + rules=rules_list, + ): + self._logger.debug(f"feature's calculated value is True, feature_name={feature_name}") + ret_list.append(feature_name) + return ret_list diff --git a/aws_lambda_powertools/utilities/feature_toggles/exceptions.py b/aws_lambda_powertools/utilities/feature_toggles/exceptions.py new file mode 100644 index 0000000000..9bbb5f200b --- /dev/null +++ b/aws_lambda_powertools/utilities/feature_toggles/exceptions.py @@ -0,0 +1,2 @@ +class ConfigurationException(Exception): + """When a a configuration store raises an exception on config retrieval or parsing""" diff --git a/aws_lambda_powertools/utilities/feature_toggles/schema.py b/aws_lambda_powertools/utilities/feature_toggles/schema.py new file mode 100644 index 0000000000..58e75fabfc --- /dev/null +++ b/aws_lambda_powertools/utilities/feature_toggles/schema.py @@ -0,0 +1,83 @@ +from enum import Enum +from typing import Any, Dict + +from .exceptions import ConfigurationException + +FEATURES_KEY = "features" +RULES_KEY = "rules" +FEATURE_DEFAULT_VAL_KEY = "feature_default_value" +CONDITIONS_KEY = "conditions" +RULE_NAME_KEY = "rule_name" +RULE_DEFAULT_VALUE = "value_when_applies" +CONDITION_KEY = "key" +CONDITION_VALUE = "value" +CONDITION_ACTION = "action" + + +class ACTION(str, Enum): + EQUALS = "EQUALS" + STARTSWITH = "STARTSWITH" + ENDSWITH = "ENDSWITH" + CONTAINS = "CONTAINS" + + +class SchemaValidator: + def __init__(self, logger: object): + self._logger = logger + + def _raise_conf_exc(self, error_str: str) -> None: + self._logger.error(error_str) + raise ConfigurationException(error_str) + + def _validate_condition(self, rule_name: str, condition: Dict[str, str]) -> None: + if not condition or not isinstance(condition, dict): + self._raise_conf_exc(f"invalid condition type, not a dictionary, rule_name={rule_name}") + action = condition.get(CONDITION_ACTION, "") + if action not in [ACTION.EQUALS.value, ACTION.STARTSWITH.value, ACTION.ENDSWITH.value, ACTION.CONTAINS.value]: + self._raise_conf_exc(f"invalid action value, rule_name={rule_name}, action={action}") + key = condition.get(CONDITION_KEY, "") + if not key or not isinstance(key, str): + self._raise_conf_exc(f"invalid key value, key has to be a non empty string, rule_name={rule_name}") + value = condition.get(CONDITION_VALUE, "") + if not value: + self._raise_conf_exc(f"missing condition value, rule_name={rule_name}") + + def _validate_rule(self, feature_name: str, rule: Dict[str, Any]) -> None: + if not rule or not isinstance(rule, dict): + self._raise_conf_exc(f"feature rule is not a dictionary, feature_name={feature_name}") + rule_name = rule.get(RULE_NAME_KEY) + if not rule_name or rule_name is None or not isinstance(rule_name, str): + self._raise_conf_exc(f"invalid rule_name, feature_name={feature_name}") + rule_default_value = rule.get(RULE_DEFAULT_VALUE) + if rule_default_value is None or not isinstance(rule_default_value, bool): + self._raise_conf_exc(f"invalid rule_default_value, rule_name={rule_name}") + conditions = rule.get(CONDITIONS_KEY, {}) + if not conditions or not isinstance(conditions, list): + self._raise_conf_exc(f"invalid condition, rule_name={rule_name}") + # validate conditions + for condition in conditions: + self._validate_condition(rule_name, condition) + + def _validate_feature(self, feature_name: str, feature_dict_def: Dict[str, Any]) -> None: + if not feature_dict_def or not isinstance(feature_dict_def, dict): + self._raise_conf_exc(f"invalid AWS AppConfig JSON schema detected, feature {feature_name} is invalid") + feature_default_value = feature_dict_def.get(FEATURE_DEFAULT_VAL_KEY) + if feature_default_value is None or not isinstance(feature_default_value, bool): + self._raise_conf_exc(f"missing feature_default_value for feature, feature_name={feature_name}") + # validate rules + rules = feature_dict_def.get(RULES_KEY, []) + if not rules: + return + if not isinstance(rules, list): + self._raise_conf_exc(f"feature rules is not a list, feature_name={feature_name}") + for rule in rules: + self._validate_rule(feature_name, rule) + + def validate_json_schema(self, schema: Dict[str, Any]) -> None: + if not isinstance(schema, dict): + self._raise_conf_exc("invalid AWS AppConfig JSON schema detected, root schema is not a dictionary") + features_dict: Dict = schema.get(FEATURES_KEY) + if not isinstance(features_dict, dict): + self._raise_conf_exc("invalid AWS AppConfig JSON schema detected, missing features dictionary") + for feature_name, feature_dict_def in features_dict.items(): + self._validate_feature(feature_name, feature_dict_def) diff --git a/aws_lambda_powertools/utilities/feature_toggles/schema_fetcher.py b/aws_lambda_powertools/utilities/feature_toggles/schema_fetcher.py new file mode 100644 index 0000000000..37dee63f7f --- /dev/null +++ b/aws_lambda_powertools/utilities/feature_toggles/schema_fetcher.py @@ -0,0 +1,20 @@ +from abc import ABC, abstractclassmethod +from typing import Any, Dict + + +class SchemaFetcher(ABC): + def __init__(self, configuration_name: str, cache_seconds: int): + self.configuration_name = configuration_name + self._cache_seconds = cache_seconds + + @abstractclassmethod + def get_json_configuration(self) -> Dict[str, Any]: + """Get configuration string from any configuration storing service and return the parsed JSON dictionary + + Raises: + ConfigurationException: Any error that can occur during schema fetch or JSON parse + + Returns: + Dict[str, Any]: parsed JSON dictionary + """ + return None diff --git a/aws_lambda_powertools/utilities/idempotency/config.py b/aws_lambda_powertools/utilities/idempotency/config.py index 52afb3bad8..06468cc74a 100644 --- a/aws_lambda_powertools/utilities/idempotency/config.py +++ b/aws_lambda_powertools/utilities/idempotency/config.py @@ -1,4 +1,4 @@ -from typing import Dict +from typing import Dict, Optional class IdempotencyConfig: @@ -6,7 +6,7 @@ def __init__( self, event_key_jmespath: str = "", payload_validation_jmespath: str = "", - jmespath_options: Dict = None, + jmespath_options: Optional[Dict] = None, raise_on_no_idempotency_key: bool = False, expires_after_seconds: int = 60 * 60, # 1 hour default use_local_cache: bool = False, diff --git a/aws_lambda_powertools/utilities/idempotency/idempotency.py b/aws_lambda_powertools/utilities/idempotency/idempotency.py index 6f73a842af..c2bcc62fd6 100644 --- a/aws_lambda_powertools/utilities/idempotency/idempotency.py +++ b/aws_lambda_powertools/utilities/idempotency/idempotency.py @@ -31,7 +31,7 @@ def idempotent( event: Dict[str, Any], context: LambdaContext, persistence_store: BasePersistenceLayer, - config: IdempotencyConfig = None, + config: Optional[IdempotencyConfig] = None, ) -> Any: """ Middleware to handle idempotency diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index 31aef6dc0f..eb43a8b30c 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -39,9 +39,9 @@ def __init__( self, idempotency_key, status: str = "", - expiry_timestamp: int = None, + expiry_timestamp: Optional[int] = None, response_data: Optional[str] = "", - payload_hash: str = None, + payload_hash: Optional[str] = None, ) -> None: """ diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py index dc00334277..ae3a1be490 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py @@ -154,7 +154,7 @@ def _update_record(self, data_record: DataRecord): "ExpressionAttributeNames": expression_attr_names, } - self.table.update_item(**kwargs) + self.table.update_item(**kwargs) # type: ignore def _delete_record(self, data_record: DataRecord) -> None: logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") diff --git a/aws_lambda_powertools/utilities/parameters/appconfig.py b/aws_lambda_powertools/utilities/parameters/appconfig.py index 4490e26036..63a8415f1e 100644 --- a/aws_lambda_powertools/utilities/parameters/appconfig.py +++ b/aws_lambda_powertools/utilities/parameters/appconfig.py @@ -149,7 +149,7 @@ def get_app_config( >>> print(value) My configuration value - **Retrieves a confiugration value and decodes it using a JSON decoder** + **Retrieves a configuration value and decodes it using a JSON decoder** >>> from aws_lambda_powertools.utilities.parameters import get_parameter >>> diff --git a/aws_lambda_powertools/utilities/parser/models/apigw.py b/aws_lambda_powertools/utilities/parser/models/apigw.py index de968e20ec..4de8ee96cc 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigw.py +++ b/aws_lambda_powertools/utilities/parser/models/apigw.py @@ -46,7 +46,7 @@ class APIGatewayEventAuthorizer(BaseModel): class APIGatewayEventRequestContext(BaseModel): accountId: str apiId: str - authorizer: APIGatewayEventAuthorizer + authorizer: Optional[APIGatewayEventAuthorizer] stage: str protocol: str identity: APIGatewayEventIdentity @@ -70,7 +70,7 @@ class APIGatewayEventRequestContext(BaseModel): class APIGatewayProxyEventModel(BaseModel): - version: str + version: Optional[str] resource: str path: str httpMethod: Literal["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"] diff --git a/aws_lambda_powertools/utilities/validation/base.py b/aws_lambda_powertools/utilities/validation/base.py index ec4165f487..b818f11a40 100644 --- a/aws_lambda_powertools/utilities/validation/base.py +++ b/aws_lambda_powertools/utilities/validation/base.py @@ -1,9 +1,9 @@ import logging -from typing import Any, Dict, Optional +from typing import Any, Dict, Optional, Union -import fastjsonschema +import fastjsonschema # type: ignore import jmespath -from jmespath.exceptions import LexerError +from jmespath.exceptions import LexerError # type: ignore from aws_lambda_powertools.shared.jmespath_functions import PowertoolsFunctions @@ -12,7 +12,7 @@ logger = logging.getLogger(__name__) -def validate_data_against_schema(data: Dict, schema: Dict, formats: Optional[Dict] = None): +def validate_data_against_schema(data: Union[Dict, str], schema: Dict, formats: Optional[Dict] = None): """Validate dict data against given JSON Schema Parameters @@ -41,7 +41,7 @@ def validate_data_against_schema(data: Dict, schema: Dict, formats: Optional[Dic raise SchemaValidationError(message) -def unwrap_event_from_envelope(data: Dict, envelope: str, jmespath_options: Optional[Dict]) -> Any: +def unwrap_event_from_envelope(data: Union[Dict, str], envelope: str, jmespath_options: Optional[Dict]) -> Any: """Searches data using JMESPath expression Parameters diff --git a/aws_lambda_powertools/utilities/validation/validator.py b/aws_lambda_powertools/utilities/validation/validator.py index 3628d486eb..0497a49a71 100644 --- a/aws_lambda_powertools/utilities/validation/validator.py +++ b/aws_lambda_powertools/utilities/validation/validator.py @@ -12,12 +12,12 @@ def validator( handler: Callable, event: Union[Dict, str], context: Any, - inbound_schema: Dict = None, + inbound_schema: Optional[Dict] = None, inbound_formats: Optional[Dict] = None, - outbound_schema: Dict = None, + outbound_schema: Optional[Dict] = None, outbound_formats: Optional[Dict] = None, - envelope: str = None, - jmespath_options: Dict = None, + envelope: Optional[str] = None, + jmespath_options: Optional[Dict] = None, ) -> Any: """Lambda handler decorator to validate incoming/outbound data using a JSON Schema @@ -135,8 +135,8 @@ def validate( event: Any, schema: Dict, formats: Optional[Dict] = None, - envelope: str = None, - jmespath_options: Dict = None, + envelope: Optional[str] = None, + jmespath_options: Optional[Dict] = None, ): """Standalone function to validate event data using a JSON Schema diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000000..66bed405f5 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,28 @@ +[mypy] +warn_return_any=False +warn_unused_configs=True +no_implicit_optional=True +warn_redundant_casts=True +warn_unused_ignores=True +pretty = True +show_column_numbers = True +show_error_codes = True +show_error_context = True + +[mypy-jmespath] +ignore_missing_imports=True + +[mypy-boto3] +ignore_missing_imports = True + +[mypy-boto3.dynamodb.conditions] +ignore_missing_imports = True + +[mypy-botocore.config] +ignore_missing_imports = True + +[mypy-botocore.exceptions] +ignore_missing_imports = True + +[mypy-aws_xray_sdk.ext.aiohttp.client] +ignore_missing_imports = True diff --git a/poetry.lock b/poetry.lock index 7c29d803b0..bfe9934e58 100644 --- a/poetry.lock +++ b/poetry.lock @@ -92,24 +92,24 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.17.102" +version = "1.18.1" description = "The AWS SDK for Python" category = "main" optional = false -python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">= 3.6" [package.dependencies] -botocore = ">=1.20.102,<1.21.0" +botocore = ">=1.21.1,<1.22.0" jmespath = ">=0.7.1,<1.0.0" -s3transfer = ">=0.4.0,<0.5.0" +s3transfer = ">=0.5.0,<0.6.0" [[package]] name = "botocore" -version = "1.20.102" +version = "1.21.1" description = "Low-level, data-driven core of boto 3." category = "main" optional = false -python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">= 3.6" [package.dependencies] jmespath = ">=0.7.1,<1.0.0" @@ -440,7 +440,7 @@ python-versions = "*" [[package]] name = "isort" -version = "5.9.1" +version = "5.9.2" description = "A Python utility / library to sort Python imports." category = "dev" optional = false @@ -631,7 +631,7 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "7.1.9" +version = "7.1.10" description = "A Material Design theme for MkDocs" category = "dev" optional = false @@ -671,6 +671,22 @@ mkdocs = ">=1.1.1,<2.0.0" mkdocs-autorefs = ">=0.1,<0.3" pymdown-extensions = ">=6.3,<9.0" pytkdocs = ">=0.2.0,<0.12.0" +name = "mypy" +version = "0.910" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +mypy-extensions = ">=0.4.3,<0.5.0" +toml = "*" +typed-ast = {version = ">=1.4.0,<1.5.0", markers = "python_version < \"3.8\""} +typing-extensions = ">=3.7.4" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<1.5.0)"] [[package]] name = "mypy-extensions" @@ -990,11 +1006,11 @@ python-versions = "*" [[package]] name = "s3transfer" -version = "0.4.2" +version = "0.5.0" description = "An Amazon S3 Transfer Manager" category = "main" optional = false -python-versions = "*" +python-versions = ">= 3.6" [package.dependencies] botocore = ">=1.12.36,<2.0a.0" @@ -1140,7 +1156,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.1" -content-hash = "1873bfb09f928c38d678ecee00ab3d945fa758169e7d9629e6ac0cd53863eb57" +content-hash = "97a8aca56202d6047233c32cf8960b85b1fbee1218663c1654288fa40c3268ef" [metadata.files] appdirs = [ @@ -1171,12 +1187,12 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] boto3 = [ - {file = "boto3-1.17.102-py2.py3-none-any.whl", hash = "sha256:6300e9ee9a404038113250bd218e2c4827f5e676efb14e77de2ad2dcb67679bc"}, - {file = "boto3-1.17.102.tar.gz", hash = "sha256:be4714f0475c1f5183eea09ddbf568ced6fa41b0fc9976f2698b8442e1b17303"}, + {file = "boto3-1.18.1-py3-none-any.whl", hash = "sha256:a6399df957bfc7944fbd97e9fb0755cba29b1cb135b91d7e43fd298b268ab804"}, + {file = "boto3-1.18.1.tar.gz", hash = "sha256:ddfe4a78f04cd2d3a7a37d5cdfa07b4889b24296508786969bc968bee6b8b003"}, ] botocore = [ - {file = "botocore-1.20.102-py2.py3-none-any.whl", hash = "sha256:bdf08a4f7f01ead00d386848f089c08270499711447569c18d0db60023619c06"}, - {file = "botocore-1.20.102.tar.gz", hash = "sha256:2f57f7ceed1598d96cc497aeb45317db5d3b21a5aafea4732d0e561d0fc2a8fa"}, + {file = "botocore-1.21.1-py3-none-any.whl", hash = "sha256:b845220eb580d10f7714798a96e380eb8f94dca89905a41d8a3c35119c757b01"}, + {file = "botocore-1.21.1.tar.gz", hash = "sha256:200887ce5f3b47d7499b7ded75dc65c4649abdaaddd06cebc118a3a954d6fd73"}, ] cached-property = [ {file = "cached-property-1.5.2.tar.gz", hash = "sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130"}, @@ -1341,8 +1357,8 @@ iniconfig = [ {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] isort = [ - {file = "isort-5.9.1-py3-none-any.whl", hash = "sha256:8e2c107091cfec7286bc0f68a547d0ba4c094d460b732075b6fba674f1035c0c"}, - {file = "isort-5.9.1.tar.gz", hash = "sha256:83510593e07e433b77bd5bff0f6f607dbafa06d1a89022616f02d8b699cfcd56"}, + {file = "isort-5.9.2-py3-none-any.whl", hash = "sha256:eed17b53c3e7912425579853d078a0832820f023191561fcee9d7cae424e0813"}, + {file = "isort-5.9.2.tar.gz", hash = "sha256:f65ce5bd4cbc6abdfbe29afc2f0245538ab358c14590912df638033f157d555e"}, ] jinja2 = [ {file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"}, @@ -1432,8 +1448,8 @@ mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.1-py3-none-any.whl", hash = "sha256:8ae50b45eb75d07b150a69726041860801615aae5f4adbd6b1cf4d51abaa03d5"}, ] mkdocs-material = [ - {file = "mkdocs-material-7.1.9.tar.gz", hash = "sha256:5a2fd487f769f382a7c979e869e4eab1372af58d7dec44c4365dd97ef5268cb5"}, - {file = "mkdocs_material-7.1.9-py2.py3-none-any.whl", hash = "sha256:92c8a2bd3bd44d5948eefc46ba138e2d3285cac658900112b6bf5722c7d067a5"}, + {file = "mkdocs-material-7.1.10.tar.gz", hash = "sha256:890e9be00bfbe4d22ccccbcde1bf9bad67a3ba495f2a7d2422ea4acb5099f014"}, + {file = "mkdocs_material-7.1.10-py2.py3-none-any.whl", hash = "sha256:92ff8c4a8e78555ef7b7ed0ba3043421d18971b48d066ea2cefb50e889fc66db"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.1.tar.gz", hash = "sha256:6947fb7f5e4291e3c61405bad3539d81e0b3cd62ae0d66ced018128af509c68f"}, @@ -1442,6 +1458,30 @@ mkdocs-material-extensions = [ mkdocstrings = [ {file = "mkdocstrings-0.15.2-py3-none-any.whl", hash = "sha256:8d6cbe64c07ae66739010979ca01d49dd2f64d1a45009f089d217b9cd2a65e36"}, {file = "mkdocstrings-0.15.2.tar.gz", hash = "sha256:c2fee9a3a644647c06eb2044fdfede1073adfd1a55bf6752005d3db10705fe73"}, +mypy = [ + {file = "mypy-0.910-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a155d80ea6cee511a3694b108c4494a39f42de11ee4e61e72bc424c490e46457"}, + {file = "mypy-0.910-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b94e4b785e304a04ea0828759172a15add27088520dc7e49ceade7834275bedb"}, + {file = "mypy-0.910-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:088cd9c7904b4ad80bec811053272986611b84221835e079be5bcad029e79dd9"}, + {file = "mypy-0.910-cp35-cp35m-win_amd64.whl", hash = "sha256:adaeee09bfde366d2c13fe6093a7df5df83c9a2ba98638c7d76b010694db760e"}, + {file = "mypy-0.910-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ecd2c3fe726758037234c93df7e98deb257fd15c24c9180dacf1ef829da5f921"}, + {file = "mypy-0.910-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d9dd839eb0dc1bbe866a288ba3c1afc33a202015d2ad83b31e875b5905a079b6"}, + {file = "mypy-0.910-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:3e382b29f8e0ccf19a2df2b29a167591245df90c0b5a2542249873b5c1d78212"}, + {file = "mypy-0.910-cp36-cp36m-win_amd64.whl", hash = "sha256:53fd2eb27a8ee2892614370896956af2ff61254c275aaee4c230ae771cadd885"}, + {file = "mypy-0.910-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b6fb13123aeef4a3abbcfd7e71773ff3ff1526a7d3dc538f3929a49b42be03f0"}, + {file = "mypy-0.910-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e4dab234478e3bd3ce83bac4193b2ecd9cf94e720ddd95ce69840273bf44f6de"}, + {file = "mypy-0.910-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:7df1ead20c81371ccd6091fa3e2878559b5c4d4caadaf1a484cf88d93ca06703"}, + {file = "mypy-0.910-cp37-cp37m-win_amd64.whl", hash = "sha256:0aadfb2d3935988ec3815952e44058a3100499f5be5b28c34ac9d79f002a4a9a"}, + {file = "mypy-0.910-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec4e0cd079db280b6bdabdc807047ff3e199f334050db5cbb91ba3e959a67504"}, + {file = "mypy-0.910-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:119bed3832d961f3a880787bf621634ba042cb8dc850a7429f643508eeac97b9"}, + {file = "mypy-0.910-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:866c41f28cee548475f146aa4d39a51cf3b6a84246969f3759cb3e9c742fc072"}, + {file = "mypy-0.910-cp38-cp38-win_amd64.whl", hash = "sha256:ceb6e0a6e27fb364fb3853389607cf7eb3a126ad335790fa1e14ed02fba50811"}, + {file = "mypy-0.910-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a85e280d4d217150ce8cb1a6dddffd14e753a4e0c3cf90baabb32cefa41b59e"}, + {file = "mypy-0.910-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:42c266ced41b65ed40a282c575705325fa7991af370036d3f134518336636f5b"}, + {file = "mypy-0.910-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:3c4b8ca36877fc75339253721f69603a9c7fdb5d4d5a95a1a1b899d8b86a4de2"}, + {file = "mypy-0.910-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:c0df2d30ed496a08de5daed2a9ea807d07c21ae0ab23acf541ab88c24b26ab97"}, + {file = "mypy-0.910-cp39-cp39-win_amd64.whl", hash = "sha256:c6c2602dffb74867498f86e6129fd52a2770c48b7cd3ece77ada4fa38f94eba8"}, + {file = "mypy-0.910-py3-none-any.whl", hash = "sha256:ef565033fa5a958e62796867b1df10c40263ea9ded87164d67572834e57a174d"}, + {file = "mypy-0.910.tar.gz", hash = "sha256:704098302473cb31a218f1775a873b376b30b4c18229421e9e9dc8916fd16150"}, ] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, @@ -1662,8 +1702,8 @@ requests = [ {file = "ruamel.yaml.clib-0.2.2.tar.gz", hash = "sha256:2d24bd98af676f4990c4d715bcdc2a60b19c56a3fb3a763164d2d8ca0e806ba7"}, ] s3transfer = [ - {file = "s3transfer-0.4.2-py2.py3-none-any.whl", hash = "sha256:9b3752887a2880690ce628bc263d6d13a3864083aeacff4890c1c9839a5eb0bc"}, - {file = "s3transfer-0.4.2.tar.gz", hash = "sha256:cb022f4b16551edebbb31a377d3f09600dbada7363d8c5db7976e7f47732e1b2"}, + {file = "s3transfer-0.5.0-py3-none-any.whl", hash = "sha256:9c1dc369814391a6bda20ebbf4b70a0f34630592c9aa520856bf384916af2803"}, + {file = "s3transfer-0.5.0.tar.gz", hash = "sha256:50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, diff --git a/pyproject.toml b/pyproject.toml index 0c2719b1b7..e1766e39ec 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ flake8-debugger = "^4.0.0" flake8-fixme = "^1.1.1" flake8-isort = "^4.0.0" flake8-variables-names = "^0.0.4" -isort = "^5.9.1" +isort = "^5.9.2" pytest-cov = "^2.12.1" pytest-mock = "^3.5.1" pdoc3 = "^0.9.2" @@ -49,11 +49,12 @@ radon = "^4.5.0" xenon = "^0.7.3" flake8-eradicate = "^1.1.0" flake8-bugbear = "^21.3.2" -mkdocs-material = "^7.1.9" +mkdocs-material = "^7.1.10" mkdocs-git-revision-date-plugin = "^0.3.1" mike = "^0.6.0" mkdocstrings = "^0.15.2" pytkdocs = {extras = ["numpy-style"], version = "^0.11.1"} +mypy = "^0.910" [tool.poetry.extras] diff --git a/tests/events/apiGatewayProxyEvent_noVersionAuth.json b/tests/events/apiGatewayProxyEvent_noVersionAuth.json new file mode 100644 index 0000000000..055301f8f1 --- /dev/null +++ b/tests/events/apiGatewayProxyEvent_noVersionAuth.json @@ -0,0 +1,75 @@ +{ + "resource": "/my/path", + "path": "/my/path", + "httpMethod": "GET", + "headers": { + "Header1": "value1", + "Header2": "value2" + }, + "multiValueHeaders": { + "Header1": [ + "value1" + ], + "Header2": [ + "value1", + "value2" + ] + }, + "queryStringParameters": { + "parameter1": "value1", + "parameter2": "value" + }, + "multiValueQueryStringParameters": { + "parameter1": [ + "value1", + "value2" + ], + "parameter2": [ + "value" + ] + }, + "requestContext": { + "accountId": "123456789012", + "apiId": "id", + "domainName": "id.execute-api.us-east-1.amazonaws.com", + "domainPrefix": "id", + "extendedRequestId": "request-id", + "httpMethod": "GET", + "identity": { + "accessKey": null, + "accountId": null, + "caller": null, + "cognitoAuthenticationProvider": null, + "cognitoAuthenticationType": null, + "cognitoIdentityId": null, + "cognitoIdentityPoolId": null, + "principalOrgId": null, + "sourceIp": "192.168.0.1/32", + "user": null, + "userAgent": "user-agent", + "userArn": null, + "clientCert": { + "clientCertPem": "CERT_CONTENT", + "subjectDN": "www.example.com", + "issuerDN": "Example issuer", + "serialNumber": "a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1:a1", + "validity": { + "notBefore": "May 28 12:30:02 2019 GMT", + "notAfter": "Aug 5 09:36:04 2021 GMT" + } + } + }, + "path": "/my/path", + "protocol": "HTTP/1.1", + "requestId": "id=", + "requestTime": "04/Mar/2020:19:15:17 +0000", + "requestTimeEpoch": 1583349317135, + "resourceId": null, + "resourcePath": "/my/path", + "stage": "$default" + }, + "pathParameters": null, + "stageVariables": null, + "body": "Hello from Lambda!", + "isBase64Encoded": true +} diff --git a/tests/functional/event_handler/test_api_gateway.py b/tests/functional/event_handler/test_api_gateway.py index e542483d73..b39dccc608 100644 --- a/tests/functional/event_handler/test_api_gateway.py +++ b/tests/functional/event_handler/test_api_gateway.py @@ -5,6 +5,8 @@ from pathlib import Path from typing import Dict +import pytest + from aws_lambda_powertools.event_handler import content_types from aws_lambda_powertools.event_handler.api_gateway import ( ApiGatewayResolver, @@ -20,6 +22,7 @@ ServiceError, UnauthorizedError, ) +from aws_lambda_powertools.shared import constants from aws_lambda_powertools.shared.json_encoder import Encoder from aws_lambda_powertools.utilities.data_classes import ALBEvent, APIGatewayProxyEvent, APIGatewayProxyEventV2 from tests.functional.utils import load_event @@ -31,7 +34,6 @@ def read_media(file_name: str) -> bytes: LOAD_GW_EVENT = load_event("apiGatewayProxyEvent.json") -TEXT_HTML = "text/html" def test_alb_event(): @@ -42,7 +44,7 @@ def test_alb_event(): def foo(): assert isinstance(app.current_event, ALBEvent) assert app.lambda_context == {} - return Response(200, TEXT_HTML, "foo") + return Response(200, content_types.TEXT_HTML, "foo") # WHEN calling the event handler result = app(load_event("albEvent.json"), {}) @@ -50,7 +52,7 @@ def foo(): # THEN process event correctly # AND set the current_event type as ALBEvent assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == TEXT_HTML + assert result["headers"]["Content-Type"] == content_types.TEXT_HTML assert result["body"] == "foo" @@ -80,7 +82,7 @@ def test_api_gateway(): @app.get("/my/path") def get_lambda() -> Response: assert isinstance(app.current_event, APIGatewayProxyEvent) - return Response(200, TEXT_HTML, "foo") + return Response(200, content_types.TEXT_HTML, "foo") # WHEN calling the event handler result = app(LOAD_GW_EVENT, {}) @@ -88,7 +90,7 @@ def get_lambda() -> Response: # THEN process event correctly # AND set the current_event type as APIGatewayProxyEvent assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == TEXT_HTML + assert result["headers"]["Content-Type"] == content_types.TEXT_HTML assert result["body"] == "foo" @@ -100,7 +102,7 @@ def test_api_gateway_v2(): def my_path() -> Response: assert isinstance(app.current_event, APIGatewayProxyEventV2) post_data = app.current_event.json_body - return Response(200, content_types.PLAIN_TEXT, post_data["username"]) + return Response(200, content_types.TEXT_PLAIN, post_data["username"]) # WHEN calling the event handler result = app(load_event("apiGatewayProxyV2Event.json"), {}) @@ -108,7 +110,7 @@ def my_path() -> Response: # THEN process event correctly # AND set the current_event type as APIGatewayProxyEventV2 assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.PLAIN_TEXT + assert result["headers"]["Content-Type"] == content_types.TEXT_PLAIN assert result["body"] == "tom" @@ -119,14 +121,14 @@ def test_include_rule_matching(): @app.get("//") def get_lambda(my_id: str, name: str) -> Response: assert name == "my" - return Response(200, TEXT_HTML, my_id) + return Response(200, content_types.TEXT_HTML, my_id) # WHEN calling the event handler result = app(LOAD_GW_EVENT, {}) # THEN assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == TEXT_HTML + assert result["headers"]["Content-Type"] == content_types.TEXT_HTML assert result["body"] == "path" @@ -187,11 +189,11 @@ def test_cors(): @app.get("/my/path", cors=True) def with_cors() -> Response: - return Response(200, TEXT_HTML, "test") + return Response(200, content_types.TEXT_HTML, "test") @app.get("/without-cors") def without_cors() -> Response: - return Response(200, TEXT_HTML, "test") + return Response(200, content_types.TEXT_HTML, "test") def handler(event, context): return app.resolve(event, context) @@ -202,7 +204,7 @@ def handler(event, context): # THEN the headers should include cors headers assert "headers" in result headers = result["headers"] - assert headers["Content-Type"] == TEXT_HTML + assert headers["Content-Type"] == content_types.TEXT_HTML assert headers["Access-Control-Allow-Origin"] == "*" assert "Access-Control-Allow-Credentials" not in headers assert headers["Access-Control-Allow-Headers"] == ",".join(sorted(CORSConfig._REQUIRED_HEADERS)) @@ -268,7 +270,7 @@ def test_compress_no_accept_encoding(): @app.get("/my/path", compress=True) def return_text() -> Response: - return Response(200, content_types.PLAIN_TEXT, expected_value) + return Response(200, content_types.TEXT_PLAIN, expected_value) # WHEN calling the event handler result = app({"path": "/my/path", "httpMethod": "GET", "headers": {}}, None) @@ -284,7 +286,7 @@ def test_cache_control_200(): @app.get("/success", cache_control="max-age=600") def with_cache_control() -> Response: - return Response(200, TEXT_HTML, "has 200 response") + return Response(200, content_types.TEXT_HTML, "has 200 response") def handler(event, context): return app.resolve(event, context) @@ -295,7 +297,7 @@ def handler(event, context): # THEN return the set Cache-Control headers = result["headers"] - assert headers["Content-Type"] == TEXT_HTML + assert headers["Content-Type"] == content_types.TEXT_HTML assert headers["Cache-Control"] == "max-age=600" @@ -305,7 +307,7 @@ def test_cache_control_non_200(): @app.delete("/fails", cache_control="max-age=600") def with_cache_control_has_500() -> Response: - return Response(503, TEXT_HTML, "has 503 response") + return Response(503, content_types.TEXT_HTML, "has 503 response") def handler(event, context): return app.resolve(event, context) @@ -316,7 +318,7 @@ def handler(event, context): # THEN return a Cache-Control of "no-cache" headers = result["headers"] - assert headers["Content-Type"] == TEXT_HTML + assert headers["Content-Type"] == content_types.TEXT_HTML assert headers["Cache-Control"] == "no-cache" @@ -479,7 +481,7 @@ def test_custom_preflight_response(): def custom_preflight(): return Response( status_code=200, - content_type=TEXT_HTML, + content_type=content_types.TEXT_HTML, body="Foo", headers={"Access-Control-Allow-Methods": "CUSTOM"}, ) @@ -495,7 +497,7 @@ def custom_method(): assert result["statusCode"] == 200 assert result["body"] == "Foo" headers = result["headers"] - assert headers["Content-Type"] == TEXT_HTML + assert headers["Content-Type"] == content_types.TEXT_HTML assert "Access-Control-Allow-Origin" in result["headers"] assert headers["Access-Control-Allow-Methods"] == "CUSTOM" @@ -582,3 +584,83 @@ def service_error(): assert "Access-Control-Allow-Origin" in result["headers"] expected = {"statusCode": 502, "message": "Something went wrong!"} assert result["body"] == json_dump(expected) + + +def test_debug_unhandled_exceptions_debug_on(): + # GIVEN debug is enabled + # AND an unhandled exception is raised + app = ApiGatewayResolver(debug=True) + assert app._debug + + @app.get("/raises-error") + def raises_error(): + raise RuntimeError("Foo") + + # WHEN calling the handler + result = app({"path": "/raises-error", "httpMethod": "GET"}, None) + + # THEN return a 500 + # AND Content-Type is set to text/plain + # AND include the exception traceback in the response + assert result["statusCode"] == 500 + assert "Traceback (most recent call last)" in result["body"] + headers = result["headers"] + assert headers["Content-Type"] == content_types.TEXT_PLAIN + + +def test_debug_unhandled_exceptions_debug_off(): + # GIVEN debug is disabled + # AND an unhandled exception is raised + app = ApiGatewayResolver(debug=False) + assert not app._debug + + @app.get("/raises-error") + def raises_error(): + raise RuntimeError("Foo") + + # WHEN calling the handler + # THEN raise the original exception + with pytest.raises(RuntimeError) as e: + app({"path": "/raises-error", "httpMethod": "GET"}, None) + + # AND include the original error + assert e.value.args == ("Foo",) + + +def test_debug_mode_environment_variable(monkeypatch): + # GIVEN a debug mode environment variable is set + monkeypatch.setenv(constants.EVENT_HANDLER_DEBUG_ENV, "true") + app = ApiGatewayResolver() + + # WHEN calling app._debug + # THEN the debug mode is enabled + assert app._debug + + +def test_debug_json_formatting(): + # GIVEN debug is True + app = ApiGatewayResolver(debug=True) + response = {"message": "Foo"} + + @app.get("/foo") + def foo(): + return response + + # WHEN calling the handler + result = app({"path": "/foo", "httpMethod": "GET"}, None) + + # THEN return a pretty print json in the body + assert result["body"] == json.dumps(response, indent=4) + + +def test_debug_print_event(capsys): + # GIVE debug is True + app = ApiGatewayResolver(debug=True) + + # WHEN calling resolve + event = {"path": "/foo", "httpMethod": "GET"} + app(event, None) + + # THEN print the event + out, err = capsys.readouterr() + assert json.loads(out) == event diff --git a/tests/functional/feature_toggles/__init__.py b/tests/functional/feature_toggles/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/functional/feature_toggles/test_feature_toggles.py b/tests/functional/feature_toggles/test_feature_toggles.py new file mode 100644 index 0000000000..27f89eb151 --- /dev/null +++ b/tests/functional/feature_toggles/test_feature_toggles.py @@ -0,0 +1,422 @@ +from typing import Dict, List + +import pytest # noqa: F401 +from botocore.config import Config + +from aws_lambda_powertools.utilities.feature_toggles.appconfig_fetcher import AppConfigFetcher +from aws_lambda_powertools.utilities.feature_toggles.configuration_store import ConfigurationStore +from aws_lambda_powertools.utilities.feature_toggles.schema import ACTION + + +@pytest.fixture(scope="module") +def config(): + return Config(region_name="us-east-1") + + +def init_configuration_store(mocker, mock_schema: Dict, config: Config) -> ConfigurationStore: + mocked_get_conf = mocker.patch("aws_lambda_powertools.utilities.parameters.AppConfigProvider.get") + mocked_get_conf.return_value = mock_schema + + app_conf_fetcher = AppConfigFetcher( + environment="test_env", + service="test_app", + configuration_name="test_conf_name", + cache_seconds=600, + config=config, + ) + conf_store: ConfigurationStore = ConfigurationStore(schema_fetcher=app_conf_fetcher) + return conf_store + + +# this test checks that we get correct value of feature that exists in the schema. +# we also don't send an empty rules_context dict in this case +def test_toggles_rule_does_not_match(mocker, config): + expected_value = True + mocked_app_config_schema = { + "features": { + "my_feature": { + "feature_default_value": expected_value, + "rules": [ + { + "rule_name": "tenant id equals 345345435", + "value_when_applies": False, + "conditions": [ + { + "action": ACTION.EQUALS.value, + "key": "tenant_id", + "value": "345345435", + } + ], + }, + ], + } + }, + } + + conf_store = init_configuration_store(mocker, mocked_app_config_schema, config) + toggle = conf_store.get_feature_toggle(feature_name="my_feature", rules_context={}, value_if_missing=False) + assert toggle == expected_value + + +# this test checks that if you try to get a feature that doesn't exist in the schema, +# you get the default value of False that was sent to the get_feature_toggle API +def test_toggles_no_conditions_feature_does_not_exist(mocker, config): + expected_value = False + mocked_app_config_schema = {"features": {"my_fake_feature": {"feature_default_value": True}}} + + conf_store = init_configuration_store(mocker, mocked_app_config_schema, config) + toggle = conf_store.get_feature_toggle(feature_name="my_feature", rules_context={}, value_if_missing=expected_value) + assert toggle == expected_value + + +# check that feature match works when they are no rules and we send rules_context. +# default value is False but the feature has a True default_value. +def test_toggles_no_rules(mocker, config): + expected_value = True + mocked_app_config_schema = {"features": {"my_feature": {"feature_default_value": expected_value}}} + conf_store = init_configuration_store(mocker, mocked_app_config_schema, config) + toggle = conf_store.get_feature_toggle( + feature_name="my_feature", rules_context={"tenant_id": "6", "username": "a"}, value_if_missing=False + ) + assert toggle == expected_value + + +# check a case where the feature exists but the rule doesn't match so we revert to the default value of the feature +def test_toggles_conditions_no_match(mocker, config): + expected_value = True + mocked_app_config_schema = { + "features": { + "my_feature": { + "feature_default_value": expected_value, + "rules": [ + { + "rule_name": "tenant id equals 345345435", + "value_when_applies": False, + "conditions": [ + { + "action": ACTION.EQUALS.value, + "key": "tenant_id", + "value": "345345435", + } + ], + }, + ], + } + }, + } + conf_store = init_configuration_store(mocker, mocked_app_config_schema, config) + toggle = conf_store.get_feature_toggle( + feature_name="my_feature", + rules_context={"tenant_id": "6", "username": "a"}, # rule will not match + value_if_missing=False, + ) + assert toggle == expected_value + + +# check that a rule can match when it has multiple conditions, see rule name for further explanation +def test_toggles_conditions_rule_match_equal_multiple_conditions(mocker, config): + expected_value = False + tenant_id_val = "6" + username_val = "a" + mocked_app_config_schema = { + "features": { + "my_feature": { + "feature_default_value": True, + "rules": [ + { + "rule_name": "tenant id equals 6 and username is a", + "value_when_applies": expected_value, + "conditions": [ + { + "action": ACTION.EQUALS.value, # this rule will match, it has multiple conditions + "key": "tenant_id", + "value": tenant_id_val, + }, + { + "action": ACTION.EQUALS.value, + "key": "username", + "value": username_val, + }, + ], + }, + ], + } + }, + } + conf_store = init_configuration_store(mocker, mocked_app_config_schema, config) + toggle = conf_store.get_feature_toggle( + feature_name="my_feature", + rules_context={ + "tenant_id": tenant_id_val, + "username": username_val, + }, + value_if_missing=True, + ) + assert toggle == expected_value + + +# check a case when rule doesn't match and it has multiple conditions, +# different tenant id causes the rule to not match. +# default value of the feature in this case is True +def test_toggles_conditions_no_rule_match_equal_multiple_conditions(mocker, config): + expected_val = True + mocked_app_config_schema = { + "features": { + "my_feature": { + "feature_default_value": expected_val, + "rules": [ + { + "rule_name": "tenant id equals 645654 and username is a", # rule will not match + "value_when_applies": False, + "conditions": [ + { + "action": ACTION.EQUALS.value, + "key": "tenant_id", + "value": "645654", + }, + { + "action": ACTION.EQUALS.value, + "key": "username", + "value": "a", + }, + ], + }, + ], + } + }, + } + conf_store = init_configuration_store(mocker, mocked_app_config_schema, config) + toggle = conf_store.get_feature_toggle( + feature_name="my_feature", rules_context={"tenant_id": "6", "username": "a"}, value_if_missing=False + ) + assert toggle == expected_val + + +# check rule match for multiple of action types +def test_toggles_conditions_rule_match_multiple_actions_multiple_rules_multiple_conditions(mocker, config): + expected_value_first_check = True + expected_value_second_check = False + expected_value_third_check = False + expected_value_fourth_case = False + mocked_app_config_schema = { + "features": { + "my_feature": { + "feature_default_value": expected_value_third_check, + "rules": [ + { + "rule_name": "tenant id equals 6 and username startswith a", + "value_when_applies": expected_value_first_check, + "conditions": [ + { + "action": ACTION.EQUALS.value, + "key": "tenant_id", + "value": "6", + }, + { + "action": ACTION.STARTSWITH.value, + "key": "username", + "value": "a", + }, + ], + }, + { + "rule_name": "tenant id equals 4446 and username startswith a and endswith z", + "value_when_applies": expected_value_second_check, + "conditions": [ + { + "action": ACTION.EQUALS.value, + "key": "tenant_id", + "value": "4446", + }, + { + "action": ACTION.STARTSWITH.value, + "key": "username", + "value": "a", + }, + { + "action": ACTION.ENDSWITH.value, + "key": "username", + "value": "z", + }, + ], + }, + ], + } + }, + } + + conf_store = init_configuration_store(mocker, mocked_app_config_schema, config) + # match first rule + toggle = conf_store.get_feature_toggle( + feature_name="my_feature", + rules_context={"tenant_id": "6", "username": "abcd"}, + value_if_missing=False, + ) + assert toggle == expected_value_first_check + # match second rule + toggle = conf_store.get_feature_toggle( + feature_name="my_feature", + rules_context={"tenant_id": "4446", "username": "az"}, + value_if_missing=False, + ) + assert toggle == expected_value_second_check + # match no rule + toggle = conf_store.get_feature_toggle( + feature_name="my_feature", + rules_context={"tenant_id": "11114446", "username": "ab"}, + value_if_missing=False, + ) + assert toggle == expected_value_third_check + # feature doesn't exist + toggle = conf_store.get_feature_toggle( + feature_name="my_fake_feature", + rules_context={"tenant_id": "11114446", "username": "ab"}, + value_if_missing=expected_value_fourth_case, + ) + assert toggle == expected_value_fourth_case + + +# check a case where the feature exists but the rule doesn't match so we revert to the default value of the feature +def test_toggles_match_rule_with_contains_action(mocker, config): + expected_value = True + mocked_app_config_schema = { + "features": { + "my_feature": { + "feature_default_value": False, + "rules": [ + { + "rule_name": "tenant id is contained in [6,2] ", + "value_when_applies": expected_value, + "conditions": [ + { + "action": ACTION.CONTAINS.value, + "key": "tenant_id", + "value": ["6", "2"], + } + ], + }, + ], + } + }, + } + conf_store = init_configuration_store(mocker, mocked_app_config_schema, config) + toggle = conf_store.get_feature_toggle( + feature_name="my_feature", + rules_context={"tenant_id": "6", "username": "a"}, # rule will match + value_if_missing=False, + ) + assert toggle == expected_value + + +def test_toggles_no_match_rule_with_contains_action(mocker, config): + expected_value = False + mocked_app_config_schema = { + "features": { + "my_feature": { + "feature_default_value": expected_value, + "rules": [ + { + "rule_name": "tenant id is contained in [6,2] ", + "value_when_applies": True, + "conditions": [ + { + "action": ACTION.CONTAINS.value, + "key": "tenant_id", + "value": ["8", "2"], + } + ], + }, + ], + } + }, + } + conf_store = init_configuration_store(mocker, mocked_app_config_schema, config) + toggle = conf_store.get_feature_toggle( + feature_name="my_feature", + rules_context={"tenant_id": "6", "username": "a"}, # rule will not match + value_if_missing=False, + ) + assert toggle == expected_value + + +def test_multiple_features_enabled(mocker, config): + expected_value = ["my_feature", "my_feature2"] + mocked_app_config_schema = { + "features": { + "my_feature": { + "feature_default_value": False, + "rules": [ + { + "rule_name": "tenant id is contained in [6,2] ", + "value_when_applies": True, + "conditions": [ + { + "action": ACTION.CONTAINS.value, + "key": "tenant_id", + "value": ["6", "2"], + } + ], + }, + ], + }, + "my_feature2": { + "feature_default_value": True, + }, + }, + } + conf_store = init_configuration_store(mocker, mocked_app_config_schema, config) + enabled_list: List[str] = conf_store.get_all_enabled_feature_toggles( + rules_context={"tenant_id": "6", "username": "a"} + ) + assert enabled_list == expected_value + + +def test_multiple_features_only_some_enabled(mocker, config): + expected_value = ["my_feature", "my_feature2", "my_feature4"] + mocked_app_config_schema = { + "features": { + "my_feature": { # rule will match here, feature is enabled due to rule match + "feature_default_value": False, + "rules": [ + { + "rule_name": "tenant id is contained in [6,2] ", + "value_when_applies": True, + "conditions": [ + { + "action": ACTION.CONTAINS.value, + "key": "tenant_id", + "value": ["6", "2"], + } + ], + }, + ], + }, + "my_feature2": { + "feature_default_value": True, + }, + "my_feature3": { + "feature_default_value": False, + }, + "my_feature4": { # rule will not match here, feature is enabled by default + "feature_default_value": True, + "rules": [ + { + "rule_name": "tenant id equals 7", + "value_when_applies": False, + "conditions": [ + { + "action": ACTION.EQUALS.value, + "key": "tenant_id", + "value": "7", + } + ], + }, + ], + }, + }, + } + conf_store = init_configuration_store(mocker, mocked_app_config_schema, config) + enabled_list: List[str] = conf_store.get_all_enabled_feature_toggles( + rules_context={"tenant_id": "6", "username": "a"} + ) + assert enabled_list == expected_value diff --git a/tests/functional/feature_toggles/test_schema_validation.py b/tests/functional/feature_toggles/test_schema_validation.py new file mode 100644 index 0000000000..3b024c854b --- /dev/null +++ b/tests/functional/feature_toggles/test_schema_validation.py @@ -0,0 +1,264 @@ +import logging + +import pytest # noqa: F401 + +from aws_lambda_powertools.utilities.feature_toggles.exceptions import ConfigurationException +from aws_lambda_powertools.utilities.feature_toggles.schema import ( + ACTION, + CONDITION_ACTION, + CONDITION_KEY, + CONDITION_VALUE, + CONDITIONS_KEY, + FEATURE_DEFAULT_VAL_KEY, + FEATURES_KEY, + RULE_DEFAULT_VALUE, + RULE_NAME_KEY, + RULES_KEY, + SchemaValidator, +) + +logger = logging.getLogger(__name__) + + +def test_invalid_features_dict(): + schema = {} + # empty dict + validator = SchemaValidator(logger) + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + schema = [] + # invalid type + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + # invalid features key + schema = {FEATURES_KEY: []} + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + +def test_empty_features_not_fail(): + schema = {FEATURES_KEY: {}} + validator = SchemaValidator(logger) + validator.validate_json_schema(schema) + + +def test_invalid_feature_dict(): + # invalid feature type, not dict + schema = {FEATURES_KEY: {"my_feature": []}} + validator = SchemaValidator(logger) + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + # empty feature dict + schema = {FEATURES_KEY: {"my_feature": {}}} + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + # invalid FEATURE_DEFAULT_VAL_KEY type, not boolean + schema = {FEATURES_KEY: {"my_feature": {FEATURE_DEFAULT_VAL_KEY: "False"}}} + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + # invalid FEATURE_DEFAULT_VAL_KEY type, not boolean #2 + schema = {FEATURES_KEY: {"my_feature": {FEATURE_DEFAULT_VAL_KEY: 5}}} + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + # invalid rules type, not list + schema = {FEATURES_KEY: {"my_feature": {FEATURE_DEFAULT_VAL_KEY: False, RULES_KEY: "4"}}} + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + +def test_valid_feature_dict(): + # no rules list at all + schema = {FEATURES_KEY: {"my_feature": {FEATURE_DEFAULT_VAL_KEY: False}}} + validator = SchemaValidator(logger) + validator.validate_json_schema(schema) + + # empty rules list + schema = {FEATURES_KEY: {"my_feature": {FEATURE_DEFAULT_VAL_KEY: False, RULES_KEY: []}}} + validator.validate_json_schema(schema) + + +def test_invalid_rule(): + # rules list is not a list of dict + schema = { + FEATURES_KEY: { + "my_feature": { + FEATURE_DEFAULT_VAL_KEY: False, + RULES_KEY: [ + "a", + "b", + ], + } + } + } + validator = SchemaValidator(logger) + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + # rules RULE_DEFAULT_VALUE is not bool + schema = { + FEATURES_KEY: { + "my_feature": { + FEATURE_DEFAULT_VAL_KEY: False, + RULES_KEY: [ + { + RULE_NAME_KEY: "tenant id equals 345345435", + RULE_DEFAULT_VALUE: "False", + }, + ], + } + } + } + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + # missing conditions list + schema = { + FEATURES_KEY: { + "my_feature": { + FEATURE_DEFAULT_VAL_KEY: False, + RULES_KEY: [ + { + RULE_NAME_KEY: "tenant id equals 345345435", + RULE_DEFAULT_VALUE: False, + }, + ], + } + } + } + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + # condition list is empty + schema = { + FEATURES_KEY: { + "my_feature": { + FEATURE_DEFAULT_VAL_KEY: False, + RULES_KEY: [ + {RULE_NAME_KEY: "tenant id equals 345345435", RULE_DEFAULT_VALUE: False, CONDITIONS_KEY: []}, + ], + } + } + } + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + # condition is invalid type, not list + schema = { + FEATURES_KEY: { + "my_feature": { + FEATURE_DEFAULT_VAL_KEY: False, + RULES_KEY: [ + {RULE_NAME_KEY: "tenant id equals 345345435", RULE_DEFAULT_VALUE: False, CONDITIONS_KEY: {}}, + ], + } + } + } + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + +def test_invalid_condition(): + # invalid condition action + schema = { + FEATURES_KEY: { + "my_feature": { + FEATURE_DEFAULT_VAL_KEY: False, + RULES_KEY: [ + { + RULE_NAME_KEY: "tenant id equals 345345435", + RULE_DEFAULT_VALUE: False, + CONDITIONS_KEY: {CONDITION_ACTION: "stuff", CONDITION_KEY: "a", CONDITION_VALUE: "a"}, + }, + ], + } + } + } + validator = SchemaValidator(logger) + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + # missing condition key and value + schema = { + FEATURES_KEY: { + "my_feature": { + FEATURE_DEFAULT_VAL_KEY: False, + RULES_KEY: [ + { + RULE_NAME_KEY: "tenant id equals 345345435", + RULE_DEFAULT_VALUE: False, + CONDITIONS_KEY: {CONDITION_ACTION: ACTION.EQUALS.value}, + }, + ], + } + } + } + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + # invalid condition key type, not string + schema = { + FEATURES_KEY: { + "my_feature": { + FEATURE_DEFAULT_VAL_KEY: False, + RULES_KEY: [ + { + RULE_NAME_KEY: "tenant id equals 345345435", + RULE_DEFAULT_VALUE: False, + CONDITIONS_KEY: { + CONDITION_ACTION: ACTION.EQUALS.value, + CONDITION_KEY: 5, + CONDITION_VALUE: "a", + }, + }, + ], + } + } + } + with pytest.raises(ConfigurationException): + validator.validate_json_schema(schema) + + +def test_valid_condition_all_actions(): + validator = SchemaValidator(logger) + schema = { + FEATURES_KEY: { + "my_feature": { + FEATURE_DEFAULT_VAL_KEY: False, + RULES_KEY: [ + { + RULE_NAME_KEY: "tenant id equals 645654 and username is a", + RULE_DEFAULT_VALUE: True, + CONDITIONS_KEY: [ + { + CONDITION_ACTION: ACTION.EQUALS.value, + CONDITION_KEY: "tenant_id", + CONDITION_VALUE: "645654", + }, + { + CONDITION_ACTION: ACTION.STARTSWITH.value, + CONDITION_KEY: "username", + CONDITION_VALUE: "a", + }, + { + CONDITION_ACTION: ACTION.ENDSWITH.value, + CONDITION_KEY: "username", + CONDITION_VALUE: "a", + }, + { + CONDITION_ACTION: ACTION.CONTAINS.value, + CONDITION_KEY: "username", + CONDITION_VALUE: ["a", "b"], + }, + ], + }, + ], + } + }, + } + validator.validate_json_schema(schema) diff --git a/tests/functional/py.typed b/tests/functional/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py index e65ae94dd2..cbbaf83437 100644 --- a/tests/functional/test_data_classes.py +++ b/tests/functional/test_data_classes.py @@ -743,6 +743,70 @@ def test_seq_trigger_event(): assert record.aws_region == "us-east-2" +def test_default_api_gateway_proxy_event(): + event = APIGatewayProxyEvent(load_event("apiGatewayProxyEvent_noVersionAuth.json")) + + assert event.get("version") is None + assert event.resource == event["resource"] + assert event.path == event["path"] + assert event.http_method == event["httpMethod"] + assert event.headers == event["headers"] + assert event.multi_value_headers == event["multiValueHeaders"] + assert event.query_string_parameters == event["queryStringParameters"] + assert event.multi_value_query_string_parameters == event["multiValueQueryStringParameters"] + + request_context = event.request_context + assert request_context.account_id == event["requestContext"]["accountId"] + assert request_context.api_id == event["requestContext"]["apiId"] + + assert request_context.get("authorizer") is None + + assert request_context.domain_name == event["requestContext"]["domainName"] + assert request_context.domain_prefix == event["requestContext"]["domainPrefix"] + assert request_context.extended_request_id == event["requestContext"]["extendedRequestId"] + assert request_context.http_method == event["requestContext"]["httpMethod"] + + identity = request_context.identity + assert identity.access_key == event["requestContext"]["identity"]["accessKey"] + assert identity.account_id == event["requestContext"]["identity"]["accountId"] + assert identity.caller == event["requestContext"]["identity"]["caller"] + assert ( + identity.cognito_authentication_provider == event["requestContext"]["identity"]["cognitoAuthenticationProvider"] + ) + assert identity.cognito_authentication_type == event["requestContext"]["identity"]["cognitoAuthenticationType"] + assert identity.cognito_identity_id == event["requestContext"]["identity"]["cognitoIdentityId"] + assert identity.cognito_identity_pool_id == event["requestContext"]["identity"]["cognitoIdentityPoolId"] + assert identity.principal_org_id == event["requestContext"]["identity"]["principalOrgId"] + assert identity.source_ip == event["requestContext"]["identity"]["sourceIp"] + assert identity.user == event["requestContext"]["identity"]["user"] + assert identity.user_agent == event["requestContext"]["identity"]["userAgent"] + assert identity.user_arn == event["requestContext"]["identity"]["userArn"] + + assert request_context.path == event["requestContext"]["path"] + assert request_context.protocol == event["requestContext"]["protocol"] + assert request_context.request_id == event["requestContext"]["requestId"] + assert request_context.request_time == event["requestContext"]["requestTime"] + assert request_context.request_time_epoch == event["requestContext"]["requestTimeEpoch"] + assert request_context.resource_id == event["requestContext"]["resourceId"] + assert request_context.resource_path == event["requestContext"]["resourcePath"] + assert request_context.stage == event["requestContext"]["stage"] + + assert event.path_parameters == event["pathParameters"] + assert event.stage_variables == event["stageVariables"] + assert event.body == event["body"] + assert event.is_base64_encoded == event["isBase64Encoded"] + + assert request_context.connected_at is None + assert request_context.connection_id is None + assert request_context.event_type is None + assert request_context.message_direction is None + assert request_context.message_id is None + assert request_context.route_key is None + assert request_context.operation_name is None + assert identity.api_key is None + assert identity.api_key_id is None + + def test_api_gateway_proxy_event(): event = APIGatewayProxyEvent(load_event("apiGatewayProxyEvent.json"))