diff --git a/.github/ISSUE_TEMPLATE/rfc.md b/.github/ISSUE_TEMPLATE/rfc.md index 4b666a16800..0381a352dc5 100644 --- a/.github/ISSUE_TEMPLATE/rfc.md +++ b/.github/ISSUE_TEMPLATE/rfc.md @@ -4,7 +4,7 @@ about: Feature design and proposals title: 'RFC: ' labels: RFC, triage assignees: '' - +approved by: '' --- ## Key information @@ -13,6 +13,8 @@ assignees: '' * Related issue(s), if known: * Area: (i.e. Tracer, Metrics, Logger, etc.) * Meet [tenets](https://awslabs.github.io/aws-lambda-powertools-python/#tenets): (Yes/no) +* Approved by: '' +* Reviewed by: '' ## Summary [summary]: #summary @@ -31,7 +33,15 @@ assignees: '' > Explain the design in enough detail for somebody familiar with Powertools to understand it, and for somebody familiar with the implementation to implement it. -> This should get into specifics and corner-cases, and include examples of how the feature is used. Any new terminology should be defined here. +**If this feature should be available in other runtimes (e.g. Java, Typescript), how would this look like to ensure consistency?** + +## User Experience + +**How would customers use it?** + +**Any configuration or corner cases you'd expect?** + +**Demonstration of before and after on how the experience will be better** ## Drawbacks [drawbacks]: #drawbacks diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 3c504f63282..489a5ccccc2 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -6,7 +6,7 @@ name: Publish to PyPi # # 1. Document human readable changes in CHANGELOG # 2. Bump package version using poetry version -# 3. Create a PR to develop branch, and merge if all tests pass +# 3. Merge version changes to develop branch # 4. Edit the current draft release notes # 5. If not already set, use `v` as a tag, and select develop as target branch # @@ -17,17 +17,23 @@ name: Publish to PyPi # 3. Run tests, linting, security and complexity base line # 4. Publish package to PyPi test repository # 5. Publish package to PyPi prod repository -# 6. Push latest release source code to master using release title as the commit message +# 6. Kick off Lambda Layer pipeline to publish latest version with minimal dependencies as a SAR App +# 7. Kick off Lambda Layer pipeline to publish latest version with extra dependencies as a SAR App +# 8. Builds a fresh version of docs including Changelog updates +# 9. Push latest release source code to master using release title as the commit message +# 10. Builds latest documentation for new release, and update latest alias pointing to the new release tag on: release: types: [published] jobs: - upload: + release: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 + with: + fetch-depth: 0 - name: Set up Python uses: actions/setup-python@v1 with: @@ -65,11 +71,24 @@ jobs: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_DEFAULT_REGION: eu-west-1 AWS_DEFAULT_OUTPUT: json - - + - name: Setup doc deploy + run: | + git config --global user.name Docs deploy + git config --global user.email docs@dummy.bot.com + - name: Build docs website and API reference + run: | + make release-docs VERSION=${RELEASE_TAG_VERSION} ALIAS="latest" + poetry run mike set-default --push latest + - name: Deploy all docs + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./api + keep_files: true + destination_dir: ${{ env.RELEASE_TAG_VERSION }}/api sync_master: - needs: upload + needs: release runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/python_docs.yml b/.github/workflows/python_docs.yml index 5a721b00414..dcdd409b835 100644 --- a/.github/workflows/python_docs.yml +++ b/.github/workflows/python_docs.yml @@ -1,59 +1,41 @@ name: Docs on: - pull_request: - branches: - - master push: branches: - - master - - docs/mkdocs - # Disabled until docs support versioning per branch/release - # - develop - release: - types: [published] # update Docs upon new release + - develop jobs: - api-docs: + docs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 + with: + fetch-depth: 0 - name: Set up Python uses: actions/setup-python@v1 with: python-version: "3.8" - - uses: actions/setup-node@v1 - with: - node-version: '12' - - name: Capture branch and tag - id: branch_name + - name: Install dependencies + run: make dev + - name: Setup doc deploy run: | - echo "SOURCE_BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV - echo "SOURCE_TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV - - name: Build docs website - run: make build-docs-website - - name: Build docs API reference - # if: env.SOURCE_BRANCH == 'master' # Disabled until docs support versioning per branch/release - run: make build-docs-api + git config --global user.name Docs deploy + git config --global user.email docs@dummy.bot.com + - name: Build docs website and API reference + run: make release-docs VERSION="develop" ALIAS="stage" - name: Deploy all docs uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./dist - - # Disabled until docs support versioning per branch/release - # - name: Deploy docs website - # uses: peaceiris/actions-gh-pages@v3 - # with: - # github_token: ${{ secrets.GITHUB_TOKEN }} - # publish_dir: ./dist - # # when deploying docs website only - # # we need to keep existing API docs checked in - # # but only for dev branch - # keep_files: true - # - name: Deploy all docs - # uses: peaceiris/actions-gh-pages@v3 - # if: env.SOURCE_BRANCH == 'master' - # with: - # github_token: ${{ secrets.GITHUB_TOKEN }} - # publish_dir: ./dist + publish_dir: ./api + keep_files: true + destination_dir: develop/api + - name: Create redirect from old docs + run: | + git checkout gh-pages + test -f 404.html && echo "Redirect already set" && exit 0 + git checkout develop -- 404.html + git add 404.html + git commit -m "chore: set docs redirect" --no-verify + git push origin gh-pages -f diff --git a/.gitignore b/.gitignore index 9b1b99a95e4..ace70c8192d 100644 --- a/.gitignore +++ b/.gitignore @@ -300,3 +300,6 @@ wheelhouse docs/.cache/ docs/public node_modules +api/ +site/ +!404.html diff --git a/404.html b/404.html new file mode 100644 index 00000000000..2ee34f1988f --- /dev/null +++ b/404.html @@ -0,0 +1,20 @@ + + + + + + + + + + + diff --git a/CHANGELOG.md b/CHANGELOG.md index 9e1430a3c78..bfa5085a13d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,28 @@ This project follows [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) fo ## [Unreleased] +## [1.14.0] - 2021-04-09 + +### Added + +* **Event handlers**: New core utility to easily handle incoming requests tightly integrated with Data Classes; AppSync being the first as we gauge from the community what additional ones would be helpful +* **Documentation**: Enabled versioning to access docs on a per release basis or staging docs (`develop` branch) +* **Documentation**: Links now open in a new tab and improved snippet line highlights +* **Documentation(validation)**: JSON Schema snippets and more complete examples +* **Documentation(idempotency)**: Table with expected configuration values for hash key and TTL attribute name when using the default behaviour +* **Documentation(logger)**: New example on how to set logging record timestamps in UTC +* **Parser(S3)**: Support for the new S3 Object Lambda Event model (`S3ObjectLambdaEvent`) +* **Parameters**: Support for DynamoDB Local via `endpoint_url` parameter, including docs +* **Internal**: Include `make pr` in pre-commit hooks when contributing to shorten feedback loop on pre-commit specific linting + +### Fixed + +* **Parser**: S3Model now supports keys with 0 length +* **Tracer**: Lock X-Ray SDK to 2.6.0 as there's been a regression upstream in 2.7.0 on serializing & capturing exceptions +* **Data Classes(API Gateway)**: Add missing property `operationName` within request context +* **Misc.**: Numerous typing fixes to better to support MyPy across all utilities +* **Internal**: Downgraded poetry to 1.1.4 as there's been a regression with `importlib-metadata` in 1.1.5 not yet fixed + ## [1.13.0] - 2021-03-23 ### Added diff --git a/Makefile b/Makefile index beac0cbcacb..b0a075d39a2 100644 --- a/Makefile +++ b/Makefile @@ -5,7 +5,7 @@ target: @$(MAKE) pr dev: - pip install --upgrade pip poetry pre-commit + pip install --upgrade pip pre-commit poetry==1.1.4 poetry install --extras "pydantic" pre-commit install @@ -23,25 +23,26 @@ test: coverage-html: poetry run pytest -m "not perf" --cov=aws_lambda_powertools --cov-report=html -pr: lint test security-baseline complexity-baseline +pre-commit: + pre-commit run --show-diff-on-failure + +pr: lint pre-commit test security-baseline complexity-baseline build: pr poetry build -build-docs: - @$(MAKE) build-docs-website +release-docs: + @echo "Rebuilding docs" + rm -rf site api + @echo "Updating website docs" + poetry run mike deploy --push --update-aliases ${VERSION} ${ALIAS} + @echo "Building API docs" @$(MAKE) build-docs-api -build-docs-api: dev - mkdir -p dist/api - poetry run pdoc --html --output-dir dist/api/ ./aws_lambda_powertools --force - mv -f dist/api/aws_lambda_powertools/* dist/api/ - rm -rf dist/api/aws_lambda_powertools - -build-docs-website: dev - mkdir -p dist - poetry run mkdocs build - cp -R site/* dist/ +build-docs-api: + poetry run pdoc --html --output-dir ./api/ ./aws_lambda_powertools --force + mv -f ./api/aws_lambda_powertools/* ./api/ + rm -rf ./api/aws_lambda_powertools docs-local: poetry run mkdocs serve diff --git a/aws_lambda_powertools/event_handler/__init__.py b/aws_lambda_powertools/event_handler/__init__.py new file mode 100644 index 00000000000..0475982e377 --- /dev/null +++ b/aws_lambda_powertools/event_handler/__init__.py @@ -0,0 +1,7 @@ +""" +Event handler decorators for common Lambda events +""" + +from .appsync import AppSyncResolver + +__all__ = ["AppSyncResolver"] diff --git a/aws_lambda_powertools/event_handler/appsync.py b/aws_lambda_powertools/event_handler/appsync.py new file mode 100644 index 00000000000..021afaa6654 --- /dev/null +++ b/aws_lambda_powertools/event_handler/appsync.py @@ -0,0 +1,113 @@ +import logging +from typing import Any, Callable + +from aws_lambda_powertools.utilities.data_classes import AppSyncResolverEvent +from aws_lambda_powertools.utilities.typing import LambdaContext + +logger = logging.getLogger(__name__) + + +class AppSyncResolver: + """ + AppSync resolver decorator + + Example + ------- + + **Sample usage** + + from aws_lambda_powertools.event_handler import AppSyncResolver + + app = AppSyncResolver() + + @app.resolver(type_name="Query", field_name="listLocations") + def list_locations(page: int = 0, size: int = 10) -> list: + # Your logic to fetch locations with arguments passed in + return [{"id": 100, "name": "Smooth Grooves"}] + + @app.resolver(type_name="Merchant", field_name="extraInfo") + def get_extra_info() -> dict: + # Can use "app.current_event.source" to filter within the parent context + account_type = app.current_event.source["accountType"] + method = "BTC" if account_type == "NEW" else "USD" + return {"preferredPaymentMethod": method} + + @app.resolver(field_name="commonField") + def common_field() -> str: + # Would match all fieldNames matching 'commonField' + return str(uuid.uuid4()) + """ + + current_event: AppSyncResolverEvent + lambda_context: LambdaContext + + def __init__(self): + self._resolvers: dict = {} + + def resolver(self, type_name: str = "*", field_name: str = None): + """Registers the resolver for field_name + + Parameters + ---------- + type_name : str + Type name + field_name : str + Field name + """ + + def register_resolver(func): + logger.debug(f"Adding resolver `{func.__name__}` for field `{type_name}.{field_name}`") + self._resolvers[f"{type_name}.{field_name}"] = {"func": func} + return func + + return register_resolver + + def resolve(self, event: dict, context: LambdaContext) -> Any: + """Resolve field_name + + Parameters + ---------- + event : dict + Lambda event + context : LambdaContext + Lambda context + + Returns + ------- + Any + Returns the result of the resolver + + Raises + ------- + ValueError + If we could not find a field resolver + """ + self.current_event = AppSyncResolverEvent(event) + self.lambda_context = context + resolver = self._get_resolver(self.current_event.type_name, self.current_event.field_name) + return resolver(**self.current_event.arguments) + + def _get_resolver(self, type_name: str, field_name: str) -> Callable: + """Get resolver for field_name + + Parameters + ---------- + type_name : str + Type name + field_name : str + Field name + + Returns + ------- + Callable + callable function and configuration + """ + full_name = f"{type_name}.{field_name}" + resolver = self._resolvers.get(full_name, self._resolvers.get(f"*.{field_name}")) + if not resolver: + raise ValueError(f"No resolver found for '{full_name}'") + return resolver["func"] + + def __call__(self, event, context) -> Any: + """Implicit lambda handler which internally calls `resolve`""" + return self.resolve(event, context) diff --git a/aws_lambda_powertools/logging/lambda_context.py b/aws_lambda_powertools/logging/lambda_context.py index 75da8711f03..65e9e652a92 100644 --- a/aws_lambda_powertools/logging/lambda_context.py +++ b/aws_lambda_powertools/logging/lambda_context.py @@ -1,3 +1,6 @@ +from typing import Any + + class LambdaContextModel: """A handful of Lambda Runtime Context fields @@ -31,7 +34,7 @@ def __init__( self.function_request_id = function_request_id -def build_lambda_context_model(context: object) -> LambdaContextModel: +def build_lambda_context_model(context: Any) -> LambdaContextModel: """Captures Lambda function runtime info to be used across all log statements Parameters diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index 98ecfc4c449..2e9cbb78d2e 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -4,7 +4,7 @@ import os import random import sys -from typing import Any, Callable, Dict, Union +from typing import Any, Callable, Dict, Optional, Union import jmespath @@ -318,12 +318,12 @@ def set_correlation_id(self, value: str): self.structure_logs(append=True, correlation_id=value) @staticmethod - def _get_log_level(level: Union[str, int]) -> Union[str, int]: + def _get_log_level(level: Union[str, int, None]) -> Union[str, int]: """ Returns preferred log level set by the customer in upper case """ if isinstance(level, int): return level - log_level: str = level or os.getenv("LOG_LEVEL") + log_level: Optional[str] = level or os.getenv("LOG_LEVEL") if log_level is None: return logging.INFO diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py index b7f3862b590..dc4fe34ee12 100644 --- a/aws_lambda_powertools/metrics/base.py +++ b/aws_lambda_powertools/metrics/base.py @@ -88,7 +88,7 @@ def __init__( self.service = resolve_env_var_choice(choice=service, env=os.getenv(constants.SERVICE_NAME_ENV)) self._metric_units = [unit.value for unit in MetricUnit] self._metric_unit_options = list(MetricUnit.__members__) - self.metadata_set = self.metadata_set if metadata_set is not None else {} + self.metadata_set = metadata_set if metadata_set is not None else {} def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float): """Adds given metric diff --git a/aws_lambda_powertools/metrics/metric.py b/aws_lambda_powertools/metrics/metric.py index 353defab0c6..8bdd0d800b8 100644 --- a/aws_lambda_powertools/metrics/metric.py +++ b/aws_lambda_powertools/metrics/metric.py @@ -1,7 +1,7 @@ import json import logging from contextlib import contextmanager -from typing import Dict +from typing import Dict, Optional, Union from .base import MetricManager, MetricUnit @@ -42,7 +42,7 @@ class SingleMetric(MetricManager): Inherits from `aws_lambda_powertools.metrics.base.MetricManager` """ - def add_metric(self, name: str, unit: MetricUnit, value: float): + def add_metric(self, name: str, unit: Union[MetricUnit, str], value: float): """Method to prevent more than one metric being created Parameters @@ -109,11 +109,11 @@ def single_metric(name: str, unit: MetricUnit, value: float, namespace: str = No SchemaValidationError When metric object fails EMF schema validation """ - metric_set = None + metric_set: Optional[Dict] = None try: metric: SingleMetric = SingleMetric(namespace=namespace) metric.add_metric(name=name, unit=unit, value=value) yield metric - metric_set: Dict = metric.serialize_metric_set() + metric_set = metric.serialize_metric_set() finally: print(json.dumps(metric_set, separators=(",", ":"))) diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py index 5178116a717..59d3b18e0e4 100644 --- a/aws_lambda_powertools/metrics/metrics.py +++ b/aws_lambda_powertools/metrics/metrics.py @@ -2,7 +2,7 @@ import json import logging import warnings -from typing import Any, Callable +from typing import Any, Callable, Dict, Optional from .base import MetricManager, MetricUnit from .metric import single_metric @@ -71,15 +71,15 @@ def do_something(): When metric object fails EMF schema validation """ - _metrics = {} - _dimensions = {} - _metadata = {} + _metrics: Dict[str, Any] = {} + _dimensions: Dict[str, str] = {} + _metadata: Dict[str, Any] = {} def __init__(self, service: str = None, namespace: str = None): self.metric_set = self._metrics self.dimension_set = self._dimensions self.service = service - self.namespace = namespace + self.namespace: Optional[str] = namespace self.metadata_set = self._metadata super().__init__( diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py index 079b5d29c49..b8f5cb9f74b 100644 --- a/aws_lambda_powertools/shared/functions.py +++ b/aws_lambda_powertools/shared/functions.py @@ -1,5 +1,5 @@ from distutils.util import strtobool -from typing import Any, Union +from typing import Any, Optional, Union def resolve_truthy_env_var_choice(env: Any, choice: bool = None) -> bool: @@ -22,7 +22,7 @@ def resolve_truthy_env_var_choice(env: Any, choice: bool = None) -> bool: return choice if choice is not None else strtobool(env) -def resolve_env_var_choice(env: Any, choice: bool = None) -> Union[bool, Any]: +def resolve_env_var_choice(env: Any, choice: Optional[Any] = None) -> Union[bool, Any]: """Pick explicit choice over env, if available, otherwise return env value received NOTE: Environment variable should be resolved by the caller. diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index 698ac6fb8b6..5e2e545e356 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -244,7 +244,7 @@ def patch(self, modules: Tuple[str] = None): def capture_lambda_handler( self, - lambda_handler: Callable[[Dict, Any, Optional[Dict]], Any] = None, + lambda_handler: Union[Callable[[Dict, Any], Any], Callable[[Dict, Any, Optional[Dict]], Any]] = None, capture_response: Optional[bool] = None, capture_error: Optional[bool] = None, ): @@ -517,7 +517,7 @@ async def async_tasks(): def _decorate_async_function( self, - method: Callable = None, + method: Callable, capture_response: Optional[Union[bool, str]] = None, capture_error: Optional[Union[bool, str]] = None, method_name: str = None, @@ -544,7 +544,7 @@ async def decorate(*args, **kwargs): def _decorate_generator_function( self, - method: Callable = None, + method: Callable, capture_response: Optional[Union[bool, str]] = None, capture_error: Optional[Union[bool, str]] = None, method_name: str = None, @@ -571,7 +571,7 @@ def decorate(*args, **kwargs): def _decorate_generator_function_with_context_manager( self, - method: Callable = None, + method: Callable, capture_response: Optional[Union[bool, str]] = None, capture_error: Optional[Union[bool, str]] = None, method_name: str = None, @@ -599,7 +599,7 @@ def decorate(*args, **kwargs): def _decorate_sync_function( self, - method: Callable = None, + method: Callable, capture_response: Optional[Union[bool, str]] = None, capture_error: Optional[Union[bool, str]] = None, method_name: str = None, @@ -654,20 +654,20 @@ def _add_response_as_metadata( def _add_full_exception_as_metadata( self, - method_name: str = None, - error: Exception = None, - subsegment: BaseSegment = None, + method_name: str, + error: Exception, + subsegment: BaseSegment, capture_error: Optional[bool] = None, ): """Add full exception object as metadata for given subsegment Parameters ---------- - method_name : str, optional + method_name : str method name to add as metadata key, by default None - error : Exception, optional + error : Exception error to add as subsegment metadata, by default None - subsegment : BaseSegment, optional + subsegment : BaseSegment existing subsegment to add metadata on, by default None capture_error : bool, optional Do not include error as metadata, by default True @@ -717,7 +717,7 @@ def __build_config( service: str = None, disabled: bool = None, auto_patch: bool = None, - patch_modules: List = None, + patch_modules: Union[List, Tuple] = None, provider: BaseProvider = None, ): """ Populates Tracer config for new and existing initializations """ diff --git a/aws_lambda_powertools/utilities/batch/base.py b/aws_lambda_powertools/utilities/batch/base.py index 56d99c6d19c..a0ad18a9ec1 100644 --- a/aws_lambda_powertools/utilities/batch/base.py +++ b/aws_lambda_powertools/utilities/batch/base.py @@ -104,7 +104,7 @@ def failure_handler(self, record: Any, exception: Tuple): @lambda_handler_decorator def batch_processor( - handler: Callable, event: Dict, context: Dict, record_handler: Callable, processor: BasePartialProcessor = None + handler: Callable, event: Dict, context: Dict, record_handler: Callable, processor: BasePartialProcessor ): """ Middleware to handle batch event processing diff --git a/aws_lambda_powertools/utilities/batch/sqs.py b/aws_lambda_powertools/utilities/batch/sqs.py index 597faa4c72e..e37fdbd3fb5 100644 --- a/aws_lambda_powertools/utilities/batch/sqs.py +++ b/aws_lambda_powertools/utilities/batch/sqs.py @@ -71,7 +71,7 @@ def _get_queue_url(self) -> Optional[str]: Format QueueUrl from first records entry """ if not getattr(self, "records", None): - return + return None *_, account_id, queue_name = self.records[0]["eventSourceARN"].split(":") return f"{self.client._endpoint.host}/{account_id}/{queue_name}" diff --git a/aws_lambda_powertools/utilities/data_classes/__init__.py b/aws_lambda_powertools/utilities/data_classes/__init__.py index 28179bfd291..58464ebcf99 100644 --- a/aws_lambda_powertools/utilities/data_classes/__init__.py +++ b/aws_lambda_powertools/utilities/data_classes/__init__.py @@ -1,7 +1,10 @@ -from aws_lambda_powertools.utilities.data_classes.appsync_resolver_event import AppSyncResolverEvent +""" +Event Source Data Classes utility provides classes self-describing Lambda event sources. +""" from .alb_event import ALBEvent from .api_gateway_proxy_event import APIGatewayProxyEvent, APIGatewayProxyEventV2 +from .appsync_resolver_event import AppSyncResolverEvent from .cloud_watch_logs_event import CloudWatchLogsEvent from .connect_contact_flow_event import ConnectContactFlowEvent from .dynamo_db_stream_event import DynamoDBStreamEvent diff --git a/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py index 756842ad347..6c06e48e63e 100644 --- a/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py +++ b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py @@ -195,6 +195,11 @@ def route_key(self) -> Optional[str]: """The selected route key.""" return self["requestContext"].get("routeKey") + @property + def operation_name(self) -> Optional[str]: + """The name of the operation being performed""" + return self["requestContext"].get("operationName") + class APIGatewayProxyEvent(BaseProxyEvent): """AWS Lambda proxy V1 diff --git a/aws_lambda_powertools/utilities/data_classes/appsync/resolver_utils.py b/aws_lambda_powertools/utilities/data_classes/appsync/resolver_utils.py deleted file mode 100644 index b4b49888e24..00000000000 --- a/aws_lambda_powertools/utilities/data_classes/appsync/resolver_utils.py +++ /dev/null @@ -1,50 +0,0 @@ -from typing import Any, Dict - -from aws_lambda_powertools.utilities.data_classes import AppSyncResolverEvent -from aws_lambda_powertools.utilities.typing import LambdaContext - - -class AppSyncResolver: - def __init__(self): - self._resolvers: dict = {} - - def resolver( - self, - type_name: str = "*", - field_name: str = None, - include_event: bool = False, - include_context: bool = False, - **kwargs, - ): - def register_resolver(func): - kwargs["include_event"] = include_event - kwargs["include_context"] = include_context - self._resolvers[f"{type_name}.{field_name}"] = { - "func": func, - "config": kwargs, - } - return func - - return register_resolver - - def resolve(self, event: dict, context: LambdaContext) -> Any: - event = AppSyncResolverEvent(event) - resolver, config = self._resolver(event.type_name, event.field_name) - kwargs = self._kwargs(event, context, config) - return resolver(**kwargs) - - def _resolver(self, type_name: str, field_name: str) -> tuple: - full_name = f"{type_name}.{field_name}" - resolver = self._resolvers.get(full_name, self._resolvers.get(f"*.{field_name}")) - if not resolver: - raise ValueError(f"No resolver found for '{full_name}'") - return resolver["func"], resolver["config"] - - @staticmethod - def _kwargs(event: AppSyncResolverEvent, context: LambdaContext, config: dict) -> Dict[str, Any]: - kwargs = {**event.arguments} - if config.get("include_event", False): - kwargs["event"] = event - if config.get("include_context", False): - kwargs["context"] = context - return kwargs diff --git a/aws_lambda_powertools/utilities/data_classes/cognito_user_pool_event.py b/aws_lambda_powertools/utilities/data_classes/cognito_user_pool_event.py index 461b906b230..e467875305f 100644 --- a/aws_lambda_powertools/utilities/data_classes/cognito_user_pool_event.py +++ b/aws_lambda_powertools/utilities/data_classes/cognito_user_pool_event.py @@ -292,26 +292,26 @@ class CustomMessageTriggerEventResponse(DictWrapper): def sms_message(self) -> str: return self["response"]["smsMessage"] - @property - def email_message(self) -> str: - return self["response"]["emailMessage"] - - @property - def email_subject(self) -> str: - return self["response"]["emailSubject"] - @sms_message.setter def sms_message(self, value: str): """The custom SMS message to be sent to your users. Must include the codeParameter value received in the request.""" self["response"]["smsMessage"] = value + @property + def email_message(self) -> str: + return self["response"]["emailMessage"] + @email_message.setter def email_message(self, value: str): """The custom email message to be sent to your users. Must include the codeParameter value received in the request.""" self["response"]["emailMessage"] = value + @property + def email_subject(self) -> str: + return self["response"]["emailSubject"] + @email_subject.setter def email_subject(self, value: str): """The subject line for the custom message.""" @@ -471,26 +471,26 @@ class ClaimsOverrideDetails(DictWrapper): def claims_to_add_or_override(self) -> Optional[Dict[str, str]]: return self.get("claimsToAddOrOverride") - @property - def claims_to_suppress(self) -> Optional[List[str]]: - return self.get("claimsToSuppress") - - @property - def group_configuration(self) -> Optional[GroupOverrideDetails]: - group_override_details = self.get("groupOverrideDetails") - return None if group_override_details is None else GroupOverrideDetails(group_override_details) - @claims_to_add_or_override.setter def claims_to_add_or_override(self, value: Dict[str, str]): """A map of one or more key-value pairs of claims to add or override. For group related claims, use groupOverrideDetails instead.""" self._data["claimsToAddOrOverride"] = value + @property + def claims_to_suppress(self) -> Optional[List[str]]: + return self.get("claimsToSuppress") + @claims_to_suppress.setter def claims_to_suppress(self, value: List[str]): """A list that contains claims to be suppressed from the identity token.""" self._data["claimsToSuppress"] = value + @property + def group_configuration(self) -> Optional[GroupOverrideDetails]: + group_override_details = self.get("groupOverrideDetails") + return None if group_override_details is None else GroupOverrideDetails(group_override_details) + @group_configuration.setter def group_configuration(self, value: Dict[str, Any]): """The output object containing the current group configuration. @@ -609,25 +609,25 @@ class DefineAuthChallengeTriggerEventResponse(DictWrapper): def challenge_name(self) -> str: return self["response"]["challengeName"] - @property - def fail_authentication(self) -> bool: - return bool(self["response"]["failAuthentication"]) - - @property - def issue_tokens(self) -> bool: - return bool(self["response"]["issueTokens"]) - @challenge_name.setter def challenge_name(self, value: str): """A string containing the name of the next challenge. If you want to present a new challenge to your user, specify the challenge name here.""" self["response"]["challengeName"] = value + @property + def fail_authentication(self) -> bool: + return bool(self["response"]["failAuthentication"]) + @fail_authentication.setter def fail_authentication(self, value: bool): """Set to true if you want to terminate the current authentication process, or false otherwise.""" self["response"]["failAuthentication"] = value + @property + def issue_tokens(self) -> bool: + return bool(self["response"]["issueTokens"]) + @issue_tokens.setter def issue_tokens(self, value: bool): """Set to true if you determine that the user has been sufficiently authenticated by @@ -695,14 +695,6 @@ class CreateAuthChallengeTriggerEventResponse(DictWrapper): def public_challenge_parameters(self) -> Dict[str, str]: return self["response"]["publicChallengeParameters"] - @property - def private_challenge_parameters(self) -> Dict[str, str]: - return self["response"]["privateChallengeParameters"] - - @property - def challenge_metadata(self) -> str: - return self["response"]["challengeMetadata"] - @public_challenge_parameters.setter def public_challenge_parameters(self, value: Dict[str, str]): """One or more key-value pairs for the client app to use in the challenge to be presented to the user. @@ -710,6 +702,10 @@ def public_challenge_parameters(self, value: Dict[str, str]): the user.""" self["response"]["publicChallengeParameters"] = value + @property + def private_challenge_parameters(self) -> Dict[str, str]: + return self["response"]["privateChallengeParameters"] + @private_challenge_parameters.setter def private_challenge_parameters(self, value: Dict[str, str]): """This parameter is only used by the Verify Auth Challenge Response Lambda trigger. @@ -719,6 +715,10 @@ def private_challenge_parameters(self, value: Dict[str, str]): for the question.""" self["response"]["privateChallengeParameters"] = value + @property + def challenge_metadata(self) -> str: + return self["response"]["challengeMetadata"] + @challenge_metadata.setter def challenge_metadata(self, value: str): """Your name for the custom challenge, if this is a custom challenge.""" diff --git a/aws_lambda_powertools/utilities/data_classes/common.py b/aws_lambda_powertools/utilities/data_classes/common.py index 65dfb4a0173..6f393cccb60 100644 --- a/aws_lambda_powertools/utilities/data_classes/common.py +++ b/aws_lambda_powertools/utilities/data_classes/common.py @@ -25,7 +25,9 @@ def raw_event(self) -> Dict[str, Any]: return self._data -def get_header_value(headers: Dict[str, str], name: str, default_value: str, case_sensitive: bool) -> Optional[str]: +def get_header_value( + headers: Dict[str, str], name: str, default_value: Optional[str], case_sensitive: Optional[bool] +) -> Optional[str]: """Get header value by name""" if case_sensitive: return headers.get(name, default_value) diff --git a/aws_lambda_powertools/utilities/idempotency/idempotency.py b/aws_lambda_powertools/utilities/idempotency/idempotency.py index b77c3013cbb..6f73a842af4 100644 --- a/aws_lambda_powertools/utilities/idempotency/idempotency.py +++ b/aws_lambda_powertools/utilities/idempotency/idempotency.py @@ -11,6 +11,7 @@ IdempotencyInconsistentStateError, IdempotencyItemAlreadyExistsError, IdempotencyItemNotFoundError, + IdempotencyKeyError, IdempotencyPersistenceLayerError, IdempotencyValidationError, ) @@ -132,6 +133,8 @@ def handle(self) -> Any: # We call save_inprogress first as an optimization for the most common case where no idempotent record # already exists. If it succeeds, there's no need to call get_record. self.persistence_store.save_inprogress(event=self.event, context=self.context) + except IdempotencyKeyError: + raise except IdempotencyItemAlreadyExistsError: # Now we know the item already exists, we can retrieve it record = self._get_idempotency_record() diff --git a/aws_lambda_powertools/utilities/idempotency/persistence/base.py b/aws_lambda_powertools/utilities/idempotency/persistence/base.py index 37c9968b3e0..263414a9573 100644 --- a/aws_lambda_powertools/utilities/idempotency/persistence/base.py +++ b/aws_lambda_powertools/utilities/idempotency/persistence/base.py @@ -40,7 +40,7 @@ def __init__( idempotency_key, status: str = "", expiry_timestamp: int = None, - response_data: str = "", + response_data: Optional[str] = "", payload_hash: str = None, ) -> None: """ diff --git a/aws_lambda_powertools/utilities/parameters/appconfig.py b/aws_lambda_powertools/utilities/parameters/appconfig.py index 03aac781c92..4490e260364 100644 --- a/aws_lambda_powertools/utilities/parameters/appconfig.py +++ b/aws_lambda_powertools/utilities/parameters/appconfig.py @@ -4,7 +4,7 @@ import os -from typing import Dict, Optional, Union +from typing import Any, Dict, Optional, Union from uuid import uuid4 import boto3 @@ -58,7 +58,7 @@ class AppConfigProvider(BaseProvider): """ - client = None + client: Any = None def __init__(self, environment: str, application: Optional[str] = None, config: Optional[Config] = None): """ diff --git a/aws_lambda_powertools/utilities/parameters/base.py b/aws_lambda_powertools/utilities/parameters/base.py index da73a26c07d..b3b907bc18b 100644 --- a/aws_lambda_powertools/utilities/parameters/base.py +++ b/aws_lambda_powertools/utilities/parameters/base.py @@ -7,14 +7,14 @@ from abc import ABC, abstractmethod from collections import namedtuple from datetime import datetime, timedelta -from typing import Dict, Optional, Tuple, Union +from typing import Any, Dict, Optional, Tuple, Union from .exceptions import GetParameterError, TransformParameterError DEFAULT_MAX_AGE_SECS = 5 ExpirableValue = namedtuple("ExpirableValue", ["value", "ttl"]) # These providers will be dynamically initialized on first use of the helper functions -DEFAULT_PROVIDERS = {} +DEFAULT_PROVIDERS: Dict[str, Any] = {} TRANSFORM_METHOD_JSON = "json" TRANSFORM_METHOD_BINARY = "binary" SUPPORTED_TRANSFORM_METHODS = [TRANSFORM_METHOD_JSON, TRANSFORM_METHOD_BINARY] @@ -25,7 +25,7 @@ class BaseProvider(ABC): Abstract Base Class for Parameter providers """ - store = None + store: Any = None def __init__(self): """ diff --git a/aws_lambda_powertools/utilities/parameters/dynamodb.py b/aws_lambda_powertools/utilities/parameters/dynamodb.py index dcb447b6060..5edae643ec0 100644 --- a/aws_lambda_powertools/utilities/parameters/dynamodb.py +++ b/aws_lambda_powertools/utilities/parameters/dynamodb.py @@ -3,7 +3,7 @@ """ -from typing import Dict, Optional +from typing import Any, Dict, Optional import boto3 from boto3.dynamodb.conditions import Key @@ -26,6 +26,8 @@ class DynamoDBProvider(BaseProvider): Name of the DynamoDB table sort key (defaults to 'sk'), used only for get_multiple value_attr: str, optional Attribute that contains the values in the DynamoDB table (defaults to 'value') + endpoint_url: str, optional + Complete url to reference local DynamoDB instance, e.g. http://localhost:8080 config: botocore.config.Config, optional Botocore configuration to pass during client initialization @@ -139,7 +141,7 @@ class DynamoDBProvider(BaseProvider): c Parameter value c """ - table = None + table: Any = None key_attr = None sort_attr = None value_attr = None @@ -150,6 +152,7 @@ def __init__( key_attr: str = "id", sort_attr: str = "sk", value_attr: str = "value", + endpoint_url: Optional[str] = None, config: Optional[Config] = None, ): """ @@ -157,7 +160,7 @@ def __init__( """ config = config or Config() - self.table = boto3.resource("dynamodb", config=config).Table(table_name) + self.table = boto3.resource("dynamodb", endpoint_url=endpoint_url, config=config).Table(table_name) self.key_attr = key_attr self.sort_attr = sort_attr diff --git a/aws_lambda_powertools/utilities/parameters/secrets.py b/aws_lambda_powertools/utilities/parameters/secrets.py index f14e4703ba8..6b7ea21fdf6 100644 --- a/aws_lambda_powertools/utilities/parameters/secrets.py +++ b/aws_lambda_powertools/utilities/parameters/secrets.py @@ -3,7 +3,7 @@ """ -from typing import Dict, Optional, Union +from typing import Any, Dict, Optional, Union import boto3 from botocore.config import Config @@ -56,7 +56,7 @@ class SecretsProvider(BaseProvider): My parameter value """ - client = None + client: Any = None def __init__(self, config: Optional[Config] = None): """ diff --git a/aws_lambda_powertools/utilities/parameters/ssm.py b/aws_lambda_powertools/utilities/parameters/ssm.py index 97910eda8ed..4bbef8bfc15 100644 --- a/aws_lambda_powertools/utilities/parameters/ssm.py +++ b/aws_lambda_powertools/utilities/parameters/ssm.py @@ -3,7 +3,7 @@ """ -from typing import Dict, Optional, Union +from typing import Any, Dict, Optional, Union import boto3 from botocore.config import Config @@ -72,7 +72,7 @@ class SSMProvider(BaseProvider): /my/path/prefix/c Parameter value c """ - client = None + client: Any = None def __init__(self, config: Optional[Config] = None): """ diff --git a/aws_lambda_powertools/utilities/parser/models/__init__.py b/aws_lambda_powertools/utilities/parser/models/__init__.py index 45230b8c79e..923d5d057c3 100644 --- a/aws_lambda_powertools/utilities/parser/models/__init__.py +++ b/aws_lambda_powertools/utilities/parser/models/__init__.py @@ -4,9 +4,29 @@ from .event_bridge import EventBridgeModel from .kinesis import KinesisDataStreamModel, KinesisDataStreamRecord, KinesisDataStreamRecordPayload from .s3 import S3Model, S3RecordModel -from .ses import SesModel, SesRecordModel +from .s3_object_event import ( + S3ObjectConfiguration, + S3ObjectContext, + S3ObjectLambdaEvent, + S3ObjectSessionAttributes, + S3ObjectSessionContext, + S3ObjectSessionIssuer, + S3ObjectUserIdentity, + S3ObjectUserRequest, +) +from .ses import ( + SesMail, + SesMailCommonHeaders, + SesMailHeaders, + SesMessage, + SesModel, + SesReceipt, + SesReceiptAction, + SesReceiptVerdict, + SesRecordModel, +) from .sns import SnsModel, SnsNotificationModel, SnsRecordModel -from .sqs import SqsModel, SqsRecordModel +from .sqs import SqsAttributesModel, SqsModel, SqsMsgAttributeModel, SqsRecordModel __all__ = [ "CloudWatchLogsData", @@ -20,16 +40,34 @@ "EventBridgeModel", "DynamoDBStreamChangedRecordModel", "DynamoDBStreamRecordModel", + "DynamoDBStreamChangedRecordModel", "KinesisDataStreamModel", "KinesisDataStreamRecord", "KinesisDataStreamRecordPayload", "S3Model", "S3RecordModel", + "S3ObjectLambdaEvent", + "S3ObjectUserIdentity", + "S3ObjectSessionContext", + "S3ObjectSessionAttributes", + "S3ObjectSessionIssuer", + "S3ObjectUserRequest", + "S3ObjectConfiguration", + "S3ObjectContext", "SesModel", "SesRecordModel", + "SesMessage", + "SesMail", + "SesMailCommonHeaders", + "SesMailHeaders", + "SesReceipt", + "SesReceiptAction", + "SesReceiptVerdict", "SnsModel", "SnsNotificationModel", "SnsRecordModel", "SqsModel", "SqsRecordModel", + "SqsMsgAttributeModel", + "SqsAttributesModel", ] diff --git a/aws_lambda_powertools/utilities/parser/models/s3.py b/aws_lambda_powertools/utilities/parser/models/s3.py index f3559d39908..4ec6a717f58 100644 --- a/aws_lambda_powertools/utilities/parser/models/s3.py +++ b/aws_lambda_powertools/utilities/parser/models/s3.py @@ -4,7 +4,7 @@ from pydantic import BaseModel from pydantic.fields import Field from pydantic.networks import IPvAnyNetwork -from pydantic.types import PositiveInt +from pydantic.types import NonNegativeFloat from ..types import Literal @@ -43,7 +43,7 @@ class S3Bucket(BaseModel): class S3Object(BaseModel): key: str - size: PositiveInt + size: NonNegativeFloat eTag: str sequencer: str versionId: Optional[str] diff --git a/aws_lambda_powertools/utilities/parser/models/s3_object_event.py b/aws_lambda_powertools/utilities/parser/models/s3_object_event.py new file mode 100644 index 00000000000..1fc10672746 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/s3_object_event.py @@ -0,0 +1,57 @@ +from typing import Dict, Optional + +from pydantic import BaseModel, HttpUrl + + +class S3ObjectContext(BaseModel): + inputS3Url: HttpUrl + outputRoute: str + outputToken: str + + +class S3ObjectConfiguration(BaseModel): + accessPointArn: str + supportingAccessPointArn: str + payload: str + + +class S3ObjectUserRequest(BaseModel): + url: str + headers: Dict[str, str] + + +class S3ObjectSessionIssuer(BaseModel): + type: str # noqa: A003, VNE003 + userName: Optional[str] + principalId: str + arn: str + accountId: str + + +class S3ObjectSessionAttributes(BaseModel): + creationDate: str + mfaAuthenticated: bool + + +class S3ObjectSessionContext(BaseModel): + sessionIssuer: S3ObjectSessionIssuer + attributes: S3ObjectSessionAttributes + + +class S3ObjectUserIdentity(BaseModel): + type: str # noqa003 + accountId: str + accessKeyId: str + userName: Optional[str] + principalId: str + arn: str + sessionContext: Optional[S3ObjectSessionContext] + + +class S3ObjectLambdaEvent(BaseModel): + xAmzRequestId: str + getObjectContext: S3ObjectContext + configuration: S3ObjectConfiguration + userRequest: S3ObjectUserRequest + userIdentity: S3ObjectUserIdentity + protocolVersion: str diff --git a/aws_lambda_powertools/utilities/parser/types.py b/aws_lambda_powertools/utilities/parser/types.py index b19f9c9a87f..2565e52c764 100644 --- a/aws_lambda_powertools/utilities/parser/types.py +++ b/aws_lambda_powertools/utilities/parser/types.py @@ -1,13 +1,14 @@ """Generics and other shared types used across parser""" +import sys from typing import TypeVar from pydantic import BaseModel # We only need typing_extensions for python versions <3.8 -try: +if sys.version_info >= (3, 8): from typing import Literal # noqa: F401 -except ImportError: +else: from typing_extensions import Literal # noqa: F401 Model = TypeVar("Model", bound=BaseModel) diff --git a/aws_lambda_powertools/utilities/validation/base.py b/aws_lambda_powertools/utilities/validation/base.py index a5c82503735..eb84f300ded 100644 --- a/aws_lambda_powertools/utilities/validation/base.py +++ b/aws_lambda_powertools/utilities/validation/base.py @@ -40,7 +40,7 @@ def validate_data_against_schema(data: Dict, schema: Dict, formats: Optional[Dic raise SchemaValidationError(message) -def unwrap_event_from_envelope(data: Dict, envelope: str, jmespath_options: Dict) -> Any: +def unwrap_event_from_envelope(data: Dict, envelope: str, jmespath_options: Optional[Dict]) -> Any: """Searches data using JMESPath expression Parameters diff --git a/aws_lambda_powertools/utilities/validation/validator.py b/aws_lambda_powertools/utilities/validation/validator.py index 23a7241fd32..c962f8fff76 100644 --- a/aws_lambda_powertools/utilities/validation/validator.py +++ b/aws_lambda_powertools/utilities/validation/validator.py @@ -133,7 +133,7 @@ def handler(event, context): def validate( event: Dict, - schema: Dict = None, + schema: Dict, formats: Optional[Dict] = None, envelope: str = None, jmespath_options: Dict = None, diff --git a/docs/core/event_handler/appsync.md b/docs/core/event_handler/appsync.md new file mode 100644 index 00000000000..3f61a4ad311 --- /dev/null +++ b/docs/core/event_handler/appsync.md @@ -0,0 +1,634 @@ +--- +title: Appsync +description: Core utility +--- + +Event handler for AWS AppSync Direct Lambda Resolver and Amplify GraphQL Transformer. + +### Key Features + + + +* Automatically parse API arguments to function arguments +* Choose between strictly match a GraphQL field name or all of them to a function +* Integrates with [Data classes utilities](../../utilities/data_classes.md){target="_blank"} to access resolver and identity information +* Works with both Direct Lambda Resolver and Amplify GraphQL Transformer `@function` directive +* Support async Python 3.8+ functions, and generators + +## Terminology + +**[Direct Lambda Resolver](https://docs.aws.amazon.com/appsync/latest/devguide/direct-lambda-reference.html){target="_blank"}**. A custom AppSync Resolver to bypass the use of Apache Velocity Template (VTL) and automatically map your function's response to a GraphQL field. + +**[Amplify GraphQL Transformer](https://docs.amplify.aws/cli/graphql-transformer/function){target="_blank"}**. Custom GraphQL directives to define your application's data model using Schema Definition Language (SDL). Amplify CLI uses these directives to convert GraphQL SDL into full descriptive AWS CloudFormation templates. + +## Getting started + +### Required resources + +You must have an existing AppSync GraphQL API and IAM permissions to invoke your Lambda function. That said, there is no additional permissions to use this utility. + +This is the sample infrastructure we are using for the initial examples with a AppSync Direct Lambda Resolver. + +=== "schema.graphql" + + !!! tip "Designing GraphQL Schemas for the first time?" + Visit [AWS AppSync schema documentation](https://docs.aws.amazon.com/appsync/latest/devguide/designing-your-schema.html){target="_blank"} for understanding how to define types, nesting, and pagination. + + ```typescript + --8<-- "docs/shared/getting_started_schema.graphql" + ``` + +=== "template.yml" + + ```yaml hl_lines="37-42 50-55 61-62 78-91 96-120" + AWSTemplateFormatVersion: '2010-09-09' + Transform: AWS::Serverless-2016-10-31 + Description: Hello world Direct Lambda Resolver + + Globals: + Function: + Timeout: 5 + Runtime: python3.8 + Tracing: Active + Environment: + Variables: + # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/latest/#environment-variables + LOG_LEVEL: INFO + POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 + POWERTOOLS_LOGGER_LOG_EVENT: true + POWERTOOLS_SERVICE_NAME: sample_resolver + + Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Handler: app.lambda_handler + CodeUri: hello_world + Description: Sample Lambda Powertools Direct Lambda Resolver + Tags: + SOLUTION: LambdaPowertoolsPython + + # IAM Permissions and Roles + + AppSyncServiceRole: + Type: "AWS::IAM::Role" + Properties: + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - + Effect: "Allow" + Principal: + Service: + - "appsync.amazonaws.com" + Action: + - "sts:AssumeRole" + + InvokeLambdaResolverPolicy: + Type: "AWS::IAM::Policy" + Properties: + PolicyName: "DirectAppSyncLambda" + PolicyDocument: + Version: "2012-10-17" + Statement: + - + Effect: "Allow" + Action: "lambda:invokeFunction" + Resource: + - !GetAtt HelloWorldFunction.Arn + Roles: + - !Ref AppSyncServiceRole + + # GraphQL API + + HelloWorldApi: + Type: "AWS::AppSync::GraphQLApi" + Properties: + Name: HelloWorldApi + AuthenticationType: "API_KEY" + XrayEnabled: true + + HelloWorldApiKey: + Type: AWS::AppSync::ApiKey + Properties: + ApiId: !GetAtt HelloWorldApi.ApiId + + HelloWorldApiSchema: + Type: "AWS::AppSync::GraphQLSchema" + Properties: + ApiId: !GetAtt HelloWorldApi.ApiId + Definition: | + schema { + query:Query + } + + type Query { + getTodo(id: ID!): Todo + listTodos: [Todo] + } + + type Todo { + id: ID! + title: String + description: String + done: Boolean + } + + # Lambda Direct Data Source and Resolver + + HelloWorldFunctionDataSource: + Type: "AWS::AppSync::DataSource" + Properties: + ApiId: !GetAtt HelloWorldApi.ApiId + Name: "HelloWorldLambdaDirectResolver" + Type: "AWS_LAMBDA" + ServiceRoleArn: !GetAtt AppSyncServiceRole.Arn + LambdaConfig: + LambdaFunctionArn: !GetAtt HelloWorldFunction.Arn + + ListTodosResolver: + Type: "AWS::AppSync::Resolver" + Properties: + ApiId: !GetAtt HelloWorldApi.ApiId + TypeName: "Query" + FieldName: "listTodos" + DataSourceName: !GetAtt HelloWorldFunctionDataSource.Name + + GetTodoResolver: + Type: "AWS::AppSync::Resolver" + Properties: + ApiId: !GetAtt HelloWorldApi.ApiId + TypeName: "Query" + FieldName: "getTodo" + DataSourceName: !GetAtt HelloWorldFunctionDataSource.Name + + + Outputs: + HelloWorldFunction: + Description: "Hello World Lambda Function ARN" + Value: !GetAtt HelloWorldFunction.Arn + + HelloWorldAPI: + Value: !GetAtt HelloWorldApi.Arn + ``` + + +### Resolver decorator + +You can define your functions to match GraphQL types and fields with the `app.resolver()` decorator. + +Here's an example where we have two separate functions to resolve `getTodo` and `listTodos` fields within the `Query` type. For completion, we use Scalar type utilities to generate the right output based on our schema definition. + +!!! info "GraphQL arguments are passed as function arguments" + +=== "app.py" + + ```python hl_lines="3-5 9 31-32 39-40 47" + from aws_lambda_powertools import Logger, Tracer + + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler import AppSyncResolver + from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils + + tracer = Tracer(service="sample_resolver") + logger = Logger(service="sample_resolver") + app = AppSyncResolver() + + # Note that `creation_time` isn't available in the schema + # This utility also takes into account what info you make available at API level vs what's stored + TODOS = [ + { + "id": scalar_types_utils.make_id(), # type ID or String + "title": "First task", + "description": "String", + "done": False, + "creation_time": scalar_types_utils.aws_datetime(), # type AWSDateTime + }, + { + "id": scalar_types_utils.make_id(), + "title": "Second task", + "description": "String", + "done": True, + "creation_time": scalar_types_utils.aws_datetime(), + }, + ] + + + @app.resolver(type_name="Query", field_name="getTodo") + def get_todo(id: str = ""): + logger.info(f"Fetching Todo {id}") + todo = [todo for todo in TODOS if todo["id"] == id] + + return todo + + + @app.resolver(type_name="Query", field_name="listTodos") + def list_todos(): + return TODOS + + + @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +=== "schema.graphql" + + ```typescript + --8<-- "docs/shared/getting_started_schema.graphql" + ``` + +=== "getTodo_event.json" + ```json + { + "arguments": { + "id": "7e362732-c8cd-4405-b090-144ac9b38960" + }, + "identity": null, + "source": null, + "request": { + "headers": { + "x-forwarded-for": "1.2.3.4, 5.6.7.8", + "accept-encoding": "gzip, deflate, br", + "cloudfront-viewer-country": "NL", + "cloudfront-is-tablet-viewer": "false", + "referer": "https://eu-west-1.console.aws.amazon.com/appsync/home?region=eu-west-1", + "via": "2.0 9fce949f3749407c8e6a75087e168b47.cloudfront.net (CloudFront)", + "cloudfront-forwarded-proto": "https", + "origin": "https://eu-west-1.console.aws.amazon.com", + "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq", + "content-type": "application/json", + "x-amzn-trace-id": "Root=1-606eb2f2-1babc433453a332c43fb4494", + "x-amz-cf-id": "SJw16ZOPuMZMINx5Xcxa9pB84oMPSGCzNOfrbJLvd80sPa0waCXzYQ==", + "content-length": "114", + "x-amz-user-agent": "AWS-Console-AppSync/", + "x-forwarded-proto": "https", + "host": "ldcvmkdnd5az3lm3gnf5ixvcyy.appsync-api.eu-west-1.amazonaws.com", + "accept-language": "en-US,en;q=0.5", + "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:78.0) Gecko/20100101 Firefox/78.0", + "cloudfront-is-desktop-viewer": "true", + "cloudfront-is-mobile-viewer": "false", + "accept": "*/*", + "x-forwarded-port": "443", + "cloudfront-is-smarttv-viewer": "false" + } + }, + "prev": null, + "info": { + "parentTypeName": "Query", + "selectionSetList": [ + "title", + "id" + ], + "selectionSetGraphQL": "{\n title\n id\n}", + "fieldName": "getTodo", + "variables": {} + }, + "stash": {} + } + ``` + +=== "listTodos_event.json" + ```json + { + "arguments": {}, + "identity": null, + "source": null, + "request": { + "headers": { + "x-forwarded-for": "1.2.3.4, 5.6.7.8", + "accept-encoding": "gzip, deflate, br", + "cloudfront-viewer-country": "NL", + "cloudfront-is-tablet-viewer": "false", + "referer": "https://eu-west-1.console.aws.amazon.com/appsync/home?region=eu-west-1", + "via": "2.0 9fce949f3749407c8e6a75087e168b47.cloudfront.net (CloudFront)", + "cloudfront-forwarded-proto": "https", + "origin": "https://eu-west-1.console.aws.amazon.com", + "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq", + "content-type": "application/json", + "x-amzn-trace-id": "Root=1-606eb2f2-1babc433453a332c43fb4494", + "x-amz-cf-id": "SJw16ZOPuMZMINx5Xcxa9pB84oMPSGCzNOfrbJLvd80sPa0waCXzYQ==", + "content-length": "114", + "x-amz-user-agent": "AWS-Console-AppSync/", + "x-forwarded-proto": "https", + "host": "ldcvmkdnd5az3lm3gnf5ixvcyy.appsync-api.eu-west-1.amazonaws.com", + "accept-language": "en-US,en;q=0.5", + "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:78.0) Gecko/20100101 Firefox/78.0", + "cloudfront-is-desktop-viewer": "true", + "cloudfront-is-mobile-viewer": "false", + "accept": "*/*", + "x-forwarded-port": "443", + "cloudfront-is-smarttv-viewer": "false" + } + }, + "prev": null, + "info": { + "parentTypeName": "Query", + "selectionSetList": [ + "id", + "title" + ], + "selectionSetGraphQL": "{\n id\n title\n}", + "fieldName": "listTodos", + "variables": {} + }, + "stash": {} + } + ``` + +## Advanced + +### Nested mappings + +You can nest `app.resolver()` decorator multiple times when resolving fields with the same return. + +=== "nested_mappings.py" + + ```python hl_lines="4 8 10-12 18" + from aws_lambda_powertools import Logger, Tracer + + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler import AppSyncResolver + + tracer = Tracer(service="sample_resolver") + logger = Logger(service="sample_resolver") + app = AppSyncResolver() + + @app.resolver(field_name="listLocations") + @app.resolver(field_name="locations") + def get_locations(name: str, description: str = ""): + return name + description + + @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) + @tracer.capture_lambda_handler + def lambda_handler(event, context): + return app.resolve(event, context) + ``` + +=== "schema.graphql" + + ```typescript hl_lines="6 20" + schema { + query: Query + } + + type Query { + listLocations: [Todo] + } + + type Location { + id: ID! + name: String! + description: String + address: String + } + + type Merchant { + id: String! + name: String! + description: String + locations: [Location] + } + ``` + +### Async functions + +For Lambda Python3.8+ runtime, this utility supports async functions when you use in conjunction with `asyncio.run`. + +=== "async_resolver.py" + ```python hl_lines="4 8 10-12 20" + from aws_lambda_powertools import Logger, Tracer + + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler import AppSyncResolver + + tracer = Tracer(service="sample_resolver") + logger = Logger(service="sample_resolver") + app = AppSyncResolver() + + @app.resolver(type_name="Query", field_name="listTodos") + async def list_todos(): + todos = await some_async_io_call() + return todos + + @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) + @tracer.capture_lambda_handler + def lambda_handler(event, context): + result = app.resolve(event, context) + + return asyncio.run(result) + ``` + +### Amplify GraphQL Transformer + +Assuming you have [Amplify CLI installed](https://docs.amplify.aws/cli/start/install){target="_blank"}, create a new API using `amplify add api` and use the following GraphQL Schema. + + + +=== "schema.graphql" + + ```typescript hl_lines="7 15 20 22" + @model + type Merchant { + id: String! + name: String! + description: String + # Resolves to `common_field` + commonField: String @function(name: "merchantInfo-${env}") + } + + type Location { + id: ID! + name: String! + address: String + # Resolves to `common_field` + commonField: String @function(name: "merchantInfo-${env}") + } + + type Query { + # List of locations resolves to `list_locations` + listLocations(page: Int, size: Int): [Location] @function(name: "merchantInfo-${env}") + # List of locations resolves to `list_locations` + findMerchant(search: str): [Merchant] @function(name: "searchMerchant-${env}") + } + ``` + +[Create two new basic Python functions](https://docs.amplify.aws/cli/function#set-up-a-function){target="_blank"} via `amplify add function`. + +!!! note "Amplify CLI generated functions use `Pipenv` as a dependency manager" + Your function source code is located at **`amplify/backend/function/your-function-name`**. + +Within your function's folder, add Lambda Powertools as a dependency with `pipenv install aws-lambda-powertools`. + +Use the following code for `merchantInfo` and `searchMerchant` functions respectively. + +=== "merchantInfo/src/app.py" + + ```python hl_lines="4-5 9 11-12 15-16 23" + from aws_lambda_powertools import Logger, Tracer + + from aws_lambda_powertools.logging import correlation_paths + from aws_lambda_powertools.event_handler import AppSyncResolver + from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils + + tracer = Tracer(service="sample_graphql_transformer_resolver") + logger = Logger(service="sample_graphql_transformer_resolver") + app = AppSyncResolver() + + @app.resolver(type_name="Query", field_name="listLocations") + def list_locations(page: int = 0, size: int = 10): + return [{"id": 100, "name": "Smooth Grooves"}] + + @app.resolver(field_name="commonField") + def common_field(): + # Would match all fieldNames matching 'commonField' + return scalar_types_utils.make_id() + + @tracer.capture_lambda_handler + @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) + def lambda_handler(event, context): + app.resolve(event, context) + ``` +=== "searchMerchant/src/app.py" + + ```python hl_lines="1 4 6-7" + from aws_lambda_powertools.event_handler import AppSyncResolver + from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils + + app = AppSyncResolver() + + @app.resolver(type_name="Query", field_name="findMerchant") + def find_merchant(search: str): + return [ + { + "id": scalar_types_utils.make_id(), + "name": "Brewer Brewing", + "description": "Mike Brewer's IPA brewing place" + }, + { + "id": scalar_types_utils.make_id(), + "name": "Serverlessa's Bakery", + "description": "Lessa's sourdough place" + }, + ] + ``` + +**Example AppSync GraphQL Transformer Function resolver events** + +=== "Query.listLocations event" + + ```json hl_lines="2-7" + { + "typeName": "Query", + "fieldName": "listLocations", + "arguments": { + "page": 2, + "size": 1 + }, + "identity": { + "claims": { + "iat": 1615366261 + ... + }, + "username": "mike", + ... + }, + "request": { + "headers": { + "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", + "x-forwarded-for": "127.0.0.1" + ... + } + }, + ... + } + ``` + +=== "*.commonField event" + + ```json hl_lines="2 3" + { + "typeName": "Merchant", + "fieldName": "commonField", + "arguments": { + }, + "identity": { + "claims": { + "iat": 1615366261 + ... + }, + "username": "mike", + ... + }, + "request": { + "headers": { + "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", + "x-forwarded-for": "127.0.0.1" + ... + } + }, + ... + } + ``` + +=== "Query.findMerchant event" + + ```json hl_lines="2-6" + { + "typeName": "Query", + "fieldName": "findMerchant", + "arguments": { + "search": "Brewers Coffee" + }, + "identity": { + "claims": { + "iat": 1615366261 + ... + }, + "username": "mike", + ... + }, + "request": { + "headers": { + "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", + "x-forwarded-for": "127.0.0.1" + ... + } + }, + ... + } + ``` + +## Testing your code + +You can test your resolvers by passing a mocked or actual AppSync Lambda event that you're expecting. + +You can use either `app.resolve(event, context)` or simply `app(event, context)`. + +Here's an example from our internal functional test. + +=== "test_direct_resolver.py" + ```python + def test_direct_resolver(): + # Check whether we can handle an example appsync direct resolver + # load_event primarily deserialize the JSON event into a dict + mock_event = load_event("appSyncDirectResolver.json") + + app = AppSyncResolver() + + @app.resolver(field_name="createSomething") + def create_something(id: str): + assert app.lambda_context == {} + return id + + # Call the implicit handler + result = app(mock_event, {}) + + assert result == "my identifier" + ``` + +=== "appSyncDirectResolver.json" + ```json + --8<-- "tests/events/appSyncDirectResolver.json" + ``` diff --git a/docs/core/logger.md b/docs/core/logger.md index 6fb4fa28017..ae842d6a613 100644 --- a/docs/core/logger.md +++ b/docs/core/logger.md @@ -3,7 +3,6 @@ title: Logger description: Core utility --- - Logger provides an opinionated logger with output structured as JSON. ## Key features @@ -532,6 +531,7 @@ Service is what defines the Logger name, including what the Lambda function is r For Logger, the `service` is the logging key customers can use to search log operations for one or more functions - For example, **search for all errors, or messages like X, where service is payment**. ??? tip "Logging output example" + ```json hl_lines="5" { "timestamp": "2020-05-24 18:17:33,774", @@ -572,6 +572,7 @@ A common issue when migrating from other Loggers is that `service` might be defi logger = Logger(child=True) ``` + === "correct_logger_inheritance.py" ```python hl_lines="4 10" @@ -653,6 +654,26 @@ You can also change the order of the following log record keys via the `log_reco } ``` +#### Setting timestamp to UTC + +By default, this Logger and standard logging library emits records using local time timestamp. You can override this behaviour by updating the current converter set in our formatter: + +=== "app.py" + + ```python hl_lines="1 3 9" + from aws_lambda_powertools import Logger + + import time + + logger = Logger(service="sample_service") + + logger.info("Local time") + + logger._logger.handlers[0].formatter.converter = time.gmtime + + logger.info("GMT time") + ``` + ## Testing your code When unit testing your code that makes use of `inject_lambda_context` decorator, you need to pass a dummy Lambda Context, or else Logger will fail. diff --git a/docs/core/tracer.md b/docs/core/tracer.md index 177f861709b..3dcb5da1e7c 100644 --- a/docs/core/tracer.md +++ b/docs/core/tracer.md @@ -41,7 +41,7 @@ Before your use this utility, your AWS Lambda function [must have permissions](h You can quickly start by importing the `Tracer` class, initialize it outside the Lambda handler, and use `capture_lambda_handler` decorator. === "app.py" - ```python hl_lines="1 3 7" + ```python hl_lines="1 3 6" from aws_lambda_powertools import Tracer tracer = Tracer() # Sets service via env var diff --git a/docs/index.md b/docs/index.md index 1f347b017e1..bb3d4925c21 100644 --- a/docs/index.md +++ b/docs/index.md @@ -141,17 +141,18 @@ aws serverlessrepo list-application-versions \ | Utility | Description | ------------------------------------------------- | --------------------------------------------------------------------------------- -| [Tracing](./core/tracer) | Decorators and utilities to trace Lambda function handlers, and both synchronous and asynchronous functions -| [Logger](./core/logger) | Structured logging made easier, and decorator to enrich structured logging with key Lambda context details -| [Metrics](./core/metrics) | Custom Metrics created asynchronously via CloudWatch Embedded Metric Format (EMF) -| [Middleware factory](./utilities/middleware_factory) | Decorator factory to create your own middleware to run logic before, and after each Lambda invocation -| [Parameters](./utilities/parameters) | Retrieve parameter values from AWS Systems Manager Parameter Store, AWS Secrets Manager, or Amazon DynamoDB, and cache them for a specific amount of time -| [Batch processing](./utilities/batch) | Handle partial failures for AWS SQS batch processing -| [Typing](./utilities/typing) | Static typing classes to speedup development in your IDE -| [Validation](./utilities/validation) | JSON Schema validator for inbound events and responses -| [Event source data classes](./utilities/data_classes) | Data classes describing the schema of common Lambda event triggers -| [Parser](./utilities/parser) | Data parsing and deep validation using Pydantic -| [Idempotency](./utilities/idempotency) | Idempotent Lambda handler +| [Tracing](./core/tracer.md) | Decorators and utilities to trace Lambda function handlers, and both synchronous and asynchronous functions +| [Logger](./core/logger.md) | Structured logging made easier, and decorator to enrich structured logging with key Lambda context details +| [Metrics](./core/metrics.md) | Custom Metrics created asynchronously via CloudWatch Embedded Metric Format (EMF) +| [Event handler - AppSync](./core/event_handler/appsync.md) | AppSync event handler for Lambda Direct Resolver and Amplify GraphQL Transformer function +| [Middleware factory](./utilities/middleware_factory.md) | Decorator factory to create your own middleware to run logic before, and after each Lambda invocation +| [Parameters](./utilities/parameters.md) | Retrieve parameter values from AWS Systems Manager Parameter Store, AWS Secrets Manager, or Amazon DynamoDB, and cache them for a specific amount of time +| [Batch processing](./utilities/batch.md) | Handle partial failures for AWS SQS batch processing +| [Typing](./utilities/typing.md) | Static typing classes to speedup development in your IDE +| [Validation](./utilities/validation.md) | JSON Schema validator for inbound events and responses +| [Event source data classes](./utilities/data_classes.md) | Data classes describing the schema of common Lambda event triggers +| [Parser](./utilities/parser.md) | Data parsing and deep validation using Pydantic +| [Idempotency](./utilities/idempotency.md) | Idempotent Lambda handler ## Environment variables diff --git a/docs/shared/getting_started_schema.graphql b/docs/shared/getting_started_schema.graphql new file mode 100644 index 00000000000..c738156bd73 --- /dev/null +++ b/docs/shared/getting_started_schema.graphql @@ -0,0 +1,15 @@ +schema { + query: Query +} + +type Query { + getTodo(id: ID!): Todo + listTodos: [Todo] +} + +type Todo { + id: ID! + title: String + description: String + done: Boolean +} diff --git a/docs/shared/validation_basic_eventbridge_event.json b/docs/shared/validation_basic_eventbridge_event.json new file mode 100644 index 00000000000..aa0912e751b --- /dev/null +++ b/docs/shared/validation_basic_eventbridge_event.json @@ -0,0 +1,15 @@ +{ + "id": "cdc73f9d-aea9-11e3-9d5a-835b769c0d9c", + "detail-type": "Scheduled Event", + "source": "aws.events", + "account": "123456789012", + "time": "1970-01-01T00:00:00Z", + "region": "us-east-1", + "resources": [ + "arn:aws:events:us-east-1:123456789012:rule/ExampleRule" + ], + "detail": { + "message": "hello hello", + "username": "blah blah" + } +} diff --git a/docs/shared/validation_basic_jsonschema.py b/docs/shared/validation_basic_jsonschema.py new file mode 100644 index 00000000000..afb8a723d18 --- /dev/null +++ b/docs/shared/validation_basic_jsonschema.py @@ -0,0 +1,39 @@ +INPUT = { + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "http://example.com/example.json", + "type": "object", + "title": "Sample schema", + "description": "The root schema comprises the entire JSON document.", + "examples": [{"message": "hello world", "username": "lessa"}], + "required": ["message", "username"], + "properties": { + "message": { + "$id": "#/properties/message", + "type": "string", + "title": "The message", + "examples": ["hello world"], + "maxLength": 100, + }, + "username": { + "$id": "#/properties/username", + "type": "string", + "title": "The username", + "examples": ["lessa"], + "maxLength": 30, + }, + }, +} + +OUTPUT = { + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "http://example.com/example.json", + "type": "object", + "title": "Sample outgoing schema", + "description": "The root schema comprises the entire JSON document.", + "examples": [{"statusCode": 200, "body": "response"}], + "required": ["statusCode", "body"], + "properties": { + "statusCode": {"$id": "#/properties/statusCode", "type": "integer", "title": "The statusCode"}, + "body": {"$id": "#/properties/body", "type": "string", "title": "The response"}, + }, +} diff --git a/docs/utilities/batch.md b/docs/utilities/batch.md index ca4606e0f40..26006427a14 100644 --- a/docs/utilities/batch.md +++ b/docs/utilities/batch.md @@ -54,7 +54,7 @@ Before your use this utility, your AWS Lambda function must have `sqs:DeleteMess ### Processing messages from SQS -You can use either **[sqs_batch_processor](#sqs_batch_processor-decorator)** decorator, or **[PartialSQSProcessor](#partialsqsprocessor-context-manager)** as a context manager if you'd like access to the processed results. +You can use either `sqs_batch_processor` decorator, or `PartialSQSProcessor` as a context manager if you'd like access to the processed results. You need to create a function to handle each record from the batch - We call it `record_handler` from here on. diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md index c7c11b6b2f9..dc56ed8ec41 100644 --- a/docs/utilities/data_classes.md +++ b/docs/utilities/data_classes.md @@ -3,8 +3,7 @@ title: Event Source Data Classes description: Utility --- -Event Source Data Classes utility provides classes self-describing Lambda event sources, including API decorators when -applicable. +Event Source Data Classes utility provides classes self-describing Lambda event sources. ## Key Features @@ -554,7 +553,7 @@ This example is based on the AWS Blog post [Introducing Amazon S3 Object Lambda === "app.py" - ```python hl_lines="4-5 10 12" + ```python hl_lines="5-6 12 14" import boto3 import requests diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 09e8567344f..ecadbe530ae 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -30,19 +30,26 @@ times with the same parameters**. This makes idempotent operations safe to retry ### Required resources -Before getting started, you need to create a persistent storage layer where the idempotency utility can store its -state - your lambda functions will need read and write access to it. +Before getting started, you need to create a persistent storage layer where the idempotency utility can store its state - your lambda functions will need read and write access to it. As of now, Amazon DynamoDB is the only supported persistent storage layer, so you'll need to create a table first. -> Example using AWS Serverless Application Model (SAM) +**Default table configuration** + +If you're not [changing the default configuration for the DynamoDB persistence layer](#dynamodbpersistencelayer), this is the expected default configuration: + +Configuration | Value | Notes +------------------------------------------------- | ------------------------------------------------- | ------------------------------------------------- +Partition key | `id` | +TTL attribute name | `expiration` | This can only be configured after your table is created if you're using AWS Console -=== "template.yml" - !!! tip "You can share a single state table for all functions" - > New in 1.12.0 +!!! tip "You can share a single state table for all functions" + You can reuse the same DynamoDB table to store idempotency state. We add your `function_name` in addition to the idempotency key as a hash key. - You can reuse the same DynamoDB table to store idempotency state. We add your function_name in addition to the idempotency key as a hash key. +> Example using AWS Serverless Application Model (SAM) + +=== "template.yml" ```yaml hl_lines="5-13 21-23" Resources: @@ -124,7 +131,7 @@ You can quickly start by initializing the `DynamoDBPersistenceLayer` class and u !!! tip "Dealing with always changing payloads" When dealing with a more elaborate payload, where parts of the payload always change, you should use **`event_key_jmespath`** parameter. -Use [`IdempotencyConfig`](#customizing-the-default-behaviour) to instruct the idempotent decorator to only use a portion of your payload to verify whether a request is idempotent, and therefore it should not be retried. +Use [`IdempotencyConfig`](#customizing-the-default-behavior) to instruct the idempotent decorator to only use a portion of your payload to verify whether a request is idempotent, and therefore it should not be retried. > **Payment scenario** diff --git a/docs/utilities/middleware_factory.md b/docs/utilities/middleware_factory.md index f7b22a11848..b0f5d4a1ccd 100644 --- a/docs/utilities/middleware_factory.md +++ b/docs/utilities/middleware_factory.md @@ -3,11 +3,9 @@ title: Middleware factory description: Utility --- - - Middleware factory provides a decorator factory to create your own middleware to run logic before, and after each Lambda invocation synchronously. -**Key features** +## Key features * Run logic before, after, and handle exceptions * Trace each middleware when requested @@ -80,9 +78,9 @@ This makes use of an existing Tracer instance that you may have initialized anyw ... ``` -When executed, your middleware name will [appear in AWS X-Ray Trace details as](../core/tracer) `## middleware_name`. +When executed, your middleware name will [appear in AWS X-Ray Trace details as](../core/tracer.md) `## middleware_name`. -For advanced use cases, you can instantiate [Tracer](../core/tracer) inside your middleware, and add annotations as well as metadata for additional operational insights. +For advanced use cases, you can instantiate [Tracer](../core/tracer.md) inside your middleware, and add annotations as well as metadata for additional operational insights. === "app.py" @@ -102,7 +100,7 @@ For advanced use cases, you can instantiate [Tracer](../core/tracer) inside your * Use `trace_execution` to quickly understand the performance impact of your middlewares, and reduce or merge tasks when necessary * When nesting multiple middlewares, always return the handler with event and context, or response -* Keep in mind [Python decorators execution order](https://realpython.com/primer-on-python-decorators/#nesting-decorators). Lambda handler is actually called once (top-down) +* Keep in mind [Python decorators execution order](https://realpython.com/primer-on-python-decorators/#nesting-decorators){target="_blank"}. Lambda handler is actually called once (top-down) * Async middlewares are not supported ## Testing your code diff --git a/docs/utilities/parameters.md b/docs/utilities/parameters.md index e50f3f85b81..8fc3227e2c6 100644 --- a/docs/utilities/parameters.md +++ b/docs/utilities/parameters.md @@ -4,7 +4,7 @@ description: Utility --- -The parameters utility provides high-level functions to retrieve one or multiple parameter values from [AWS Systems Manager Parameter Store](https://docs.aws.amazon.com/systems-manager/latest/userguide/systems-manager-parameter-store.html){target="_blank"}, [AWS Secrets Manager](https://aws.amazon.com/secrets-manager/), [AWS AppConfig](https://aws.amazon.com/appconfig/){target="_blank"}, [Amazon DynamoDB](https://aws.amazon.com/dynamodb/){target="_blank"}, or bring your own. +The parameters utility provides high-level functions to retrieve one or multiple parameter values from [AWS Systems Manager Parameter Store](https://docs.aws.amazon.com/systems-manager/latest/userguide/systems-manager-parameter-store.html){target="_blank"}, [AWS Secrets Manager](https://aws.amazon.com/secrets-manager/){target="_blank"}, [AWS AppConfig](https://docs.aws.amazon.com/appconfig/latest/userguide/what-is-appconfig.html){target="_blank"}, [Amazon DynamoDB](https://aws.amazon.com/dynamodb/){target="_blank"}, or bring your own. ## Key features @@ -196,6 +196,17 @@ The AWS Systems Manager Parameter Store provider supports two additional argumen The DynamoDB Provider does not have any high-level functions, as it needs to know the name of the DynamoDB table containing the parameters. +**Local testing with DynamoDB Local** + +You can initialize the DynamoDB provider pointing to [DynamoDB Local](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DynamoDBLocal.html) using **`endpoint_url`** parameter: + +=== "dynamodb_local.py" + ```python hl_lines="3" + from aws_lambda_powertools.utilities import parameters + + dynamodb_provider = parameters.DynamoDBProvider(table_name="my-table", endpoint_url="http://localhost:8000") + ``` + **DynamoDB table structure for single parameters** For single parameters, you must use `id` as the [partition key](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.CoreComponents.html#HowItWorks.CoreComponents.PrimaryKey) for that table. diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index 1c2605adc08..7c39b1ffd0a 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -5,7 +5,7 @@ description: Utility This utility provides data parsing and deep validation using [Pydantic](https://pydantic-docs.helpmanual.io/). -**Key features** +## Key features * Defines data in pure Python classes, then parse, validate and extract only what you want * Built-in envelopes to unwrap, extend, and validate popular event sources payloads @@ -149,17 +149,18 @@ Use this standalone function when you want more control over the data validation Parser comes with the following built-in models: -Model name | Description -------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- -**DynamoDBStreamModel** | Lambda Event Source payload for Amazon DynamoDB Streams -**EventBridgeModel** | Lambda Event Source payload for Amazon EventBridge -**SqsModel** | Lambda Event Source payload for Amazon SQS -**AlbModel** | Lambda Event Source payload for Amazon Application Load Balancer -**CloudwatchLogsModel** | Lambda Event Source payload for Amazon CloudWatch Logs -**S3Model** | Lambda Event Source payload for Amazon S3 -**KinesisDataStreamModel** | Lambda Event Source payload for Amazon Kinesis Data Streams -**SesModel** | Lambda Event Source payload for Amazon Simple Email Service -**SnsModel** | Lambda Event Source payload for Amazon Simple Notification Service +| Model name | Description | +| -------------------------- | ------------------------------------------------------------------ | +| **DynamoDBStreamModel** | Lambda Event Source payload for Amazon DynamoDB Streams | +| **EventBridgeModel** | Lambda Event Source payload for Amazon EventBridge | +| **SqsModel** | Lambda Event Source payload for Amazon SQS | +| **AlbModel** | Lambda Event Source payload for Amazon Application Load Balancer | +| **CloudwatchLogsModel** | Lambda Event Source payload for Amazon CloudWatch Logs | +| **S3Model** | Lambda Event Source payload for Amazon S3 | +| **S3ObjectLambdaEvent** | Lambda Event Source payload for Amazon S3 Object Lambda | +| **KinesisDataStreamModel** | Lambda Event Source payload for Amazon Kinesis Data Streams | +| **SesModel** | Lambda Event Source payload for Amazon Simple Email Service | +| **SnsModel** | Lambda Event Source payload for Amazon Simple Notification Service | ### extending built-in models @@ -293,15 +294,15 @@ Here's an example of parsing a model found in an event coming from EventBridge, Parser comes with the following built-in envelopes, where `Model` in the return section is your given model. -Envelope name | Behaviour | Return -------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ------------------------------------ -**DynamoDBStreamEnvelope** | 1. Parses data using `DynamoDBStreamModel`.
2. Parses records in `NewImage` and `OldImage` keys using your model.
3. Returns a list with a dictionary containing `NewImage` and `OldImage` keys | `List[Dict[str, Optional[Model]]]` -**EventBridgeEnvelope** | 1. Parses data using `EventBridgeModel`.
2. Parses `detail` key using your model and returns it. | `Model` -**SqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` -**CloudWatchLogsEnvelope** | 1. Parses data using `CloudwatchLogsModel` which will base64 decode and decompress it.
2. Parses records in `message` key using your model and return them in a list. | `List[Model]` -**KinesisDataStreamEnvelope** | 1. Parses data using `KinesisDataStreamModel` which will base64 decode it.
2. Parses records in in `Records` key using your model and returns them in a list. | `List[Model]` -**SnsEnvelope** | 1. Parses data using `SnsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` -**SnsSqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses SNS records in `body` key using `SnsNotificationModel`.
3. Parses data in `Message` key using your model and return them in a list. | `List[Model]` +| Envelope name | Behaviour | Return | +| ----------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------- | +| **DynamoDBStreamEnvelope** | 1. Parses data using `DynamoDBStreamModel`.
2. Parses records in `NewImage` and `OldImage` keys using your model.
3. Returns a list with a dictionary containing `NewImage` and `OldImage` keys | `List[Dict[str, Optional[Model]]]` | +| **EventBridgeEnvelope** | 1. Parses data using `EventBridgeModel`.
2. Parses `detail` key using your model and returns it. | `Model` | +| **SqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` | +| **CloudWatchLogsEnvelope** | 1. Parses data using `CloudwatchLogsModel` which will base64 decode and decompress it.
2. Parses records in `message` key using your model and return them in a list. | `List[Model]` | +| **KinesisDataStreamEnvelope** | 1. Parses data using `KinesisDataStreamModel` which will base64 decode it.
2. Parses records in in `Records` key using your model and returns them in a list. | `List[Model]` | +| **SnsEnvelope** | 1. Parses data using `SnsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` | +| **SnsSqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses SNS records in `body` key using `SnsNotificationModel`.
3. Parses data in `Message` key using your model and return them in a list. | `List[Model]` | ### bringing your own envelope diff --git a/docs/utilities/validation.md b/docs/utilities/validation.md index 3060a1fcfc4..3a32500f122 100644 --- a/docs/utilities/validation.md +++ b/docs/utilities/validation.md @@ -3,27 +3,28 @@ title: Validation description: Utility --- - This utility provides JSON Schema validation for events and responses, including JMESPath support to unwrap events before validation. -**Key features** +## Key features * Validate incoming event and response * JMESPath support to unwrap events before validation applies * Built-in envelopes to unwrap popular event sources payloads -## Validating events +## Getting started + +!!! tip "Using JSON Schemas for the first time?" + Check this [step-by-step tour in the official JSON Schema website](https://json-schema.org/learn/getting-started-step-by-step.html){target="_blank"}. -You can validate inbound and outbound events using `validator` decorator. +You can validate inbound and outbound events using [`validator` decorator](#validator-decorator). You can also use the standalone `validate` function, if you want more control over the validation process such as handling a validation error. -We support any JSONSchema draft supported by [fastjsonschema](https://horejsek.github.io/python-fastjsonschema/) library. +We support any JSONSchema draft supported by [fastjsonschema](https://horejsek.github.io/python-fastjsonschema/){target="_blank"} library. !!! warning Both `validator` decorator and `validate` standalone function expects your JSON Schema to be a **dictionary**, not a filename. - ### Validator decorator **Validator** decorator is typically used to validate either inbound or functions' response. @@ -32,17 +33,31 @@ It will fail fast with `SchemaValidationError` exception if event or response do === "validator_decorator.py" - ```python + ```python hl_lines="3 5" from aws_lambda_powertools.utilities.validation import validator - json_schema_dict = {..} - response_json_schema_dict = {..} + import schemas - @validator(inbound_schema=json_schema_dict, outbound_schema=response_json_schema_dict) + @validator(inbound_schema=schemas.INPUT, outbound_schema=schemas.OUTPUT) def handler(event, context): return event ``` +=== "event.json" + + ```json + { + "message": "hello world", + "username": "lessa" + } + ``` + +=== "schemas.py" + + ```python hl_lines="7 14 16 23 39 45 47 52" + --8<-- "docs/shared/validation_basic_jsonschema.py" + ``` + !!! note It's not a requirement to validate both inbound and outbound schemas - You can either use one, or both. @@ -54,15 +69,15 @@ You can also gracefully handle schema validation errors by catching `SchemaValid === "validator_decorator.py" - ```python + ```python hl_lines="8" from aws_lambda_powertools.utilities.validation import validate from aws_lambda_powertools.utilities.validation.exceptions import SchemaValidationError - json_schema_dict = {..} + import schemas def handler(event, context): try: - validate(event=event, schema=json_schema_dict) + validate(event=event, schema=schemas.INPUT) except SchemaValidationError as e: # do something before re-raising raise @@ -70,97 +85,87 @@ You can also gracefully handle schema validation errors by catching `SchemaValid return event ``` -### Validating custom formats +=== "event.json" -!!! note "New in 1.10.0" - JSON Schema DRAFT 7 [has many new built-in formats](https://json-schema.org/understanding-json-schema/reference/string.html#format) such as date, time, and specifically a regex format which might be a better replacement for a custom format, if you do have control over the schema. + ```json + { + "data": "hello world", + "username": "lessa" + } + ``` -If you have JSON Schemas with custom formats, for example having a `int64` for high precision integers, you can pass an optional validation to handle each type using `formats` parameter - Otherwise it'll fail validation: +=== "schemas.py" -**Example of custom integer format** + ```python hl_lines="7 14 16 23 39 45 47 52" + --8<-- "docs/shared/validation_basic_jsonschema.py" + ``` -```json -{ - "lastModifiedTime": { - "format": "int64", - "type": "integer" - } -} -``` +### Unwrapping events prior to validation -For each format defined in a dictionary key, you must use a regex, or a function that returns a boolean to instruct the validator on how to proceed when encountering that type. +You might want to validate only a portion of your event - This is where the `envelope` parameter is for. -```python -from aws_lambda_powertools.utilities.validation import validate +Envelopes are [JMESPath expressions](https://jmespath.org/tutorial.html) to extract a portion of JSON you want before applying JSON Schema validation. -event = {} # some event -schema_with_custom_format = {} # some JSON schema that defines a custom format +Here is a sample custom EventBridge event, where we only validate what's inside the `detail` key: -custom_format = { - "int64": True, # simply ignore it, - "positive": lambda x: False if x < 0 else True -} +=== "unwrapping_events.py" -validate(event=event, schema=schema_with_custom_format, formats=custom_format) -``` + We use the `envelope` parameter to extract the payload inside the `detail` key before validating. -## Unwrapping events prior to validation + ```python hl_lines="5" + from aws_lambda_powertools.utilities.validation import validator -You might want to validate only a portion of your event - This is where the `envelope` parameter is for. + import schemas -Envelopes are [JMESPath expressions](https://jmespath.org/tutorial.html) to extract a portion of JSON you want before applying JSON Schema validation. - -Here is a sample custom EventBridge event, where we only validate what's inside the `detail` key: + @validator(inbound_schema=schemas.INPUT, envelope="detail") + def handler(event, context): + return event + ``` === "sample_wrapped_event.json" - ```json hl_lines="9" - { - "id": "cdc73f9d-aea9-11e3-9d5a-835b769c0d9c", - "detail-type": "Scheduled Event", - "source": "aws.events", - "account": "123456789012", - "time": "1970-01-01T00:00:00Z", - "region": "us-east-1", - "resources": ["arn:aws:events:us-east-1:123456789012:rule/ExampleRule"], - "detail": {"message": "hello hello", "username": "blah blah"} - } + ```python hl_lines="11-14" + --8<-- "docs/shared/validation_basic_eventbridge_event.json" ``` -Here is how you'd use the `envelope` parameter to extract the payload inside the `detail` key before validating: - -=== "unwrapping_events.py" - - ```python hl_lines="5 7" - from aws_lambda_powertools.utilities.validation import validator, validate - - json_schema_dict = {..} +=== "schemas.py" - @validator(inbound_schema=json_schema_dict, envelope="detail") - def handler(event, context): - validate(event=event, schema=json_schema_dict, envelope="detail") - return event + ```python hl_lines="7 14 16 23 39 45 47 52" + --8<-- "docs/shared/validation_basic_jsonschema.py" ``` + This is quite powerful because you can use JMESPath Query language to extract records from [arrays, slice and dice](https://jmespath.org/tutorial.html#list-and-slice-projections), to [pipe expressions](https://jmespath.org/tutorial.html#pipe-expressions) and [function expressions](https://jmespath.org/tutorial.html#functions), where you'd extract what you need before validating the actual payload. -## Built-in envelopes +### Built-in envelopes This utility comes with built-in envelopes to easily extract the payload from popular event sources. === "unwrapping_popular_event_sources.py" ```python hl_lines="5 7" - from aws_lambda_powertools.utilities.validation import envelopes, validate, validator + from aws_lambda_powertools.utilities.validation import envelopes, validator - json_schema_dict = {..} + import schemas - @validator(inbound_schema=json_schema_dict, envelope=envelopes.EVENTBRIDGE) + @validator(inbound_schema=schemas.INPUT, envelope=envelopes.EVENTBRIDGE) def handler(event, context): - validate(event=event, schema=json_schema_dict, envelope=envelopes.EVENTBRIDGE) return event ``` +=== "sample_wrapped_event.json" + + ```python hl_lines="11-14" + --8<-- "docs/shared/validation_basic_eventbridge_event.json" + ``` + +=== "schemas.py" + + ```python hl_lines="7 14 16 23 39 45 47 52" + --8<-- "docs/shared/validation_basic_jsonschema.py" + ``` + + Here is a handy table with built-in envelopes along with their JMESPath expressions in case you want to build your own. Envelope name | JMESPath expression @@ -174,7 +179,253 @@ Envelope name | JMESPath expression **KINESIS_DATA_STREAM** | "Records[*].kinesis.powertools_json(powertools_base64(data))" **CLOUDWATCH_LOGS** | "awslogs.powertools_base64_gzip(data) | powertools_json(@).logEvents[*]" -## Built-in JMESPath functions +## Advanced + +### Validating custom formats + +!!! note "New in 1.10.0" + JSON Schema DRAFT 7 [has many new built-in formats](https://json-schema.org/understanding-json-schema/reference/string.html#format){target="_blank"} such as date, time, and specifically a regex format which might be a better replacement for a custom format, if you do have control over the schema. + +JSON Schemas with custom formats like `int64` will fail validation. If you have these, you can pass them using `formats` parameter: + +=== "custom_json_schema_type_format.json" + ```json + { + "lastModifiedTime": { + "format": "int64", + "type": "integer" + } + } + ``` + +For each format defined in a dictionary key, you must use a regex, or a function that returns a boolean to instruct the validator on how to proceed when encountering that type. + +=== "validate_custom_format.py" + + ```python hl_lines="5-8 10" + from aws_lambda_powertools.utilities.validation import validate + + import schema + + custom_format = { + "int64": True, # simply ignore it, + "positive": lambda x: False if x < 0 else True + } + + validate(event=event, schema=schemas.INPUT, formats=custom_format) + ``` + +=== "schemas.py" + + ```python hl_lines="68" 91 93" + INPUT = { + "$schema": "http://json-schema.org/draft-04/schema#", + "definitions": { + "AWSAPICallViaCloudTrail": { + "properties": { + "additionalEventData": {"$ref": "#/definitions/AdditionalEventData"}, + "awsRegion": {"type": "string"}, + "errorCode": {"type": "string"}, + "errorMessage": {"type": "string"}, + "eventID": {"type": "string"}, + "eventName": {"type": "string"}, + "eventSource": {"type": "string"}, + "eventTime": {"format": "date-time", "type": "string"}, + "eventType": {"type": "string"}, + "eventVersion": {"type": "string"}, + "recipientAccountId": {"type": "string"}, + "requestID": {"type": "string"}, + "requestParameters": {"$ref": "#/definitions/RequestParameters"}, + "resources": {"items": {"type": "object"}, "type": "array"}, + "responseElements": {"type": ["object", "null"]}, + "sourceIPAddress": {"type": "string"}, + "userAgent": {"type": "string"}, + "userIdentity": {"$ref": "#/definitions/UserIdentity"}, + "vpcEndpointId": {"type": "string"}, + "x-amazon-open-api-schema-readOnly": {"type": "boolean"}, + }, + "required": [ + "eventID", + "awsRegion", + "eventVersion", + "responseElements", + "sourceIPAddress", + "eventSource", + "requestParameters", + "resources", + "userAgent", + "readOnly", + "userIdentity", + "eventType", + "additionalEventData", + "vpcEndpointId", + "requestID", + "eventTime", + "eventName", + "recipientAccountId", + ], + "type": "object", + }, + "AdditionalEventData": { + "properties": { + "objectRetentionInfo": {"$ref": "#/definitions/ObjectRetentionInfo"}, + "x-amz-id-2": {"type": "string"}, + }, + "required": ["x-amz-id-2"], + "type": "object", + }, + "Attributes": { + "properties": { + "creationDate": {"format": "date-time", "type": "string"}, + "mfaAuthenticated": {"type": "string"}, + }, + "required": ["mfaAuthenticated", "creationDate"], + "type": "object", + }, + "LegalHoldInfo": { + "properties": { + "isUnderLegalHold": {"type": "boolean"}, + "lastModifiedTime": {"format": "int64", "type": "integer"}, + }, + "type": "object", + }, + "ObjectRetentionInfo": { + "properties": { + "legalHoldInfo": {"$ref": "#/definitions/LegalHoldInfo"}, + "retentionInfo": {"$ref": "#/definitions/RetentionInfo"}, + }, + "type": "object", + }, + "RequestParameters": { + "properties": { + "bucketName": {"type": "string"}, + "key": {"type": "string"}, + "legal-hold": {"type": "string"}, + "retention": {"type": "string"}, + }, + "required": ["bucketName", "key"], + "type": "object", + }, + "RetentionInfo": { + "properties": { + "lastModifiedTime": {"format": "int64", "type": "integer"}, + "retainUntilMode": {"type": "string"}, + "retainUntilTime": {"format": "int64", "type": "integer"}, + }, + "type": "object", + }, + "SessionContext": { + "properties": {"attributes": {"$ref": "#/definitions/Attributes"}}, + "required": ["attributes"], + "type": "object", + }, + "UserIdentity": { + "properties": { + "accessKeyId": {"type": "string"}, + "accountId": {"type": "string"}, + "arn": {"type": "string"}, + "principalId": {"type": "string"}, + "sessionContext": {"$ref": "#/definitions/SessionContext"}, + "type": {"type": "string"}, + }, + "required": ["accessKeyId", "sessionContext", "accountId", "principalId", "type", "arn"], + "type": "object", + }, + }, + "properties": { + "account": {"type": "string"}, + "detail": {"$ref": "#/definitions/AWSAPICallViaCloudTrail"}, + "detail-type": {"type": "string"}, + "id": {"type": "string"}, + "region": {"type": "string"}, + "resources": {"items": {"type": "string"}, "type": "array"}, + "source": {"type": "string"}, + "time": {"format": "date-time", "type": "string"}, + "version": {"type": "string"}, + }, + "required": ["detail-type", "resources", "id", "source", "time", "detail", "region", "version", "account"], + "title": "AWSAPICallViaCloudTrail", + "type": "object", + "x-amazon-events-detail-type": "AWS API Call via CloudTrail", + "x-amazon-events-source": "aws.s3", + } + ``` + +=== "event.json" + ```json + { + "account": "123456789012", + "detail": { + "additionalEventData": { + "AuthenticationMethod": "AuthHeader", + "CipherSuite": "ECDHE-RSA-AES128-GCM-SHA256", + "SignatureVersion": "SigV4", + "bytesTransferredIn": 0, + "bytesTransferredOut": 0, + "x-amz-id-2": "ejUr9Nd/4IO1juF/a6GOcu+PKrVX6dOH6jDjQOeCJvtARUqzxrhHGrhEt04cqYtAZVqcSEXYqo0=", + }, + "awsRegion": "us-west-1", + "eventCategory": "Data", + "eventID": "be4fdb30-9508-4984-b071-7692221899ae", + "eventName": "HeadObject", + "eventSource": "s3.amazonaws.com", + "eventTime": "2020-12-22T10:05:29Z", + "eventType": "AwsApiCall", + "eventVersion": "1.07", + "managementEvent": False, + "readOnly": True, + "recipientAccountId": "123456789012", + "requestID": "A123B1C123D1E123", + "requestParameters": { + "Host": "lambda-artifacts-deafc19498e3f2df.s3.us-west-1.amazonaws.com", + "bucketName": "lambda-artifacts-deafc19498e3f2df", + "key": "path1/path2/path3/file.zip", + }, + "resources": [ + { + "ARN": "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df/path1/path2/path3/file.zip", + "type": "AWS::S3::Object", + }, + { + "ARN": "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df", + "accountId": "123456789012", + "type": "AWS::S3::Bucket", + }, + ], + "responseElements": None, + "sourceIPAddress": "AWS Internal", + "userAgent": "AWS Internal", + "userIdentity": { + "accessKeyId": "ABCDEFGHIJKLMNOPQR12", + "accountId": "123456789012", + "arn": "arn:aws:sts::123456789012:assumed-role/role-name1/1234567890123", + "invokedBy": "AWS Internal", + "principalId": "ABCDEFGHIJKLMN1OPQRST:1234567890123", + "sessionContext": { + "attributes": {"creationDate": "2020-12-09T09:58:24Z", "mfaAuthenticated": "false"}, + "sessionIssuer": { + "accountId": "123456789012", + "arn": "arn:aws:iam::123456789012:role/role-name1", + "principalId": "ABCDEFGHIJKLMN1OPQRST", + "type": "Role", + "userName": "role-name1", + }, + }, + "type": "AssumedRole", + }, + "vpcEndpointId": "vpce-a123cdef", + }, + "detail-type": "AWS API Call via CloudTrail", + "id": "e0bad426-0a70-4424-b53a-eb902ebf5786", + "region": "us-west-1", + "resources": [], + "source": "aws.s3", + "time": "2020-12-22T10:05:29Z", + "version": "0", + } + ``` + +### Built-in JMESPath functions You might have events or responses that contain non-encoded JSON, where you need to decode before validating them. @@ -183,7 +434,7 @@ You can use our built-in JMESPath functions within your expressions to do exactl !!! info We use these for built-in envelopes to easily to decode and unwrap events from sources like Kinesis, CloudWatch Logs, etc. -### powertools_json function +#### powertools_json function Use `powertools_json` function to decode any JSON String. @@ -194,19 +445,23 @@ This sample will decode the value within the `data` key into a valid JSON before ```python hl_lines="9" from aws_lambda_powertools.utilities.validation import validate - json_schema_dict = {..} + import schemas + sample_event = { 'data': '{"payload": {"message": "hello hello", "username": "blah blah"}}' } - def handler(event, context): - validate(event=event, schema=json_schema_dict, envelope="powertools_json(data)") - return event + validate(event=sample_event, schema=schemas.INPUT, envelope="powertools_json(data)") + ``` + +=== "schemas.py" - handler(event=sample_event, context={}) + ```python hl_lines="7 14 16 23 39 45 47 52" + --8<-- "docs/shared/validation_basic_jsonschema.py" ``` -### powertools_base64 function + +#### powertools_base64 function Use `powertools_base64` function to decode any base64 data. @@ -214,22 +469,29 @@ This sample will decode the base64 value within the `data` key, and decode the J === "powertools_json_jmespath_function.py" - ```python hl_lines="9" + ```python hl_lines="12" from aws_lambda_powertools.utilities.validation import validate - json_schema_dict = {..} + import schemas + sample_event = { "data": "eyJtZXNzYWdlIjogImhlbGxvIGhlbGxvIiwgInVzZXJuYW1lIjogImJsYWggYmxhaCJ9=" } - def handler(event, context): - validate(event=event, schema=json_schema_dict, envelope="powertools_json(powertools_base64(data))") - return event + validate( + event=sample_event, + schema=schemas.INPUT, + envelope="powertools_json(powertools_base64(data))" + ) + ``` - handler(event=sample_event, context={}) +=== "schemas.py" + + ```python hl_lines="7 14 16 23 39 45 47 52" + --8<-- "docs/shared/validation_basic_jsonschema.py" ``` -### powertools_base64_gzip function +#### powertools_base64_gzip function Use `powertools_base64_gzip` function to decompress and decode base64 data. @@ -237,37 +499,44 @@ This sample will decompress and decode base64 data, then use JMESPath pipeline e === "powertools_json_jmespath_function.py" - ```python hl_lines="9" + ```python hl_lines="12" from aws_lambda_powertools.utilities.validation import validate - json_schema_dict = {..} + import schemas + sample_event = { "data": "H4sIACZAXl8C/52PzUrEMBhFX2UILpX8tPbHXWHqIOiq3Q1F0ubrWEiakqTWofTdTYYB0YWL2d5zvnuTFellBIOedoiyKH5M0iwnlKH7HZL6dDB6ngLDfLFYctUKjie9gHFaS/sAX1xNEq525QxwFXRGGMEkx4Th491rUZdV3YiIZ6Ljfd+lfSyAtZloacQgAkqSJCGhxM6t7cwwuUGPz4N0YKyvO6I9WDeMPMSo8Z4Ca/kJ6vMEYW5f1MX7W1lVxaG8vqX8hNFdjlc0iCBBSF4ERT/3Pl7RbMGMXF2KZMh/C+gDpNS7RRsp0OaRGzx0/t8e0jgmcczyLCWEePhni/23JWalzjdu0a3ZvgEaNLXeugEAAA==" } - def handler(event, context): - validate(event=event, schema=json_schema_dict, envelope="powertools_base64_gzip(data) | powertools_json(@)") - return event + validate( + event=sample_event, + schema=schemas.INPUT, + envelope="powertools_base64_gzip(data) | powertools_json(@)" + ) + ``` - handler(event=sample_event, context={}) +=== "schemas.py" + + ```python hl_lines="7 14 16 23 39 45 47 52" + --8<-- "docs/shared/validation_basic_jsonschema.py" ``` -## Bring your own JMESPath function +### Bring your own JMESPath function !!! warning This should only be used for advanced use cases where you have special formats not covered by the built-in functions. This will **replace all provided built-in functions such as `powertools_json`, so you will no longer be able to use them**. -For special binary formats that you want to decode before applying JSON Schema validation, you can bring your own [JMESPath function](https://github.com/jmespath/jmespath.py#custom-functions) and any additional option via `jmespath_options` param. +For special binary formats that you want to decode before applying JSON Schema validation, you can bring your own [JMESPath function](https://github.com/jmespath/jmespath.py#custom-functions){target="_blank"} and any additional option via `jmespath_options` param. === "custom_jmespath_function.py" - ```python hl_lines="15" - from aws_lambda_powertools.utilities.validation import validate + ```python hl_lines="2 6-10 14" + from aws_lambda_powertools.utilities.validation import validator from jmespath import functions - json_schema_dict = {..} + import schemas class CustomFunctions(functions.Functions): @@ -277,7 +546,13 @@ For special binary formats that you want to decode before applying JSON Schema v custom_jmespath_options = {"custom_functions": CustomFunctions()} + @validator(schema=schemas.INPUT, jmespath_options=**custom_jmespath_options) def handler(event, context): - validate(event=event, schema=json_schema_dict, envelope="", jmespath_options=**custom_jmespath_options) return event ``` + +=== "schemas.py" + + ```python hl_lines="7 14 16 23 39 45 47 52" + --8<-- "docs/shared/validation_basic_jsonschema.py" + ``` diff --git a/mkdocs.yml b/mkdocs.yml index d8d37830369..43a7e125696 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,14 +1,19 @@ site_name: Lambda Powertools Python site_description: AWS Lambda Powertools for Python site_author: Amazon Web Services +repo_url: https://github.com/awslabs/aws-lambda-powertools-python +edit_uri: edit/develop/docs + nav: - Homepage: index.md - Changelog: changelog.md - - API reference: https://awslabs.github.io/aws-lambda-powertools-python/api/" target="_blank + - API reference: api/" target="_blank - Core utilities: - core/tracer.md - core/logger.md - core/metrics.md + - Event Handler: + - core/event_handler/appsync.md - Utilities: - utilities/middleware_factory.md - utilities/parameters.md @@ -26,6 +31,7 @@ theme: features: - navigation.sections - navigation.expand + - navigation.top icon: repo: fontawesome/brands/github logo: media/aws-logo-light.svg @@ -48,6 +54,7 @@ markdown_extensions: toc_depth: 4 - attr_list - pymdownx.emoji + - pymdownx.inlinehilite copyright: Copyright © 2021 Amazon Web Services @@ -61,6 +68,6 @@ extra_javascript: - javascript/aws-amplify.min.js - javascript/extra.js - -repo_url: https://github.com/awslabs/aws-lambda-powertools-python -edit_uri: edit/develop/docs +extra: + version: + provider: mike diff --git a/poetry.lock b/poetry.lock index d00c3e10833..2a92a013fc1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -82,20 +82,20 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "boto3" -version = "1.17.33" +version = "1.17.44" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] -botocore = ">=1.20.33,<1.21.0" +botocore = ">=1.20.44,<1.21.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.3.0,<0.4.0" [[package]] name = "botocore" -version = "1.20.33" +version = "1.20.44" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -109,22 +109,6 @@ urllib3 = ">=1.25.4,<1.27" [package.extras] crt = ["awscrt (==0.10.8)"] -[[package]] -name = "certifi" -version = "2020.12.5" -description = "Python package for providing Mozilla's CA Bundle." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "chardet" -version = "4.0.0" -description = "Universal encoding detector for Python 2 and 3" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - [[package]] name = "click" version = "7.1.2" @@ -237,7 +221,7 @@ flake8 = ">=3.0.0" [[package]] name = "flake8-bugbear" -version = "21.3.2" +version = "21.4.3" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false @@ -355,14 +339,14 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "gitdb" -version = "4.0.5" +version = "4.0.7" description = "Git Object Database" category = "dev" optional = false python-versions = ">=3.4" [package.dependencies] -smmap = ">=3.0.1,<4" +smmap = ">=3.0.1,<5" [[package]] name = "gitpython" @@ -377,15 +361,15 @@ gitdb = ">=4.0.1,<5" [[package]] name = "idna" -version = "2.10" +version = "3.1" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +optional = true +python-versions = ">=3.4" [[package]] name = "importlib-metadata" -version = "3.7.3" +version = "3.10.0" description = "Read metadata from Python packages" category = "main" optional = false @@ -397,7 +381,7 @@ zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] name = "iniconfig" @@ -553,6 +537,24 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "mike" +version = "0.6.0" +description = "Manage multiple versions of your MkDocs-powered documentation" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +jinja2 = "*" +mkdocs = ">=1.0" +packaging = "*" +"ruamel.yaml" = "*" + +[package.extras] +dev = ["coverage", "flake8 (>=3.0)", "pypandoc (>=1.4)"] +test = ["coverage", "flake8 (>=3.0)"] + [[package]] name = "mkdocs" version = "1.1.2" @@ -585,7 +587,7 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "7.0.6" +version = "7.1.0" description = "A Material Design theme for MkDocs" category = "dev" optional = false @@ -710,23 +712,23 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.7.3" +version = "1.8.1" description = "Data validation and settings management using python 3.6 type hinting" category = "main" optional = true -python-versions = ">=3.6" +python-versions = ">=3.6.1" [package.dependencies] dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} +typing-extensions = ">=3.7.4.3" [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] -typing_extensions = ["typing-extensions (>=3.7.2)"] [[package]] name = "pyflakes" -version = "2.3.0" +version = "2.3.1" description = "passive checker of Python programs" category = "dev" optional = false @@ -761,7 +763,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "pytest" -version = "6.2.2" +version = "6.2.3" description = "pytest: simple powerful testing with Python" category = "dev" optional = false @@ -862,7 +864,7 @@ flake8 = ["flake8-polyfill"] [[package]] name = "regex" -version = "2021.3.17" +version = "2021.4.4" description = "Alternative regular expression module, to replace re." category = "dev" optional = false @@ -870,21 +872,38 @@ python-versions = "*" [[package]] name = "requests" -version = "2.25.1" +version = "2.15.1" description = "Python HTTP for Humans." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "*" + +[package.extras] +security = ["cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] + +[[package]] +name = "ruamel.yaml" +version = "0.17.2" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "dev" +optional = false +python-versions = ">=3" [package.dependencies] -certifi = ">=2017.4.17" -chardet = ">=3.0.2,<5" -idna = ">=2.5,<3" -urllib3 = ">=1.21.1,<1.27" +"ruamel.yaml.clib" = {version = ">=0.1.2", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.10\""} [package.extras] -security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +docs = ["ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel.yaml.clib" +version = "0.2.2" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +category = "dev" +optional = false +python-versions = "*" [[package]] name = "s3transfer" @@ -907,11 +926,11 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "smmap" -version = "3.0.5" +version = "4.0.0" description = "A pure Python implementation of a sliding window memory map manager" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.5" [[package]] name = "stevedore" @@ -1030,12 +1049,12 @@ docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [extras] -pydantic = ["pydantic", "typing_extensions", "email-validator"] +pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" -python-versions = "^3.6" -content-hash = "c0a85833f33ef1addfe2aef991d7264761c7b0ac382da712e233a3da2a974ea3" +python-versions = "^3.6.1" +content-hash = "5b211499bc362d34629e46a0d4a66a4ef844fc1540cf66e3250284a9431bffe8" [metadata.files] appdirs = [ @@ -1062,20 +1081,12 @@ black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] boto3 = [ - {file = "boto3-1.17.33-py2.py3-none-any.whl", hash = "sha256:3306dad87f993703b102a0a70ca19c549b7f41e7f70fa7b4c579735c9f79351d"}, - {file = "boto3-1.17.33.tar.gz", hash = "sha256:0cac2fffc1ba915f7bb5ecee539318532db51f218c928a228fafe3e501e9472e"}, + {file = "boto3-1.17.44-py2.py3-none-any.whl", hash = "sha256:e74da1da74fbefbe2db7a9c53082018d862433f35e2ecd4c173632efc5742f40"}, + {file = "boto3-1.17.44.tar.gz", hash = "sha256:ffb9b192b2b52ab88cde09e2af7d9fd6e541287e5719098be97ffd7144f47eb1"}, ] botocore = [ - {file = "botocore-1.20.33-py2.py3-none-any.whl", hash = "sha256:a33e862685259fe22d9790d9c9f3567feda8b824d44d3c62a3617af1133543a4"}, - {file = "botocore-1.20.33.tar.gz", hash = "sha256:e355305309699d3aca1e0050fc21d48595b40db046cb0d2491cd57ff5b26920b"}, -] -certifi = [ - {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, - {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, -] -chardet = [ - {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, - {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, + {file = "botocore-1.20.44-py2.py3-none-any.whl", hash = "sha256:8a7f85bf05ad62551b0e6dfeeec471147b330cb2b5c7f48795057e811e6a2e77"}, + {file = "botocore-1.20.44.tar.gz", hash = "sha256:2958e3912939558fd789a64b23a10039d8b0c0c84a23b573f3f2e3154de357ad"}, ] click = [ {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, @@ -1166,8 +1177,8 @@ flake8-black = [ {file = "flake8-black-0.2.1.tar.gz", hash = "sha256:f26651bc10db786c03f4093414f7c9ea982ed8a244cec323c984feeffdf4c118"}, ] flake8-bugbear = [ - {file = "flake8-bugbear-21.3.2.tar.gz", hash = "sha256:cadce434ceef96463b45a7c3000f23527c04ea4b531d16c7ac8886051f516ca0"}, - {file = "flake8_bugbear-21.3.2-py36.py37.py38-none-any.whl", hash = "sha256:5d6ccb0c0676c738a6e066b4d50589c408dcc1c5bf1d73b464b18b73cd6c05c2"}, + {file = "flake8-bugbear-21.4.3.tar.gz", hash = "sha256:2346c81f889955b39e4a368eb7d508de723d9de05716c287dc860a4073dc57e7"}, + {file = "flake8_bugbear-21.4.3-py36.py37.py38-none-any.whl", hash = "sha256:4f305dca96be62bf732a218fe6f1825472a621d3452c5b994d8f89dae21dbafa"}, ] flake8-builtins = [ {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"}, @@ -1204,20 +1215,20 @@ future = [ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, ] gitdb = [ - {file = "gitdb-4.0.5-py3-none-any.whl", hash = "sha256:91f36bfb1ab7949b3b40e23736db18231bf7593edada2ba5c3a174a7b23657ac"}, - {file = "gitdb-4.0.5.tar.gz", hash = "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9"}, + {file = "gitdb-4.0.7-py3-none-any.whl", hash = "sha256:6c4cc71933456991da20917998acbe6cf4fb41eeaab7d6d67fbc05ecd4c865b0"}, + {file = "gitdb-4.0.7.tar.gz", hash = "sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005"}, ] gitpython = [ {file = "GitPython-3.1.14-py3-none-any.whl", hash = "sha256:3283ae2fba31c913d857e12e5ba5f9a7772bbc064ae2bb09efafa71b0dd4939b"}, {file = "GitPython-3.1.14.tar.gz", hash = "sha256:be27633e7509e58391f10207cd32b2a6cf5b908f92d9cd30da2e514e1137af61"}, ] idna = [ - {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, - {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, + {file = "idna-3.1-py3-none-any.whl", hash = "sha256:5205d03e7bcbb919cc9c19885f9920d622ca52448306f2377daede5cf3faac16"}, + {file = "idna-3.1.tar.gz", hash = "sha256:c5b02147e01ea9920e6b0a3f1f7bb833612d507592c837a6c49552768f4054e1"}, ] importlib-metadata = [ - {file = "importlib_metadata-3.7.3-py3-none-any.whl", hash = "sha256:b74159469b464a99cb8cc3e21973e4d96e05d3024d337313fedb618a6e86e6f4"}, - {file = "importlib_metadata-3.7.3.tar.gz", hash = "sha256:742add720a20d0467df2f444ae41704000f50e1234f46174b51f9c6031a1bd71"}, + {file = "importlib_metadata-3.10.0-py3-none-any.whl", hash = "sha256:d2d46ef77ffc85cbf7dac7e81dd663fde71c45326131bea8033b9bad42268ebe"}, + {file = "importlib_metadata-3.10.0.tar.gz", hash = "sha256:c9db46394197244adf2f0b08ec5bc3cf16757e9590b02af1fca085c16c0d600a"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, @@ -1320,6 +1331,10 @@ mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] +mike = [ + {file = "mike-0.6.0-py3-none-any.whl", hash = "sha256:cef9b9c803ff5c3fbb410f51f5ceb00902a9fe16d9fabd93b69c65cf481ab5a1"}, + {file = "mike-0.6.0.tar.gz", hash = "sha256:6d6239de2a60d733da2f34617e9b9a14c4b5437423b47e524f14dc96d6ce5f2f"}, +] mkdocs = [ {file = "mkdocs-1.1.2-py3-none-any.whl", hash = "sha256:096f52ff52c02c7e90332d2e53da862fde5c062086e1b5356a6e392d5d60f5e9"}, {file = "mkdocs-1.1.2.tar.gz", hash = "sha256:f0b61e5402b99d7789efa032c7a74c90a20220a9c81749da06dbfbcbd52ffb39"}, @@ -1329,8 +1344,8 @@ mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.1-py3-none-any.whl", hash = "sha256:8ae50b45eb75d07b150a69726041860801615aae5f4adbd6b1cf4d51abaa03d5"}, ] mkdocs-material = [ - {file = "mkdocs-material-7.0.6.tar.gz", hash = "sha256:e1423286dcb2ac6b9417e9e04a3f63a97f12f7f64802af09c8257561e9f3a319"}, - {file = "mkdocs_material-7.0.6-py2.py3-none-any.whl", hash = "sha256:a89f8a08a5f0a5ecce2c7a4a61a1ddd2c2cbac86f17978264eb8b8ce2ca5411b"}, + {file = "mkdocs-material-7.1.0.tar.gz", hash = "sha256:1afaa5b174265eaa4a886f73187bb0e302a9596e9bfedb5aa2cb260d8b1d994e"}, + {file = "mkdocs_material-7.1.0-py2.py3-none-any.whl", hash = "sha256:13e73b3571d36f7e4a7dc11093323cff92095f4f219a00ba19c77a5e53aa6c55"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.1.tar.gz", hash = "sha256:6947fb7f5e4291e3c61405bad3539d81e0b3cd62ae0d66ced018128af509c68f"}, @@ -1371,32 +1386,32 @@ pycodestyle = [ {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] pydantic = [ - {file = "pydantic-1.7.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c59ea046aea25be14dc22d69c97bee629e6d48d2b2ecb724d7fe8806bf5f61cd"}, - {file = "pydantic-1.7.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a4143c8d0c456a093387b96e0f5ee941a950992904d88bc816b4f0e72c9a0009"}, - {file = "pydantic-1.7.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:d8df4b9090b595511906fa48deda47af04e7d092318bfb291f4d45dfb6bb2127"}, - {file = "pydantic-1.7.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:514b473d264671a5c672dfb28bdfe1bf1afd390f6b206aa2ec9fed7fc592c48e"}, - {file = "pydantic-1.7.3-cp36-cp36m-win_amd64.whl", hash = "sha256:dba5c1f0a3aeea5083e75db9660935da90216f8a81b6d68e67f54e135ed5eb23"}, - {file = "pydantic-1.7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59e45f3b694b05a69032a0d603c32d453a23f0de80844fb14d55ab0c6c78ff2f"}, - {file = "pydantic-1.7.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:5b24e8a572e4b4c18f614004dda8c9f2c07328cb5b6e314d6e1bbd536cb1a6c1"}, - {file = "pydantic-1.7.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:b2b054d095b6431cdda2f852a6d2f0fdec77686b305c57961b4c5dd6d863bf3c"}, - {file = "pydantic-1.7.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:025bf13ce27990acc059d0c5be46f416fc9b293f45363b3d19855165fee1874f"}, - {file = "pydantic-1.7.3-cp37-cp37m-win_amd64.whl", hash = "sha256:6e3874aa7e8babd37b40c4504e3a94cc2023696ced5a0500949f3347664ff8e2"}, - {file = "pydantic-1.7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e682f6442ebe4e50cb5e1cfde7dda6766fb586631c3e5569f6aa1951fd1a76ef"}, - {file = "pydantic-1.7.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:185e18134bec5ef43351149fe34fda4758e53d05bb8ea4d5928f0720997b79ef"}, - {file = "pydantic-1.7.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:f5b06f5099e163295b8ff5b1b71132ecf5866cc6e7f586d78d7d3fd6e8084608"}, - {file = "pydantic-1.7.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:24ca47365be2a5a3cc3f4a26dcc755bcdc9f0036f55dcedbd55663662ba145ec"}, - {file = "pydantic-1.7.3-cp38-cp38-win_amd64.whl", hash = "sha256:d1fe3f0df8ac0f3a9792666c69a7cd70530f329036426d06b4f899c025aca74e"}, - {file = "pydantic-1.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f6864844b039805add62ebe8a8c676286340ba0c6d043ae5dea24114b82a319e"}, - {file = "pydantic-1.7.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ecb54491f98544c12c66ff3d15e701612fc388161fd455242447083350904730"}, - {file = "pydantic-1.7.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:ffd180ebd5dd2a9ac0da4e8b995c9c99e7c74c31f985ba090ee01d681b1c4b95"}, - {file = "pydantic-1.7.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8d72e814c7821125b16f1553124d12faba88e85405b0864328899aceaad7282b"}, - {file = "pydantic-1.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:475f2fa134cf272d6631072554f845d0630907fce053926ff634cc6bc45bf1af"}, - {file = "pydantic-1.7.3-py3-none-any.whl", hash = "sha256:38be427ea01a78206bcaf9a56f835784afcba9e5b88fbdce33bbbfbcd7841229"}, - {file = "pydantic-1.7.3.tar.gz", hash = "sha256:213125b7e9e64713d16d988d10997dabc6a1f73f3991e1ff8e35ebb1409c7dc9"}, + {file = "pydantic-1.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0c40162796fc8d0aa744875b60e4dc36834db9f2a25dbf9ba9664b1915a23850"}, + {file = "pydantic-1.8.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:fff29fe54ec419338c522b908154a2efabeee4f483e48990f87e189661f31ce3"}, + {file = "pydantic-1.8.1-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:fbfb608febde1afd4743c6822c19060a8dbdd3eb30f98e36061ba4973308059e"}, + {file = "pydantic-1.8.1-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:eb8ccf12295113ce0de38f80b25f736d62f0a8d87c6b88aca645f168f9c78771"}, + {file = "pydantic-1.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:20d42f1be7c7acc352b3d09b0cf505a9fab9deb93125061b376fbe1f06a5459f"}, + {file = "pydantic-1.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dde4ca368e82791de97c2ec019681ffb437728090c0ff0c3852708cf923e0c7d"}, + {file = "pydantic-1.8.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:3bbd023c981cbe26e6e21c8d2ce78485f85c2e77f7bab5ec15b7d2a1f491918f"}, + {file = "pydantic-1.8.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:830ef1a148012b640186bf4d9789a206c56071ff38f2460a32ae67ca21880eb8"}, + {file = "pydantic-1.8.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:fb77f7a7e111db1832ae3f8f44203691e15b1fa7e5a1cb9691d4e2659aee41c4"}, + {file = "pydantic-1.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3bcb9d7e1f9849a6bdbd027aabb3a06414abd6068cb3b21c49427956cce5038a"}, + {file = "pydantic-1.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2287ebff0018eec3cc69b1d09d4b7cebf277726fa1bd96b45806283c1d808683"}, + {file = "pydantic-1.8.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:4bbc47cf7925c86a345d03b07086696ed916c7663cb76aa409edaa54546e53e2"}, + {file = "pydantic-1.8.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:6388ef4ef1435364c8cc9a8192238aed030595e873d8462447ccef2e17387125"}, + {file = "pydantic-1.8.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:dd4888b300769ecec194ca8f2699415f5f7760365ddbe243d4fd6581485fa5f0"}, + {file = "pydantic-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:8fbb677e4e89c8ab3d450df7b1d9caed23f254072e8597c33279460eeae59b99"}, + {file = "pydantic-1.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2f2736d9a996b976cfdfe52455ad27462308c9d3d0ae21a2aa8b4cd1a78f47b9"}, + {file = "pydantic-1.8.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3114d74329873af0a0e8004627f5389f3bb27f956b965ddd3e355fe984a1789c"}, + {file = "pydantic-1.8.1-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:258576f2d997ee4573469633592e8b99aa13bda182fcc28e875f866016c8e07e"}, + {file = "pydantic-1.8.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:c17a0b35c854049e67c68b48d55e026c84f35593c66d69b278b8b49e2484346f"}, + {file = "pydantic-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:e8bc082afef97c5fd3903d05c6f7bb3a6af9fc18631b4cc9fedeb4720efb0c58"}, + {file = "pydantic-1.8.1-py3-none-any.whl", hash = "sha256:e3f8790c47ac42549dc8b045a67b0ca371c7f66e73040d0197ce6172b385e520"}, + {file = "pydantic-1.8.1.tar.gz", hash = "sha256:26cf3cb2e68ec6c0cfcb6293e69fb3450c5fd1ace87f46b64f678b0d29eac4c3"}, ] pyflakes = [ - {file = "pyflakes-2.3.0-py2.py3-none-any.whl", hash = "sha256:910208209dcea632721cb58363d0f72913d9e8cf64dc6f8ae2e02a3609aba40d"}, - {file = "pyflakes-2.3.0.tar.gz", hash = "sha256:e59fd8e750e588358f1b8885e5a4751203a0516e0ee6d34811089ac294c8806f"}, + {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, + {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] pygments = [ {file = "Pygments-2.8.1-py3-none-any.whl", hash = "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8"}, @@ -1411,8 +1426,8 @@ pyparsing = [ {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, ] pytest = [ - {file = "pytest-6.2.2-py3-none-any.whl", hash = "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839"}, - {file = "pytest-6.2.2.tar.gz", hash = "sha256:9d1edf9e7d0b84d72ea3dbcdfd22b35fb543a5e8f2a60092dd578936bf63d7f9"}, + {file = "pytest-6.2.3-py3-none-any.whl", hash = "sha256:6ad9c7bdf517a808242b998ac20063c41532a570d088d77eec1ee12b0b5574bc"}, + {file = "pytest-6.2.3.tar.gz", hash = "sha256:671238a46e4df0f3498d1c3270e5deb9b32d25134c99b7d75370a68cfbe9b634"}, ] pytest-asyncio = [ {file = "pytest-asyncio-0.14.0.tar.gz", hash = "sha256:9882c0c6b24429449f5f969a5158b528f39bde47dc32e85b9f0403965017e700"}, @@ -1466,51 +1481,88 @@ radon = [ {file = "radon-4.5.0.tar.gz", hash = "sha256:7afa65db14d759616ab68033e0e1caf1f624c97308dd256afa47518ecebddf6e"}, ] regex = [ - {file = "regex-2021.3.17-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b97ec5d299c10d96617cc851b2e0f81ba5d9d6248413cd374ef7f3a8871ee4a6"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:cb4ee827857a5ad9b8ae34d3c8cc51151cb4a3fe082c12ec20ec73e63cc7c6f0"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:633497504e2a485a70a3268d4fc403fe3063a50a50eed1039083e9471ad0101c"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:a59a2ee329b3de764b21495d78c92ab00b4ea79acef0f7ae8c1067f773570afa"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f85d6f41e34f6a2d1607e312820971872944f1661a73d33e1e82d35ea3305e14"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:4651f839dbde0816798e698626af6a2469eee6d9964824bb5386091255a1694f"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:39c44532d0e4f1639a89e52355b949573e1e2c5116106a395642cbbae0ff9bcd"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:3d9a7e215e02bd7646a91fb8bcba30bc55fd42a719d6b35cf80e5bae31d9134e"}, - {file = "regex-2021.3.17-cp36-cp36m-win32.whl", hash = "sha256:159fac1a4731409c830d32913f13f68346d6b8e39650ed5d704a9ce2f9ef9cb3"}, - {file = "regex-2021.3.17-cp36-cp36m-win_amd64.whl", hash = "sha256:13f50969028e81765ed2a1c5fcfdc246c245cf8d47986d5172e82ab1a0c42ee5"}, - {file = "regex-2021.3.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9d8d286c53fe0cbc6d20bf3d583cabcd1499d89034524e3b94c93a5ab85ca90"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:201e2619a77b21a7780580ab7b5ce43835e242d3e20fef50f66a8df0542e437f"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d47d359545b0ccad29d572ecd52c9da945de7cd6cf9c0cfcb0269f76d3555689"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:ea2f41445852c660ba7c3ebf7d70b3779b20d9ca8ba54485a17740db49f46932"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:486a5f8e11e1f5bbfcad87f7c7745eb14796642323e7e1829a331f87a713daaa"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:18e25e0afe1cf0f62781a150c1454b2113785401ba285c745acf10c8ca8917df"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:a2ee026f4156789df8644d23ef423e6194fad0bc53575534101bb1de5d67e8ce"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:4c0788010a93ace8a174d73e7c6c9d3e6e3b7ad99a453c8ee8c975ddd9965643"}, - {file = "regex-2021.3.17-cp37-cp37m-win32.whl", hash = "sha256:575a832e09d237ae5fedb825a7a5bc6a116090dd57d6417d4f3b75121c73e3be"}, - {file = "regex-2021.3.17-cp37-cp37m-win_amd64.whl", hash = "sha256:8e65e3e4c6feadf6770e2ad89ad3deb524bcb03d8dc679f381d0568c024e0deb"}, - {file = "regex-2021.3.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a0df9a0ad2aad49ea3c7f65edd2ffb3d5c59589b85992a6006354f6fb109bb18"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b98bc9db003f1079caf07b610377ed1ac2e2c11acc2bea4892e28cc5b509d8d5"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:808404898e9a765e4058bf3d7607d0629000e0a14a6782ccbb089296b76fa8fe"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:5770a51180d85ea468234bc7987f5597803a4c3d7463e7323322fe4a1b181578"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:976a54d44fd043d958a69b18705a910a8376196c6b6ee5f2596ffc11bff4420d"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:63f3ca8451e5ff7133ffbec9eda641aeab2001be1a01878990f6c87e3c44b9d5"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bcd945175c29a672f13fce13a11893556cd440e37c1b643d6eeab1988c8b209c"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:3d9356add82cff75413bec360c1eca3e58db4a9f5dafa1f19650958a81e3249d"}, - {file = "regex-2021.3.17-cp38-cp38-win32.whl", hash = "sha256:f5d0c921c99297354cecc5a416ee4280bd3f20fd81b9fb671ca6be71499c3fdf"}, - {file = "regex-2021.3.17-cp38-cp38-win_amd64.whl", hash = "sha256:14de88eda0976020528efc92d0a1f8830e2fb0de2ae6005a6fc4e062553031fa"}, - {file = "regex-2021.3.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4c2e364491406b7888c2ad4428245fc56c327e34a5dfe58fd40df272b3c3dab3"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux1_i686.whl", hash = "sha256:8bd4f91f3fb1c9b1380d6894bd5b4a519409135bec14c0c80151e58394a4e88a"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:882f53afe31ef0425b405a3f601c0009b44206ea7f55ee1c606aad3cc213a52c"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:07ef35301b4484bce843831e7039a84e19d8d33b3f8b2f9aab86c376813d0139"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:360a01b5fa2ad35b3113ae0c07fb544ad180603fa3b1f074f52d98c1096fa15e"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:709f65bb2fa9825f09892617d01246002097f8f9b6dde8d1bb4083cf554701ba"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:c66221e947d7207457f8b6f42b12f613b09efa9669f65a587a2a71f6a0e4d106"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:c782da0e45aff131f0bed6e66fbcfa589ff2862fc719b83a88640daa01a5aff7"}, - {file = "regex-2021.3.17-cp39-cp39-win32.whl", hash = "sha256:dc9963aacb7da5177e40874585d7407c0f93fb9d7518ec58b86e562f633f36cd"}, - {file = "regex-2021.3.17-cp39-cp39-win_amd64.whl", hash = "sha256:a0d04128e005142260de3733591ddf476e4902c0c23c1af237d9acf3c96e1b38"}, - {file = "regex-2021.3.17.tar.gz", hash = "sha256:4b8a1fb724904139149a43e172850f35aa6ea97fb0545244dc0b805e0154ed68"}, + {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"}, + {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"}, + {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"}, + {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"}, + {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"}, + {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"}, + {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"}, + {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"}, + {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"}, + {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"}, + {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"}, + {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"}, + {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, ] requests = [ - {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, - {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, + {file = "requests-2.15.1-py2.py3-none-any.whl", hash = "sha256:ff753b2196cd18b1bbeddc9dcd5c864056599f7a7d9a4fb5677e723efa2b7fb9"}, + {file = "requests-2.15.1.tar.gz", hash = "sha256:e5659b9315a0610505e050bb7190bf6fa2ccee1ac295f2b760ef9d8a03ebbb2e"}, +] +"ruamel.yaml" = [ + {file = "ruamel.yaml-0.17.2-py3-none-any.whl", hash = "sha256:0850def9ebca23b3a8c64c4b4115ebb6b364a10d49f89d289a26ee965e1e7d9d"}, + {file = "ruamel.yaml-0.17.2.tar.gz", hash = "sha256:8f1e15421668b9edf30ed02899f5f81aff9808a4271935776f61a99a569a13da"}, +] +"ruamel.yaml.clib" = [ + {file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:28116f204103cb3a108dfd37668f20abe6e3cafd0d3fd40dba126c732457b3cc"}, + {file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:daf21aa33ee9b351f66deed30a3d450ab55c14242cfdfcd377798e2c0d25c9f1"}, + {file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-win32.whl", hash = "sha256:30dca9bbcbb1cc858717438218d11eafb78666759e5094dd767468c0d577a7e7"}, + {file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-win_amd64.whl", hash = "sha256:f6061a31880c1ed6b6ce341215336e2f3d0c1deccd84957b6fa8ca474b41e89f"}, + {file = "ruamel.yaml.clib-0.2.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:73b3d43e04cc4b228fa6fa5d796409ece6fcb53a6c270eb2048109cbcbc3b9c2"}, + {file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:53b9dd1abd70e257a6e32f934ebc482dac5edb8c93e23deb663eac724c30b026"}, + {file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:839dd72545ef7ba78fd2aa1a5dd07b33696adf3e68fae7f31327161c1093001b"}, + {file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1236df55e0f73cd138c0eca074ee086136c3f16a97c2ac719032c050f7e0622f"}, + {file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-win32.whl", hash = "sha256:b1e981fe1aff1fd11627f531524826a4dcc1f26c726235a52fcb62ded27d150f"}, + {file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4e52c96ca66de04be42ea2278012a2342d89f5e82b4512fb6fb7134e377e2e62"}, + {file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a873e4d4954f865dcb60bdc4914af7eaae48fb56b60ed6daa1d6251c72f5337c"}, + {file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ab845f1f51f7eb750a78937be9f79baea4a42c7960f5a94dde34e69f3cce1988"}, + {file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:2fd336a5c6415c82e2deb40d08c222087febe0aebe520f4d21910629018ab0f3"}, + {file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-win32.whl", hash = "sha256:e9f7d1d8c26a6a12c23421061f9022bb62704e38211fe375c645485f38df34a2"}, + {file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:2602e91bd5c1b874d6f93d3086f9830f3e907c543c7672cf293a97c3fabdcd91"}, + {file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:44c7b0498c39f27795224438f1a6be6c5352f82cb887bc33d962c3a3acc00df6"}, + {file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8e8fd0a22c9d92af3a34f91e8a2594eeb35cba90ab643c5e0e643567dc8be43e"}, + {file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:75f0ee6839532e52a3a53f80ce64925ed4aed697dd3fa890c4c918f3304bd4f4"}, + {file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-win32.whl", hash = "sha256:464e66a04e740d754170be5e740657a3b3b6d2bcc567f0c3437879a6e6087ff6"}, + {file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:52ae5739e4b5d6317b52f5b040b1b6639e8af68a5b8fd606a8b08658fbd0cab5"}, + {file = "ruamel.yaml.clib-0.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df5019e7783d14b79217ad9c56edf1ba7485d614ad5a385d1b3c768635c81c0"}, + {file = "ruamel.yaml.clib-0.2.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5254af7d8bdf4d5484c089f929cb7f5bafa59b4f01d4f48adda4be41e6d29f99"}, + {file = "ruamel.yaml.clib-0.2.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8be05be57dc5c7b4a0b24edcaa2f7275866d9c907725226cdde46da09367d923"}, + {file = "ruamel.yaml.clib-0.2.2-cp38-cp38-win32.whl", hash = "sha256:74161d827407f4db9072011adcfb825b5258a5ccb3d2cd518dd6c9edea9e30f1"}, + {file = "ruamel.yaml.clib-0.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:058a1cc3df2a8aecc12f983a48bda99315cebf55a3b3a5463e37bb599b05727b"}, + {file = "ruamel.yaml.clib-0.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6ac7e45367b1317e56f1461719c853fd6825226f45b835df7436bb04031fd8a"}, + {file = "ruamel.yaml.clib-0.2.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b4b0d31f2052b3f9f9b5327024dc629a253a83d8649d4734ca7f35b60ec3e9e5"}, + {file = "ruamel.yaml.clib-0.2.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1f8c0a4577c0e6c99d208de5c4d3fd8aceed9574bb154d7a2b21c16bb924154c"}, + {file = "ruamel.yaml.clib-0.2.2-cp39-cp39-win32.whl", hash = "sha256:46d6d20815064e8bb023ea8628cfb7402c0f0e83de2c2227a88097e239a7dffd"}, + {file = "ruamel.yaml.clib-0.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:6c0a5dc52fc74eb87c67374a4e554d4761fd42a4d01390b7e868b30d21f4b8bb"}, + {file = "ruamel.yaml.clib-0.2.2.tar.gz", hash = "sha256:2d24bd98af676f4990c4d715bcdc2a60b19c56a3fb3a763164d2d8ca0e806ba7"}, ] s3transfer = [ {file = "s3transfer-0.3.6-py2.py3-none-any.whl", hash = "sha256:5d48b1fd2232141a9d5fb279709117aaba506cacea7f86f11bc392f06bfa8fc2"}, @@ -1521,8 +1573,8 @@ six = [ {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, ] smmap = [ - {file = "smmap-3.0.5-py2.py3-none-any.whl", hash = "sha256:7bfcf367828031dc893530a29cb35eb8c8f2d7c8f2d0989354d75d24c8573714"}, - {file = "smmap-3.0.5.tar.gz", hash = "sha256:84c2751ef3072d4f6b2785ec7ee40244c6f45eb934d9e543e2c51f1bd3d54c50"}, + {file = "smmap-4.0.0-py2.py3-none-any.whl", hash = "sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2"}, + {file = "smmap-4.0.0.tar.gz", hash = "sha256:7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182"}, ] stevedore = [ {file = "stevedore-3.3.0-py3-none-any.whl", hash = "sha256:50d7b78fbaf0d04cd62411188fa7eedcb03eb7f4c4b37005615ceebe582aa82a"}, diff --git a/pyproject.toml b/pyproject.toml index 8d8d7d1b7af..3c16f373756 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "1.13.0" +version = "1.14.0" description = "Python utilities for AWS Lambda functions including but not limited to tracing, logging and custom metric" authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed"] @@ -19,14 +19,13 @@ keywords = ["aws_lambda_powertools", "aws", "tracing", "logging", "lambda", "pow license = "MIT-0" [tool.poetry.dependencies] -python = "^3.6" -aws-xray-sdk = "^2.5.0" +python = "^3.6.1" +aws-xray-sdk = "2.6.0" fastjsonschema = "^2.14.5" boto3 = "^1.12" jmespath = "^0.10.0" -pydantic = {version = "^1.6.0", optional = true } +pydantic = {version = "^1.8.1", optional = true } email-validator = {version = "*", optional = true } -typing_extensions = {version = "^3.7.4.2", optional = true, python= "<3.8" } [tool.poetry.dev-dependencies] coverage = {extras = ["toml"], version = "^5.5"} @@ -50,12 +49,13 @@ radon = "^4.5.0" xenon = "^0.7.1" flake8-eradicate = "^1.0.0" flake8-bugbear = "^21.3.2" -mkdocs-material = "^7.0.6" +mkdocs-material = "^7.1.0" mkdocs-git-revision-date-plugin = "^0.3.1" +mike = "^0.6.0" [tool.poetry.extras] -pydantic = ["pydantic", "typing_extensions", "email-validator"] +pydantic = ["pydantic", "email-validator"] [tool.coverage.run] source = ["aws_lambda_powertools"] diff --git a/tests/functional/event_handler/__init__.py b/tests/functional/event_handler/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/appsync/test_appsync_resolver_utils.py b/tests/functional/event_handler/test_appsync.py similarity index 52% rename from tests/functional/appsync/test_appsync_resolver_utils.py rename to tests/functional/event_handler/test_appsync.py index a1388a1fb5c..c72331c32f1 100644 --- a/tests/functional/appsync/test_appsync_resolver_utils.py +++ b/tests/functional/event_handler/test_appsync.py @@ -1,28 +1,18 @@ import asyncio -import datetime import json -import os import sys +from pathlib import Path import pytest +from aws_lambda_powertools.event_handler import AppSyncResolver from aws_lambda_powertools.utilities.data_classes import AppSyncResolverEvent -from aws_lambda_powertools.utilities.data_classes.appsync.resolver_utils import AppSyncResolver -from aws_lambda_powertools.utilities.data_classes.appsync.scalar_types_utils import ( - _formatted_time, - aws_date, - aws_datetime, - aws_time, - aws_timestamp, - make_id, -) from aws_lambda_powertools.utilities.typing import LambdaContext def load_event(file_name: str) -> dict: - full_file_name = os.path.dirname(os.path.realpath(__file__)) + "/../../events/" + file_name - with open(full_file_name) as fp: - return json.load(fp) + path = Path(str(Path(__file__).parent.parent.parent) + "/events/" + file_name) + return json.loads(path.read_text()) def test_direct_resolver(): @@ -31,15 +21,14 @@ def test_direct_resolver(): app = AppSyncResolver() - @app.resolver(field_name="createSomething", include_context=True) - def create_something(context, id: str): # noqa AA03 VNE003 - assert context == {} + @app.resolver(field_name="createSomething") + def create_something(id: str): # noqa AA03 VNE003 + assert app.lambda_context == {} return id - def handler(event, context): - return app.resolve(event, context) + # Call the implicit handler + result = app(mock_event, {}) - result = handler(mock_event, {}) assert result == "my identifier" @@ -49,14 +38,16 @@ def test_amplify_resolver(): app = AppSyncResolver() - @app.resolver(type_name="Merchant", field_name="locations", include_event=True) - def get_location(event: AppSyncResolverEvent, page: int, size: int, name: str): - assert event is not None + @app.resolver(type_name="Merchant", field_name="locations") + def get_location(page: int, size: int, name: str): + assert app.current_event is not None + assert isinstance(app.current_event, AppSyncResolverEvent) assert page == 2 assert size == 1 return name def handler(event, context): + # Call the explicit resolve function return app.resolve(event, context) result = handler(mock_event, {}) @@ -80,42 +71,6 @@ def no_params(): assert result == "no_params has no params" -def test_resolver_include_event(): - # GIVEN - app = AppSyncResolver() - - mock_event = {"typeName": "Query", "fieldName": "field", "arguments": {}} - - @app.resolver(field_name="field", include_event=True) - def get_value(event: AppSyncResolverEvent): - return event - - # WHEN - result = app.resolve(mock_event, LambdaContext()) - - # THEN - assert result._data == mock_event - assert isinstance(result, AppSyncResolverEvent) - - -def test_resolver_include_context(): - # GIVEN - app = AppSyncResolver() - - mock_event = {"typeName": "Query", "fieldName": "field", "arguments": {}} - - @app.resolver(field_name="field", include_context=True) - def get_value(context: LambdaContext): - return context - - # WHEN - mock_context = LambdaContext() - result = app.resolve(mock_event, mock_context) - - # THEN - assert result == mock_context - - def test_resolver_value_error(): # GIVEN no defined field resolver app = AppSyncResolver() @@ -189,46 +144,3 @@ async def get_async(): # THEN assert asyncio.run(result) == "value" - - -def test_make_id(): - uuid: str = make_id() - assert isinstance(uuid, str) - assert len(uuid) == 36 - - -def test_aws_date_utc(): - date_str = aws_date() - assert isinstance(date_str, str) - assert datetime.datetime.strptime(date_str, "%Y-%m-%dZ") - - -def test_aws_time_utc(): - time_str = aws_time() - assert isinstance(time_str, str) - assert datetime.datetime.strptime(time_str, "%H:%M:%SZ") - - -def test_aws_datetime_utc(): - datetime_str = aws_datetime() - assert isinstance(datetime_str, str) - assert datetime.datetime.strptime(datetime_str, "%Y-%m-%dT%H:%M:%SZ") - - -def test_aws_timestamp(): - timestamp = aws_timestamp() - assert isinstance(timestamp, int) - - -def test_format_time_positive(): - now = datetime.datetime(2022, 1, 22) - datetime_str = _formatted_time(now, "%Y-%m-%d", 8) - assert isinstance(datetime_str, str) - assert datetime_str == "2022-01-22+08:00:00" - - -def test_format_time_negative(): - now = datetime.datetime(2022, 1, 22, 14, 22, 33) - datetime_str = _formatted_time(now, "%H:%M:%S", -12) - assert isinstance(datetime_str, str) - assert datetime_str == "02:22:33-12:00:00" diff --git a/tests/functional/idempotency/test_idempotency.py b/tests/functional/idempotency/test_idempotency.py index 503ec7d6183..25f76af48be 100644 --- a/tests/functional/idempotency/test_idempotency.py +++ b/tests/functional/idempotency/test_idempotency.py @@ -828,3 +828,20 @@ def lambda_handler(event, context): stubber.assert_no_pending_responses() stubber.deactivate() assert "Failed to save in progress record to idempotency store" == e.value.args[0] + + +def test_handler_raise_idempotency_key_error(persistence_store: DynamoDBPersistenceLayer, lambda_context): + # GIVEN raise_on_no_idempotency_key is True + idempotency_config = IdempotencyConfig(event_key_jmespath="idemKey", raise_on_no_idempotency_key=True) + + # WHEN handling the idempotent call + # AND save_inprogress raises a IdempotencyKeyError + @idempotent(persistence_store=persistence_store, config=idempotency_config) + def handler(event, context): + raise ValueError("Should not be raised") + + # THEN idempotent should re-raise the IdempotencyKeyError + with pytest.raises(IdempotencyKeyError) as e: + handler({}, lambda_context) + + assert "No data found to create a hashed idempotency_key" == e.value.args[0] diff --git a/tests/functional/parser/test_s3 object_event.py b/tests/functional/parser/test_s3 object_event.py new file mode 100644 index 00000000000..da015338cf4 --- /dev/null +++ b/tests/functional/parser/test_s3 object_event.py @@ -0,0 +1,67 @@ +from aws_lambda_powertools.utilities.parser import event_parser +from aws_lambda_powertools.utilities.parser.models import S3ObjectLambdaEvent +from aws_lambda_powertools.utilities.typing import LambdaContext +from tests.functional.parser.utils import load_event + + +@event_parser(model=S3ObjectLambdaEvent) +def handle_s3_object_event_iam(event: S3ObjectLambdaEvent, _: LambdaContext): + return event + + +def test_s3_object_event(): + event = load_event("s3ObjectEventIAMUser.json") + parsed_event: S3ObjectLambdaEvent = handle_s3_object_event_iam(event, LambdaContext()) + assert parsed_event.xAmzRequestId == event["xAmzRequestId"] + assert parsed_event.getObjectContext is not None + object_context = parsed_event.getObjectContext + assert str(object_context.inputS3Url) == event["getObjectContext"]["inputS3Url"] + assert object_context.outputRoute == event["getObjectContext"]["outputRoute"] + assert object_context.outputToken == event["getObjectContext"]["outputToken"] + assert parsed_event.configuration is not None + configuration = parsed_event.configuration + assert configuration.accessPointArn == event["configuration"]["accessPointArn"] + assert configuration.supportingAccessPointArn == event["configuration"]["supportingAccessPointArn"] + assert configuration.payload == event["configuration"]["payload"] + assert parsed_event.userRequest is not None + user_request = parsed_event.userRequest + assert user_request.url == event["userRequest"]["url"] + assert user_request.headers == event["userRequest"]["headers"] + assert user_request.headers["Accept-Encoding"] == "identity" + assert parsed_event.userIdentity is not None + user_identity = parsed_event.userIdentity + assert user_identity.type == event["userIdentity"]["type"] + assert user_identity.principalId == event["userIdentity"]["principalId"] + assert user_identity.arn == event["userIdentity"]["arn"] + assert user_identity.accountId == event["userIdentity"]["accountId"] + assert user_identity.accessKeyId == event["userIdentity"]["accessKeyId"] + assert user_identity.userName == event["userIdentity"]["userName"] + assert user_identity.sessionContext is None + assert parsed_event.protocolVersion == event["protocolVersion"] + + +@event_parser(model=S3ObjectLambdaEvent) +def handle_s3_object_event_temp_creds(event: S3ObjectLambdaEvent, _: LambdaContext): + return event + + +def test_s3_object_event_temp_credentials(): + event = load_event("s3ObjectEventTempCredentials.json") + parsed_event: S3ObjectLambdaEvent = handle_s3_object_event_temp_creds(event, LambdaContext()) + assert parsed_event.xAmzRequestId == event["xAmzRequestId"] + session_context = parsed_event.userIdentity.sessionContext + assert session_context is not None + session_issuer = session_context.sessionIssuer + assert session_issuer is not None + assert session_issuer.type == event["userIdentity"]["sessionContext"]["sessionIssuer"]["type"] + assert session_issuer.userName == event["userIdentity"]["sessionContext"]["sessionIssuer"]["userName"] + assert session_issuer.principalId == event["userIdentity"]["sessionContext"]["sessionIssuer"]["principalId"] + assert session_issuer.arn == event["userIdentity"]["sessionContext"]["sessionIssuer"]["arn"] + assert session_issuer.accountId == event["userIdentity"]["sessionContext"]["sessionIssuer"]["accountId"] + session_attributes = session_context.attributes + assert session_attributes is not None + assert ( + str(session_attributes.mfaAuthenticated).lower() + == event["userIdentity"]["sessionContext"]["attributes"]["mfaAuthenticated"] + ) + assert session_attributes.creationDate == event["userIdentity"]["sessionContext"]["attributes"]["creationDate"] diff --git a/tests/functional/parser/test_s3.py b/tests/functional/parser/test_s3.py index 5d8a19a933e..a9c325f3a97 100644 --- a/tests/functional/parser/test_s3.py +++ b/tests/functional/parser/test_s3.py @@ -1,4 +1,4 @@ -from aws_lambda_powertools.utilities.parser import event_parser +from aws_lambda_powertools.utilities.parser import event_parser, parse from aws_lambda_powertools.utilities.parser.models import S3Model, S3RecordModel from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.parser.utils import load_event @@ -87,3 +87,9 @@ def test_s3_trigger_event(): def test_s3_glacier_trigger_event(): event_dict = load_event("s3EventGlacier.json") handle_s3_glacier(event_dict, LambdaContext()) + + +def test_s3_empty_object(): + event_dict = load_event("s3Event.json") + event_dict["Records"][0]["s3"]["object"]["size"] = 0 + parse(event=event_dict, model=S3Model) diff --git a/tests/functional/test_lambda_trigger_events.py b/tests/functional/test_data_classes.py similarity index 97% rename from tests/functional/test_lambda_trigger_events.py rename to tests/functional/test_data_classes.py index 62bcb50762c..0221acc6853 100644 --- a/tests/functional/test_lambda_trigger_events.py +++ b/tests/functional/test_data_classes.py @@ -1,4 +1,5 @@ import base64 +import datetime import json import os from secrets import compare_digest @@ -17,6 +18,14 @@ SNSEvent, SQSEvent, ) +from aws_lambda_powertools.utilities.data_classes.appsync.scalar_types_utils import ( + _formatted_time, + aws_date, + aws_datetime, + aws_time, + aws_timestamp, + make_id, +) from aws_lambda_powertools.utilities.data_classes.appsync_resolver_event import ( AppSyncIdentityCognito, AppSyncIdentityIAM, @@ -704,6 +713,7 @@ def test_api_gateway_proxy_event(): assert request_context.message_direction is None assert request_context.message_id is None assert request_context.route_key is None + assert request_context.operation_name is None assert identity.api_key is None assert identity.api_key_id is None @@ -1059,3 +1069,46 @@ def test_s3_object_event_temp_credentials(): assert session_attributes is not None assert session_attributes.mfa_authenticated == session_context["attributes"]["mfaAuthenticated"] assert session_attributes.creation_date == session_context["attributes"]["creationDate"] + + +def test_make_id(): + uuid: str = make_id() + assert isinstance(uuid, str) + assert len(uuid) == 36 + + +def test_aws_date_utc(): + date_str = aws_date() + assert isinstance(date_str, str) + assert datetime.datetime.strptime(date_str, "%Y-%m-%dZ") + + +def test_aws_time_utc(): + time_str = aws_time() + assert isinstance(time_str, str) + assert datetime.datetime.strptime(time_str, "%H:%M:%SZ") + + +def test_aws_datetime_utc(): + datetime_str = aws_datetime() + assert isinstance(datetime_str, str) + assert datetime.datetime.strptime(datetime_str, "%Y-%m-%dT%H:%M:%SZ") + + +def test_aws_timestamp(): + timestamp = aws_timestamp() + assert isinstance(timestamp, int) + + +def test_format_time_positive(): + now = datetime.datetime(2022, 1, 22) + datetime_str = _formatted_time(now, "%Y-%m-%d", 8) + assert isinstance(datetime_str, str) + assert datetime_str == "2022-01-22+08:00:00" + + +def test_format_time_negative(): + now = datetime.datetime(2022, 1, 22, 14, 22, 33) + datetime_str = _formatted_time(now, "%H:%M:%S", -12) + assert isinstance(datetime_str, str) + assert datetime_str == "02:22:33-12:00:00" diff --git a/tests/functional/test_metrics.py b/tests/functional/test_metrics.py index 6386e76e42f..3090a1228d2 100644 --- a/tests/functional/test_metrics.py +++ b/tests/functional/test_metrics.py @@ -739,3 +739,13 @@ def test_serialize_metric_set_metric_definition_multiple_values( assert "Timestamp" in metric_definition_output["_aws"] remove_timestamp(metrics=[metric_definition_output, expected_metric_definition]) assert metric_definition_output == expected_metric_definition + + +def test_metric_manage_metadata_set(): + expected_dict = {"setting": "On"} + + try: + metric = MetricManager(metadata_set=expected_dict) + assert metric.metadata_set == expected_dict + except AttributeError: + pytest.fail("AttributeError should not be raised") diff --git a/tests/performance/test_high_level_imports.py b/tests/performance/test_high_level_imports.py index 70a8d993bdf..e3914b26f57 100644 --- a/tests/performance/test_high_level_imports.py +++ b/tests/performance/test_high_level_imports.py @@ -6,7 +6,7 @@ import pytest -LOGGER_INIT_SLA: float = 0.001 +LOGGER_INIT_SLA: float = 0.005 METRICS_INIT_SLA: float = 0.005 TRACER_INIT_SLA: float = 0.5 IMPORT_INIT_SLA: float = 0.035 @@ -48,7 +48,7 @@ def test_import_times_ceiling(): elapsed = t() if elapsed > IMPORT_INIT_SLA: - pytest.fail(f"High level imports should be below 35ms: {elapsed}") + pytest.fail(f"High level imports should be below ${IMPORT_INIT_SLA}s: {elapsed}") @pytest.mark.perf @@ -64,7 +64,7 @@ def test_tracer_init(): elapsed = t() if elapsed > TRACER_INIT_SLA: - pytest.fail(f"High level imports should be below 50ms: {elapsed}") + pytest.fail(f"High level imports should be below ${TRACER_INIT_SLA}s: {elapsed}") @pytest.mark.perf @@ -78,7 +78,7 @@ def test_metrics_init(): elapsed = t() if elapsed > METRICS_INIT_SLA: - pytest.fail(f"High level imports should be below 40ms: {elapsed}") + pytest.fail(f"High level imports should be below ${METRICS_INIT_SLA}s: {elapsed}") @pytest.mark.perf @@ -92,4 +92,4 @@ def test_logger_init(): elapsed = t() if elapsed > LOGGER_INIT_SLA: - pytest.fail(f"High level imports should be below 40ms: {elapsed}") + pytest.fail(f"High level imports should be below ${LOGGER_INIT_SLA}s: {elapsed}")