diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 2ed1322608a..fed13e3e577 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,6 +1,5 @@ version: 2 updates: - - package-ecosystem: "github-actions" directory: "/" schedule: @@ -20,6 +19,10 @@ updates: ignore: # 2022-04-23: Ignoring boto3 changes until we need to care about them. - dependency-name: "boto3" + groups: + boto-typing: + patterns: + - "mypy-boto3-*" - package-ecosystem: "npm" directory: "/" @@ -33,46 +36,54 @@ updates: # Allow updates for AWS CDK - dependency-name: "aws-cdk" -# - package-ecosystem: "pip" -# directory: "/" -# schedule: -# interval: "daily" -# target-branch: "develop" -# update_types: -# - "semver:major" -# labels: -# - "do-not-merge" -# - "dependencies" -# commit-message: -# prefix: chore -# include: scope - - package-ecosystem: pip directory: /benchmark/src/instrumented + commit-message: + prefix: chore + include: scope schedule: interval: daily - package-ecosystem: pip directory: /benchmark/src/reference + commit-message: + prefix: chore + include: scope schedule: interval: daily - package-ecosystem: docker directory: /docs + commit-message: + prefix: chore + include: scope schedule: interval: daily - package-ecosystem: pip - directory: /examples/event_handler_graphql/src + directory: /docs + commit-message: + prefix: chore + include: scope schedule: interval: daily - - package-ecosystem: gomod - directory: /layer/scripts/layer-balancer + - package-ecosystem: pip + directory: /examples/event_handler_graphql/src + commit-message: + prefix: chore + include: scope schedule: interval: daily - - package-ecosystem: pip - directory: /docs + - package-ecosystem: gomod + directory: /layer/scripts/layer-balancer + commit-message: + prefix: chore + include: scope schedule: interval: daily + groups: + layer-balancer: + patterns: + - "*" diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index 1ffd05de307..072a9ae47c4 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -19,4 +19,4 @@ jobs: - name: 'Checkout Repository' uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 - name: 'Dependency Review' - uses: actions/dependency-review-action@1360a344ccb0ab6e9475edef90ad2f46bf8003b1 # v3.0.6 + uses: actions/dependency-review-action@f6fff72a3217f580d5afd49a46826795305b63c7 # v3.0.8 diff --git a/.github/workflows/ossf_scorecard.yml b/.github/workflows/ossf_scorecard.yml index 4d7d2b6fbe4..40d908e8674 100644 --- a/.github/workflows/ossf_scorecard.yml +++ b/.github/workflows/ossf_scorecard.yml @@ -15,6 +15,7 @@ jobs: analysis: name: Scorecard analysis runs-on: ubuntu-latest + environment: scorecard permissions: security-events: write # update code-scanning dashboard id-token: write # confirm org+repo identity before publish results @@ -31,6 +32,7 @@ jobs: results_file: results.sarif results_format: sarif publish_results: true # publish to OSSF Scorecard REST API + repo_token: ${{ secrets.SCORECARD_TOKEN }} # read-only fine-grained token to read branch protection settings - name: "Upload results" uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 diff --git a/.github/workflows/publish_v2_layer.yml b/.github/workflows/publish_v2_layer.yml index 1b5c668d79d..d62bd12cead 100644 --- a/.github/workflows/publish_v2_layer.yml +++ b/.github/workflows/publish_v2_layer.yml @@ -101,7 +101,7 @@ jobs: - name: Install poetry run: pipx install git+https://github.com/python-poetry/poetry@68b88e5390720a3dd84f02940ec5200bfce39ac6 # v1.5.0 - name: Setup Node.js - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: "16.12" - name: Setup python diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 351da7f67b7..613f3278893 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -206,7 +206,7 @@ jobs: # NOTE: provenance fails if we use action pinning... it's a Github limitation # because SLSA needs to trace & attest it came from a given branch; pinning doesn't expose that information # https://github.com/slsa-framework/slsa-github-generator/blob/main/internal/builders/generic/README.md#referencing-the-slsa-generator - uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.7.0 + uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.8.0 with: base64-subjects: ${{ needs.build.outputs.attestation_hashes }} upload-assets: false # we upload its attestation in create_tag job, otherwise it creates a new release @@ -237,12 +237,12 @@ jobs: - name: Upload to PyPi prod if: ${{ !inputs.skip_pypi }} - uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 # v1.8.8 + uses: pypa/gh-action-pypi-publish@b7f401de30cb6434a1e19f805ff006643653240e # v1.8.10 # PyPi test maintenance affected us numerous times, leaving for history purposes # - name: Upload to PyPi test # if: ${{ !inputs.skip_pypi }} - # uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 # v1.8.8 + # uses: pypa/gh-action-pypi-publish@b7f401de30cb6434a1e19f805ff006643653240e # v1.8.10 # with: # repository-url: https://test.pypi.org/legacy/ diff --git a/.github/workflows/reusable_deploy_v2_layer_stack.yml b/.github/workflows/reusable_deploy_v2_layer_stack.yml index dc2e143a778..1f7f0818f22 100644 --- a/.github/workflows/reusable_deploy_v2_layer_stack.yml +++ b/.github/workflows/reusable_deploy_v2_layer_stack.yml @@ -120,6 +120,8 @@ jobs: has_arm64_support: "true" - region: "eu-west-3" has_arm64_support: "true" + - region: "il-central-1" + has_arm64_support: "false" - region: "me-central-1" has_arm64_support: "false" - region: "me-south-1" @@ -154,7 +156,7 @@ jobs: aws-region: ${{ matrix.region }} role-to-assume: ${{ secrets.AWS_LAYERS_ROLE_ARN }} - name: Setup Node.js - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: "16.12" - name: Setup python diff --git a/.github/workflows/reusable_deploy_v2_sar.yml b/.github/workflows/reusable_deploy_v2_sar.yml index beab36f24c2..a381d8832c2 100644 --- a/.github/workflows/reusable_deploy_v2_sar.yml +++ b/.github/workflows/reusable_deploy_v2_sar.yml @@ -111,7 +111,7 @@ jobs: aws-region: ${{ env.AWS_REGION }} role-to-assume: ${{ secrets.AWS_SAR_V2_ROLE_ARN }} - name: Setup Node.js - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: ${{ env.NODE_VERSION }} - name: Download artifact diff --git a/.github/workflows/reusable_publish_changelog.yml b/.github/workflows/reusable_publish_changelog.yml index 1df1ceb5953..0a678d9ee7d 100644 --- a/.github/workflows/reusable_publish_changelog.yml +++ b/.github/workflows/reusable_publish_changelog.yml @@ -15,6 +15,7 @@ permissions: jobs: publish_changelog: + if: github.repository == 'aws-powertools/powertools-lambda-python' # Force Github action to run only a single job at a time (based on the group name) # This is to prevent race-condition and inconsistencies with changelog push concurrency: diff --git a/.github/workflows/reusable_publish_docs.yml b/.github/workflows/reusable_publish_docs.yml index 83f5198f1a9..8d86664d87e 100644 --- a/.github/workflows/reusable_publish_docs.yml +++ b/.github/workflows/reusable_publish_docs.yml @@ -32,6 +32,7 @@ permissions: jobs: publish_docs: + if: github.repository == 'aws-powertools/powertools-lambda-python' # Force Github action to run only a single job at a time (based on the group name) # This is to prevent "race-condition" in publishing a new version of doc to `gh-pages` concurrency: diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index af6e884041b..d3531db71f3 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -48,7 +48,7 @@ jobs: fail-fast: false # needed so if a version fails, the others will still be able to complete and cleanup matrix: version: ["3.7", "3.8", "3.9", "3.10", "3.11"] - if: ${{ github.actor != 'dependabot[bot]' }} + if: ${{ github.actor != 'dependabot[bot]' && github.repository == 'aws-powertools/powertools-lambda-python' }} steps: - name: "Checkout" uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 @@ -61,7 +61,7 @@ jobs: architecture: "x64" cache: "poetry" - name: Setup Node.js - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: "16.12" - name: Install CDK CLI diff --git a/.gitleaks.toml b/.gitleaks.toml new file mode 100644 index 00000000000..0622b57c118 --- /dev/null +++ b/.gitleaks.toml @@ -0,0 +1,15 @@ +# Title for the gitleaks configuration file. +title = "Gitleaks" + +[extend] +# useDefault will extend the base configuration with the default gitleaks config: +# https://github.com/zricethezav/gitleaks/blob/master/config/gitleaks.toml +useDefault = true + +[allowlist] +description = "Allow list false positive" + +# Allow list paths to ignore due to false positives. +paths = [ + '''tests/unit/parser/test_kinesis_firehose\.py''', +] diff --git a/.gitleaksignore b/.gitleaksignore new file mode 100644 index 00000000000..d501e5cc212 --- /dev/null +++ b/.gitleaksignore @@ -0,0 +1,3 @@ +examples/batch_processing/src/context_manager_access_output_pydantic.txt:aws-access-token:10 +examples/batch_processing/src/context_manager_access_output_pydantic.txt:aws-access-token:15 +examples/batch_processing/src/context_manager_access_output.txt:aws-access-token:10 diff --git a/.markdownlintignore b/.markdownlintignore new file mode 100644 index 00000000000..11b6d7ffe29 --- /dev/null +++ b/.markdownlintignore @@ -0,0 +1,2 @@ +docs/core/metrics/index.md +includes/abbreviations.md diff --git a/CHANGELOG.md b/CHANGELOG.md index fbe488d678d..efc7033629d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,101 @@ # Unreleased +## Bug Fixes + +* **logger:** strip xray_trace_id when explicitly disabled ([#2852](https://github.com/aws-powertools/powertools-lambda-python/issues/2852)) +* **metrics:** proxy service and namespace attrs to provider ([#2910](https://github.com/aws-powertools/powertools-lambda-python/issues/2910)) +* **parser:** API Gateway V2 request context scope field should be optional ([#2961](https://github.com/aws-powertools/powertools-lambda-python/issues/2961)) + +## Code Refactoring + +* **e2e:** support fail fast in get_lambda_response ([#2912](https://github.com/aws-powertools/powertools-lambda-python/issues/2912)) +* **metrics:** move from protocol to ABC; split provider tests ([#2934](https://github.com/aws-powertools/powertools-lambda-python/issues/2934)) + +## Documentation + +* **batch:** new visuals and error handling section ([#2857](https://github.com/aws-powertools/powertools-lambda-python/issues/2857)) +* **batch:** explain record type discrepancy in failure and success handler ([#2868](https://github.com/aws-powertools/powertools-lambda-python/issues/2868)) +* **metrics:** update Datadog integration diagram ([#2954](https://github.com/aws-powertools/powertools-lambda-python/issues/2954)) +* **navigation:** remove nofollow attribute for internal links ([#2867](https://github.com/aws-powertools/powertools-lambda-python/issues/2867)) +* **navigation:** add nofollow attribute ([#2842](https://github.com/aws-powertools/powertools-lambda-python/issues/2842)) +* **roadmap:** update roadmap themes ([#2915](https://github.com/aws-powertools/powertools-lambda-python/issues/2915)) +* **roadmap:** add GovCloud and China region item ([#2960](https://github.com/aws-powertools/powertools-lambda-python/issues/2960)) +* **tutorial:** add support for Python 3.11 ([#2860](https://github.com/aws-powertools/powertools-lambda-python/issues/2860)) + +## Features + +* **event_handler:** allow stripping route prefixes using regexes ([#2521](https://github.com/aws-powertools/powertools-lambda-python/issues/2521)) +* **layers:** add new comercial region Israel(Tel Aviv) ([#2907](https://github.com/aws-powertools/powertools-lambda-python/issues/2907)) +* **metrics:** add Datadog observability provider ([#2906](https://github.com/aws-powertools/powertools-lambda-python/issues/2906)) +* **metrics:** support to bring your own metrics provider ([#2194](https://github.com/aws-powertools/powertools-lambda-python/issues/2194)) + +## Maintenance + +* **ci:** enable protected branch auditing ([#2913](https://github.com/aws-powertools/powertools-lambda-python/issues/2913)) +* **ci:** group dependabot updates ([#2896](https://github.com/aws-powertools/powertools-lambda-python/issues/2896)) +* **deps:** bump github.com/aws/aws-sdk-go-v2 from 1.19.0 to 1.19.1 in /layer/scripts/layer-balancer ([#2877](https://github.com/aws-powertools/powertools-lambda-python/issues/2877)) +* **deps:** bump gitpython from 3.1.31 to 3.1.32 in /docs ([#2948](https://github.com/aws-powertools/powertools-lambda-python/issues/2948)) +* **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.38.0 to 1.38.1 in /layer/scripts/layer-balancer ([#2876](https://github.com/aws-powertools/powertools-lambda-python/issues/2876)) +* **deps:** bump actions/dependency-review-action from 3.0.6 to 3.0.7 ([#2941](https://github.com/aws-powertools/powertools-lambda-python/issues/2941)) +* **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.29 to 1.18.30 in /layer/scripts/layer-balancer ([#2875](https://github.com/aws-powertools/powertools-lambda-python/issues/2875)) +* **deps:** bump pypa/gh-action-pypi-publish from 1.8.9 to 1.8.10 ([#2946](https://github.com/aws-powertools/powertools-lambda-python/issues/2946)) +* **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.28 to 1.18.29 in /layer/scripts/layer-balancer ([#2844](https://github.com/aws-powertools/powertools-lambda-python/issues/2844)) +* **deps:** bump actions/setup-node from 3.7.0 to 3.8.0 ([#2957](https://github.com/aws-powertools/powertools-lambda-python/issues/2957)) +* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 3 updates ([#2933](https://github.com/aws-powertools/powertools-lambda-python/issues/2933)) +* **deps:** bump actions/setup-node from 3.8.0 to 3.8.1 ([#2970](https://github.com/aws-powertools/powertools-lambda-python/issues/2970)) +* **deps:** bump actions/dependency-review-action from 3.0.7 to 3.0.8 ([#2963](https://github.com/aws-powertools/powertools-lambda-python/issues/2963)) +* **deps:** bump slsa-framework/slsa-github-generator from 1.7.0 to 1.8.0 ([#2927](https://github.com/aws-powertools/powertools-lambda-python/issues/2927)) +* **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.37.1 to 1.38.0 in /layer/scripts/layer-balancer ([#2843](https://github.com/aws-powertools/powertools-lambda-python/issues/2843)) +* **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.38.1 to 1.39.0 in /layer/scripts/layer-balancer ([#2890](https://github.com/aws-powertools/powertools-lambda-python/issues/2890)) +* **deps:** bump pydantic from 1.10.11 to 1.10.12 ([#2846](https://github.com/aws-powertools/powertools-lambda-python/issues/2846)) +* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 3 updates ([#2971](https://github.com/aws-powertools/powertools-lambda-python/issues/2971)) +* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 2 updates ([#2904](https://github.com/aws-powertools/powertools-lambda-python/issues/2904)) +* **deps:** bump pypa/gh-action-pypi-publish from 1.8.8 to 1.8.9 ([#2943](https://github.com/aws-powertools/powertools-lambda-python/issues/2943)) +* **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.30 to 1.18.31 in /layer/scripts/layer-balancer ([#2889](https://github.com/aws-powertools/powertools-lambda-python/issues/2889)) +* **deps:** bump squidfunk/mkdocs-material from `33e28bd` to `cd3a522` in /docs ([#2859](https://github.com/aws-powertools/powertools-lambda-python/issues/2859)) +* **deps-dev:** bump sentry-sdk from 1.28.1 to 1.29.0 ([#2900](https://github.com/aws-powertools/powertools-lambda-python/issues/2900)) +* **deps-dev:** bump cfn-lint from 0.79.5 to 0.79.6 ([#2899](https://github.com/aws-powertools/powertools-lambda-python/issues/2899)) +* **deps-dev:** bump the boto-typing group with 11 updates ([#2901](https://github.com/aws-powertools/powertools-lambda-python/issues/2901)) +* **deps-dev:** bump ruff from 0.0.280 to 0.0.281 ([#2891](https://github.com/aws-powertools/powertools-lambda-python/issues/2891)) +* **deps-dev:** bump aws-cdk from 2.88.0 to 2.89.0 ([#2887](https://github.com/aws-powertools/powertools-lambda-python/issues/2887)) +* **deps-dev:** bump the boto-typing group with 3 updates ([#2967](https://github.com/aws-powertools/powertools-lambda-python/issues/2967)) +* **deps-dev:** bump mkdocs-material from 9.1.19 to 9.1.21 ([#2894](https://github.com/aws-powertools/powertools-lambda-python/issues/2894)) +* **deps-dev:** bump radon from 5.1.0 to 6.0.1 ([#2964](https://github.com/aws-powertools/powertools-lambda-python/issues/2964)) +* **deps-dev:** bump the boto-typing group with 4 updates ([#2928](https://github.com/aws-powertools/powertools-lambda-python/issues/2928)) +* **deps-dev:** bump aws-cdk from 2.89.0 to 2.90.0 ([#2932](https://github.com/aws-powertools/powertools-lambda-python/issues/2932)) +* **deps-dev:** bump mypy-boto3-logs from 1.28.1 to 1.28.15 ([#2880](https://github.com/aws-powertools/powertools-lambda-python/issues/2880)) +* **deps-dev:** bump mypy-boto3-appconfigdata from 1.28.0 to 1.28.15 ([#2879](https://github.com/aws-powertools/powertools-lambda-python/issues/2879)) +* **deps-dev:** bump mypy-boto3-lambda from 1.28.11 to 1.28.15 ([#2878](https://github.com/aws-powertools/powertools-lambda-python/issues/2878)) +* **deps-dev:** bump mypy-boto3-xray from 1.28.0 to 1.28.15 ([#2881](https://github.com/aws-powertools/powertools-lambda-python/issues/2881)) +* **deps-dev:** bump ruff from 0.0.282 to 0.0.283 ([#2937](https://github.com/aws-powertools/powertools-lambda-python/issues/2937)) +* **deps-dev:** bump mypy-boto3-dynamodb from 1.28.0 to 1.28.11 ([#2847](https://github.com/aws-powertools/powertools-lambda-python/issues/2847)) +* **deps-dev:** bump ruff from 0.0.283 to 0.0.284 ([#2940](https://github.com/aws-powertools/powertools-lambda-python/issues/2940)) +* **deps-dev:** bump cfn-lint from 0.79.4 to 0.79.5 ([#2870](https://github.com/aws-powertools/powertools-lambda-python/issues/2870)) +* **deps-dev:** bump cfn-lint from 0.79.6 to 0.79.7 ([#2956](https://github.com/aws-powertools/powertools-lambda-python/issues/2956)) +* **deps-dev:** bump mypy-boto3-cloudformation from 1.28.10 to 1.28.12 ([#2864](https://github.com/aws-powertools/powertools-lambda-python/issues/2864)) +* **deps-dev:** bump mypy-boto3-cloudwatch from 1.28.0 to 1.28.12 ([#2865](https://github.com/aws-powertools/powertools-lambda-python/issues/2865)) +* **deps-dev:** bump cfn-lint from 0.79.3 to 0.79.4 ([#2862](https://github.com/aws-powertools/powertools-lambda-python/issues/2862)) +* **deps-dev:** bump mypy-boto3-appconfig from 1.28.0 to 1.28.12 ([#2861](https://github.com/aws-powertools/powertools-lambda-python/issues/2861)) +* **deps-dev:** bump mypy-boto3-ssm from 1.28.0 to 1.28.12 ([#2863](https://github.com/aws-powertools/powertools-lambda-python/issues/2863)) +* **deps-dev:** bump the boto-typing group with 1 update ([#2944](https://github.com/aws-powertools/powertools-lambda-python/issues/2944)) +* **deps-dev:** bump aws-cdk from 2.90.0 to 2.91.0 ([#2947](https://github.com/aws-powertools/powertools-lambda-python/issues/2947)) +* **deps-dev:** bump cfn-lint from 0.78.2 to 0.79.3 ([#2854](https://github.com/aws-powertools/powertools-lambda-python/issues/2854)) +* **deps-dev:** bump mypy-boto3-lambda from 1.28.0 to 1.28.11 ([#2845](https://github.com/aws-powertools/powertools-lambda-python/issues/2845)) +* **deps-dev:** bump xenon from 0.9.0 to 0.9.1 ([#2955](https://github.com/aws-powertools/powertools-lambda-python/issues/2955)) +* **deps-dev:** bump aws-cdk from 2.91.0 to 2.92.0 ([#2965](https://github.com/aws-powertools/powertools-lambda-python/issues/2965)) +* **deps-dev:** bump ruff from 0.0.281 to 0.0.282 ([#2905](https://github.com/aws-powertools/powertools-lambda-python/issues/2905)) +* **docs:** include the environment variables section in the utilities documentation ([#2925](https://github.com/aws-powertools/powertools-lambda-python/issues/2925)) +* **docs:** disable line length rule using older syntax ([#2920](https://github.com/aws-powertools/powertools-lambda-python/issues/2920)) +* **maintenance:** enables publishing docs and changelog, running e2e tests only in the main repository ([#2924](https://github.com/aws-powertools/powertools-lambda-python/issues/2924)) + + + +## [v2.22.0] - 2023-07-25 +## Bug Fixes + +* **parameters:** distinct cache key for single vs path with same name ([#2839](https://github.com/aws-powertools/powertools-lambda-python/issues/2839)) + ## Documentation * **community:** new batch processing article ([#2828](https://github.com/aws-powertools/powertools-lambda-python/issues/2828)) @@ -15,13 +110,14 @@ ## Maintenance +* version bump * **ci:** add baking time for layer build ([#2834](https://github.com/aws-powertools/powertools-lambda-python/issues/2834)) * **ci:** build changelog on a schedule only ([#2832](https://github.com/aws-powertools/powertools-lambda-python/issues/2832)) * **deps:** bump actions/setup-python from 4.6.1 to 4.7.0 ([#2821](https://github.com/aws-powertools/powertools-lambda-python/issues/2821)) -* **deps-dev:** bump ruff from 0.0.279 to 0.0.280 ([#2836](https://github.com/aws-powertools/powertools-lambda-python/issues/2836)) * **deps-dev:** bump ruff from 0.0.278 to 0.0.279 ([#2822](https://github.com/aws-powertools/powertools-lambda-python/issues/2822)) -* **deps-dev:** bump mypy-boto3-cloudformation from 1.28.0 to 1.28.10 ([#2837](https://github.com/aws-powertools/powertools-lambda-python/issues/2837)) * **deps-dev:** bump cfn-lint from 0.78.1 to 0.78.2 ([#2823](https://github.com/aws-powertools/powertools-lambda-python/issues/2823)) +* **deps-dev:** bump ruff from 0.0.279 to 0.0.280 ([#2836](https://github.com/aws-powertools/powertools-lambda-python/issues/2836)) +* **deps-dev:** bump mypy-boto3-cloudformation from 1.28.0 to 1.28.10 ([#2837](https://github.com/aws-powertools/powertools-lambda-python/issues/2837)) @@ -3597,7 +3693,8 @@ * Merge pull request [#5](https://github.com/aws-powertools/powertools-lambda-python/issues/5) from jfuss/feat/python38 -[Unreleased]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.21.0...HEAD +[Unreleased]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.22.0...HEAD +[v2.22.0]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.21.0...v2.22.0 [v2.21.0]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.20.0...v2.21.0 [v2.20.0]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.19.0...v2.20.0 [v2.19.0]: https://github.com/aws-powertools/powertools-lambda-python/compare/v2.18.0...v2.19.0 diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index 446b1eca856..1e6fe2a50bb 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -520,7 +520,7 @@ def __init__( cors: Optional[CORSConfig] = None, debug: Optional[bool] = None, serializer: Optional[Callable[[Dict], str]] = None, - strip_prefixes: Optional[List[str]] = None, + strip_prefixes: Optional[List[Union[str, Pattern]]] = None, ): """ Parameters @@ -534,9 +534,10 @@ def __init__( environment variable serializer : Callable, optional function to serialize `obj` to a JSON formatted `str`, by default json.dumps - strip_prefixes: List[str], optional - optional list of prefixes to be removed from the request path before doing the routing. This is often used - with api gateways with multiple custom mappings. + strip_prefixes: List[Union[str, Pattern]], optional + optional list of prefixes to be removed from the request path before doing the routing. + This is often used with api gateways with multiple custom mappings. + Each prefix can be a static string or a compiled regex pattern """ self._proxy_type = proxy_type self._dynamic_routes: List[Route] = [] @@ -713,10 +714,21 @@ def _remove_prefix(self, path: str) -> str: return path for prefix in self._strip_prefixes: - if path == prefix: - return "/" - if self._path_starts_with(path, prefix): - return path[len(prefix) :] + if isinstance(prefix, str): + if path == prefix: + return "/" + + if self._path_starts_with(path, prefix): + return path[len(prefix) :] + + if isinstance(prefix, Pattern): + path = re.sub(prefix, "", path) + + # When using regexes, we might get into a point where everything is removed + # from the string, so we check if it's empty and return /, since there's nothing + # else to strip anymore. + if not path: + return "/" return path @@ -911,7 +923,7 @@ def __init__( cors: Optional[CORSConfig] = None, debug: Optional[bool] = None, serializer: Optional[Callable[[Dict], str]] = None, - strip_prefixes: Optional[List[str]] = None, + strip_prefixes: Optional[List[Union[str, Pattern]]] = None, ): """Amazon API Gateway REST and HTTP API v1 payload resolver""" super().__init__(ProxyEventType.APIGatewayProxyEvent, cors, debug, serializer, strip_prefixes) @@ -942,7 +954,7 @@ def __init__( cors: Optional[CORSConfig] = None, debug: Optional[bool] = None, serializer: Optional[Callable[[Dict], str]] = None, - strip_prefixes: Optional[List[str]] = None, + strip_prefixes: Optional[List[Union[str, Pattern]]] = None, ): """Amazon API Gateway HTTP API v2 payload resolver""" super().__init__(ProxyEventType.APIGatewayProxyEventV2, cors, debug, serializer, strip_prefixes) @@ -956,7 +968,7 @@ def __init__( cors: Optional[CORSConfig] = None, debug: Optional[bool] = None, serializer: Optional[Callable[[Dict], str]] = None, - strip_prefixes: Optional[List[str]] = None, + strip_prefixes: Optional[List[Union[str, Pattern]]] = None, ): """Amazon Application Load Balancer (ALB) resolver""" super().__init__(ProxyEventType.ALBEvent, cors, debug, serializer, strip_prefixes) diff --git a/aws_lambda_powertools/event_handler/lambda_function_url.py b/aws_lambda_powertools/event_handler/lambda_function_url.py index 6978b29f451..433a013ab0b 100644 --- a/aws_lambda_powertools/event_handler/lambda_function_url.py +++ b/aws_lambda_powertools/event_handler/lambda_function_url.py @@ -1,4 +1,4 @@ -from typing import Callable, Dict, List, Optional +from typing import Callable, Dict, List, Optional, Pattern, Union from aws_lambda_powertools.event_handler import CORSConfig from aws_lambda_powertools.event_handler.api_gateway import ( @@ -51,6 +51,6 @@ def __init__( cors: Optional[CORSConfig] = None, debug: Optional[bool] = None, serializer: Optional[Callable[[Dict], str]] = None, - strip_prefixes: Optional[List[str]] = None, + strip_prefixes: Optional[List[Union[str, Pattern]]] = None, ): super().__init__(ProxyEventType.LambdaFunctionUrlEvent, cors, debug, serializer, strip_prefixes) diff --git a/aws_lambda_powertools/event_handler/vpc_lattice.py b/aws_lambda_powertools/event_handler/vpc_lattice.py index 1150f7224fb..b3cb042b40b 100644 --- a/aws_lambda_powertools/event_handler/vpc_lattice.py +++ b/aws_lambda_powertools/event_handler/vpc_lattice.py @@ -1,4 +1,4 @@ -from typing import Callable, Dict, List, Optional +from typing import Callable, Dict, List, Optional, Pattern, Union from aws_lambda_powertools.event_handler import CORSConfig from aws_lambda_powertools.event_handler.api_gateway import ( @@ -47,7 +47,7 @@ def __init__( cors: Optional[CORSConfig] = None, debug: Optional[bool] = None, serializer: Optional[Callable[[Dict], str]] = None, - strip_prefixes: Optional[List[str]] = None, + strip_prefixes: Optional[List[Union[str, Pattern]]] = None, ): """Amazon VPC Lattice resolver""" super().__init__(ProxyEventType.VPCLatticeEvent, cors, debug, serializer, strip_prefixes) diff --git a/aws_lambda_powertools/logging/formatter.py b/aws_lambda_powertools/logging/formatter.py index 03b290fde64..3a7ede4ce4e 100644 --- a/aws_lambda_powertools/logging/formatter.py +++ b/aws_lambda_powertools/logging/formatter.py @@ -236,8 +236,12 @@ def _build_default_keys(): "timestamp": "%(asctime)s", } - @staticmethod - def _get_latest_trace_id(): + def _get_latest_trace_id(self): + xray_trace_id_key = self.log_format.get("xray_trace_id", "") + if xray_trace_id_key is None: + # key is explicitly disabled; ignore it. e.g., Logger(xray_trace_id=None) + return None + xray_trace_id = os.getenv(constants.XRAY_TRACE_ID_ENV) return xray_trace_id.split(";")[0].replace("Root=", "") if xray_trace_id else None diff --git a/aws_lambda_powertools/metrics/__init__.py b/aws_lambda_powertools/metrics/__init__.py index 5f30f14102d..b8c94478816 100644 --- a/aws_lambda_powertools/metrics/__init__.py +++ b/aws_lambda_powertools/metrics/__init__.py @@ -1,23 +1,22 @@ """CloudWatch Embedded Metric Format utility """ -from .base import MetricResolution, MetricUnit -from .exceptions import ( +from aws_lambda_powertools.metrics.base import MetricResolution, MetricUnit, single_metric +from aws_lambda_powertools.metrics.exceptions import ( MetricResolutionError, MetricUnitError, MetricValueError, SchemaValidationError, ) -from .metric import single_metric -from .metrics import EphemeralMetrics, Metrics +from aws_lambda_powertools.metrics.metrics import EphemeralMetrics, Metrics __all__ = [ - "Metrics", - "EphemeralMetrics", "single_metric", - "MetricUnit", "MetricUnitError", - "MetricResolution", "MetricResolutionError", "SchemaValidationError", "MetricValueError", + "Metrics", + "EphemeralMetrics", + "MetricResolution", + "MetricUnit", ] diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py index 6a5e7282392..2c45aa1fb3e 100644 --- a/aws_lambda_powertools/metrics/base.py +++ b/aws_lambda_powertools/metrics/base.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import datetime import functools import json @@ -7,59 +9,28 @@ import warnings from collections import defaultdict from contextlib import contextmanager -from enum import Enum from typing import Any, Callable, Dict, Generator, List, Optional, Union -from ..shared import constants -from ..shared.functions import resolve_env_var_choice -from .exceptions import ( +from aws_lambda_powertools.metrics.exceptions import ( MetricResolutionError, MetricUnitError, MetricValueError, SchemaValidationError, ) -from .types import MetricNameUnitResolution +from aws_lambda_powertools.metrics.provider import cold_start +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.constants import MAX_DIMENSIONS, MAX_METRICS +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.metric_properties import MetricResolution, MetricUnit +from aws_lambda_powertools.metrics.provider.cold_start import ( + reset_cold_start_flag, # noqa: F401 # backwards compatibility +) +from aws_lambda_powertools.metrics.types import MetricNameUnitResolution +from aws_lambda_powertools.shared import constants +from aws_lambda_powertools.shared.functions import resolve_env_var_choice logger = logging.getLogger(__name__) -MAX_METRICS = 100 -MAX_DIMENSIONS = 29 - -is_cold_start = True - - -class MetricResolution(Enum): - Standard = 60 - High = 1 - - -class MetricUnit(Enum): - Seconds = "Seconds" - Microseconds = "Microseconds" - Milliseconds = "Milliseconds" - Bytes = "Bytes" - Kilobytes = "Kilobytes" - Megabytes = "Megabytes" - Gigabytes = "Gigabytes" - Terabytes = "Terabytes" - Bits = "Bits" - Kilobits = "Kilobits" - Megabits = "Megabits" - Gigabits = "Gigabits" - Terabits = "Terabits" - Percent = "Percent" - Count = "Count" - BytesPerSecond = "Bytes/Second" - KilobytesPerSecond = "Kilobytes/Second" - MegabytesPerSecond = "Megabytes/Second" - GigabytesPerSecond = "Gigabytes/Second" - TerabytesPerSecond = "Terabytes/Second" - BitsPerSecond = "Bits/Second" - KilobitsPerSecond = "Kilobits/Second" - MegabitsPerSecond = "Megabits/Second" - GigabitsPerSecond = "Gigabits/Second" - TerabitsPerSecond = "Terabits/Second" - CountPerSecond = "Count/Second" +# Maintenance: alias due to Hyrum's law +is_cold_start = cold_start.is_cold_start class MetricManager: @@ -94,11 +65,11 @@ class MetricManager: def __init__( self, - metric_set: Optional[Dict[str, Any]] = None, - dimension_set: Optional[Dict] = None, - namespace: Optional[str] = None, - metadata_set: Optional[Dict[str, Any]] = None, - service: Optional[str] = None, + metric_set: Dict[str, Any] | None = None, + dimension_set: Dict | None = None, + namespace: str | None = None, + metadata_set: Dict[str, Any] | None = None, + service: str | None = None, ): self.metric_set = metric_set if metric_set is not None else {} self.dimension_set = dimension_set if dimension_set is not None else {} @@ -112,9 +83,9 @@ def __init__( def add_metric( self, name: str, - unit: Union[MetricUnit, str], + unit: MetricUnit | str, value: float, - resolution: Union[MetricResolution, int] = 60, + resolution: MetricResolution | int = 60, ) -> None: """Adds given metric @@ -173,9 +144,9 @@ def add_metric( def serialize_metric_set( self, - metrics: Optional[Dict] = None, - dimensions: Optional[Dict] = None, - metadata: Optional[Dict] = None, + metrics: Dict | None = None, + dimensions: Dict | None = None, + metadata: Dict | None = None, ) -> Dict: """Serializes metric and dimensions set @@ -355,10 +326,10 @@ def flush_metrics(self, raise_on_empty_metrics: bool = False) -> None: def log_metrics( self, - lambda_handler: Union[Callable[[Dict, Any], Any], Optional[Callable[[Dict, Any, Optional[Dict]], Any]]] = None, + lambda_handler: Callable[[Dict, Any], Any] | Optional[Callable[[Dict, Any, Optional[Dict]], Any]] = None, capture_cold_start_metric: bool = False, raise_on_empty_metrics: bool = False, - default_dimensions: Optional[Dict[str, str]] = None, + default_dimensions: Dict[str, str] | None = None, ): """Decorator to serialize and publish metrics at the end of a function execution. @@ -537,9 +508,9 @@ class SingleMetric(MetricManager): def add_metric( self, name: str, - unit: Union[MetricUnit, str], + unit: MetricUnit | str, value: float, - resolution: Union[MetricResolution, int] = 60, + resolution: MetricResolution | int = 60, ) -> None: """Method to prevent more than one metric being created @@ -565,9 +536,9 @@ def single_metric( name: str, unit: MetricUnit, value: float, - resolution: Union[MetricResolution, int] = 60, - namespace: Optional[str] = None, - default_dimensions: Optional[Dict[str, str]] = None, + resolution: MetricResolution | int = 60, + namespace: str | None = None, + default_dimensions: Dict[str, str] | None = None, ) -> Generator[SingleMetric, None, None]: """Context manager to simplify creation of a single metric @@ -622,7 +593,7 @@ def single_metric( SchemaValidationError When metric object fails EMF schema validation """ # noqa: E501 - metric_set: Optional[Dict] = None + metric_set: Dict | None = None try: metric: SingleMetric = SingleMetric(namespace=namespace) metric.add_metric(name=name, unit=unit, value=value, resolution=resolution) @@ -635,9 +606,3 @@ def single_metric( metric_set = metric.serialize_metric_set() finally: print(json.dumps(metric_set, separators=(",", ":"))) - - -def reset_cold_start_flag(): - global is_cold_start - if not is_cold_start: - is_cold_start = True diff --git a/aws_lambda_powertools/metrics/exceptions.py b/aws_lambda_powertools/metrics/exceptions.py index 94f492d14d7..30a4996d67e 100644 --- a/aws_lambda_powertools/metrics/exceptions.py +++ b/aws_lambda_powertools/metrics/exceptions.py @@ -1,13 +1,4 @@ -class MetricUnitError(Exception): - """When metric unit is not supported by CloudWatch""" - - pass - - -class MetricResolutionError(Exception): - """When metric resolution is not supported by CloudWatch""" - - pass +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.exceptions import MetricResolutionError, MetricUnitError class SchemaValidationError(Exception): @@ -20,3 +11,6 @@ class MetricValueError(Exception): """When metric value isn't a valid number""" pass + + +__all__ = ["MetricUnitError", "MetricResolutionError", "SchemaValidationError", "MetricValueError"] diff --git a/aws_lambda_powertools/metrics/functions.py b/aws_lambda_powertools/metrics/functions.py new file mode 100644 index 00000000000..d951c0749a3 --- /dev/null +++ b/aws_lambda_powertools/metrics/functions.py @@ -0,0 +1,72 @@ +from __future__ import annotations + +from typing import List + +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.exceptions import ( + MetricResolutionError, + MetricUnitError, +) +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.metric_properties import MetricResolution, MetricUnit + + +def extract_cloudwatch_metric_resolution_value(metric_resolutions: List, resolution: int | MetricResolution) -> int: + """Return metric value from CloudWatch metric unit whether that's str or MetricResolution enum + + Parameters + ---------- + unit : Union[int, MetricResolution] + Metric resolution + + Returns + ------- + int + Metric resolution value must be 1 or 60 + + Raises + ------ + MetricResolutionError + When metric resolution is not supported by CloudWatch + """ + if isinstance(resolution, MetricResolution): + return resolution.value + + if isinstance(resolution, int) and resolution in metric_resolutions: + return resolution + + raise MetricResolutionError( + f"Invalid metric resolution '{resolution}', expected either option: {metric_resolutions}", # noqa: E501 + ) + + +def extract_cloudwatch_metric_unit_value(metric_units: List, metric_valid_options: List, unit: str | MetricUnit) -> str: + """Return metric value from CloudWatch metric unit whether that's str or MetricUnit enum + + Parameters + ---------- + unit : Union[str, MetricUnit] + Metric unit + + Returns + ------- + str + Metric unit value (e.g. "Seconds", "Count/Second") + + Raises + ------ + MetricUnitError + When metric unit is not supported by CloudWatch + """ + + if isinstance(unit, str): + if unit in metric_valid_options: + unit = MetricUnit[unit].value + + if unit not in metric_units: + raise MetricUnitError( + f"Invalid metric unit '{unit}', expected either option: {metric_valid_options}", + ) + + if isinstance(unit, MetricUnit): + unit = unit.value + + return unit diff --git a/aws_lambda_powertools/metrics/metric.py b/aws_lambda_powertools/metrics/metric.py index 5465889f1f0..e2ac49df489 100644 --- a/aws_lambda_powertools/metrics/metric.py +++ b/aws_lambda_powertools/metrics/metric.py @@ -1,4 +1,4 @@ # NOTE: prevents circular inheritance import -from .base import SingleMetric, single_metric +from aws_lambda_powertools.metrics.base import SingleMetric, single_metric __all__ = ["SingleMetric", "single_metric"] diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py index 487f2ab9b2f..cb970fcfdc0 100644 --- a/aws_lambda_powertools/metrics/metrics.py +++ b/aws_lambda_powertools/metrics/metrics.py @@ -1,10 +1,15 @@ -from typing import Any, Dict, Optional +# NOTE: keeps for compatibility +from __future__ import annotations -from .base import MetricManager +from typing import Any, Callable, Dict, Optional +from aws_lambda_powertools.metrics.base import MetricResolution, MetricUnit +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.cloudwatch import AmazonCloudWatchEMFProvider +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.types import CloudWatchEMFOutput -class Metrics(MetricManager): - """Metrics create an EMF object with up to 100 metrics + +class Metrics: + """Metrics create an CloudWatch EMF object with up to 100 metrics Use Metrics when you need to create multiple metrics that have dimensions in common (e.g. service_name="payment"). @@ -46,6 +51,8 @@ def lambda_handler(): service name to be used as metric dimension, by default "service_undefined" namespace : str, optional Namespace for metrics + provider: AmazonCloudWatchEMFProvider, optional + Pre-configured AmazonCloudWatchEMFProvider provider Raises ------ @@ -69,22 +76,73 @@ def lambda_handler(): _metadata: Dict[str, Any] = {} _default_dimensions: Dict[str, Any] = {} - def __init__(self, service: Optional[str] = None, namespace: Optional[str] = None): + def __init__( + self, + service: str | None = None, + namespace: str | None = None, + provider: AmazonCloudWatchEMFProvider | None = None, + ): self.metric_set = self._metrics self.metadata_set = self._metadata self.default_dimensions = self._default_dimensions self.dimension_set = self._dimensions self.dimension_set.update(**self._default_dimensions) - super().__init__( - namespace=namespace, - service=service, - metric_set=self.metric_set, - dimension_set=self.dimension_set, - metadata_set=self.metadata_set, + + if provider is None: + self.provider = AmazonCloudWatchEMFProvider( + namespace=namespace, + service=service, + metric_set=self.metric_set, + dimension_set=self.dimension_set, + metadata_set=self.metadata_set, + default_dimensions=self._default_dimensions, + ) + else: + self.provider = provider + + def add_metric( + self, + name: str, + unit: MetricUnit | str, + value: float, + resolution: MetricResolution | int = 60, + ) -> None: + self.provider.add_metric(name=name, unit=unit, value=value, resolution=resolution) + + def add_dimension(self, name: str, value: str) -> None: + self.provider.add_dimension(name=name, value=value) + + def serialize_metric_set( + self, + metrics: Dict | None = None, + dimensions: Dict | None = None, + metadata: Dict | None = None, + ) -> CloudWatchEMFOutput: + return self.provider.serialize_metric_set(metrics=metrics, dimensions=dimensions, metadata=metadata) + + def add_metadata(self, key: str, value: Any) -> None: + self.provider.add_metadata(key=key, value=value) + + def flush_metrics(self, raise_on_empty_metrics: bool = False) -> None: + self.provider.flush_metrics(raise_on_empty_metrics=raise_on_empty_metrics) + + def log_metrics( + self, + lambda_handler: Callable[[Dict, Any], Any] | Optional[Callable[[Dict, Any, Optional[Dict]], Any]] = None, + capture_cold_start_metric: bool = False, + raise_on_empty_metrics: bool = False, + default_dimensions: Dict[str, str] | None = None, + ): + return self.provider.log_metrics( + lambda_handler=lambda_handler, + capture_cold_start_metric=capture_cold_start_metric, + raise_on_empty_metrics=raise_on_empty_metrics, + default_dimensions=default_dimensions, ) def set_default_dimensions(self, **dimensions) -> None: + self.provider.set_default_dimensions(**dimensions) """Persist dimensions across Lambda invocations Parameters @@ -111,63 +169,36 @@ def lambda_handler(): self.default_dimensions.update(**dimensions) def clear_default_dimensions(self) -> None: + self.provider.default_dimensions.clear() self.default_dimensions.clear() def clear_metrics(self) -> None: - super().clear_metrics() - # re-add default dimensions - self.set_default_dimensions(**self.default_dimensions) - - -class EphemeralMetrics(MetricManager): - """Non-singleton version of Metrics to not persist metrics across instances - - NOTE: This is useful when you want to: + self.provider.clear_metrics() - - Create metrics for distinct namespaces - - Create the same metrics with different dimensions more than once - """ + # We now allow customers to bring their own instance + # of the AmazonCloudWatchEMFProvider provider + # So we need to define getter/setter for namespace and service properties + # To access these attributes on the provider instance. + @property + def namespace(self): + return self.provider.namespace - _dimensions: Dict[str, str] = {} - _default_dimensions: Dict[str, Any] = {} + @namespace.setter + def namespace(self, namespace): + self.provider.namespace = namespace - def __init__(self, service: Optional[str] = None, namespace: Optional[str] = None): - self.default_dimensions = self._default_dimensions - self.dimension_set = self._dimensions + @property + def service(self): + return self.provider.service - self.dimension_set.update(**self._default_dimensions) - super().__init__(namespace=namespace, service=service) + @service.setter + def service(self, service): + self.provider.service = service - def set_default_dimensions(self, **dimensions) -> None: - """Persist dimensions across Lambda invocations - Parameters - ---------- - dimensions : Dict[str, Any], optional - metric dimensions as key=value +# Maintenance: until v3, we can't afford to break customers. +# AmazonCloudWatchEMFProvider has the exact same functionality (non-singleton) +# so we simply alias. If a customer subclassed `EphemeralMetrics` and somehow relied on __name__ +# we can quickly revert and duplicate code while using self.provider - Example - ------- - **Sets some default dimensions that will always be present across metrics and invocations** - - from aws_lambda_powertools import Metrics - - metrics = Metrics(namespace="ServerlessAirline", service="payment") - metrics.set_default_dimensions(environment="demo", another="one") - - @metrics.log_metrics() - def lambda_handler(): - return True - """ - for name, value in dimensions.items(): - self.add_dimension(name, value) - - self.default_dimensions.update(**dimensions) - - def clear_default_dimensions(self) -> None: - self.default_dimensions.clear() - - def clear_metrics(self) -> None: - super().clear_metrics() - # re-add default dimensions - self.set_default_dimensions(**self.default_dimensions) +EphemeralMetrics = AmazonCloudWatchEMFProvider diff --git a/aws_lambda_powertools/metrics/provider/__init__.py b/aws_lambda_powertools/metrics/provider/__init__.py new file mode 100644 index 00000000000..30019199c52 --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/__init__.py @@ -0,0 +1,5 @@ +from aws_lambda_powertools.metrics.provider.base import BaseProvider + +__all__ = [ + "BaseProvider", +] diff --git a/aws_lambda_powertools/metrics/provider/base.py b/aws_lambda_powertools/metrics/provider/base.py new file mode 100644 index 00000000000..702b4b3d2ba --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/base.py @@ -0,0 +1,234 @@ +from __future__ import annotations + +import functools +import logging +from abc import ABC, abstractmethod +from typing import Any, Callable, Dict, Optional + +from aws_lambda_powertools.metrics.provider import cold_start +from aws_lambda_powertools.utilities.typing import LambdaContext + +logger = logging.getLogger(__name__) + + +class BaseProvider(ABC): + """ + Interface to create a metrics provider. + + BaseProvider implements `log_metrics` decorator for every provider as a value add feature. + + Usage: + 1. Inherit from this class. + 2. Implement the required methods specific to your metric provider. + 3. Customize the behavior and functionality of the metric provider in your subclass. + """ + + @abstractmethod + def add_metric(self, *args: Any, **kwargs: Any) -> Any: + """ + Abstract method for adding a metric. + + This method must be implemented in subclasses to add a metric and return a combined metrics dictionary. + + Parameters + ---------- + *args: + Positional arguments. + *kwargs: + Keyword arguments. + + Returns + ---------- + Dict + A combined metrics dictionary. + + Raises + ---------- + NotImplementedError + This method must be implemented in subclasses. + """ + raise NotImplementedError + + @abstractmethod + def serialize_metric_set(self, *args: Any, **kwargs: Any) -> Any: + """ + Abstract method for serialize a metric. + + This method must be implemented in subclasses to add a metric and return a combined metrics dictionary. + + Parameters + ---------- + *args: + Positional arguments. + *kwargs: + Keyword arguments. + + Returns + ---------- + Dict + Serialized metrics + + Raises + ---------- + NotImplementedError + This method must be implemented in subclasses. + """ + raise NotImplementedError + + @abstractmethod + def flush_metrics(self, *args: Any, **kwargs) -> Any: + """ + Abstract method for flushing a metric. + + This method must be implemented in subclasses to add a metric and return a combined metrics dictionary. + + Parameters + ---------- + *args: + Positional arguments. + *kwargs: + Keyword arguments. + + Raises + ---------- + NotImplementedError + This method must be implemented in subclasses. + """ + raise NotImplementedError + + @abstractmethod + def clear_metrics(self, *args: Any, **kwargs) -> None: + """ + Abstract method for clear metric instance. + + This method must be implemented in subclasses to clear the metric instance + + Parameters + ---------- + *args: + Positional arguments. + *kwargs: + Keyword arguments. + + Raises + ---------- + NotImplementedError + This method must be implemented in subclasses. + """ + raise NotImplementedError + + @abstractmethod + def add_cold_start_metric(self, context: LambdaContext) -> Any: + """ + Abstract method for clear metric instance. + + This method must be implemented in subclasses to add a metric and return a combined metrics dictionary. + + Parameters + ---------- + *args: + Positional arguments. + *kwargs: + Keyword arguments. + + Raises + ---------- + NotImplementedError + This method must be implemented in subclasses. + """ + raise NotImplementedError + + def log_metrics( + self, + lambda_handler: Callable[[Dict, Any], Any] | Optional[Callable[[Dict, Any, Optional[Dict]], Any]] = None, + capture_cold_start_metric: bool = False, + raise_on_empty_metrics: bool = False, + **kwargs, + ): + """Decorator to serialize and publish metrics at the end of a function execution. + + Be aware that the log_metrics **does call* the decorated function (e.g. lambda_handler). + + Example + ------- + **Lambda function using tracer and metrics decorators** + + from aws_lambda_powertools import Metrics, Tracer + + metrics = Metrics(service="payment") + tracer = Tracer(service="payment") + + @tracer.capture_lambda_handler + @metrics.log_metrics + def handler(event, context): + ... + + Parameters + ---------- + lambda_handler : Callable[[Any, Any], Any], optional + lambda function handler, by default None + capture_cold_start_metric : bool, optional + captures cold start metric, by default False + raise_on_empty_metrics : bool, optional + raise exception if no metrics are emitted, by default False + default_dimensions: Dict[str, str], optional + metric dimensions as key=value that will always be present + + Raises + ------ + e + Propagate error received + """ + extra_args = {} + + if kwargs.get("default_dimensions"): + extra_args.update({"default_dimensions": kwargs.get("default_dimensions")}) + + if kwargs.get("default_tags"): + extra_args.update({"default_tags": kwargs.get("default_tags")}) + + # If handler is None we've been called with parameters + # Return a partial function with args filled + if lambda_handler is None: + logger.debug("Decorator called with parameters") + return functools.partial( + self.log_metrics, + capture_cold_start_metric=capture_cold_start_metric, + raise_on_empty_metrics=raise_on_empty_metrics, + **extra_args, + ) + + @functools.wraps(lambda_handler) + def decorate(event, context): + try: + response = lambda_handler(event, context) + if capture_cold_start_metric: + self._add_cold_start_metric(context=context) + finally: + self.flush_metrics(raise_on_empty_metrics=raise_on_empty_metrics) + + return response + + return decorate + + def _add_cold_start_metric(self, context: Any) -> None: + """ + Add cold start metric + + Parameters + ---------- + context : Any + Lambda context + """ + if not cold_start.is_cold_start: + return + + logger.debug("Adding cold start metric and function_name dimension") + self.add_cold_start_metric(context=context) + + cold_start.is_cold_start = False + + +def reset_cold_start_flag_provider(): + if not cold_start.is_cold_start: + cold_start.is_cold_start = True diff --git a/aws_lambda_powertools/metrics/provider/cloudwatch_emf/__init__.py b/aws_lambda_powertools/metrics/provider/cloudwatch_emf/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/aws_lambda_powertools/metrics/provider/cloudwatch_emf/cloudwatch.py b/aws_lambda_powertools/metrics/provider/cloudwatch_emf/cloudwatch.py new file mode 100644 index 00000000000..16be60112c3 --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/cloudwatch_emf/cloudwatch.py @@ -0,0 +1,426 @@ +from __future__ import annotations + +import datetime +import json +import logging +import numbers +import os +import warnings +from collections import defaultdict +from typing import Any, Callable, Dict, List, Optional + +from aws_lambda_powertools.metrics.base import single_metric +from aws_lambda_powertools.metrics.exceptions import MetricValueError, SchemaValidationError +from aws_lambda_powertools.metrics.functions import ( + extract_cloudwatch_metric_resolution_value, + extract_cloudwatch_metric_unit_value, +) +from aws_lambda_powertools.metrics.provider.base import BaseProvider +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.constants import MAX_DIMENSIONS, MAX_METRICS +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.metric_properties import MetricResolution, MetricUnit +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.types import CloudWatchEMFOutput +from aws_lambda_powertools.metrics.types import MetricNameUnitResolution +from aws_lambda_powertools.shared import constants +from aws_lambda_powertools.shared.functions import resolve_env_var_choice +from aws_lambda_powertools.utilities.typing import LambdaContext + +logger = logging.getLogger(__name__) + + +class AmazonCloudWatchEMFProvider(BaseProvider): + """ + AmazonCloudWatchEMFProvider creates metrics asynchronously via CloudWatch Embedded Metric Format (EMF). + + CloudWatch EMF can create up to 100 metrics per EMF object + and metrics, dimensions, and namespace created via AmazonCloudWatchEMFProvider + will adhere to the schema, will be serialized and validated against EMF Schema. + + **Use `aws_lambda_powertools.Metrics` or + `aws_lambda_powertools.single_metric` to create EMF metrics.** + + Environment variables + --------------------- + POWERTOOLS_METRICS_NAMESPACE : str + metric namespace to be set for all metrics + POWERTOOLS_SERVICE_NAME : str + service name used for default dimension + + Raises + ------ + MetricUnitError + When metric unit isn't supported by CloudWatch + MetricResolutionError + When metric resolution isn't supported by CloudWatch + MetricValueError + When metric value isn't a number + SchemaValidationError + When metric object fails EMF schema validation + """ + + def __init__( + self, + metric_set: Dict[str, Any] | None = None, + dimension_set: Dict | None = None, + namespace: str | None = None, + metadata_set: Dict[str, Any] | None = None, + service: str | None = None, + default_dimensions: Dict[str, Any] | None = None, + ): + self.metric_set = metric_set if metric_set is not None else {} + self.dimension_set = dimension_set if dimension_set is not None else {} + self.default_dimensions = default_dimensions or {} + self.namespace = resolve_env_var_choice(choice=namespace, env=os.getenv(constants.METRICS_NAMESPACE_ENV)) + self.service = resolve_env_var_choice(choice=service, env=os.getenv(constants.SERVICE_NAME_ENV)) + self.metadata_set = metadata_set if metadata_set is not None else {} + + self._metric_units = [unit.value for unit in MetricUnit] + self._metric_unit_valid_options = list(MetricUnit.__members__) + self._metric_resolutions = [resolution.value for resolution in MetricResolution] + + self.dimension_set.update(**self.default_dimensions) + + def add_metric( + self, + name: str, + unit: MetricUnit | str, + value: float, + resolution: MetricResolution | int = 60, + ) -> None: + """Adds given metric + + Example + ------- + **Add given metric using MetricUnit enum** + + metric.add_metric(name="BookingConfirmation", unit=MetricUnit.Count, value=1) + + **Add given metric using plain string as value unit** + + metric.add_metric(name="BookingConfirmation", unit="Count", value=1) + + **Add given metric with MetricResolution non default value** + + metric.add_metric(name="BookingConfirmation", unit="Count", value=1, resolution=MetricResolution.High) + + Parameters + ---------- + name : str + Metric name + unit : Union[MetricUnit, str] + `aws_lambda_powertools.helper.models.MetricUnit` + value : float + Metric value + resolution : Union[MetricResolution, int] + `aws_lambda_powertools.helper.models.MetricResolution` + + Raises + ------ + MetricUnitError + When metric unit is not supported by CloudWatch + MetricResolutionError + When metric resolution is not supported by CloudWatch + """ + if not isinstance(value, numbers.Number): + raise MetricValueError(f"{value} is not a valid number") + + unit = extract_cloudwatch_metric_unit_value( + metric_units=self._metric_units, + metric_valid_options=self._metric_unit_valid_options, + unit=unit, + ) + resolution = extract_cloudwatch_metric_resolution_value( + metric_resolutions=self._metric_resolutions, + resolution=resolution, + ) + metric: Dict = self.metric_set.get(name, defaultdict(list)) + metric["Unit"] = unit + metric["StorageResolution"] = resolution + metric["Value"].append(float(value)) + logger.debug(f"Adding metric: {name} with {metric}") + self.metric_set[name] = metric + + if len(self.metric_set) == MAX_METRICS or len(metric["Value"]) == MAX_METRICS: + logger.debug(f"Exceeded maximum of {MAX_METRICS} metrics - Publishing existing metric set") + metrics = self.serialize_metric_set() + print(json.dumps(metrics)) + + # clear metric set only as opposed to metrics and dimensions set + # since we could have more than 100 metrics + self.metric_set.clear() + + def serialize_metric_set( + self, + metrics: Dict | None = None, + dimensions: Dict | None = None, + metadata: Dict | None = None, + ) -> CloudWatchEMFOutput: + """Serializes metric and dimensions set + + Parameters + ---------- + metrics : Dict, optional + Dictionary of metrics to serialize, by default None + dimensions : Dict, optional + Dictionary of dimensions to serialize, by default None + metadata: Dict, optional + Dictionary of metadata to serialize, by default None + + Example + ------- + **Serialize metrics into EMF format** + + metrics = MetricManager() + # ...add metrics, dimensions, namespace + ret = metrics.serialize_metric_set() + + Returns + ------- + Dict + Serialized metrics following EMF specification + + Raises + ------ + SchemaValidationError + Raised when serialization fail schema validation + """ + if metrics is None: # pragma: no cover + metrics = self.metric_set + + if dimensions is None: # pragma: no cover + dimensions = self.dimension_set + + if metadata is None: # pragma: no cover + metadata = self.metadata_set + + if self.service and not self.dimension_set.get("service"): + # self.service won't be a float + self.add_dimension(name="service", value=self.service) + + if len(metrics) == 0: + raise SchemaValidationError("Must contain at least one metric.") + + if self.namespace is None: + raise SchemaValidationError("Must contain a metric namespace.") + + logger.debug({"details": "Serializing metrics", "metrics": metrics, "dimensions": dimensions}) + + # For standard resolution metrics, don't add StorageResolution field to avoid unnecessary ingestion of data into cloudwatch # noqa E501 + # Example: [ { "Name": "metric_name", "Unit": "Count"} ] # noqa ERA001 + # + # In case using high-resolution metrics, add StorageResolution field + # Example: [ { "Name": "metric_name", "Unit": "Count", "StorageResolution": 1 } ] # noqa ERA001 + metric_definition: List[MetricNameUnitResolution] = [] + metric_names_and_values: Dict[str, float] = {} # { "metric_name": 1.0 } + + for metric_name in metrics: + metric: dict = metrics[metric_name] + metric_value: int = metric.get("Value", 0) + metric_unit: str = metric.get("Unit", "") + metric_resolution: int = metric.get("StorageResolution", 60) + + metric_definition_data: MetricNameUnitResolution = {"Name": metric_name, "Unit": metric_unit} + + # high-resolution metrics + if metric_resolution == 1: + metric_definition_data["StorageResolution"] = metric_resolution + + metric_definition.append(metric_definition_data) + + metric_names_and_values.update({metric_name: metric_value}) + + return { + "_aws": { + "Timestamp": int(datetime.datetime.now().timestamp() * 1000), # epoch + "CloudWatchMetrics": [ + { + "Namespace": self.namespace, # "test_namespace" + "Dimensions": [list(dimensions.keys())], # [ "service" ] + "Metrics": metric_definition, + }, + ], + }, + # NOTE: Mypy doesn't recognize splats '** syntax' in TypedDict + **dimensions, # type: ignore[misc] # "service": "test_service" + **metadata, # "username": "test" + **metric_names_and_values, # "single_metric": 1.0 + } + + def add_dimension(self, name: str, value: str) -> None: + """Adds given dimension to all metrics + + Example + ------- + **Add a metric dimensions** + + metric.add_dimension(name="operation", value="confirm_booking") + + Parameters + ---------- + name : str + Dimension name + value : str + Dimension value + """ + logger.debug(f"Adding dimension: {name}:{value}") + if len(self.dimension_set) == MAX_DIMENSIONS: + raise SchemaValidationError( + f"Maximum number of dimensions exceeded ({MAX_DIMENSIONS}): Unable to add dimension {name}.", + ) + # Cast value to str according to EMF spec + # Majority of values are expected to be string already, so + # checking before casting improves performance in most cases + self.dimension_set[name] = value if isinstance(value, str) else str(value) + + def add_metadata(self, key: str, value: Any) -> None: + """Adds high cardinal metadata for metrics object + + This will not be available during metrics visualization. + Instead, this will be searchable through logs. + + If you're looking to add metadata to filter metrics, then + use add_dimensions method. + + Example + ------- + **Add metrics metadata** + + metric.add_metadata(key="booking_id", value="booking_id") + + Parameters + ---------- + key : str + Metadata key + value : any + Metadata value + """ + logger.debug(f"Adding metadata: {key}:{value}") + + # Cast key to str according to EMF spec + # Majority of keys are expected to be string already, so + # checking before casting improves performance in most cases + if isinstance(key, str): + self.metadata_set[key] = value + else: + self.metadata_set[str(key)] = value + + def clear_metrics(self) -> None: + logger.debug("Clearing out existing metric set from memory") + self.metric_set.clear() + self.dimension_set.clear() + self.metadata_set.clear() + self.set_default_dimensions(**self.default_dimensions) + + def flush_metrics(self, raise_on_empty_metrics: bool = False) -> None: + """Manually flushes the metrics. This is normally not necessary, + unless you're running on other runtimes besides Lambda, where the @log_metrics + decorator already handles things for you. + + Parameters + ---------- + raise_on_empty_metrics : bool, optional + raise exception if no metrics are emitted, by default False + """ + if not raise_on_empty_metrics and not self.metric_set: + warnings.warn( + "No application metrics to publish. The cold-start metric may be published if enabled. " + "If application metrics should never be empty, consider using 'raise_on_empty_metrics'", + stacklevel=2, + ) + else: + logger.debug("Flushing existing metrics") + metrics = self.serialize_metric_set() + print(json.dumps(metrics, separators=(",", ":"))) + self.clear_metrics() + + def log_metrics( + self, + lambda_handler: Callable[[Dict, Any], Any] | Optional[Callable[[Dict, Any, Optional[Dict]], Any]] = None, + capture_cold_start_metric: bool = False, + raise_on_empty_metrics: bool = False, + **kwargs, + ): + """Decorator to serialize and publish metrics at the end of a function execution. + + Be aware that the log_metrics **does call* the decorated function (e.g. lambda_handler). + + Example + ------- + **Lambda function using tracer and metrics decorators** + + from aws_lambda_powertools import Metrics, Tracer + + metrics = Metrics(service="payment") + tracer = Tracer(service="payment") + + @tracer.capture_lambda_handler + @metrics.log_metrics + def handler(event, context): + ... + + Parameters + ---------- + lambda_handler : Callable[[Any, Any], Any], optional + lambda function handler, by default None + capture_cold_start_metric : bool, optional + captures cold start metric, by default False + raise_on_empty_metrics : bool, optional + raise exception if no metrics are emitted, by default False + **kwargs + + Raises + ------ + e + Propagate error received + """ + + default_dimensions = kwargs.get("default_dimensions") + + if default_dimensions: + self.set_default_dimensions(**default_dimensions) + + return super().log_metrics( + lambda_handler=lambda_handler, + capture_cold_start_metric=capture_cold_start_metric, + raise_on_empty_metrics=raise_on_empty_metrics, + **kwargs, + ) + + def add_cold_start_metric(self, context: LambdaContext) -> None: + """Add cold start metric and function_name dimension + + Parameters + ---------- + context : Any + Lambda context + """ + logger.debug("Adding cold start metric and function_name dimension") + with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1, namespace=self.namespace) as metric: + metric.add_dimension(name="function_name", value=context.function_name) + if self.service: + metric.add_dimension(name="service", value=str(self.service)) + + def set_default_dimensions(self, **dimensions) -> None: + """Persist dimensions across Lambda invocations + + Parameters + ---------- + dimensions : Dict[str, Any], optional + metric dimensions as key=value + + Example + ------- + **Sets some default dimensions that will always be present across metrics and invocations** + + from aws_lambda_powertools import Metrics + + metrics = Metrics(namespace="ServerlessAirline", service="payment") + metrics.set_default_dimensions(environment="demo", another="one") + + @metrics.log_metrics() + def lambda_handler(): + return True + """ + for name, value in dimensions.items(): + self.add_dimension(name, value) + + self.default_dimensions.update(**dimensions) diff --git a/aws_lambda_powertools/metrics/provider/cloudwatch_emf/constants.py b/aws_lambda_powertools/metrics/provider/cloudwatch_emf/constants.py new file mode 100644 index 00000000000..d8f5da0cec8 --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/cloudwatch_emf/constants.py @@ -0,0 +1,2 @@ +MAX_DIMENSIONS = 29 +MAX_METRICS = 100 diff --git a/aws_lambda_powertools/metrics/provider/cloudwatch_emf/exceptions.py b/aws_lambda_powertools/metrics/provider/cloudwatch_emf/exceptions.py new file mode 100644 index 00000000000..6ac2d932ea7 --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/cloudwatch_emf/exceptions.py @@ -0,0 +1,10 @@ +class MetricUnitError(Exception): + """When metric unit is not supported by CloudWatch""" + + pass + + +class MetricResolutionError(Exception): + """When metric resolution is not supported by CloudWatch""" + + pass diff --git a/aws_lambda_powertools/metrics/provider/cloudwatch_emf/metric_properties.py b/aws_lambda_powertools/metrics/provider/cloudwatch_emf/metric_properties.py new file mode 100644 index 00000000000..ea11bb997bb --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/cloudwatch_emf/metric_properties.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from enum import Enum + + +class MetricUnit(Enum): + Seconds = "Seconds" + Microseconds = "Microseconds" + Milliseconds = "Milliseconds" + Bytes = "Bytes" + Kilobytes = "Kilobytes" + Megabytes = "Megabytes" + Gigabytes = "Gigabytes" + Terabytes = "Terabytes" + Bits = "Bits" + Kilobits = "Kilobits" + Megabits = "Megabits" + Gigabits = "Gigabits" + Terabits = "Terabits" + Percent = "Percent" + Count = "Count" + BytesPerSecond = "Bytes/Second" + KilobytesPerSecond = "Kilobytes/Second" + MegabytesPerSecond = "Megabytes/Second" + GigabytesPerSecond = "Gigabytes/Second" + TerabytesPerSecond = "Terabytes/Second" + BitsPerSecond = "Bits/Second" + KilobitsPerSecond = "Kilobits/Second" + MegabitsPerSecond = "Megabits/Second" + GigabitsPerSecond = "Gigabits/Second" + TerabitsPerSecond = "Terabits/Second" + CountPerSecond = "Count/Second" + + +class MetricResolution(Enum): + Standard = 60 + High = 1 diff --git a/aws_lambda_powertools/metrics/provider/cloudwatch_emf/types.py b/aws_lambda_powertools/metrics/provider/cloudwatch_emf/types.py new file mode 100644 index 00000000000..bf3a48ea13f --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/cloudwatch_emf/types.py @@ -0,0 +1,24 @@ +from typing import List + +from typing_extensions import NotRequired, TypedDict + + +class CloudWatchEMFMetric(TypedDict): + Name: str + Unit: str + StorageResolution: NotRequired[int] + + +class CloudWatchEMFMetrics(TypedDict): + Namespace: str + Dimensions: List[List[str]] # [ [ 'test_dimension' ] ] + Metrics: List[CloudWatchEMFMetric] + + +class CloudWatchEMFRoot(TypedDict): + Timestamp: int + CloudWatchMetrics: List[CloudWatchEMFMetrics] + + +class CloudWatchEMFOutput(TypedDict): + _aws: CloudWatchEMFRoot diff --git a/aws_lambda_powertools/metrics/provider/cold_start.py b/aws_lambda_powertools/metrics/provider/cold_start.py new file mode 100644 index 00000000000..c6ef67bd787 --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/cold_start.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +is_cold_start = True + + +def reset_cold_start_flag(): + global is_cold_start + if not is_cold_start: + is_cold_start = True diff --git a/aws_lambda_powertools/metrics/provider/datadog/__init__.py b/aws_lambda_powertools/metrics/provider/datadog/__init__.py new file mode 100644 index 00000000000..23cb35d31eb --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/datadog/__init__.py @@ -0,0 +1,7 @@ +from aws_lambda_powertools.metrics.provider.datadog.datadog import DatadogProvider +from aws_lambda_powertools.metrics.provider.datadog.metrics import DatadogMetrics + +__all__ = [ + "DatadogMetrics", + "DatadogProvider", +] diff --git a/aws_lambda_powertools/metrics/provider/datadog/datadog.py b/aws_lambda_powertools/metrics/provider/datadog/datadog.py new file mode 100644 index 00000000000..6195589cd1b --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/datadog/datadog.py @@ -0,0 +1,391 @@ +from __future__ import annotations + +import json +import logging +import numbers +import os +import re +import time +import warnings +from typing import Any, Callable, Dict, List, Optional + +from aws_lambda_powertools.metrics.exceptions import MetricValueError, SchemaValidationError +from aws_lambda_powertools.metrics.provider import BaseProvider +from aws_lambda_powertools.metrics.provider.datadog.warnings import DatadogDataValidationWarning +from aws_lambda_powertools.shared import constants +from aws_lambda_powertools.shared.functions import resolve_env_var_choice +from aws_lambda_powertools.utilities.typing import LambdaContext + +METRIC_NAME_REGEX = re.compile(r"^[a-zA-Z0-9_.]+$") + +logger = logging.getLogger(__name__) + +# Check if using datadog layer +try: + from datadog_lambda.metric import lambda_metric # type: ignore +except ImportError: # pragma: no cover + lambda_metric = None # pragma: no cover + +DEFAULT_NAMESPACE = "default" + + +class DatadogProvider(BaseProvider): + """ + DatadogProvider creates metrics asynchronously via Datadog extension or exporter. + + **Use `aws_lambda_powertools.DatadogMetrics` to create and metrics to Datadog.** + + Environment variables + --------------------- + POWERTOOLS_METRICS_NAMESPACE : str + metric namespace to be set for all metrics + + Raises + ------ + MetricValueError + When metric value isn't a number + SchemaValidationError + When metric object fails EMF schema validation + """ + + def __init__( + self, + metric_set: List | None = None, + namespace: str | None = None, + flush_to_log: bool | None = None, + default_tags: Dict[str, Any] | None = None, + ): + self.metric_set = metric_set if metric_set is not None else [] + self.namespace = ( + resolve_env_var_choice(choice=namespace, env=os.getenv(constants.METRICS_NAMESPACE_ENV)) + or DEFAULT_NAMESPACE + ) + self.default_tags = default_tags or {} + self.flush_to_log = resolve_env_var_choice(choice=flush_to_log, env=os.getenv(constants.DATADOG_FLUSH_TO_LOG)) + + # adding name,value,timestamp,tags + def add_metric( + self, + name: str, + value: float, + timestamp: int | None = None, + **tags, + ) -> None: + """ + The add_metrics function that will be used by metrics class. + + Parameters + ---------- + name: str + Name/Key for the metrics + value: float + Value for the metrics + timestamp: int + Timestamp in int for the metrics, default = time.time() + tags: List[str] + In format like List["tag:value","tag2:value2"] + args: Any + extra args will be dropped for compatibility + kwargs: Any + extra kwargs will be converted into tags, e.g., add_metrics(sales=sam) -> tags=['sales:sam'] + + Examples + -------- + >>> provider = DatadogProvider() + >>> + >>> provider.add_metric( + >>> name='coffee_house.order_value', + >>> value=12.45, + >>> tags=['product:latte', 'order:online'], + >>> sales='sam' + >>> ) + """ + + # validating metric name + if not self._validate_datadog_metric_name(name): + docs = "https://docs.datadoghq.com/metrics/custom_metrics/#naming-custom-metrics" + raise SchemaValidationError( + f"Invalid metric name. Please ensure the metric {name} follows the requirements. \n" + f"See Datadog documentation here: \n {docs}", + ) + + # validating metric tag + self._validate_datadog_tags_name(tags) + + if not isinstance(value, numbers.Real): + raise MetricValueError(f"{value} is not a valid number") + + if not timestamp: + timestamp = int(time.time()) + + logger.debug({"details": "Appending metric", "metrics": name}) + self.metric_set.append({"m": name, "v": value, "e": timestamp, "t": tags}) + + def serialize_metric_set(self, metrics: List | None = None) -> List: + """Serializes metrics + + Example + ------- + **Serialize metrics into Datadog format** + + metrics = DatadogMetric() + # ...add metrics, tags, namespace + ret = metrics.serialize_metric_set() + + Returns + ------- + List + Serialized metrics following Datadog specification + + Raises + ------ + SchemaValidationError + Raised when serialization fail schema validation + """ + + if metrics is None: # pragma: no cover + metrics = self.metric_set + + if len(metrics) == 0: + raise SchemaValidationError("Must contain at least one metric.") + + output_list: List = [] + + logger.debug({"details": "Serializing metrics", "metrics": metrics}) + + for single_metric in metrics: + if self.namespace != DEFAULT_NAMESPACE: + metric_name = f"{self.namespace}.{single_metric['m']}" + else: + metric_name = single_metric["m"] + + output_list.append( + { + "m": metric_name, + "v": single_metric["v"], + "e": single_metric["e"], + "t": self._serialize_datadog_tags(metric_tags=single_metric["t"], default_tags=self.default_tags), + }, + ) + + return output_list + + # flush serialized data to output + def flush_metrics(self, raise_on_empty_metrics: bool = False) -> None: + """Manually flushes the metrics. This is normally not necessary, + unless you're running on other runtimes besides Lambda, where the @log_metrics + decorator already handles things for you. + + Parameters + ---------- + raise_on_empty_metrics : bool, optional + raise exception if no metrics are emitted, by default False + """ + if not raise_on_empty_metrics and len(self.metric_set) == 0: + warnings.warn( + "No application metrics to publish. The cold-start metric may be published if enabled. " + "If application metrics should never be empty, consider using 'raise_on_empty_metrics'", + stacklevel=2, + ) + + else: + logger.debug("Flushing existing metrics") + metrics = self.serialize_metric_set() + # submit through datadog extension + if lambda_metric and not self.flush_to_log: + # use lambda_metric function from datadog package, submit metrics to datadog + for metric_item in metrics: # pragma: no cover + lambda_metric( # pragma: no cover + metric_name=metric_item["m"], + value=metric_item["v"], + timestamp=metric_item["e"], + tags=metric_item["t"], + ) + else: + # dd module not found: flush to log, this format can be recognized via datadog log forwarder + # https://github.com/Datadog/datadog-lambda-python/blob/main/datadog_lambda/metric.py#L77 + for metric_item in metrics: + print(json.dumps(metric_item, separators=(",", ":"))) + + self.clear_metrics() + + def clear_metrics(self): + logger.debug("Clearing out existing metric set from memory") + self.metric_set.clear() + + def add_cold_start_metric(self, context: LambdaContext) -> None: + """Add cold start metric and function_name dimension + + Parameters + ---------- + context : Any + Lambda context + """ + logger.debug("Adding cold start metric and function_name tagging") + self.add_metric(name="ColdStart", value=1, function_name=context.function_name) + + def log_metrics( + self, + lambda_handler: Callable[[Dict, Any], Any] | Optional[Callable[[Dict, Any, Optional[Dict]], Any]] = None, + capture_cold_start_metric: bool = False, + raise_on_empty_metrics: bool = False, + **kwargs, + ): + """Decorator to serialize and publish metrics at the end of a function execution. + + Be aware that the log_metrics **does call* the decorated function (e.g. lambda_handler). + + Example + ------- + **Lambda function using tracer and metrics decorators** + + from aws_lambda_powertools import Tracer + from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics + + metrics = DatadogMetrics(namespace="powertools") + tracer = Tracer(service="payment") + + @tracer.capture_lambda_handler + @metrics.log_metrics + def handler(event, context): + ... + + Parameters + ---------- + lambda_handler : Callable[[Any, Any], Any], optional + lambda function handler, by default None + capture_cold_start_metric : bool, optional + captures cold start metric, by default False + raise_on_empty_metrics : bool, optional + raise exception if no metrics are emitted, by default False + **kwargs + + Raises + ------ + e + Propagate error received + """ + + default_tags = kwargs.get("default_tags") + + if default_tags: + self.set_default_tags(**default_tags) + + return super().log_metrics( + lambda_handler=lambda_handler, + capture_cold_start_metric=capture_cold_start_metric, + raise_on_empty_metrics=raise_on_empty_metrics, + **kwargs, + ) + + def set_default_tags(self, **tags) -> None: + """Persist tags across Lambda invocations + + Parameters + ---------- + tags : **kwargs + tags as key=value + + Example + ------- + **Sets some default dimensions that will always be present across metrics and invocations** + + from aws_lambda_powertools import Metrics + + metrics = Metrics(namespace="ServerlessAirline", service="payment") + metrics.set_default_tags(environment="demo", another="one") + + @metrics.log_metrics() + def lambda_handler(): + return True + """ + self._validate_datadog_tags_name(tags) + self.default_tags.update(**tags) + + @staticmethod + def _serialize_datadog_tags(metric_tags: Dict[str, Any], default_tags: Dict[str, Any]) -> List[str]: + """ + Serialize metric tags into a list of formatted strings for Datadog integration. + + This function takes a dictionary of metric-specific tags or default tags. + It parse these tags and converts them into a list of strings in the format "tag_key:tag_value". + + Parameters + ---------- + metric_tags: Dict[str, Any] + A dictionary containing metric-specific tags. + default_tags: Dict[str, Any] + A dictionary containing default tags applicable to all metrics. + + Returns: + ------- + List[str] + A list of formatted tag strings, each in the "tag_key:tag_value" format. + + Example: + >>> metric_tags = {'environment': 'production', 'service': 'web'} + >>> serialize_datadog_tags(metric_tags, None) + ['environment:production', 'service:web'] + """ + tags = metric_tags or default_tags + + return [f"{tag_key}:{tag_value}" for tag_key, tag_value in tags.items()] + + @staticmethod + def _validate_datadog_tags_name(tags: Dict): + """ + Validate a metric tag according to specific requirements. + + Metric tags must start with a letter. + Metric tags must not exceed 200 characters. Fewer than 100 is preferred from a UI perspective. + + More information here: https://docs.datadoghq.com/getting_started/tagging/#define-tags + + Parameters: + ---------- + tags: Dict + The metric tags to be validated. + """ + for tag_key, tag_value in tags.items(): + tag = f"{tag_key}:{tag_value}" + if not tag[0].isalpha() or len(tag) > 200: + docs = "https://docs.datadoghq.com/getting_started/tagging/#define-tags" + warnings.warn( + f"Invalid tag value. Please ensure the specific tag {tag} follows the requirements. \n" + f"May incur data loss for metrics. \n" + f"See Datadog documentation here: \n {docs}", + DatadogDataValidationWarning, + stacklevel=2, + ) + + @staticmethod + def _validate_datadog_metric_name(metric_name: str) -> bool: + """ + Validate a metric name according to specific requirements. + + Metric names must start with a letter. + Metric names must only contain ASCII alphanumerics, underscores, and periods. + Other characters, including spaces, are converted to underscores. + Unicode is not supported. + Metric names must not exceed 200 characters. Fewer than 100 is preferred from a UI perspective. + + More information here: https://docs.datadoghq.com/metrics/custom_metrics/#naming-custom-metrics + + Parameters: + ---------- + metric_name: str + The metric name to be validated. + + Returns: + ------- + bool + True if the metric name is valid, False otherwise. + """ + + # Check if the metric name starts with a letter + # Check if the metric name contains more than 200 characters + # Check if the resulting metric name only contains ASCII alphanumerics, underscores, and periods + if not metric_name[0].isalpha() or len(metric_name) > 200 or not METRIC_NAME_REGEX.match(metric_name): + return False + + return True diff --git a/aws_lambda_powertools/metrics/provider/datadog/metrics.py b/aws_lambda_powertools/metrics/provider/datadog/metrics.py new file mode 100644 index 00000000000..3ee4dc2f835 --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/datadog/metrics.py @@ -0,0 +1,126 @@ +# NOTE: keeps for compatibility +from __future__ import annotations + +from typing import Any, Callable, Dict, List, Optional + +from aws_lambda_powertools.metrics.provider.datadog.datadog import DatadogProvider + + +class DatadogMetrics: + """ + DatadogProvider creates metrics asynchronously via Datadog extension or exporter. + + **Use `aws_lambda_powertools.DatadogMetrics` to create and metrics to Datadog.** + + Example + ------- + **Creates a few metrics and publish at the end of a function execution** + + from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics + + metrics = DatadogMetrics(namespace="ServerlessAirline") + + @metrics.log_metrics(capture_cold_start_metric=True) + def lambda_handler(): + metrics.add_metric(name="item_sold", value=1, product="latte", order="online") + return True + + Environment variables + --------------------- + POWERTOOLS_METRICS_NAMESPACE : str + metric namespace + + Parameters + ---------- + flush_to_log : bool, optional + Used when using export instead of Lambda Extension + namespace : str, optional + Namespace for metrics + provider: DatadogProvider, optional + Pre-configured DatadogProvider provider + + Raises + ------ + MetricValueError + When metric value isn't a number + SchemaValidationError + When metric object fails Datadog schema validation + """ + + # NOTE: We use class attrs to share metrics data across instances + # this allows customers to initialize Metrics() throughout their code base (and middlewares) + # and not get caught by accident with metrics data loss, or data deduplication + # e.g., m1 and m2 add metric ProductCreated, however m1 has 'version' dimension but m2 doesn't + # Result: ProductCreated is created twice as we now have 2 different EMF blobs + _metrics: List = [] + _default_tags: Dict[str, Any] = {} + + def __init__( + self, + namespace: str | None = None, + flush_to_log: bool | None = None, + provider: DatadogProvider | None = None, + ): + self.metric_set = self._metrics + self.default_tags = self._default_tags + + if provider is None: + self.provider = DatadogProvider( + namespace=namespace, + flush_to_log=flush_to_log, + metric_set=self.metric_set, + ) + else: + self.provider = provider + + def add_metric( + self, + name: str, + value: float, + timestamp: int | None = None, + **tags: Any, + ) -> None: + self.provider.add_metric(name=name, value=value, timestamp=timestamp, **tags) + + def serialize_metric_set(self, metrics: List | None = None) -> List: + return self.provider.serialize_metric_set(metrics=metrics) + + def flush_metrics(self, raise_on_empty_metrics: bool = False) -> None: + self.provider.flush_metrics(raise_on_empty_metrics=raise_on_empty_metrics) + + def log_metrics( + self, + lambda_handler: Callable[[Dict, Any], Any] | Optional[Callable[[Dict, Any, Optional[Dict]], Any]] = None, + capture_cold_start_metric: bool = False, + raise_on_empty_metrics: bool = False, + default_tags: Dict[str, Any] | None = None, + ): + return self.provider.log_metrics( + lambda_handler=lambda_handler, + capture_cold_start_metric=capture_cold_start_metric, + raise_on_empty_metrics=raise_on_empty_metrics, + default_tags=default_tags, + ) + + def set_default_tags(self, **tags) -> None: + self.provider.set_default_tags(**tags) + self.default_tags.update(**tags) + + def clear_metrics(self) -> None: + self.provider.clear_metrics() + + def clear_default_tags(self) -> None: + self.provider.default_tags.clear() + self.default_tags.clear() + + # We now allow customers to bring their own instance + # of the DatadogProvider provider + # So we need to define getter/setter for namespace property + # To access this attribute on the provider instance. + @property + def namespace(self): + return self.provider.namespace + + @namespace.setter + def namespace(self, namespace): + self.provider.namespace = namespace diff --git a/aws_lambda_powertools/metrics/provider/datadog/warnings.py b/aws_lambda_powertools/metrics/provider/datadog/warnings.py new file mode 100644 index 00000000000..accf19526e7 --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/datadog/warnings.py @@ -0,0 +1,8 @@ +class DatadogDataValidationWarning(Warning): + message: str + + def __init__(self, message: str): + self.message = message + + def __str__(self) -> str: + return self.message diff --git a/aws_lambda_powertools/shared/constants.py b/aws_lambda_powertools/shared/constants.py index 0cde7582976..20a7fbf47d2 100644 --- a/aws_lambda_powertools/shared/constants.py +++ b/aws_lambda_powertools/shared/constants.py @@ -10,6 +10,8 @@ METRICS_NAMESPACE_ENV: str = "POWERTOOLS_METRICS_NAMESPACE" +DATADOG_FLUSH_TO_LOG: str = "DD_FLUSH_TO_LOG" + SERVICE_NAME_ENV: str = "POWERTOOLS_SERVICE_NAME" XRAY_TRACE_ID_ENV: str = "_X_AMZN_TRACE_ID" LAMBDA_TASK_ROOT_ENV: str = "LAMBDA_TASK_ROOT" diff --git a/aws_lambda_powertools/shared/version.py b/aws_lambda_powertools/shared/version.py index f22bb031e1d..e1a3c9425d1 100644 --- a/aws_lambda_powertools/shared/version.py +++ b/aws_lambda_powertools/shared/version.py @@ -1,3 +1,3 @@ """Exposes version constant to avoid circular dependencies.""" -VERSION = "2.21.0" +VERSION = "2.23.0" diff --git a/aws_lambda_powertools/utilities/data_classes/common.py b/aws_lambda_powertools/utilities/data_classes/common.py index 7eb14c38de4..7a3fc8ab404 100644 --- a/aws_lambda_powertools/utilities/data_classes/common.py +++ b/aws_lambda_powertools/utilities/data_classes/common.py @@ -30,7 +30,7 @@ def __init__(self, data: Dict[str, Any], json_deserializer: Optional[Callable] = def __getitem__(self, key: str) -> Any: return self._data[key] - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: if not isinstance(other, DictWrapper): return False diff --git a/aws_lambda_powertools/utilities/parser/models/apigwv2.py b/aws_lambda_powertools/utilities/parser/models/apigwv2.py index 3be793dd951..8f0f8dbf50c 100644 --- a/aws_lambda_powertools/utilities/parser/models/apigwv2.py +++ b/aws_lambda_powertools/utilities/parser/models/apigwv2.py @@ -25,7 +25,7 @@ class RequestContextV2AuthorizerIam(BaseModel): class RequestContextV2AuthorizerJwt(BaseModel): claims: Dict[str, Any] - scopes: List[str] + scopes: Optional[List[str]] = None class RequestContextV2Authorizer(BaseModel): diff --git a/aws_lambda_powertools/utilities/parser/models/s3_object_event.py b/aws_lambda_powertools/utilities/parser/models/s3_object_event.py index 7ef98fe4bb2..867cd996fa0 100644 --- a/aws_lambda_powertools/utilities/parser/models/s3_object_event.py +++ b/aws_lambda_powertools/utilities/parser/models/s3_object_event.py @@ -39,7 +39,7 @@ class S3ObjectSessionContext(BaseModel): class S3ObjectUserIdentity(BaseModel): - type: str # noqa003 + type: str # noqa: A003 accountId: str accessKeyId: str userName: Optional[str] = None diff --git a/docs/Dockerfile b/docs/Dockerfile index 4d24b69c3f0..eaa3942f01e 100644 --- a/docs/Dockerfile +++ b/docs/Dockerfile @@ -1,5 +1,5 @@ # v9.1.18 -FROM squidfunk/mkdocs-material@sha256:33e28bdae302bc1aa9c6783dd863742416cb1174bae4ad9d7bcc5b2efe685639 +FROM squidfunk/mkdocs-material@sha256:cd3a522b3282071586552499611f71206db0aec145d4d53822bc6fde76792cf8 # pip-compile --generate-hashes --output-file=requirements.txt requirements.in COPY requirements.txt /tmp/ RUN pip install --require-hashes -r /tmp/requirements.txt diff --git a/docs/automation.md b/docs/automation.md index d0aacb89977..467df2b9803 100644 --- a/docs/automation.md +++ b/docs/automation.md @@ -7,7 +7,7 @@ description: Automation practices and processes for Powertools for AWS Lambda (P ## Continuous integration practices -!!! note "We adhere to industry recommendations from the [OSSF Scorecard project](https://bestpractices.coreinfrastructure.org/en/criteria){target="_blank"}, among [others](https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions){target="_blank"}." +!!! note "We adhere to industry recommendations from the [OSSF Scorecard project](https://bestpractices.coreinfrastructure.org/en/criteria){target="_blank" rel="nofollow"}, among [others](https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions){target="_blank" rel="nofollow"}." Since all code changes require a pull request (PR) along with one or more reviewers, we automate quality and security checks **before**, **during**, and **after** a PR is merged to trunk (`develop`). @@ -29,11 +29,11 @@ To prevent scenarios where these checks are intentionally omitted at the client !!! note "These run locally only for changed files" -* [**Merge conflict check**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.pre-commit-config.yaml#L10){target="_blank"}. Checks for merge strings in each individual change accidentally left unresolved to prevent breakage. +* [**Merge conflict check**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.pre-commit-config.yaml#L10){target="_blank" }. Checks for merge strings in each individual change accidentally left unresolved to prevent breakage. * [**Code linting**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/Makefile#L22){target="_blank"}. Linter checks for industry quality standards and known bad practices that could lead to abuse. * [**CloudFormation linting**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.pre-commit-config.yaml#L34){target="_blank"}. `cfn-lint` ensures [best practices](https://github.com/aws-cloudformation/cfn-lint/blob/86f0370bd43b400ed4c485180dbc2697f73367b2/docs/rules.md){target=""_blank"} at our documentation examples. * [**Markdown linting**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.pre-commit-config.yaml#L25){target="_blank}. Primarily [industry markdown practices](https://github.com/DavidAnson/markdownlint/blob/d01180ec5a014083ee9d574b693a8d7fbc1e566d/README.md#rules--aliases){target="_blank"} at this stage. -* [**GitHub Actions linting**](https://github.com/rhysd/actionlint/blob/main/docs/checks.md){target="_blank"}. `actionlint` ensures workflows follow [GitHub Actions security practices](https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions){target="_blank"}. It guards against numerous [leading practices](https://github.com/rhysd/actionlint/blob/main/docs/checks.md){target="_blank"} to prevent common configuration mistakes, insecure inline scripts, among many others. +* [**GitHub Actions linting**](https://github.com/rhysd/actionlint/blob/main/docs/checks.md){target="_blank" rel="nofollow"}. `actionlint` ensures workflows follow [GitHub Actions security practices](https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions){target="_blank" rel="nofollow"}. It guards against numerous [leading practices](https://github.com/rhysd/actionlint/blob/main/docs/checks.md){target="_blank" rel="nofollow"} to prevent common configuration mistakes, insecure inline scripts, among many others. * [**Terraform linting**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.pre-commit-config.yaml#L43){target="_blank"}. As of now, largely formatting until we increase our Terraform coverage in documentation examples. * [**Secrets linting**](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/.pre-commit-config.yaml#L49){target="_blank"}. Detects industry credentials that might be accidentally leaked in source code. @@ -43,10 +43,10 @@ For an improved contributing experience, most of our checks can run locally. For !!! note "These are in addition to [pre-commit checks](#pre-commit-checks)." -* [**Static typing analysis**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/Makefile#L108). `mypy` checks for static typing annotations to prevent common bugs in Python that may or may not lead to abuse. +* [**Static typing analysis**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/Makefile#L108){target="_blank"}. `mypy` checks for static typing annotations to prevent common bugs in Python that may or may not lead to abuse. * [**Tests**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/Makefile#L31){target="_blank"}. We run `unit`, `functional`, and `performance` tests ([_see our definition_](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/CONTRIBUTING.md#testing-definition){target="_blank"}). Besides breaking changes, we are investing in mutation testing to find additional sources of bugs and potential abuse. * [**Security baseline**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/CONTRIBUTING.md#testing-definition){target="_blank"}. `bandit` detects common security issues defined by Python Code Quality Authority (PyCQA). -* [**Complexity baseline**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/Makefile#L79){target="_blank"}. We run a series of maintenability and cyclomatic checks to reduce code and logic complexity. This aids reviewers' cognitive overhead and long-term maintainers revisiting legacy code at a later date. +* [**Complexity baseline**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/Makefile#L79){target="_blank"}. We run a series of maintainability and cyclomatic checks to reduce code and logic complexity. This aids reviewers' cognitive overhead and long-term maintainers revisiting legacy code at a later date. ### Pull Request checks @@ -54,7 +54,7 @@ While we trust contributors and maintainers do go through pre-commit and pre-pul !!! note "Checks described earlier are omitted to improve reading experience." -* [**Semantic PR title**](https://github.com/Ezard/semantic-prs){target="_blank"}. We enforce PR titles follow semantic naming, for example `chore(category): change`. This benefits contributors with a lower entry bar, no need for semantic commits. It also benefits everyone looking for an [useful changelog message](https://docs.powertools.aws.dev/lambda/python/latest/changelog/){target="_blank"} on **what** changed and **where**. +* [**Semantic PR title**](https://github.com/Ezard/semantic-prs){target="_blank" rel="nofollow"}. We enforce PR titles follow semantic naming, for example `chore(category): change`. This benefits contributors with a lower entry bar, no need for semantic commits. It also benefits everyone looking for an [useful changelog message](https://docs.powertools.aws.dev/lambda/python/latest/changelog/){target="_blank"} on **what** changed and **where**. * [**Related issue check**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.github/workflows/on_opened_pr.yml#L44){target="_blank"}. [Every change require an issue](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/CONTRIBUTING.md#contributing-via-pull-requests){target="_blank"} describing its needs. This enforces a PR has a related issue by blocking merge operations if missing. * [**Acknowledgment check**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.github/workflows/on_opened_pr.yml#L63){target="_blank"}. [Ensures PR template](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.github/PULL_REQUEST_TEMPLATE.md#L36){target="_blank"} is used and every contributor is aware of code redistribution. * [**Code coverage diff**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.github/workflows/quality_check.yml#L73){target="_blank"}. Educates contributors and maintainers about code coverage differences for a given change. @@ -70,14 +70,14 @@ We strike a balance in security and contribution experience. These automated che * [**End-to-end tests**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.github/workflows/run-e2e-tests.yml#L41){target="_blank"}. We run E2E with a [high degree of parallelization](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/MAINTAINERS.md#test-runner-parallelization). While it is designed to also run locally, it may incur AWS charges to contributors. For additional security, all infrastructure is ephemeral per change and per Python version. * [**SAST check**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.github/workflows/codeql-analysis.yml#L20){target="_blank"}. GitHub CodeQL runs ~30m static analysis in the entire codebase. -* [**Security posture check**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.github/workflows/ossf_scorecard.yml#L14){target="_blank"}. OSSF Scorecard runs numerous automated checks upon changes, and raises security alerts if [OSSF security practices](https://bestpractices.coreinfrastructure.org/en/criteria){target="_blank"} are no longer followed. -* [**Rebuild Changelog**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.github/workflows/build_changelog.yml#L23){target="_blank"}. We rebuild our entire changelog upon changes and create a PR for maintainers. This has the added benefit in keeping a [protected branch](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/managing-protected-branches/about-protected-branches){target="_blank"} while keeping removing error-prone tasks from maintainers. +* [**Security posture check**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.github/workflows/ossf_scorecard.yml#L14){target="_blank"}. OSSF Scorecard runs numerous automated checks upon changes, and raises security alerts if [OSSF security practices](https://bestpractices.coreinfrastructure.org/en/criteria){target="_blank" rel="nofollow"} are no longer followed. +* [**Rebuild Changelog**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.github/workflows/build_changelog.yml#L23){target="_blank"}. We rebuild our entire changelog upon changes and create a PR for maintainers. This has the added benefit in keeping a [protected branch](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/managing-protected-branches/about-protected-branches){target="_blank" rel="nofollow"} while keeping removing error-prone tasks from maintainers. * [**Stage documentation**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.github/workflows/on_push_docs.yml#L27C16-L27C16){target="_blank"}. We rebuild and deploy changes to the documentation to a [staged version](https://docs.powertools.aws.dev/lambda/python/stage/){target="_blank"}. This gives us safety that our docs can always be rebuilt, and ready to release to production when needed. -* [**Update draft release**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.github/workflows/release-drafter.yml#L25){target="_blank"}. We use [Release Drafter](https://github.com/release-drafter/release-drafter){target="_blank"} to generate a portion of our release notes and to always keep a fresh draft upon changes. You can read our [thoughts on a good quality release notes here](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/MAINTAINERS.md#drafting-release-notes){target="_blank"} (human readable changes + automation). +* [**Update draft release**](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/.github/workflows/release-drafter.yml#L25){target="_blank"}. We use [Release Drafter](https://github.com/release-drafter/release-drafter){target="_blank" rel="nofollow"} to generate a portion of our release notes and to always keep a fresh draft upon changes. You can read our [thoughts on a good quality release notes here](https://github.com/aws-powertools/powertools-lambda-python/blob/0523ff64606514ea3e59c07c8c69c83d751f61fa/MAINTAINERS.md#drafting-release-notes){target="_blank"} (human readable changes + automation). ## Continuous deployment practices -!!! note "We adhere to industry recommendations from the [OSSF Scorecard project](https://bestpractices.coreinfrastructure.org/en/criteria){target="_blank"}, among [others](https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions){target="_blank"}." +!!! note "We adhere to industry recommendations from the [OSSF Scorecard project](https://bestpractices.coreinfrastructure.org/en/criteria){target="_blank" rel="nofollow"}, among [others](https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions){target="_blank" rel="nofollow"}." Releases are triggered by maintainers along with a reviewer - [detailed info here](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/MAINTAINERS.md#releasing-a-new-version){target="_blank"}. In addition to [checks that run for every code change](#continuous-integration-practices), our pipeline requires a manual approval before releasing. diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index 0e9c050ff4c..dcfa38f6f9a 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -18,6 +18,7 @@ Event handler for Amazon API Gateway REST and HTTP APIs, Application Loader Bala ### Required resources + If you're using any API Gateway integration, you must have an existing [API Gateway Proxy integration](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html){target="_blank"} or [ALB](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html){target="_blank"} configured to invoke your Lambda function. In case of using [VPC Lattice](https://docs.aws.amazon.com/lambda/latest/dg/services-vpc-lattice.html){target="_blank"}, you must have a service network configured to invoke your Lambda function. @@ -163,7 +164,7 @@ Each dynamic route you set must be part of your function signature. This allows ???+ note We recommend having explicit routes whenever possible; use catch-all routes sparingly. -You can use a [regex](https://docs.python.org/3/library/re.html#regular-expression-syntax){target="_blank"} string to handle an arbitrary number of paths within a request, for example `.+`. +You can use a [regex](https://docs.python.org/3/library/re.html#regular-expression-syntax){target="_blank" rel="nofollow"} string to handle an arbitrary number of paths within a request, for example `.+`. You can also combine nested paths with greedy regex to catch in between routes. @@ -271,7 +272,7 @@ When using [Custom Domain API Mappings feature](https://docs.aws.amazon.com/apig **Scenario**: You have a custom domain `api.mydomain.dev`. Then you set `/payment` API Mapping to forward any payment requests to your Payments API. -**Challenge**: This means your `path` value for any API requests will always contain `/payment/`, leading to HTTP 404 as Event Handler is trying to match what's after `payment/`. This gets further complicated with an [arbitrary level of nesting](https://github.com/aws-powertools/powertools-lambda-roadmap/issues/34){target="_blank"}. +**Challenge**: This means your `path` value for any API requests will always contain `/payment/`, leading to HTTP 404 as Event Handler is trying to match what's after `payment/`. This gets further complicated with an [arbitrary level of nesting](https://github.com/aws-powertools/powertools-lambda/issues/34){target="_blank"}. To address this API Gateway behavior, we use `strip_prefixes` parameter to account for these prefixes that are now injected into the path regardless of which type of API Gateway you're using. @@ -292,6 +293,14 @@ To address this API Gateway behavior, we use `strip_prefixes` parameter to accou For example, when using `strip_prefixes` value of `/pay`, there is no difference between a request path of `/pay` and `/pay/`; and the path argument would be defined as `/`. +For added flexibility, you can use regexes to strip a prefix. This is helpful when you have many options due to different combinations of prefixes (e.g: multiple environments, multiple versions). + +=== "strip_route_prefix_regex.py" + + ```python hl_lines="12" + --8<-- "examples/event_handler_rest/src/strip_route_prefix_regex.py" + ``` + ## Advanced ### CORS @@ -346,12 +355,12 @@ For convenience, these are the default values when using `CORSConfig` to enable | Key | Value | Note | | -------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **[allow_origin](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin){target="_blank"}**: `str` | `*` | Only use the default value for development. **Never use `*` for production** unless your use case requires it | -| **[extra_origins](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin){target="_blank"}**: `List[str]` | `[]` | Additional origins to be allowed, in addition to the one specified in `allow_origin` | -| **[allow_headers](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Headers){target="_blank"}**: `List[str]` | `[Authorization, Content-Type, X-Amz-Date, X-Api-Key, X-Amz-Security-Token]` | Additional headers will be appended to the default list for your convenience | -| **[expose_headers](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Expose-Headers){target="_blank"}**: `List[str]` | `[]` | Any additional header beyond the [safe listed by CORS specification](https://developer.mozilla.org/en-US/docs/Glossary/CORS-safelisted_response_header){target="_blank"}. | -| **[max_age](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Max-Age){target="_blank"}**: `int` | `` | Only for pre-flight requests if you choose to have your function to handle it instead of API Gateway | -| **[allow_credentials](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Credentials){target="_blank"}**: `bool` | `False` | Only necessary when you need to expose cookies, authorization headers or TLS client certificates. | +| **[allow_origin](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin){target="_blank" rel="nofollow"}**: `str` | `*` | Only use the default value for development. **Never use `*` for production** unless your use case requires it | +| **[extra_origins](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin){target="_blank" rel="nofollow"}**: `List[str]` | `[]` | Additional origins to be allowed, in addition to the one specified in `allow_origin` | +| **[allow_headers](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Headers){target="_blank" rel="nofollow"}**: `List[str]` | `[Authorization, Content-Type, X-Amz-Date, X-Api-Key, X-Amz-Security-Token]` | Additional headers will be appended to the default list for your convenience | +| **[expose_headers](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Expose-Headers){target="_blank" rel="nofollow"}**: `List[str]` | `[]` | Any additional header beyond the [safe listed by CORS specification](https://developer.mozilla.org/en-US/docs/Glossary/CORS-safelisted_response_header){target="_blank" rel="nofollow"}. | +| **[max_age](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Max-Age){target="_blank" rel="nofollow"}**: `int` | `` | Only for pre-flight requests if you choose to have your function to handle it instead of API Gateway | +| **[allow_credentials](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Credentials){target="_blank" rel="nofollow"}**: `bool` | `False` | Only necessary when you need to expose cookies, authorization headers or TLS client certificates. | ### Fine grained responses @@ -693,7 +702,7 @@ You can test your routes by passing a proxy event request with required params. Chalice is a full featured microframework that manages application and infrastructure. This utility, however, is largely focused on routing to reduce boilerplate and expects you to setup and manage infrastructure with your framework of choice. -That said, [Chalice has native integration with Lambda Powertools](https://aws.github.io/chalice/topics/middleware.html){target="_blank"} if you're looking for a more opinionated and web framework feature set. +That said, [Chalice has native integration with Lambda Powertools](https://aws.github.io/chalice/topics/middleware.html){target="_blank" rel="nofollow"} if you're looking for a more opinionated and web framework feature set. **What happened to `ApiGatewayResolver`?** diff --git a/docs/core/logger.md b/docs/core/logger.md index 94aa1a71a6b..6fc48e8898f 100644 --- a/docs/core/logger.md +++ b/docs/core/logger.md @@ -24,6 +24,8 @@ Logger requires two settings: | **Logging level** | Sets how verbose Logger should be (INFO, by default) | `LOG_LEVEL` | `level` | | **Service** | Sets **service** key that will be present across all log statements | `POWERTOOLS_SERVICE_NAME` | `service` | +There are some [other environment variables](#environment-variables) which can be set to modify Logger's settings at a global scope. + ```yaml hl_lines="12-13" title="AWS Serverless Application Model (SAM) example" --8<-- "examples/logger/sam/template.yaml" ``` @@ -83,7 +85,7 @@ When debugging in non-production environments, you can instruct Logger to log th ### Setting a Correlation ID -You can set a Correlation ID using `correlation_id_path` param by passing a [JMESPath expression](https://jmespath.org/tutorial.html){target="_blank"}. +You can set a Correlation ID using `correlation_id_path` param by passing a [JMESPath expression](https://jmespath.org/tutorial.html){target="_blank" rel="nofollow"}. ???+ tip You can retrieve correlation IDs via `get_correlation_id` method @@ -301,7 +303,7 @@ Logger can optionally log uncaught exceptions by setting `log_uncaught_exception ??? question "What are uncaught exceptions?" - It's any raised exception that wasn't handled by the [`except` statement](https://docs.python.org/3.9/tutorial/errors.html#handling-exceptions){target="_blank"}, leading a Python program to a non-successful exit. + It's any raised exception that wasn't handled by the [`except` statement](https://docs.python.org/3.9/tutorial/errors.html#handling-exceptions){target="_blank" rel="nofollow"}, leading a Python program to a non-successful exit. They are typically raised intentionally to signal a problem (`raise ValueError`), or a propagated exception from elsewhere in your code that you didn't handle it willingly or not (`KeyError`, `jsonDecoderError`, etc.). @@ -323,10 +325,10 @@ Logger uses Python's standard logging date format with the addition of timezone: You can easily change the date format using one of the following parameters: -* **`datefmt`**. You can pass any [strftime format codes](https://strftime.org/){target="_blank"}. Use `%F` if you need milliseconds. +* **`datefmt`**. You can pass any [strftime format codes](https://strftime.org/){target="_blank" rel="nofollow"}. Use `%F` if you need milliseconds. * **`use_rfc3339`**. This flag will use a format compliant with both RFC3339 and ISO8601: `2022-10-27T16:27:43.738+02:00` -???+ tip "Prefer using [datetime string formats](https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes){target="_blank"}?" +???+ tip "Prefer using [datetime string formats](https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes){target="_blank" rel="nofollow"}?" Use `use_datetime_directive` flag along with `datefmt` to instruct Logger to use `datetime` instead of `time.strftime`. === "date_formatting.py" @@ -341,6 +343,18 @@ You can easily change the date format using one of the following parameters: --8<-- "examples/logger/src/date_formatting_output.json" ``` +### Environment variables + +The following environment variables are available to configure Logger at a global scope: + +| Setting | Description | Environment variable | Default | +|---------------------------|------------------------------------------------------------------------------|-----------------------------------------|---------| +| **Event Logging** | Whether to log the incoming event. | `POWERTOOLS_LOGGER_LOG_EVENT` | `false` | +| **Debug Sample Rate** | Sets the debug log sampling. | `POWERTOOLS_LOGGER_SAMPLE_RATE` | `0` | +| **Disable Deduplication** | Disables log deduplication filter protection to use Pytest Live Log feature. | `POWERTOOLS_LOG_DEDUPLICATION_DISABLED` | `false` | + +[`POWERTOOLS_LOGGER_LOG_EVENT`](#logging-incoming-event) can also be set on a per-method basis, and [`POWERTOOLS_LOGGER_SAMPLE_RATE`](#sampling-debug-logs) on a per-instance basis. These parameter values will override the environment variable value. + ## Advanced ### Built-in Correlation ID expressions @@ -447,7 +461,7 @@ If you prefer configuring it separately, or you'd want to bring this JSON Format ### Observability providers -!!! note "In this context, an observability provider is an [AWS Lambda Partner](https://go.aws/3HtU6CZ){target="_blank"} offering a platform for logging, metrics, traces, etc." +!!! note "In this context, an observability provider is an [AWS Lambda Partner](https://go.aws/3HtU6CZ){target="_blank" rel="nofollow"} offering a platform for logging, metrics, traces, etc." You can send logs to the observability provider of your choice via [Lambda Extensions](https://aws.amazon.com/blogs/compute/using-aws-lambda-extensions-to-send-logs-to-custom-destinations/){target="_blank"}. In most cases, you shouldn't need any custom Logger configuration, and logs will be shipped async without any performance impact. @@ -634,7 +648,7 @@ For exceptional cases where you want to completely replace our formatter logic, #### Bring your own JSON serializer -By default, Logger uses `json.dumps` and `json.loads` as serializer and deserializer respectively. There could be scenarios where you are making use of alternative JSON libraries like [orjson](https://github.com/ijl/orjson){target="_blank"}. +By default, Logger uses `json.dumps` and `json.loads` as serializer and deserializer respectively. There could be scenarios where you are making use of alternative JSON libraries like [orjson](https://github.com/ijl/orjson){target="_blank" rel="nofollow"}. As parameters don't always translate well between them, you can pass any callable that receives a `dict` and return a `str`: @@ -664,7 +678,7 @@ This is a Pytest sample that provides the minimum information necessary for Logg ``` ???+ tip - Check out the built-in [Pytest caplog fixture](https://docs.pytest.org/en/latest/how-to/logging.html){target="_blank"} to assert plain log messages + Check out the built-in [Pytest caplog fixture](https://docs.pytest.org/en/latest/how-to/logging.html){target="_blank" rel="nofollow"} to assert plain log messages ### Pytest live log feature @@ -703,7 +717,7 @@ By default all registered loggers will be modified. You can change this behavior ### How can I add standard library logging attributes to a log record? -The Python standard library log records contains a [large set of attributes](https://docs.python.org/3/library/logging.html#logrecord-attributes){target="_blank"}, however only a few are included in Powertools for AWS Lambda (Python) Logger log record by default. +The Python standard library log records contains a [large set of attributes](https://docs.python.org/3/library/logging.html#logrecord-attributes){target="_blank" rel="nofollow"}, however only a few are included in Powertools for AWS Lambda (Python) Logger log record by default. You can include any of these logging attributes as key value arguments (`kwargs`) when instantiating `Logger` or `LambdaPowertoolsFormatter`. diff --git a/docs/core/metrics.md b/docs/core/metrics.md index 052406300dc..31b4ea99ce7 100644 --- a/docs/core/metrics.md +++ b/docs/core/metrics.md @@ -1,5 +1,5 @@ --- -title: Metrics +title: Amazon CloudWatch EMF Metrics description: Core utility --- @@ -16,7 +16,7 @@ These metrics can be visualized through [Amazon CloudWatch Console](https://cons ## Terminologies -If you're new to Amazon CloudWatch, there are two terminologies you must be aware of before using this utility: +If you're new to Amazon CloudWatch, there are five terminologies you must be aware of before using this utility: * **Namespace**. It's the highest level container that will group multiple metrics from multiple services for a given application, for example `ServerlessEcommerce`. * **Dimensions**. Metrics metadata in key-value format. They help you slice and dice metrics visualization, for example `ColdStart` metric by Payment `service`. @@ -193,6 +193,16 @@ This has the advantage of keeping cold start metric separate from your applicati ???+ info We do not emit 0 as a value for ColdStart metric for cost reasons. [Let us know](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=feature-request%2C+triage&template=feature_request.md&title=){target="_blank"} if you'd prefer a flag to override it. +### Environment variables + +The following environment variable is available to configure Metrics at a global scope: + +| Setting | Description | Environment variable | Default | +| ------------------ | -------------------------------- | ------------------------------ | ------- | +| **Namespace Name** | Sets namespace used for metrics. | `POWERTOOLS_METRICS_NAMESPACE` | `None` | + +`POWERTOOLS_METRICS_NAMESPACE` is also available on a per-instance basis with the `namespace` parameter, which will consequently override the environment variable value. + ## Advanced ### Adding metadata @@ -251,7 +261,7 @@ By default it will skip all previously defined dimensions including default dime ### Flushing metrics manually -If you are using the AWS Lambda Web Adapter project, or a middleware with custom metric logic, you can use `flush_metrics()`. This method will serialize, print metrics available to standard output, and clear in-memory metrics data. +If you are using the [AWS Lambda Web Adapter](https://github.com/awslabs/aws-lambda-web-adapter){target="_blank"} project, or a middleware with custom metric logic, you can use `flush_metrics()`. This method will serialize, print metrics available to standard output, and clear in-memory metrics data. ???+ warning This does not capture Cold Start metrics, and metric data validation still applies. @@ -276,14 +286,15 @@ You can use `EphemeralMetrics` class when looking to isolate multiple instances `EphemeralMetrics` has only one difference while keeping nearly the exact same set of features: -| Feature | Metrics | EphemeralMetrics | -| ----------------------------------------------------------------------------------------------------------- | ------- | ---------------- | -| **Share data across instances** (metrics, dimensions, metadata, etc.) | Yes | - | +| Feature | Metrics | EphemeralMetrics | +| --------------------------------------------------------------------- | ------- | ---------------- | +| **Share data across instances** (metrics, dimensions, metadata, etc.) | Yes | - | !!! question "Why not changing the default `Metrics` behaviour to not share data across instances?" This is an intentional design to prevent accidental data deduplication or data loss issues due to [CloudWatch EMF](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Embedded_Metric_Format_Specification.html){target="_blank"} metric dimension constraint. + In CloudWatch, there are two metric ingestion mechanisms: [EMF (async)](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Embedded_Metric_Format_Specification.html){target="_blank"} and [`PutMetricData` API (sync)](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/cloudwatch.html#CloudWatch.Client.put_metric_data){target="_blank"}. The former creates metrics asynchronously via CloudWatch Logs, and the latter uses a synchronous and more flexible ingestion API. @@ -316,9 +327,23 @@ These issues are exacerbated when you create **(A)** metric dimensions condition That is why `Metrics` shares data across instances by default, as that covers 80% of use cases and different personas using Powertools. This allows them to instantiate `Metrics` in multiple places throughout their code - be a separate file, a middleware, or an abstraction that sets default dimensions. +### Observability providers + +> An observability provider is an [AWS Lambda Partner](https://docs.aws.amazon.com/lambda/latest/dg/extensions-api-partners.html){target="_blank" rel="nofollow"} offering a platform for logging, metrics, traces, etc. + +We provide a thin-wrapper on top of the most requested observability providers. We strive to keep a similar UX as close as possible while keeping our value add features. + +!!! tip "Missing your preferred provider? Please create a [feature request](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=feature-request%2Ctriage&projects=&template=feature_request.yml&title=Feature+request%3A+TITLE){target="_blank"}." + +Current providers: + +| Provider | Notes | +| ------------------------------------- | -------------------------------------------------------- | +| [Datadog](./datadog){target="_blank"} | Uses Datadog SDK and Datadog Lambda Extension by default | + ## Testing your code -### Environment variables +### Setting environment variables ???+ tip Ignore this section, if: @@ -328,7 +353,7 @@ That is why `Metrics` shares data across instances by default, as that covers 80 For example, `Metrics(namespace="ServerlessAirline", service="booking")` -Make sure to set `POWERTOOLS_METRICS_NAMESPACE` and `POWERTOOLS_SERVICE_NAME` before running your tests to prevent failing on `SchemaValidation` exception. You can set it before you run tests or via pytest plugins like [dotenv](https://pypi.org/project/pytest-dotenv/){target="_blank"}. +Make sure to set `POWERTOOLS_METRICS_NAMESPACE` and `POWERTOOLS_SERVICE_NAME` before running your tests to prevent failing on `SchemaValidation` exception. You can set it before you run tests or via pytest plugins like [dotenv](https://pypi.org/project/pytest-dotenv/){target="_blank" rel="nofollow"}. ```bash title="Injecting dummy Metric Namespace before running tests" --8<-- "examples/metrics/src/run_tests_env_var.sh" @@ -373,4 +398,4 @@ You can read standard output and assert whether metrics have been flushed. Here' ``` ???+ tip - For more elaborate assertions and comparisons, check out [our functional testing for Metrics utility.](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/tests/functional/test_metrics.py){target="_blank"} + For more elaborate assertions and comparisons, check out [our functional testing for Metrics utility.](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/tests/functional/metrics/test_metrics_cloudwatch_emf.py){target="_blank"} diff --git a/docs/core/metrics/datadog.md b/docs/core/metrics/datadog.md new file mode 100644 index 00000000000..eb036fd3270 --- /dev/null +++ b/docs/core/metrics/datadog.md @@ -0,0 +1,260 @@ +--- +title: Datadog +description: Metrics provider +--- + + +This observability provider creates custom metrics by flushing metrics to [Datadog Lambda extension](https://docs.datadoghq.com/serverless/installation/python/?tab=datadogcli){target="_blank" rel="nofollow"}, or to standard output via [Datadog Forwarder](https://docs.datadoghq.com/logs/guide/forwarder/?tab=cloudformation){target="_blank" rel="nofollow"}. These metrics can be visualized in the [Datadog console](https://app.datadoghq.com/metric/explore){target="_blank" rel="nofollow"}. + + +```mermaid +stateDiagram-v2 + direction LR + LambdaFn: Your Lambda function + LambdaCode: DatadogMetrics + DatadogSDK: Datadog SDK + DatadogExtension: Datadog Lambda Extension + Datadog: Datadog Dashboard + LambdaExtension: Lambda Extension + + LambdaFn --> LambdaCode + LambdaCode --> DatadogSDK + DatadogSDK --> DatadogExtension + DatadogExtension --> Datadog: async + + state LambdaExtension { + DatadogExtension + } + +``` + +## Key features + +* Flush metrics to Datadog extension or standard output +* Validate against common metric definitions mistakes +* Support to add default tags + +## Terminologies + +If you're new to Datadog Metrics, there are three terminologies you must be aware of before using this utility: + +* **Namespace**. It's the highest level container that will group multiple metrics from multiple services for a given application, for example `ServerlessEcommerce`. +* **Metric**. It's the name of the metric, for example: SuccessfulBooking or UpdatedBooking. +* **Tags**. Metrics metadata in key-value pair format. They help provide contextual information, and filter org organize metrics. + +You can read more details in the [Datadog official documentation](https://docs.datadoghq.com/metrics/custom_metrics/){target="_blank" rel="nofollow"}. + +## Getting started + +???+ tip + All examples shared in this documentation are available within the [project repository](https://github.com/aws-powertools/powertools-lambda-python/tree/develop/examples){target="_blank" }. + +### Install + +> **Using Datadog Forwarder?** You can skip this step. + +We recommend using [Datadog SDK](https://docs.datadoghq.com/serverless/installation/python/){target="_blank" rel="nofollow"} and Datadog Lambda Extension with this feature for optimal results. + +For Datadog SDK, you can add `aws-lambda-powertools[datadog]` as a dependency in your preferred tool, or as a Lambda Layer in the following example: + +```yaml hl_lines="15-16 28 32" title="AWS Serverless Application Model (SAM) example" +--8<-- "examples/metrics_datadog/sam/template.yaml" +``` + +### Creating metrics + +You can create metrics using `add_metric`. + +By default, we will generate the current timestamp for you. Alternatively, you can use the `timestamp` parameter to set a custom one in epoch time. + +=== "add_datadog_metrics.py" + + ```python hl_lines="4 7 9" + --8<-- "examples/metrics_datadog/src/add_datadog_metrics.py" + ``` + +=== "add_metrics_with_timestamp.py" + + ```python hl_lines="11" + --8<-- "examples/metrics_datadog/src/add_metrics_with_timestamp.py" + ``` + +???+ warning "Warning: Do not create metrics outside the handler" + Metrics added in the global scope will only be added during cold start. Disregard if you that's the intended behavior. + +### Adding tags + +You can add any number of tags to your metrics via keyword arguments (`key=value`). They are helpful to filter, organize, and aggregate your metrics later. + +!!! info "We will emit a warning for tags [beyond the 200 chars limit](https://docs.datadoghq.com/getting_started/tagging/){target="_blank" rel="nofollow"}." + +=== "add_metrics_with_tags.py" + + ```python hl_lines="9" + --8<-- "examples/metrics_datadog/src/add_metrics_with_tags.py" + ``` + +### Adding default tags + +You can persist tags across Lambda invocations and `DatadogMetrics` instances via `set_default_tags` method, or `default_tags` parameter in the `log_metrics` decorator. + +If you'd like to remove them at some point, you can use the `clear_default_tags` method. + +???+ note "Metric tag takes precedence over default tags of the same name" + When adding tags with the same name via `add_metric` and `set_default_tags`, `add_metric` takes precedence. + +=== "set_default_tags.py" + + ```python hl_lines="5" + --8<-- "examples/metrics_datadog/src/set_default_tags.py" + ``` + +=== "set_default_tags_log_metrics.py" + + ```python hl_lines="6 9" + --8<-- "examples/metrics_datadog/src/set_default_tags_log_metrics.py" + ``` + +### Flushing metrics + +Use `log_metrics` decorator to automatically serialize and flush your metrics (SDK or Forwarder) at the end of your invocation. + +This decorator also ensures metrics are flushed in the event of an exception, including warning you in case you forgot to add metrics. + +=== "add_metrics.py" + + ```python hl_lines="7" + --8<-- "examples/metrics_datadog/src/add_metrics_with_tags.py" + ``` + +=== "log_metrics_output.json" + + ```json hl_lines="2 6 7" + --8<-- "examples/metrics_datadog/src/log_metrics_output.json" + ``` + +#### Raising SchemaValidationError on empty metrics + +Use `raise_on_empty_metrics=True` if you want to ensure at least one metric is always emitted. + +```python hl_lines="7" title="Failing fast if no metrics are added" +--8<-- "examples/metrics_datadog/src/raise_on_empty_datadog_metrics.py" +``` + +???+ tip "Suppressing warning messages on empty metrics" + If you expect your function to execute without publishing metrics every time, you can suppress the warning with **`warnings.filterwarnings("ignore", "No metrics to publish*")`**. + +### Capturing cold start metric + +You can optionally capture cold start metrics with `log_metrics` decorator via `capture_cold_start_metric` param. + +=== "capture_cold_start_metric.py" + + ```python hl_lines="7" + --8<-- "examples/metrics_datadog/src/capture_cold_start_datadog_metric.py" + ``` + +=== "capture_cold_start_metric_output.json" + + ```json hl_lines="2 6" + --8<-- "examples/metrics_datadog/src/capture_cold_start_metric_output.json" + ``` + +If it's a cold start invocation, this feature will: + +* Create a separate Datadog metric solely containing a metric named `ColdStart` +* Add `function_name` metric tag + +This has the advantage of keeping cold start metric separate from your application metrics, where you might have unrelated tags. + +???+ info + We do not emit 0 as a value for ColdStart metric for cost reasons. [Let us know](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=feature-request%2C+triage&template=feature_request.md&title=){target="_blank"} if you'd prefer a flag to override it. + +### Environment variables + +You can use any of the following environment variables to configure `DatadogMetrics`: + +| Setting | Description | Environment variable | Constructor parameter | +| -------------------- | -------------------------------------------------------------------------------- | ------------------------------ | --------------------- | +| **Metric namespace** | Logical container where all metrics will be placed e.g. `ServerlessAirline` | `POWERTOOLS_METRICS_NAMESPACE` | `namespace` | +| **Flush to log** | Use this when you want to flush metrics to be exported through Datadog Forwarder | `DD_FLUSH_TO_LOG` | `flush_to_log` | + +## Advanced + +### Flushing metrics manually + +If you are using the [AWS Lambda Web Adapter](https://github.com/awslabs/aws-lambda-web-adapter){target="_blank"} project, or a middleware with custom metric logic, you can use `flush_metrics()`. This method will serialize, print metrics available to standard output, and clear in-memory metrics data. + +???+ warning + This does not capture Cold Start metrics, and metric data validation still applies. + +Contrary to the `log_metrics` decorator, you are now also responsible to flush metrics in the event of an exception. + +```python hl_lines="17" title="Manually flushing and clearing metrics from memory" +--8<-- "examples/metrics_datadog/src/flush_datadog_metrics.py" +``` + +### Integrating with Datadog Forwarder + +Use `flush_to_log=True` in `DatadogMetrics` to integrate with the legacy [Datadog Forwarder](https://docs.datadoghq.com/logs/guide/forwarder/?tab=cloudformation){target="_blank" rel="nofollow"}. + +This will serialize and flush metrics to standard output. + +=== "flush_metrics_to_standard_output.py" + + ```python hl_lines="4" + --8<-- "examples/metrics_datadog/src/flush_metrics_to_standard_output.py" + ``` + +=== "log_metrics_standard_output.json" + + ```json + --8<-- "examples/metrics_datadog/src/log_metrics_standard_output.json" + ``` + +## Testing your code + +### Setting environment variables + +???+ tip + Ignore this section, if: + + * You are explicitly setting namespace via `namespace` parameter + * You're not instantiating `DatadogMetrics` in the global namespace + + For example, `DatadogMetrics(namespace="ServerlessAirline")` + +Make sure to set `POWERTOOLS_METRICS_NAMESPACE` before running your tests to prevent failing on `SchemaValidation` exception. You can set it before you run tests or via pytest plugins like [dotenv](https://pypi.org/project/pytest-dotenv/){target="_blank" rel="nofollow"}. + +```bash title="Injecting dummy metric namespace before running tests" +--8<-- "examples/metrics_datadog/src/run_tests_env_var.sh" +``` + +1. **`DD_FLUSH_TO_LOG=True`** makes it easier to test by flushing final metrics to standard output. + +### Clearing metrics + +`DatadogMetrics` keep metrics in memory across multiple instances. If you need to test this behavior, you can use the following Pytest fixture to ensure metrics are reset incl. cold start: + +```python title="Clearing metrics between tests" +--8<-- "examples/metrics_datadog/src/clear_datadog_metrics_in_tests.py" +``` + +### Functional testing + +You can read standard output and assert whether metrics have been flushed. Here's an example using `pytest` with `capsys` built-in fixture: + +=== "assert_single_datadog_metric.py" + + ```python hl_lines="7" + --8<-- "examples/metrics_datadog/src/assert_single_datadog_metric.py" + ``` + +=== "add_datadog_metrics.py" + + ```python + --8<-- "examples/metrics_datadog/src/add_datadog_metrics.py" + ``` + +???+ tip + For more elaborate assertions and comparisons, check out [our functional testing for DatadogMetrics utility.](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/tests/functional/metrics/test_metrics_datadog.py){target="_blank"} diff --git a/docs/core/metrics/index.md b/docs/core/metrics/index.md new file mode 100644 index 00000000000..359ce28eb33 --- /dev/null +++ b/docs/core/metrics/index.md @@ -0,0 +1,6 @@ +--- +title: Metrics +description: Core utility +--- + +--8<-- "docs/core/metrics.md" diff --git a/docs/core/tracer.md b/docs/core/tracer.md index 349a868dc83..f7163564e6b 100644 --- a/docs/core/tracer.md +++ b/docs/core/tracer.md @@ -3,7 +3,7 @@ title: Tracer description: Core utility --- -Tracer is an opinionated thin wrapper for [AWS X-Ray Python SDK](https://github.com/aws/aws-xray-sdk-python/){target="_blank"}. +Tracer is an opinionated thin wrapper for [AWS X-Ray Python SDK](https://github.com/aws/aws-xray-sdk-python/){target="_blank" rel="nofollow"}. ![Tracer showcase](../media/tracer_utility_showcase.png) @@ -19,7 +19,7 @@ Tracer is an opinionated thin wrapper for [AWS X-Ray Python SDK](https://github. ???+ tip All examples shared in this documentation are available within the [project repository](https://github.com/aws-powertools/powertools-lambda-python/tree/develop/examples){target="_blank"}. -!!! note "Tracer relies on AWS X-Ray SDK over [OpenTelememetry Distro (ADOT)](https://aws-otel.github.io/docs/getting-started/lambda){target="_blank"} for optimal cold start (lower latency)." +!!! note "Tracer relies on AWS X-Ray SDK over [OpenTelememetry Distro (ADOT)](https://aws-otel.github.io/docs/getting-started/lambda){target="_blank" rel="nofollow"} for optimal cold start (lower latency)." ### Install @@ -103,6 +103,18 @@ You can trace asynchronous functions and generator functions (including context --8<-- "examples/tracer/src/capture_method_generators.py" ``` +### Environment variables + +The following environment variables are available to configure Tracer at a global scope: + +| Setting | Description | Environment variable | Default | +|-----------------------|--------------------------------------------------|--------------------------------------|---------| +| **Disable Tracing** | Explicitly disables all tracing. | `POWERTOOLS_TRACE_DISABLED` | `false` | +| **Response Capture** | Captures Lambda or method return as metadata. | `POWERTOOLS_TRACER_CAPTURE_RESPONSE` | `true` | +| **Exception Capture** | Captures Lambda or method exception as metadata. | `POWERTOOLS_TRACER_CAPTURE_ERROR` | `true` | + +Both [`POWERTOOLS_TRACER_CAPTURE_RESPONSE`](#disabling-response-auto-capture) and [`POWERTOOLS_TRACER_CAPTURE_ERROR`](#disabling-exception-auto-capture) can be set on a per-method basis, consequently overriding the environment variable value. + ## Advanced ### Patching modules @@ -162,7 +174,7 @@ You can use `ignore_endpoint` method with the hostname and/or URLs you'd like it ???+ info This snippet assumes you have aiohttp as a dependency -You can use `aiohttp_trace_config` function to create a valid [aiohttp trace_config object](https://docs.aiohttp.org/en/stable/tracing_reference.html){target="_blank"}. This is necessary since X-Ray utilizes [aiohttp](https://docs.aiohttp.org/en/stable/){target="_blank"} trace hooks to capture requests end-to-end. +You can use `aiohttp_trace_config` function to create a valid [aiohttp trace_config object](https://docs.aiohttp.org/en/stable/tracing_reference.html){target="_blank" rel="nofollow"}. This is necessary since X-Ray utilizes [aiohttp](https://docs.aiohttp.org/en/stable/){target="_blank" rel="nofollow"} trace hooks to capture requests end-to-end. ```python hl_lines="7 17" title="Tracing aiohttp requests" --8<-- "examples/tracer/src/tracing_aiohttp.py" diff --git a/docs/index.md b/docs/index.md index 210462bc5b6..4ea82dd127c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -8,7 +8,7 @@ description: Powertools for AWS Lambda (Python) Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity. ???+ tip - Powertools for AWS Lambda (Python) is also available for [Java](https://docs.powertools.aws.dev/lambda/java/){target="_blank"}, [TypeScript](https://docs.powertools.aws.dev/lambda/typescript/latest/){target="_blank"}, and [.NET](https://docs.powertools.aws.dev/lambda/dotnet/){target="_blank"} + Powertools for AWS Lambda (Python) is also available for [Java](https://docs.powertools.aws.dev/lambda/java/){target="_blank"}, [TypeScript](https://docs.powertools.aws.dev/lambda/typescript/latest/){target="_blank" }, and [.NET](https://docs.powertools.aws.dev/lambda/dotnet/){target="_blank"} ??? hint "Support this project by becoming a reference customer, sharing your work, or using Layers/SAR :heart:" @@ -26,8 +26,8 @@ Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverles You can install Powertools for AWS Lambda (Python) using one of the following options: -* **Lambda Layer (x86_64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:39**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard: -* **Lambda Layer (arm64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard: +* **Lambda Layer (x86_64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:40**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard: +* **Lambda Layer (arm64)**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40**](# "Replace {region} with your AWS region, e.g., eu-west-1"){: .copyMe}:clipboard: * **Pip**: **[`pip install "aws-lambda-powertools"`](#){: .copyMe}:clipboard:** !!! question "Looking for Pip signed releases? [Learn more about verifying signed builds](./security.md#verifying-signed-builds)" @@ -80,60 +80,61 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd | Region | Layer ARN | | ---------------- | ---------------------------------------------------------------------------------------------------------- | - | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `ap-south-2` | [arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `ap-southeast-4` | [arn:aws:lambda:ap-southeast-4:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `eu-central-2` | [arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `eu-south-2` | [arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `me-central-1` | [arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | - | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:39](#){: .copyMe}:clipboard: | + | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `ap-south-2` | [arn:aws:lambda:ap-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `ap-southeast-4` | [arn:aws:lambda:ap-southeast-4:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `eu-central-2` | [arn:aws:lambda:eu-central-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `eu-south-2` | [arn:aws:lambda:eu-south-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `il-central-1` | [arn:aws:lambda:il-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `me-central-1` | [arn:aws:lambda:me-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2:40](#){: .copyMe}:clipboard: | === "arm64" | Region | Layer ARN | | ---------------- | ---------------------------------------------------------------------------------------------------------------- | - | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | - | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39](#){: .copyMe}:clipboard: | + | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40](#){: .copyMe}:clipboard: | ??? note "Note: Click to expand and copy code snippets for popular frameworks" @@ -146,7 +147,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd Type: AWS::Serverless::Function Properties: Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:39 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:40 ``` === "Serverless framework" @@ -156,7 +157,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd hello: handler: lambda_function.lambda_handler layers: - - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:39 + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:40 ``` === "CDK" @@ -172,7 +173,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( self, id="lambda-powertools", - layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:39" + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:40" ) aws_lambda.Function(self, 'sample-app-lambda', @@ -221,7 +222,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd role = aws_iam_role.iam_for_lambda.arn handler = "index.test" runtime = "python3.9" - layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:39"] + layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:40"] source_code_hash = filebase64sha256("lambda_function_payload.zip") } @@ -274,7 +275,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd ? Do you want to configure advanced settings? Yes ... ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40 โฏ amplify push -y @@ -285,7 +286,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd - Name: ? Which setting do you want to update? Lambda layers configuration ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:39 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2:40 ? Do you want to edit the local lambda function now? No ``` @@ -299,7 +300,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd Properties: Architectures: [arm64] Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40 ``` === "Serverless framework" @@ -310,7 +311,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd handler: lambda_function.lambda_handler architecture: arm64 layers: - - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39 + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40 ``` === "CDK" @@ -326,7 +327,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( self, id="lambda-powertools", - layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39" + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40" ) aws_lambda.Function(self, 'sample-app-lambda', @@ -376,7 +377,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd role = aws_iam_role.iam_for_lambda.arn handler = "index.test" runtime = "python3.9" - layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39"] + layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40"] architectures = ["arm64"] source_code_hash = filebase64sha256("lambda_function_payload.zip") @@ -432,7 +433,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd ? Do you want to configure advanced settings? Yes ... ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40 โฏ amplify push -y @@ -443,7 +444,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd - Name: ? Which setting do you want to update? Lambda layers configuration ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:39 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPythonV2-Arm64:40 ? Do you want to edit the local lambda function now? No ``` @@ -451,7 +452,7 @@ You can include Powertools for AWS Lambda (Python) Lambda Layer using [AWS Lambd Change {region} to your AWS region, e.g. `eu-west-1` ```bash title="AWS CLI" - aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:39 --region {region} + aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:40 --region {region} ``` The pre-signed URL to download this Lambda Layer will be within `Location` key. @@ -549,7 +550,7 @@ Compared with the [public Layer ARN](#lambda-layer) option, SAR allows you to ch === "Terraform" - > Credits to [Dani Comnea](https://github.com/DanyC97){target="_blank"} for providing the Terraform equivalent. + > Credits to [Dani Comnea](https://github.com/DanyC97){target="_blank" rel="nofollow"} for providing the Terraform equivalent. ```terraform hl_lines="12-13 15-20 23-25 40" terraform { @@ -597,7 +598,7 @@ Compared with the [public Layer ARN](#lambda-layer) option, SAR allows you to ch ??? example "Example: Least-privileged IAM permissions to deploy Layer" - > Credits to [mwarkentin](https://github.com/mwarkentin){target="_blank"} for providing the scoped down IAM permissions. + > Credits to [mwarkentin](https://github.com/mwarkentin){target="_blank" rel="nofollow"} for providing the scoped down IAM permissions. The region and the account id for `CloudFormationTransform` and `GetCfnTemplate` are fixed. @@ -672,7 +673,7 @@ Compared with the [public Layer ARN](#lambda-layer) option, SAR allows you to ch ## Quick getting started ```bash title="Hello world example using SAM CLI" -sam init --app-template hello-world-powertools-python --name sam-app --package-type Zip --runtime python3.10 --no-tracing +sam init --app-template hello-world-powertools-python --name sam-app --package-type Zip --runtime python3.11 --no-tracing ``` ## Features @@ -705,18 +706,18 @@ Core utilities such as Tracing, Logging, Metrics, and Event Handler will be avai | Environment variable | Description | Utility | Default | | ----------------------------------------- | -------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | --------------------- | | **POWERTOOLS_SERVICE_NAME** | Sets service name used for tracing namespace, metrics dimension and structured logging | All | `"service_undefined"` | -| **POWERTOOLS_METRICS_NAMESPACE** | Sets namespace used for metrics | [Metrics](./core/metrics){target="_blank"} | `None` | -| **POWERTOOLS_TRACE_DISABLED** | Explicitly disables tracing | [Tracing](./core/tracer){target="_blank"} | `false` | -| **POWERTOOLS_TRACER_CAPTURE_RESPONSE** | Captures Lambda or method return as metadata. | [Tracing](./core/tracer){target="_blank"} | `true` | -| **POWERTOOLS_TRACER_CAPTURE_ERROR** | Captures Lambda or method exception as metadata. | [Tracing](./core/tracer){target="_blank"} | `true` | -| **POWERTOOLS_TRACE_MIDDLEWARES** | Creates sub-segment for each custom middleware | [Middleware factory](./utilities/middleware_factory){target="_blank"} | `false` | -| **POWERTOOLS_LOGGER_LOG_EVENT** | Logs incoming event | [Logging](./core/logger){target="_blank"} | `false` | -| **POWERTOOLS_LOGGER_SAMPLE_RATE** | Debug log sampling | [Logging](./core/logger){target="_blank"} | `0` | -| **POWERTOOLS_LOG_DEDUPLICATION_DISABLED** | Disables log deduplication filter protection to use Pytest Live Log feature | [Logging](./core/logger){target="_blank"} | `false` | -| **POWERTOOLS_PARAMETERS_MAX_AGE** | Adjust how long values are kept in cache (in seconds) | [Parameters](./utilities/parameters/#adjusting-cache-ttl){target="_blank"} | `5` | -| **POWERTOOLS_PARAMETERS_SSM_DECRYPT** | Sets whether to decrypt or not values retrieved from AWS SSM Parameters Store | [Parameters](./utilities/parameters/#ssmprovider){target="_blank"} | `false` | +| **POWERTOOLS_METRICS_NAMESPACE** | Sets namespace used for metrics | [Metrics](./core/metrics.md){target="_blank"} | `None` | +| **POWERTOOLS_TRACE_DISABLED** | Explicitly disables tracing | [Tracing](./core/tracer.md){target="_blank"} | `false` | +| **POWERTOOLS_TRACER_CAPTURE_RESPONSE** | Captures Lambda or method return as metadata. | [Tracing](./core/tracer.md){target="_blank"} | `true` | +| **POWERTOOLS_TRACER_CAPTURE_ERROR** | Captures Lambda or method exception as metadata. | [Tracing](./core/tracer.md){target="_blank"} | `true` | +| **POWERTOOLS_TRACE_MIDDLEWARES** | Creates sub-segment for each custom middleware | [Middleware factory](./utilities/middleware_factory.md){target="_blank"} | `false` | +| **POWERTOOLS_LOGGER_LOG_EVENT** | Logs incoming event | [Logging](./core/logger.md){target="_blank"} | `false` | +| **POWERTOOLS_LOGGER_SAMPLE_RATE** | Debug log sampling | [Logging](./core/logger.md){target="_blank"} | `0` | +| **POWERTOOLS_LOG_DEDUPLICATION_DISABLED** | Disables log deduplication filter protection to use Pytest Live Log feature | [Logging](./core/logger.md){target="_blank"} | `false` | +| **POWERTOOLS_PARAMETERS_MAX_AGE** | Adjust how long values are kept in cache (in seconds) | [Parameters](./utilities/parameters.md#adjusting-cache-ttl){target="_blank"} | `5` | +| **POWERTOOLS_PARAMETERS_SSM_DECRYPT** | Sets whether to decrypt or not values retrieved from AWS SSM Parameters Store | [Parameters](./utilities/parameters.md#ssmprovider){target="_blank"} | `false` | | **POWERTOOLS_DEV** | Increases verbosity across utilities | Multiple; see [POWERTOOLS_DEV effect below](#optimizing-for-non-production-environments) | `false` | -| **LOG_LEVEL** | Sets logging level | [Logging](./core/logger){target="_blank"} | `INFO` | +| **LOG_LEVEL** | Sets logging level | [Logging](./core/logger.md){target="_blank"} | `INFO` | ### Optimizing for non-production environments @@ -747,16 +748,16 @@ Knowing which companies are using this library is important to help prioritize t The following companies, among others, use Powertools: -* [Capital One](https://www.capitalone.com/){target="_blank"} -* [CPQi (Exadel Financial Services)](https://cpqi.com/){target="_blank"} -* [CloudZero](https://www.cloudzero.com/){target="_blank"} -* [CyberArk](https://www.cyberark.com/){target="_blank"} -* [globaldatanet](https://globaldatanet.com/){target="_blank"} -* [IMS](https://ims.tech/){target="_blank"} -* [Jit Security](https://www.jit.io/){target="_blank"} -* [Propellor.ai](https://www.propellor.ai/){target="_blank"} -* [TopSport](https://www.topsport.com.au/){target="_blank"} -* [Trek10](https://www.trek10.com/){target="_blank"} +* [Capital One](https://www.capitalone.com/){target="_blank" rel="nofollow"} +* [CPQi (Exadel Financial Services)](https://cpqi.com/){target="_blank" rel="nofollow"} +* [CloudZero](https://www.cloudzero.com/){target="_blank" rel="nofollow"} +* [CyberArk](https://www.cyberark.com/){target="_blank" rel="nofollow"} +* [globaldatanet](https://globaldatanet.com/){target="_blank" rel="nofollow"} +* [IMS](https://ims.tech/){target="_blank" rel="nofollow"} +* [Jit Security](https://www.jit.io/){target="_blank" rel="nofollow"} +* [Propellor.ai](https://www.propellor.ai/){target="_blank" rel="nofollow"} +* [TopSport](https://www.topsport.com.au/){target="_blank" rel="nofollow"} +* [Trek10](https://www.trek10.com/){target="_blank" rel="nofollow"} ### Sharing your work diff --git a/docs/maintainers.md b/docs/maintainers.md index 6fc00c83cd9..4fd4f109a33 100644 --- a/docs/maintainers.md +++ b/docs/maintainers.md @@ -9,27 +9,27 @@ description: Playbook for active maintainers in Powertools for AWS Lambda (Pytho !!! note "Please treat this content as a living document." -This is document explains who the maintainers are, their responsibilities, and how they should be doing it. If you're interested in contributing, see [CONTRIBUTING](CONTRIBUTING.md). +This is document explains who the maintainers are, their responsibilities, and how they should be doing it. If you're interested in contributing, see [CONTRIBUTING](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/CONTRIBUTING.md){target="_blank"}. ## Current Maintainers -| Maintainer | GitHub ID | Affiliation | -| ----------------- | ------------------------------------------------------- | ----------- | -| Heitor Lessa | [heitorlessa](https://github.com/heitorlessa) | Amazon | -| Simon Thulbourn | [sthulb](https://github.com/sthulb) | Amazon | -| Ruben Fonseca | [rubenfonseca](https://github.com/rubenfonseca) | Amazon | -| Leandro Damascena | [leandrodamascena](https://github.com/leandrodamascena) | Amazon | +| Maintainer | GitHub ID | Affiliation | +| ----------------- | --------------------------------------------------------------------------------------- | ----------- | +| Heitor Lessa | [heitorlessa](https://github.com/heitorlessa){target="_blank" rel="nofollow"} | Amazon | +| Simon Thulbourn | [sthulb](https://github.com/sthulb){target="_blank" rel="nofollow"} | Amazon | +| Ruben Fonseca | [rubenfonseca](https://github.com/rubenfonseca){target="_blank" rel="nofollow"} | Amazon | +| Leandro Damascena | [leandrodamascena](https://github.com/leandrodamascena){target="_blank" rel="nofollow"} | Amazon | ## Emeritus Previous active maintainers who contributed to this project. -| Maintainer | GitHub ID | Affiliation | -| ----------------- | ----------------------------------------------- | ----------- | -| Tom McCarthy | [cakepietoast](https://github.com/cakepietoast) | MongoDB | -| Nicolas Moutschen | [nmoutschen](https://github.com/nmoutschen) | Apollo | -| Alexander Melnyk | [am29d](https://github.com/am29d) | Amazon | -| Michal Ploski | [mploski](https://github.com/mploski) | Amazon | +| Maintainer | GitHub ID | Affiliation | +| ----------------- | ------------------------------------------------------------------------------- | ----------- | +| Tom McCarthy | [cakepietoast](https://github.com/cakepietoast){target="_blank" rel="nofollow"} | MongoDB | +| Nicolas Moutschen | [nmoutschen](https://github.com/nmoutschen){target="_blank" rel="nofollow"} | Apollo | +| Alexander Melnyk | [am29d](https://github.com/am29d){target="_blank" rel="nofollow"} | Amazon | +| Michal Ploski | [mploski](https://github.com/mploski){target="_blank" rel="nofollow"} | Amazon | ## Labels @@ -74,13 +74,14 @@ These are the most common labels used by maintainers to triage issues, pull requ ## Maintainer Responsibilities -Maintainers are active and visible members of the community, and have [maintain-level permissions on a repository](https://docs.github.com/en/organizations/managing-access-to-your-organizations-repositories/repository-permission-levels-for-an-organization). Use those privileges to serve the community and evolve code as follows. +Maintainers are active and visible members of the community, and have [maintain-level permissions on a repository](https://docs.github.com/en/organizations/managing-access-to-your-organizations-repositories/repository-permission-levels-for-an-organization){target="_blank" rel="nofollow"}. Use those privileges to serve the community and evolve code as follows. Be aware of recurring ambiguous situations and [document them](#common-scenarios) to help your fellow maintainers. ### Uphold Code of Conduct -Model the behavior set forward by the [Code of Conduct](CODE_OF_CONDUCT.md) and raise any violations to other maintainers and admins. There could be unusual circumstances where inappropriate behavior does not immediately fall within the [Code of Conduct](CODE_OF_CONDUCT.md). + +Model the behavior set forward by the [Code of Conduct](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/CODE_OF_CONDUCT.md){target="_blank"} and raise any violations to other maintainers and admins. There could be unusual circumstances where inappropriate behavior does not immediately fall within the [Code of Conduct](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/CODE_OF_CONDUCT.md){target="_blank"}. These might be nuanced and should be handled with extra care - when in doubt, do not engage and reach out to other maintainers and admins. @@ -88,7 +89,7 @@ These might be nuanced and should be handled with extra care - when in doubt, do Security is your number one priority. Maintainer's Github keys must be password protected securely and any reported security vulnerabilities are addressed before features or bugs. -Note that this repository is monitored and supported 24/7 by Amazon Security, see [Reporting a Vulnerability](SECURITY.md) for details. +Note that this repository is monitored and supported 24/7 by Amazon Security, see [Reporting a Vulnerability](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/SECURITY.md){target="_blank"} for details. ### Review Pull Requests @@ -96,7 +97,7 @@ Review pull requests regularly, comment, suggest, reject, merge and close. Accep PRs are [labeled](#labels) based on file changes and semantic title. Pay attention to whether labels reflect the current state of the PR and correct accordingly. -Use and enforce [semantic versioning](https://semver.org/) pull request titles, as these will be used for [CHANGELOG](CHANGELOG.md) and [Release notes](https://github.com/aws-powertools/powertools-lambda-python/releases) - make sure they communicate their intent at the human level. +Use and enforce [semantic versioning](https://semver.org/) pull request titles, as these will be used for [CHANGELOG](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/CHANGELOG.md){target="_blank"} and [Release notes](https://github.com/aws-powertools/powertools-lambda-python/releases) - make sure they communicate their intent at the human level. > TODO: This is an area we want to automate using the new GitHub GraphQL API. @@ -250,7 +251,7 @@ These are some questions to keep in mind when drafting your first or future rele - Can customers understand at a high level what changed in this release? - Is there a link to the documentation where they can read more about each main change? -- Are there any graphics or [code snippets](carbon.now.sh/) that can enhance readability? +- Are there any graphics or [code snippets](https://carbon.now.sh/) that can enhance readability? - Are we calling out any key contributor(s) to this release? - All contributors are automatically credited, use this as an exceptional case to feature them @@ -288,13 +289,15 @@ Add integration checks that validate pull requests and pushes to ease the burden ### Negative Impact on the Project -Actions that negatively impact the project will be handled by the admins, in coordination with other maintainers, in balance with the urgency of the issue. Examples would be [Code of Conduct](CODE_OF_CONDUCT.md) violations, deliberate harmful or malicious actions, spam, monopolization, and security risks. + +Actions that negatively impact the project will be handled by the admins, in coordination with other maintainers, in balance with the urgency of the issue. Examples would be [Code of Conduct](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/CODE_OF_CONDUCT.md){target="_blank"} violations, deliberate harmful or malicious actions, spam, monopolization, and security risks. + ### Becoming a maintainer In 2023, we will revisit this. We need to improve our understanding of how other projects are doing, their mechanisms to promote key contributors, and how they interact daily. -We suspect this process might look similar to the [OpenSearch project](https://github.com/opensearch-project/.github/blob/main/MAINTAINERS.md#becoming-a-maintainer). +We suspect this process might look similar to the [OpenSearch project](https://github.com/opensearch-project/.github/blob/main/MAINTAINERS.md#becoming-a-maintainer){target="_blank" rel="nofollow"}. ## Common scenarios @@ -314,7 +317,7 @@ When in doubt, use `need-more-information` or `need-customer-feedback` labels to ### Crediting contributions -We credit all contributions as part of each [release note](https://github.com/aws-powertools/powertools-lambda-python/releases) as an automated process. If you find contributors are missing from the release note you're producing, please add them manually. +We credit all contributions as part of each [release note](https://github.com/aws-powertools/powertools-lambda-python/releases){target="_blank"} as an automated process. If you find contributors are missing from the release note you're producing, please add them manually. ### Is that a bug? @@ -338,7 +341,7 @@ In the rare cases where both parties don't have the bandwidth or expertise to co ### Structure -Our E2E framework relies on [Pytest fixtures](https://docs.pytest.org/en/6.2.x/fixture.html) to coordinate infrastructure and test parallelization - see [Test Parallelization](#test-runner-parallelization) and [CDK CLI Parallelization](#cdk-cli-parallelization). +Our E2E framework relies on [Pytest fixtures](https://docs.pytest.org/en/6.2.x/fixture.html){target="_blank" rel="nofollow"} to coordinate infrastructure and test parallelization - see [Test Parallelization](#test-runner-parallelization) and [CDK CLI Parallelization](#cdk-cli-parallelization). **tests/e2e structure** @@ -387,7 +390,7 @@ Where: ### Mechanics -Under [`BaseInfrastructure`](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/tests/e2e/utils/infrastructure.py), we hide the complexity of deployment and delete coordination under `deploy`, `delete`, and `create_lambda_functions` methods. +Under [`BaseInfrastructure`](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/tests/e2e/utils/infrastructure.py){target="_blank"}, we hide the complexity of deployment and delete coordination under `deploy`, `delete`, and `create_lambda_functions` methods. This allows us to benefit from test and deployment parallelization, use IDE step-through debugging for a single test, run one, subset, or all tests and only deploy their related infrastructure, without any custom configuration. @@ -589,7 +592,7 @@ graph TD #### CDK CLI parallelization -For CDK CLI to work with [independent CDK Apps](https://docs.aws.amazon.com/cdk/v2/guide/apps.html), we specify an output directory when synthesizing our stack and deploy from said output directory. +For CDK CLI to work with [independent CDK Apps](https://docs.aws.amazon.com/cdk/v2/guide/apps.html){target="_blank"}, we specify an output directory when synthesizing our stack and deploy from said output directory. ```mermaid flowchart TD @@ -657,6 +660,6 @@ Where: - **`layer_build`**. Contains our Lambda Layer source code built once, used by all stacks independently - **`layer_build.diff`**. Contains a hash on whether our source code has changed to speed up further deployments and E2E tests -Together, all of this allows us to use Pytest like we would for any project, use CDK CLI and its [context methods](https://docs.aws.amazon.com/cdk/v2/guide/context.html#context_methods) (`from_lookup`), and use step-through debugging for a single E2E test without any extra configuration. +Together, all of this allows us to use Pytest like we would for any project, use CDK CLI and its [context methods](https://docs.aws.amazon.com/cdk/v2/guide/context.html#context_methods){target="_blank"} (`from_lookup`), and use step-through debugging for a single E2E test without any extra configuration. -> NOTE: VSCode doesn't support debugging processes spawning sub-processes (like CDK CLI does w/ shell and CDK App). Maybe [this works](https://stackoverflow.com/a/65339352). PyCharm works just fine. +> NOTE: VSCode doesn't support debugging processes spawning sub-processes (like CDK CLI does w/ shell and CDK App). Maybe [this works](https://stackoverflow.com/a/65339352){target="_blank" rel="nofollow"}. PyCharm works just fine. diff --git a/docs/requirements.txt b/docs/requirements.txt index 5030fc70306..42d3c2ab26e 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -16,16 +16,10 @@ gitdb==4.0.10 \ --hash=sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a \ --hash=sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7 # via gitpython -gitpython==3.1.31 \ - --hash=sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573 \ - --hash=sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d +gitpython==3.1.32 \ + --hash=sha256:8d9b8cb1e80b9735e8717c9362079d3ce4c6e5ddeebedd0361b228c3a67a62f6 \ + --hash=sha256:e3d59b1c2c6ebb9dfa7a184daf3b6dd4914237e7488a1730a6d8f6f5d0b4187f # via mkdocs-git-revision-date-plugin -importlib-metadata==6.7.0 \ - --hash=sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4 \ - --hash=sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5 - # via - # markdown - # mkdocs jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 @@ -192,7 +186,3 @@ watchdog==3.0.0 \ --hash=sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44 \ --hash=sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33 # via mkdocs -zipp==3.15.0 \ - --hash=sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b \ - --hash=sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556 - # via importlib-metadata diff --git a/docs/roadmap.md b/docs/roadmap.md index 5e9515b9252..c2de6829fb4 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -2,25 +2,56 @@ ## Overview -This is our public roadmap that outlines the high level direction we are working towards, namely [Themes](#themes). We update this document when our priorities change: security and stability is our top priority. +Our public roadmap outlines the high level direction we are working towards, namely [Themes](#themes). We update this document when our priorities change: security and stability is our top priority. -[See our latest list of activities ยป](https://github.com/orgs/awslabs/projects/51/views/1?query=is%3Aopen+sort%3Aupdated-desc){target="_blank"} +!!! info "For most up-to-date information, see our [board of activities](https://github.com/orgs/aws-powertools/projects/3/views/2?query=is%3Aopen+sort%3Aupdated-desc){target="_blank"}." ## Themes -!!! info "Operational Excellence is priority number 1." +Operational Excellence is priority number 1. This means bug fixing, stability, security, customer's support, and governance will take precedence above all else. -Themes are key activities maintainers are focusing on, besides bug reports. These are updated periodically and you can find the latest [under Epics in our public board](https://github.com/orgs/awslabs/projects/51/views/11?query=is%3Aopen+sort%3Aupdated-desc){target="_blank"}. +**What are themes?** -### Increased end-to-end coverage +They are key activities maintainers are focusing on. These are updated periodically and you can find the latest [under Themes in our public board](https://github.com/orgs/aws-powertools/projects/3/views/11?query=is%3Aopen+sort%3Aupdated-desc){target="_blank"}. -We continue to work on increasing end-to-end coverage for all features. Our main challenge is testing contracts for Lambda Event Sources (Parser, Event Source Data Classes) due to the lack of an official JSON schema. +### Observability providers -Some Lambda Event Sources require clusters (e.g., MSK) leading to additional delays of up to 30m in the end-to-end feedback loop. We need a RFC to start discussing viable options, and whether we should publish JSON Schemas from identified contracts. +We want to extend Tracer, Metrics, and Logger to support any [AWS Lambda certified observability partner](https://go.aws/3HtU6CZ){target="_blank"}, along with OpenTelemetry. -### Observability providers +At launch, we will support Datadog since it's [most requested observability provider](https://github.com/aws-powertools/powertools-lambda-python/issues/1433). OpenTelemetry will be a fast follow-up as we need to decide on a stable solution to cold start penalty. + +!!! tip "Help us identify which observability providers we should integrate next. Open [feature request](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=feature-request%2Ctriage&projects=&template=feature_request.yml&title=Feature+request%3A+TITLE){target="_blank"} or by voting `+1` in existing issues" + +**Major updates** + +- [x] [Document how customers can use any provider with Logger](https://docs.powertools.aws.dev/lambda/python/latest/core/logger/#observability-providers) +- [x] [Extend Metrics to add support for any Provider](https://github.com/aws-powertools/powertools-lambda-python/pull/2194) +- [ ] [Extend Tracer to add support for any Provider](https://github.com/aws-powertools/powertools-lambda-python/issues/2030) +- [ ] Investigate alternative solution to OpenTelemetry cold start performance + +### Sensitive Data Masking + +Data Masking will be a new utility to mask/unmask sensitive data using encryption providers. It's the second most voted feature request (behind [Observability Providers](#observability-providers)). + +**Major updates** + +- [x] [RFC to agree on design and MVP](https://github.com/aws-powertools/powertools-lambda-python/issues/1858) +- [ ] [POC with AWS KMS as the default provider](https://github.com/aws-powertools/powertools-lambda-python/pull/2197) +- [ ] Documentation to guide customers how to bring their own provider (e.g., `ItsDangerous`) + +### Revamp Event Handler -We want to extend Tracer, Metrics, and Logger to support any [observability provider](https://github.com/aws-powertools/powertools-lambda-python/issues/1433){target="_blank"}. We need a RFC to define a contract and to identify two most requested observability providers that we can work with as an initial step. +Event Handler provides lightweight routing for both [**REST**: Amazon API Gateway, Amazon Elastic Load Balancer and AWS Lambda Function URL](./core/event_handler/api_gateway.md), and [**GraphQL**: AWS AppSync](./core/event_handler/appsync.md). + +Based on customers feedback, we want to provide middleware authoring support for cross-cutting concerns. For REST APIs, we are also looking into auto-generate OpenAPI Schemas and a SwaggerUI route. For GraphQL, we are working on supporting batch invocations (N+1 problem) along with partial failure support. + +**Major updates** + +- [x] [Agree on experience for middleware support](https://github.com/aws-powertools/powertools-lambda-python/issues/953#issuecomment-1450223155) +- [x] [RFC to outline initial thoughts on OpenAPI integration](https://github.com/aws-powertools/powertools-lambda-python/issues/2421) +- [ ] MVP for REST middleware +- [ ] MVP for OpenAPI and SwaggerUI +- [ ] [MVP for AppSync Batch invoke and partial failure support](https://github.com/aws-powertools/powertools-lambda-python/pull/1998) ### Lambda Layer in release notes @@ -30,19 +61,82 @@ As of V2, we prioritize Lambda Layers being available before release notes are o This means we have room to include a JSON map for Lambda Layers and facilitate automation for customers wanting the latest version as soon as it's available. -### Strict typing +**Major updates** + +- [x] Create secure mechanism to upload signed assets to GitHub Release Notes +- [ ] Create feature request to agree on JSON structure and asset name + +### Office hours + +We heard from [customers](https://github.com/aws-powertools/powertools-lambda-python#connect){target="_blank"} that Powertools for AWS Lambda and its community can move faster than they are able to catch up. While documentation and release notes take these into account, they notice they don't always know advanced tricks, or what other customers tend to do in similar situations. + +We want to run a monthly office hours to start addressing that, and learn from customers how they're using Powertools and whether or not they need a closer support. + +Timezones being tricky, we plan to experiment with an afternoon slot in Central European that would also cover Middle East, US east coast, and South America. Depending on attendance, we plan to A/B test an Asia friendly one too. + +**Major updates** + +- [ ] Decide whether to use Amazon Chime or Zoom (we had audio setup issues on Discord) + +### Authentication (SigV4) + +[During customers interview](https://github.com/aws-powertools/powertools-lambda-python#connect){target="_blank"}, we hear that signing requests using [AWS SigV4](https://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-authenticating-requests.html){target="_blank"} could be easier. + +Since JWT is a close second, this new utility would cover higher level functions to sign and verify requests more easily. + +**Major updates** + +- [ ] RFC to outline challenges, alternative solutions and desired experience +- [ ] MVP based off RFC + +### Enhanced operational metrics + +[Through customers interview](https://github.com/aws-powertools/powertools-lambda-python#connect){target="_blank"}, [Discord](https://discord.gg/B8zZKbbyET){target="_blank" rel="nofollow"}, and [1:1 customer enablement](https://github.com/aws-powertools/powertools-lambda-python#connect){target="_blank"}, we noticed customers often create the same set of custom operational metrics. + +We want to make this easier by extending certain utilities to accept a `metrics` instance and metrics configuration (what metrics to create). It would be opt-in due to costs associated with creating metrics. + +!!! question "Got ideas for custom metrics? Open up a [feature request](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=feature-request%2Ctriage&projects=&template=feature_request.yml&title=Feature+request%3A+TITLE)" + +**Major updates** + +- [ ] RFC to outline metrics for Batch (_e.g., Failed items, Batch size_) +- [ ] RFC to outline metrics for Feature flags (_e.g., matched rules_) +- [ ] RFC to outline metrics for Event Handler (_e.g., validation errors_ ) +- [ ] RFC to outline metrics for Idempotency (_e.g., cache hit_) + +### Lambda Layer in GovCloud and China region + +We want to investigate security and scaling requirements for these special regions, so they're in sync for every release. + +!!! note "Help us prioritize it by reaching out to your AWS representatives or [via email](mailto:aws-lambda-powertools-feedback@amazon.com)." + +**Major updates** + +- [ ] Gather agencies and customers name to prioritize it +- [ ] Investigate security requirements for special regions +- [ ] Update CDK Layer construct to include regions +- [ ] Create additional infrastructure for special regions + +### V3 -We want to enable MyPy strict mode against the code base. We need a RFC to identify most critical areas to start, and do so gradually as to not impact new features and enhancements in parallel. +With Python 3.7 reaching [end-of-life in AWS Lambda by the end of the year](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html), we want to plan some breaking changes. As always, we plan on having ample notice, a detailed upgrade guide, and keep breaking changes to a minimum to ease transition (e.g., it took ~7 months from v2 to surpass v1 downloads). -This also means bringing `typing-extensions` as a runtime dependency to ensure complete coverage across all Python versions. Future wise, we might be able to experiment with [MyPyC](https://github.com/mypyc/mypyc){target="_blank"} to compile less performing parts of the code base as a C-Extension. +For example, these are on our mind but not settled yet until we have a public tracker to discuss what these means in detail. -### New utilities - -With V2 launched, we want to resume working on new utilities, specifically but not limited to the most commonly asked: **(1)** [Sensitive Data Masking](https://github.com/aws-powertools/powertools-lambda-python/issues/1173){target="_blank"}, **(2)** [Integration/End-to-end Testing](https://github.com/aws-powertools/powertools-lambda-python/issues/1169){target="_blank"}, and **(3)** [Event Bridge](https://github.com/aws-powertools/powertools-lambda-python/issues/1168){target="_blank"}. +- **Parser**: Drop Pydantic v1 +- **Parser**: Deserialize Amazon DynamoDB data types automatically (like Event Source Data Classes) +- **Parameters**: Increase default `max_age` for `get_secret` +- **Event Source Data Classes**: Return sane defaults for any property that has `Optional[]` returns +- **Python 3.7 EOL**: Update PyPi and Layers to only support 3.8 +- **Upgrade tool**: Consider building a CST (Concrete Syntax Tree) tool to ease certain upgrade actions like `pyupgrade` and `django-upgrade` +- **Batch**: Stop at first error for Amazon DynamoDB Streams and Amazon Kinesis Data Streams (e.g., `stop_on_failure=True`) -### Open iteration planning +**Major updates** -We want to experiment running a bi-weekly audio channel on [Discord](https://discord.gg/B8zZKbbyET){target="_blank"} to help us prioritize backlog in real-time. Depending on attendance, we might switch to run an office hours instead. +- [ ] Create an issue to track breaking changes we consider making +- [ ] Create a v3 branch to allow early experimentation +- [ ] Create workflows to allow pre-releases +- [ ] Create a mechanism to keep ideas for breaking change somewhere regardless of v3 ## Roadmap status definition @@ -54,13 +148,13 @@ graph LR Visual representation -Within our [public board](https://github.com/orgs/awslabs/projects/51/views/1?query=is%3Aopen+sort%3Aupdated-desc){target="_blank"}, you'll see the following values in the `Status` column: +Within our [public board](https://github.com/orgs/aws-powertools/projects/3/views/1?query=is%3Aopen+sort%3Aupdated-desc){target="_blank"}, you'll see the following values in the `Status` column: -* **Ideas**. Incoming and existing feature requests that are not being actively considered yet. These will be reviewed when bandwidth permits. -* **Backlog**. Accepted feature requests or enhancements that we want to work on. -* **Working on it**. Features or enhancements we're currently either researching or implementing it. -* **Coming soon**. Any feature, enhancement, or bug fixes that have been merged and are coming in the next release. -* **Shipped**. Features or enhancements that are now available in the most recent release. +- **Ideas**. Incoming and existing feature requests that are not being actively considered yet. These will be reviewed when bandwidth permits. +- **Backlog**. Accepted feature requests or enhancements that we want to work on. +- **Working on it**. Features or enhancements we're currently either researching or implementing it. +- **Coming soon**. Any feature, enhancement, or bug fixes that have been merged and are coming in the next release. +- **Shipped**. Features or enhancements that are now available in the most recent release. > Tasks or issues with empty `Status` will be categorized in upcoming review cycles. @@ -82,12 +176,12 @@ graph LR Our end-to-end mechanism follows four major steps: -* **Feature Request**. Ideas start with a [feature request](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=feature-request%2Ctriage&template=feature_request.yml&title=Feature+request%3A+TITLE){target="_blank"} to outline their use case at a high level. For complex use cases, maintainers might ask for/write a RFC. - * Maintainers review requests based on [project tenets](index.md#tenets){target="_blank"}, customers reaction (๐Ÿ‘), and use cases. -* **Request-for-comments (RFC)**. Design proposals use our [RFC issue template](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=RFC%2Ctriage&template=rfc.yml&title=RFC%3A+TITLE){target="_blank"} to describe its implementation, challenges, developer experience, dependencies, and alternative solutions. - * This helps refine the initial idea with community feedback before a decision is made. -* **Decision**. After carefully reviewing and discussing them, maintainers make a final decision on whether to start implementation, defer or reject it, and update everyone with the next steps. -* **Implementation**. For approved features, maintainers give priority to the original authors for implementation unless it is a sensitive task that is best handled by maintainers. +- **Feature Request**. Ideas start with a [feature request](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=feature-request%2Ctriage&template=feature_request.yml&title=Feature+request%3A+TITLE){target="_blank"} to outline their use case at a high level. For complex use cases, maintainers might ask for/write a RFC. + - Maintainers review requests based on [project tenets](index.md#tenets){target="_blank"}, customers reaction (๐Ÿ‘), and use cases. +- **Request-for-comments (RFC)**. Design proposals use our [RFC issue template](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=RFC%2Ctriage&template=rfc.yml&title=RFC%3A+TITLE){target="_blank"} to describe its implementation, challenges, developer experience, dependencies, and alternative solutions. + - This helps refine the initial idea with community feedback before a decision is made. +- **Decision**. After carefully reviewing and discussing them, maintainers make a final decision on whether to start implementation, defer or reject it, and update everyone with the next steps. +- **Implementation**. For approved features, maintainers give priority to the original authors for implementation unless it is a sensitive task that is best handled by maintainers. ???+ info "See [Maintainers](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/MAINTAINERS.md){target="_blank"} document to understand how we triage issues and pull requests, labels and governance." diff --git a/docs/security.md b/docs/security.md index b3475e2c0c0..e8900366f3b 100644 --- a/docs/security.md +++ b/docs/security.md @@ -19,7 +19,7 @@ This page describes our security processes and supply chain practices. ### Verifying signed builds -!!! note "Starting from v2.20.0 releases, builds are [reproducible](https://slsa.dev/spec/v0.1/faq#q-what-about-reproducible-builds){target="_blank"} and signed publicly." +!!! note "Starting from v2.20.0 releases, builds are [reproducible](https://slsa.dev/spec/v0.1/faq#q-what-about-reproducible-builds){target="_blank" rel="nofollow"} and signed publicly."
![SLSA Supply Chain Threats](https://slsa.dev/images/v1.0/supply-chain-threats.svg) @@ -29,7 +29,7 @@ This page describes our security processes and supply chain practices. #### Terminology -We use [SLSA](https://slsa.dev/spec/v1.0/about){target="_blank"} to ensure our builds are reproducible and to adhere to [supply chain security practices](https://slsa.dev/spec/v1.0/threats-overview). +We use [SLSA](https://slsa.dev/spec/v1.0/about){target="_blank" rel="nofollow"} to ensure our builds are reproducible and to adhere to [supply chain security practices](https://slsa.dev/spec/v1.0/threats-overview). Within our [releases page](https://github.com/aws-powertools/powertools-lambda-python/releases), you will notice a new metadata file: `multiple.intoto.jsonl`. It's metadata to describe **where**, **when**, and **how** our build artifacts were produced - or simply, **attestation** in SLSA terminology. @@ -79,7 +79,7 @@ You can do this manually or automated via a shell script. We maintain the latter 5. **Runs SLSA Verifier against attestation**, GitHub Source, and release binary 6. **Cleanup** by removing downloaded files to keep your current directory tidy - ??? info "Expand or [click here](https://github.com/heitorlessa/aws-lambda-powertools-python/blob/refactor/ci-seal/.github/actions/verify-provenance/verify_provenance.sh#L95){target="_blank"} to see the script source code" + ??? info "Expand or [click here](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/.github/actions/verify-provenance/verify_provenance.sh){target="_blank"} to see the script source code" ```bash title=".github/actions/verify-provenance/verify_provenance.sh" ---8<-- ".github/actions/verify-provenance/verify_provenance.sh" diff --git a/docs/tutorial/index.md b/docs/tutorial/index.md index 6674b5bcf4e..bf9afdaf70b 100644 --- a/docs/tutorial/index.md +++ b/docs/tutorial/index.md @@ -20,11 +20,11 @@ Let's clone our sample project before we add one feature at a time. Bootstrap directly via SAM CLI: ```shell - sam init --app-template hello-world-powertools-python --name sam-app --package-type Zip --runtime python3.10 --no-tracing + sam init --app-template hello-world-powertools-python --name sam-app --package-type Zip --runtime python3.11 --no-tracing ``` ```bash title="Use SAM CLI to initialize the sample project" -sam init --runtime python3.10 --dependency-manager pip --app-template hello-world --name powertools-quickstart +sam init --runtime python3.11 --dependency-manager pip --app-template hello-world --name powertools-quickstart ``` ### Project structure @@ -103,7 +103,7 @@ AWS SAM allows you to execute a serverless application locally by running `sam b 2021-11-26 17:43:08 * Running on http://127.0.0.1:3000/ (Press CTRL+C to quit) ``` -As a result, a local API endpoint will be exposed and you can invoke it using your browser, or your preferred HTTP API client e.g., [Postman](https://www.postman.com/downloads/){target="_blank"}, [httpie](https://httpie.io/){target="_blank"}, etc. +As a result, a local API endpoint will be exposed and you can invoke it using your browser, or your preferred HTTP API client e.g., [Postman](https://www.postman.com/downloads/){target="_blank" rel="nofollow"}, [httpie](https://httpie.io/){target="_blank" rel="nofollow"}, etc. ```bash title="Invoking our function locally via curl" > curl http://127.0.0.1:3000/hello @@ -226,7 +226,7 @@ For this to work, we could create a new Lambda function to handle incoming reque We could group similar routes and intents, separate read and write operations resulting in fewer functions. It doesn't address the boilerplate routing code, but maybe it will be easier to add additional URLs. ???+ info "Info: You might be already asking yourself about mono vs micro-functions" - If you want a more detailed explanation of these two approaches, head over to the [trade-offs on each approach](../core/event_handler/api_gateway/#considerations){target="_blank"} later. + If you want a more detailed explanation of these two approaches, head over to the [trade-offs on each approach](../core/event_handler/api_gateway.md#considerations){target="_blank"} later. A first attempt at the routing logic might look similar to the following code snippet. @@ -355,7 +355,7 @@ Let's include Powertools for AWS Lambda (Python) as a dependency in `requirement Use `sam build && sam local start-api` and try run it locally again. ???+ note - If you're coming from [Flask](https://flask.palletsprojects.com/en/2.0.x/){target="_blank"}, you will be familiar with this experience already. [Event Handler for API Gateway](../core/event_handler/api_gateway.md){target="_blank"} uses `APIGatewayRestResolver` to give a Flask-like experience while staying true to our tenet `Keep it lean`. + If you're coming from [Flask](https://flask.palletsprojects.com/en/2.0.x/){target="_blank" rel="nofollow"}, you will be familiar with this experience already. [Event Handler for API Gateway](../core/event_handler/api_gateway.md){target="_blank"} uses `APIGatewayRestResolver` to give a Flask-like experience while staying true to our tenet `Keep it lean`. We have added the route annotation as the decorator for our methods. It enables us to use the parameters passed in the request directly, and our responses are simply dictionaries. @@ -364,7 +364,7 @@ Lastly, we used `return app.resolve(event, context)` so Event Handler can resolv From here, we could handle [404 routes](../core/event_handler/api_gateway.md#handling-not-found-routes){target="_blank"}, [error handling](../core/event_handler/api_gateway.md#exception-handling){target="_blank"}, [access query strings, payload](../core/event_handler/api_gateway.md#accessing-request-details){target="_blank"}, etc. ???+ tip - If you'd like to learn how python decorators work under the hood, you can follow [Real Python](https://realpython.com/primer-on-python-decorators/){target="_blank"}'s article. + If you'd like to learn how python decorators work under the hood, you can follow [Real Python](https://realpython.com/primer-on-python-decorators/){target="_blank" rel="nofollow"}'s article. ## Structured Logging @@ -509,7 +509,7 @@ This is how the logs would look like now: ``` We can now search our logs by the request ID to find a specific operation. Additionally, we can also search our logs for function name, Lambda request ID, Lambda function ARN, find out whether an operation was a cold start, etc. - + From here, we could [set specific keys](../core/logger.md#append_keys-method){target="_blank"} to add additional contextual information about a given operation, [log exceptions](../core/logger.md#logging-exceptions){target="_blank"} to easily enumerate them later, [sample debug logs](../core/logger.md#sampling-debug-logs){target="_blank"}, etc. By having structured logs like this, we can easily search and analyse them in [CloudWatch Logs Insight](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/AnalyzingLogData.html){target="_blank"}. @@ -773,7 +773,7 @@ Another subtle difference is that you can now run your Lambda functions and unit Powertools for AWS Lambda (Python) optimizes for Lambda compute environment. As such, we add these and other common approaches to accelerate your development, so you don't worry about implementing every cross-cutting concern. ???+ tip - You can [opt-out some of these behaviours](../core/tracer/#advanced){target="_blank"} like disabling response capturing, explicitly patching only X modules, etc. + You can [opt-out some of these behaviours](../core/tracer.md#advanced){target="_blank"} like disabling response capturing, explicitly patching only X modules, etc. Repeat the process of building, deploying, and invoking your application via the API endpoint. Within the [AWS X-Ray Console](https://console.aws.amazon.com/xray/home#/traces/){target="_blank"}, you should see a similar view: @@ -794,7 +794,7 @@ From here, you can browse to specific logs in CloudWatch Logs Insight, Metrics D ### Creating metrics Let's add custom metrics to better understand our application and business behavior (e.g. number of reservations, etc.). - + By default, AWS Lambda adds [invocation and performance metrics](https://docs.aws.amazon.com/lambda/latest/dg/monitoring-metrics.html#monitoring-metrics-types){target="_blank"}, and Amazon API Gateway adds [latency and some HTTP metrics](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-metrics-and-dimensions.html#api-gateway-metrics){target="_blank"}. ???+ tip @@ -993,7 +993,7 @@ That's a lot less boilerplate code! Let's break this down: * **L33**: We use `@metrics.log_metrics` decorator to ensure that our metrics are aligned with the EMF output and validated before-hand, like in case we forget to set namespace, or accidentally use a metric unit as a string that doesn't exist in CloudWatch. * **L33**: We also use `capture_cold_start_metric=True` so we don't have to handle that logic either. Note that [Metrics](../core/metrics.md){target="_blank"} does not publish a warm invocation metric (ColdStart=0) for cost reasons. As such, treat the absence (sparse metric) as a non-cold start invocation. -Repeat the process of building, deploying, and invoking your application via the API endpoint a few times to generate metrics - [Artillery](https://www.artillery.io/){target="_blank"} and [K6.io](https://k6.io/open-source){target="_blank"} are quick ways to generate some load. +Repeat the process of building, deploying, and invoking your application via the API endpoint a few times to generate metrics - [Artillery](https://www.artillery.io/){target="_blank" rel="nofollow"} and [K6.io](https://k6.io/open-source){target="_blank" rel="nofollow"} are quick ways to generate some load. Within [CloudWatch Metrics view](https://console.aws.amazon.com/cloudwatch/home#metricsV2:graph=~()){target="_blank}, you should see `MyApp` custom namespace with your custom metrics there and `SuccessfulGreetings` available to graph. @@ -1034,7 +1034,7 @@ If you're curious about how the EMF portion of your function logs look like, you ## Final considerations We covered a lot of ground here and we only scratched the surface of the feature set available within Powertools for AWS Lambda (Python). - + When it comes to the observability features ([Tracer](../core/tracer.md){target="_blank"}, [Metrics](../core/metrics.md){target="_blank"}, [Logging](../core/logger.md){target="_blank"}), don't stop there! The goal here is to ensure you can ask arbitrary questions to assess your system's health; these features are only part of the wider story! This requires a change in mindset to ensure operational excellence is part of the software development lifecycle. diff --git a/docs/upgrade.md b/docs/upgrade.md index 064c6a9657a..d9602da1a53 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -44,7 +44,7 @@ You can migrate to `BatchProcessor` with the following changes: 1. If you use **`sqs_batch_decorator`**, change to **`batch_processor`** decorator 2. If you use **`PartialSQSProcessor`**, change to **`BatchProcessor`** -3. [Enable **`ReportBatchItemFailures`** in your Lambda Event Source](../utilities/batch#required-resources){target="_blank"} +3. [Enable **`ReportBatchItemFailures`** in your Lambda Event Source](./utilities/batch.md#required-resources){target="_blank"} 4. Change your Lambda Handler to return the new response format === "[Before] Decorator" diff --git a/docs/utilities/batch.md b/docs/utilities/batch.md index 1b9894a0256..ada05766ab4 100644 --- a/docs/utilities/batch.md +++ b/docs/utilities/batch.md @@ -5,6 +5,30 @@ description: Utility The batch processing utility handles partial failures when processing batches from Amazon SQS, Amazon Kinesis Data Streams, and Amazon DynamoDB Streams. +```mermaid +stateDiagram-v2 + direction LR + BatchSource: Amazon SQS

Amazon Kinesis Data Streams

Amazon DynamoDB Streams

+ LambdaInit: Lambda invocation + BatchProcessor: Batch Processor + RecordHandler: Record Handler function + YourLogic: Your logic to process each batch item + LambdaResponse: Lambda response + + BatchSource --> LambdaInit + + LambdaInit --> BatchProcessor + BatchProcessor --> RecordHandler + + state BatchProcessor { + [*] --> RecordHandler: Your function + RecordHandler --> YourLogic + } + + RecordHandler --> BatchProcessor: Collect results + BatchProcessor --> LambdaResponse: Report items that failed processing +``` + ## Key features * Reports batch item failures to reduce number of retries for a record upon errors @@ -16,12 +40,21 @@ The batch processing utility handles partial failures when processing batches fr When using SQS, Kinesis Data Streams, or DynamoDB Streams as a Lambda event source, your Lambda functions are triggered with a batch of messages. -If your function fails to process any message from the batch, the entire batch returns to your queue or stream. This same batch is then retried until either condition happens first: **a)** your Lambda function returns a successful response, **b)** record reaches maximum retry attempts, or **c)** when records expire. +If your function fails to process any message from the batch, the entire batch returns to your queue or stream. This same batch is then retried until either condition happens first: **a)** your Lambda function returns a successful response, **b)** record reaches maximum retry attempts, or **c)** records expire. + +```mermaid +journey + section Conditions + Successful response: 5: Success + Maximum retries: 3: Failure + Records expired: 1: Failure +``` -With this utility, batch records are processed individually โ€“ only messages that failed to be processed return to the queue or stream for a further retry. This works when two mechanisms are in place: +This behavior changes when you enable Report Batch Item Failures feature in your Lambda function event source configuration: -1. `ReportBatchItemFailures` is set in your SQS, Kinesis, or DynamoDB event source properties -2. [A specific response](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#services-sqs-batchfailurereporting){target="_blank"} is returned so Lambda knows which records should not be deleted during partial responses + +* [**SQS queues**](#sqs-standard). Only messages reported as failure will return to the queue for a retry, while successful ones will be deleted. +* [**Kinesis data streams**](#kinesis-and-dynamodb-streams) and [**DynamoDB streams**](#kinesis-and-dynamodb-streams). Single reported failure will use its sequence number as the stream checkpoint. Multiple reported failures will use the lowest sequence number as checkpoint. @@ -32,14 +65,16 @@ With this utility, batch records are processed individually โ€“ only messages th ## Getting started -Regardless whether you're using SQS, Kinesis Data Streams or DynamoDB Streams, you must configure your Lambda function event source to use `ReportBatchItemFailures`. +For this feature to work, you need to **(1)** configure your Lambda function event source to use `ReportBatchItemFailures`, and **(2)** return [a specific response](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#services-sqs-batchfailurereporting){target="_blank" rel="nofollow"} to report which records failed to be processed. -You do not need any additional IAM permissions to use this utility, except for what each event source requires. +You use your preferred deployment framework to set the correct configuration while this utility handles the correct response to be returned. ### Required resources The remaining sections of the documentation will rely on these samples. For completeness, this demonstrates IAM permissions and Dead Letter Queue where batch records will be sent after 2 retries were attempted. +!!! note "You do not need any additional IAM permissions to use this utility, except for what each event source requires." + === "SQS" ```yaml title="template.yaml" hl_lines="30-31" @@ -66,15 +101,18 @@ Processing batches from SQS works in three stages: 2. Define your function to handle each batch record, and use [`SQSRecord`](data_classes.md#sqs){target="_blank"} type annotation for autocompletion 3. Use **`process_partial_response`** to kick off processing -???+ info - This code example optionally uses Tracer and Logger for completion. +!!! info "This code example uses Tracer and Logger for completion." === "Recommended" - ```python hl_lines="4-9 12 18 28" + ```python hl_lines="2-7 10 16 24" --8<-- "examples/batch_processing/src/getting_started_sqs.py" ``` + 1. **Step 1**. Creates a partial failure batch processor for SQS queues. See [partial failure mechanics for details](#partial-failure-mechanics) + 2. **Step 2**. Defines a function to receive one record at a time from the batch + 3. **Step 3**. Kicks off processing + === "As a context manager" ```python hl_lines="4-5 8 14 25-26 29" @@ -103,15 +141,17 @@ Processing batches from SQS works in three stages: #### FIFO queues -When using [SQS FIFO queues](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/FIFO-queues.html){target="_blank"}, we will stop processing messages after the first failure, and return all failed and unprocessed messages in `batchItemFailures`. +When using [SQS FIFO queues](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/FIFO-queues.html){target="_blank" rel="nofollow"}, we will stop processing messages after the first failure, and return all failed and unprocessed messages in `batchItemFailures`. This helps preserve the ordering of messages in your queue. === "Recommended" - ```python hl_lines="5-6 11 27" + ```python hl_lines="2-6 9 23" --8<-- "examples/batch_processing/src/getting_started_sqs_fifo.py" ``` + 1. **Step 1**. Creates a partial failure batch processor for SQS FIFO queues. See [partial failure mechanics for details](#partial-failure-mechanics) + === "As a context manager" ```python hl_lines="4 8" @@ -132,8 +172,7 @@ Processing batches from Kinesis works in three stages: 2. Define your function to handle each batch record, and use [`KinesisStreamRecord`](data_classes.md#kinesis-streams){target="_blank"} type annotation for autocompletion 3. Use **`process_partial_response`** to kick off processing -???+ info - This code example optionally uses Tracer and Logger for completion. +!!! info "This code example uses Tracer and Logger for completion." === "Recommended" @@ -141,6 +180,8 @@ Processing batches from Kinesis works in three stages: --8<-- "examples/batch_processing/src/getting_started_kinesis.py" ``` + 1. **Step 1**. Creates a partial failure batch processor for Kinesis Data Streams. See [partial failure mechanics for details](#partial-failure-mechanics) + === "As a context manager" ```python hl_lines="3-5 8 14 23-25 28" @@ -175,8 +216,7 @@ Processing batches from DynamoDB Streams works in three stages: 2. Define your function to handle each batch record, and use [`DynamoDBRecord`](data_classes.md#dynamodb-streams){target="_blank"} type annotation for autocompletion 3. Use **`process_partial_response`** to kick off processing -???+ info - This code example optionally uses Tracer and Logger for completion. +!!! info "This code example uses Tracer and Logger for completion." === "Recommended" @@ -184,6 +224,8 @@ Processing batches from DynamoDB Streams works in three stages: --8<-- "examples/batch_processing/src/getting_started_dynamodb.py" ``` + 1. **Step 1**. Creates a partial failure batch processor for DynamoDB Streams. See [partial failure mechanics for details](#partial-failure-mechanics) + === "As a context manager" ```python hl_lines="5-7 10 16 28-30 33" @@ -210,24 +252,157 @@ Processing batches from DynamoDB Streams works in three stages: --8<-- "examples/batch_processing/src/getting_started_dynamodb_event.json" ``` +### Error handling + +By default, we catch any exception raised by your record handler function. This allows us to **(1)** continue processing the batch, **(2)** collect each batch item that failed processing, and **(3)** return the appropriate response correctly without failing your Lambda function execution. + +=== "Sample error handling with custom exception" + + ```python title="" hl_lines="24" + --8<-- "examples/batch_processing/src/getting_started_error_handling.py" + ``` + + 1. Any exception works here. See [extending BatchProcessor section, if you want to override this behavior.](#extending-batchprocessor) + + 2. Exceptions raised in `record_handler` will propagate to `process_partial_response`.

We catch them and include each failed batch item identifier in the response dictionary (see `Sample response` tab). + +=== "Sample response" + + ```json + --8<-- "examples/batch_processing/src/getting_started_sqs_response.json" + ``` + ### Partial failure mechanics -All records in the batch will be passed to this handler for processing, even if exceptions are thrown - Here's the behaviour after completing the batch: +All batch items will be passed to the record handler for processing, even if exceptions are thrown - Here's the behavior after completing the batch: * **All records successfully processed**. We will return an empty list of item failures `{'batchItemFailures': []}` * **Partial success with some exceptions**. We will return a list of all item IDs/sequence numbers that failed processing * **All records failed to be processed**. We will raise `BatchProcessingError` exception with a list of all exceptions raised when processing +The following sequence diagrams explain how each Batch processor behaves under different scenarios. + +#### SQS Standard + +> Read more about [Batch Failure Reporting feature in AWS Lambda](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#services-sqs-batchfailurereporting){target="_blank"}. + +Sequence diagram to explain how [`BatchProcessor` works](#processing-messages-from-sqs) with SQS Standard queues. + +
+```mermaid +sequenceDiagram + autonumber + participant SQS queue + participant Lambda service + participant Lambda function + Lambda service->>SQS queue: Poll + Lambda service->>Lambda function: Invoke (batch event) + Lambda function->>Lambda service: Report some failed messages + activate SQS queue + Lambda service->>SQS queue: Delete successful messages + SQS queue-->>SQS queue: Failed messages return + Note over SQS queue,Lambda service: Process repeat + deactivate SQS queue +``` +SQS mechanism with Batch Item Failures +
+ +#### SQS FIFO + +> Read more about [Batch Failure Reporting feature in AWS Lambda](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#services-sqs-batchfailurereporting){target="_blank"}. + +Sequence diagram to explain how [`SqsFifoPartialProcessor` works](#fifo-queues) with SQS FIFO queues. + +
+```mermaid +sequenceDiagram + autonumber + participant SQS queue + participant Lambda service + participant Lambda function + Lambda service->>SQS queue: Poll + Lambda service->>Lambda function: Invoke (batch event) + activate Lambda function + Lambda function-->Lambda function: Process 2 out of 10 batch items + Lambda function--xLambda function: Fail on 3rd batch item + Lambda function->>Lambda service: Report 3rd batch item and unprocessed messages as failure + deactivate Lambda function + activate SQS queue + Lambda service->>SQS queue: Delete successful messages (1-2) + SQS queue-->>SQS queue: Failed messages return (3-10) + deactivate SQS queue +``` +SQS FIFO mechanism with Batch Item Failures +
+ +#### Kinesis and DynamoDB Streams + +> Read more about [Batch Failure Reporting feature](https://docs.aws.amazon.com/lambda/latest/dg/with-kinesis.html#services-kinesis-batchfailurereporting){target="_blank"}. + +Sequence diagram to explain how `BatchProcessor` works with both [Kinesis Data Streams](#processing-messages-from-kinesis) and [DynamoDB Streams](#processing-messages-from-dynamodb). + +For brevity, we will use `Streams` to refer to either services. For theory on stream checkpoints, see this [blog post](https://aws.amazon.com/blogs/compute/optimizing-batch-processing-with-custom-checkpoints-in-aws-lambda/){target="_blank"} + +
+```mermaid +sequenceDiagram + autonumber + participant Streams + participant Lambda service + participant Lambda function + Lambda service->>Streams: Poll latest records + Lambda service->>Lambda function: Invoke (batch event) + activate Lambda function + Lambda function-->Lambda function: Process 2 out of 10 batch items + Lambda function--xLambda function: Fail on 3rd batch item + Lambda function-->Lambda function: Continue processing batch items (4-10) + Lambda function->>Lambda service: Report batch item as failure (3) + deactivate Lambda function + activate Streams + Lambda service->>Streams: Checkpoints to sequence number from 3rd batch item + Lambda service->>Streams: Poll records starting from updated checkpoint + deactivate Streams +``` +Kinesis and DynamoDB streams mechanism with single batch item failure +
+ +The behavior changes slightly when there are multiple item failures. Stream checkpoint is updated to the lowest sequence number reported. + +!!! important "Note that the batch item sequence number could be different from batch item number in the illustration." + +
+```mermaid +sequenceDiagram + autonumber + participant Streams + participant Lambda service + participant Lambda function + Lambda service->>Streams: Poll latest records + Lambda service->>Lambda function: Invoke (batch event) + activate Lambda function + Lambda function-->Lambda function: Process 2 out of 10 batch items + Lambda function--xLambda function: Fail on 3-5 batch items + Lambda function-->Lambda function: Continue processing batch items (6-10) + Lambda function->>Lambda service: Report batch items as failure (3-5) + deactivate Lambda function + activate Streams + Lambda service->>Streams: Checkpoints to lowest sequence number + Lambda service->>Streams: Poll records starting from updated checkpoint + deactivate Streams +``` +Kinesis and DynamoDB streams mechanism with multiple batch item failures +
+ ### Processing messages asynchronously -!!! tip "New to AsyncIO? Read this [comprehensive guide first](https://realpython.com/async-io-python/){target="_blank"}." +> New to AsyncIO? Read this [comprehensive guide first](https://realpython.com/async-io-python/){target="_blank" rel="nofollow"}. You can use `AsyncBatchProcessor` class and `async_process_partial_response` function to process messages concurrently. ???+ question "When is this useful?" Your use case might be able to process multiple records at the same time without conflicting with one another. - For example, imagine you need to process multiple loyalty points and incrementally save in a database. While you await the database to confirm your records are saved, you could start processing another request concurrently. + For example, imagine you need to process multiple loyalty points and incrementally save them in the database. While you await the database to confirm your records are saved, you could start processing another request concurrently. The reason this is not the default behaviour is that not all use cases can handle concurrency safely (e.g., loyalty points must be updated in order). @@ -236,9 +411,7 @@ You can use `AsyncBatchProcessor` class and `async_process_partial_response` fun ``` ???+ warning "Using tracer?" - `AsyncBatchProcessor` uses `asyncio.gather` which can cause side effects and reach trace limits at high concurrency. - - See [Tracing concurrent asynchronous functions](../core/tracer.md#concurrent-asynchronous-functions){target="_blank"}. + `AsyncBatchProcessor` uses `asyncio.gather`. This might cause [side effects and reach trace limits at high concurrency](../core/tracer.md#concurrent-asynchronous-functions){target="_blank"}. ## Advanced @@ -250,7 +423,7 @@ Inheritance is importance because we need to access message IDs and sequence num === "SQS" - ```python hl_lines="8 17 27 35" + ```python hl_lines="8 17 21 27 35" --8<-- "examples/batch_processing/src/pydantic_sqs.py" ``` @@ -288,18 +461,42 @@ Inheritance is importance because we need to access message IDs and sequence num Use the context manager to access a list of all returned values from your `record_handler` function. -* **When successful**. We will include a tuple with `success`, the result of `record_handler`, and the batch record -* **When failed**. We will include a tuple with `fail`, exception as a string, and the batch record +* **When successful**. We include a tuple with **1/** `success`, **2/** the result of `record_handler`, and **3/** the batch item +* **When failed**. We include a tuple with **1/** `fail`, **2/** exception as a string, and **3/** the batch item serialized as Event Source Data Class or Pydantic model. -```python hl_lines="28-33" title="Accessing processed messages via context manager" ---8<-- "examples/batch_processing/src/context_manager_access.py" -``` +!!! note "If a Pydantic model fails validation early, we serialize its failure record as Event Source Data Class to be able to collect message ID/sequence numbers etc." + +=== "Accessing raw processed messages" + + ```python hl_lines="29-36" + --8<-- "examples/batch_processing/src/context_manager_access.py" + ``` + + 1. Context manager requires the records list. This is typically handled by `process_partial_response`. + 2. Cause contains `exception` str if failed, or `success` otherwise. + +=== "Sample processed messages" + + ```python + --8<-- "examples/batch_processing/src/context_manager_access_output.txt" + ``` + + 1. Sample exception could have raised from within `record_handler` function. + +=== "Sample processed messages (Pydantic)" + + ```python + --8<-- "examples/batch_processing/src/context_manager_access_output_pydantic.txt" + ``` + + 1. Sample when a model fails validation early.

Batch item (3rd item) is serialized to the respective Event Source Data Class event type. + 2. Sample when model validated successfully but another exception was raised during processing. ### Accessing Lambda Context Within your `record_handler` function, you might need access to the Lambda context to determine how much time you have left before your function times out. -We can automatically inject the [Lambda context](https://docs.aws.amazon.com/lambda/latest/dg/python-context.html){target="_blank"} into your `record_handler` if your function signature has a parameter named `lambda_context`. When using a context manager, you also need to pass the Lambda context object like in the example below. +We can automatically inject the [Lambda context](https://docs.aws.amazon.com/lambda/latest/dg/python-context.html){target="_blank" rel="nofollow"} into your `record_handler` if your function signature has a parameter named `lambda_context`. When using a context manager, you also need to pass the Lambda context object like in the example below. === "Recommended" @@ -325,20 +522,50 @@ You might want to bring custom logic to the existing `BatchProcessor` to slightl For these scenarios, you can subclass `BatchProcessor` and quickly override `success_handler` and `failure_handler` methods: -* **`success_handler()`** โ€“ Keeps track of successful batch records -* **`failure_handler()`** โ€“ Keeps track of failed batch records +* **`success_handler()`** is called for each successfully processed record +* **`failure_handler()`** is called for each failed record + +???+ note + These functions have a common `record` argument. For backward compatibility reasons, their type is not the same: -???+ example - Let's suppose you'd like to add a metric named `BatchRecordFailures` for each batch record that failed processing + - `success_handler`: `record` type is `dict[str, Any]`, the raw record data. + - `failure_handler`: `record` type can be an Event Source Data Class or your [Pydantic model](#pydantic-integration). During Pydantic validation errors, we fall back and serialize `record` to Event Source Data Class to not break the processing pipeline. -```python hl_lines="8 9 16-19 22 38" title="Extending failure handling mechanism in BatchProcessor" ---8<-- "examples/batch_processing/src/extending_failure.py" +Let's suppose you'd like to add metrics to track successes and failures of your batch records. + +```python hl_lines="8-10 18-25 28 44" title="Extending failure handling mechanism in BatchProcessor" +--8<-- "examples/batch_processing/src/extending_processor_handlers.py" ``` ### Create your own partial processor You can create your own partial batch processor from scratch by inheriting the `BasePartialProcessor` class, and implementing `_prepare()`, `_clean()`, `_process_record()` and `_async_process_record()`. + +
+```mermaid +classDiagram + direction LR + class BasePartialProcessor { + <> + +_prepare() + +_clean() + +_process_record_(record: Dict) + +_async_process_record_() + } + + class YourCustomProcessor { + +_prepare() + +_clean() + +_process_record_(record: Dict) + +_async_process_record_() + } + + BasePartialProcessor <|-- YourCustomProcessor : implement +``` +Visual representation to bring your own processor +
+ * **`_process_record()`** โ€“ handles all processing logic for each individual message of a batch, including calling the `record_handler` (self.handler) * **`_prepare()`** โ€“ called once as part of the processor initialization * **`_clean()`** โ€“ teardown logic called once after `_process_record` completes diff --git a/docs/utilities/feature_flags.md b/docs/utilities/feature_flags.md index 890f4f0f8c2..b68fcc594fb 100644 --- a/docs/utilities/feature_flags.md +++ b/docs/utilities/feature_flags.md @@ -32,9 +32,9 @@ Feature flags are used to modify behaviour without changing the application's co If you want to learn more about feature flags, their variations and trade-offs, check these articles: -* [Feature Toggles (aka Feature Flags) - Pete Hodgson](https://martinfowler.com/articles/feature-toggles.html){target="_blank"} -* [AWS Lambda Feature Toggles Made Simple - Ran Isenberg](https://isenberg-ran.medium.com/aws-lambda-feature-toggles-made-simple-580b0c444233){target="_blank"} -* [Feature Flags Getting Started - CloudBees](https://www.cloudbees.com/blog/ultimate-feature-flag-guide){target="_blank"} +* [Feature Toggles (aka Feature Flags) - Pete Hodgson](https://martinfowler.com/articles/feature-toggles.html){target="_blank" rel="nofollow"} +* [AWS Lambda Feature Toggles Made Simple - Ran Isenberg](https://isenberg-ran.medium.com/aws-lambda-feature-toggles-made-simple-580b0c444233){target="_blank" rel="nofollow"} +* [Feature Flags Getting Started - CloudBees](https://www.cloudbees.com/blog/ultimate-feature-flag-guide){target="_blank" rel="nofollow"} ???+ note AWS AppConfig requires two API calls to fetch configuration for the first time. You can improve latency by consolidating your feature settings in a single [Configuration](https://docs.aws.amazon.com/appconfig/latest/userguide/appconfig-creating-configuration-and-profile.html){target="_blank"}. @@ -255,8 +255,8 @@ You can also have features enabled only at specific days, for example: enable ch ``` ???+ info "How should I use timezones?" - You can use any [IANA time zone](https://www.iana.org/time-zones){target="_blank"} (as originally specified - in [PEP 615](https://peps.python.org/pep-0615/){target="_blank"}) as part of your rules definition. + You can use any [IANA time zone](https://www.iana.org/time-zones){target="_blank" rel="nofollow"} (as originally specified + in [PEP 615](https://peps.python.org/pep-0615/){target="_blank" rel="nofollow"}) as part of your rules definition. Powertools for AWS Lambda (Python) takes care of converting and calculate the correct timestamps for you. When using `SCHEDULE_BETWEEN_DATETIME_RANGE`, use timestamps without timezone information, and @@ -448,7 +448,7 @@ The `action` configuration can have the following values, where the expressions | Key | Meaning | | ------------------- | ----------------------------------------------------------------------------------------- | | CURRENT_TIME | The current time, 24 hour format (HH:mm) | - | CURRENT_DATETIME | The current datetime ([ISO8601](https://en.wikipedia.org/wiki/ISO_8601){target="_blank"}) | + | CURRENT_DATETIME | The current datetime ([ISO8601](https://en.wikipedia.org/wiki/ISO_8601){target="_blank" rel="nofollow"}) | | CURRENT_DAY_OF_WEEK | The current day of the week (Monday-Sunday) | If not specified, the timezone used for calculations will be UTC. @@ -501,7 +501,7 @@ These are the available options for further customization. | **envelope** | `None` | JMESPath expression to use to extract feature flags configuration from AWS AppConfig configuration | | **max_age** | `5` | Number of seconds to cache feature flags configuration fetched from AWS AppConfig | | **sdk_config** | `None` | [Botocore Config object](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html){target="_blank"} | -| **jmespath_options** | `None` | For advanced use cases when you want to bring your own [JMESPath functions](https://github.com/jmespath/jmespath.py#custom-functions){target="_blank"} | +| **jmespath_options** | `None` | For advanced use cases when you want to bring your own [JMESPath functions](https://github.com/jmespath/jmespath.py#custom-functions){target="_blank" rel="nofollow"} | | **logger** | `logging.Logger` | Logger to use for debug. You can optionally supply an instance of Powertools for AWS Lambda (Python) Logger. | === "appconfig_provider_options.py" diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index bfd65d0a1f6..6e5c47af6fc 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -74,7 +74,7 @@ If you're not [changing the default configuration for the DynamoDB persistence l | TTL attribute name | `expiration` | This can only be configured after your table is created if you're using AWS Console | ???+ tip "Tip: You can share a single state table for all functions" - You can reuse the same DynamoDB table to store idempotency state. We add `module_name` and [qualified name for classes and functions](https://peps.python.org/pep-3155/){target="_blank"} in addition to the idempotency key as a hash key. + You can reuse the same DynamoDB table to store idempotency state. We add `module_name` and [qualified name for classes and functions](https://peps.python.org/pep-3155/){target="_blank" rel="nofollow"} in addition to the idempotency key as a hash key. === "AWS Serverless Application Model (SAM) example" @@ -135,7 +135,7 @@ Similar to [idempotent decorator](#idempotent-decorator), you can use `idempoten When using `idempotent_function`, you must tell us which keyword parameter in your function signature has the data we should use via **`data_keyword_argument`**. -!!! tip "We support JSON serializable data, [Python Dataclasses](https://docs.python.org/3.7/library/dataclasses.html){target="_blank"}, [Parser/Pydantic Models](parser.md){target="_blank"}, and our [Event Source Data Classes](./data_classes.md){target="_blank"}." +!!! tip "We support JSON serializable data, [Python Dataclasses](https://docs.python.org/3.7/library/dataclasses.html){target="_blank" rel="nofollow"}, [Parser/Pydantic Models](parser.md){target="_blank"}, and our [Event Source Data Classes](./data_classes.md){target="_blank"}." ???+ warning "Limitation" Make sure to call your decorated function using keyword arguments. @@ -496,15 +496,15 @@ When using DynamoDB as a persistence layer, you can alter the attribute names by Idempotent decorator can be further configured with **`IdempotencyConfig`** as seen in the previous example. These are the available options for further configuration -| Parameter | Default | Description | -| ------------------------------- | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **event_key_jmespath** | `""` | JMESPath expression to extract the idempotency key from the event record using [built-in functions](/utilities/jmespath_functions){target="_blank"} | -| **payload_validation_jmespath** | `""` | JMESPath expression to validate whether certain parameters have changed in the event while the event payload | -| **raise_on_no_idempotency_key** | `False` | Raise exception if no idempotency key was found in the request | -| **expires_after_seconds** | 3600 | The number of seconds to wait before a record is expired | -| **use_local_cache** | `False` | Whether to locally cache idempotency results | -| **local_cache_max_items** | 256 | Max number of items to store in local cache | -| **hash_function** | `md5` | Function to use for calculating hashes, as provided by [hashlib](https://docs.python.org/3/library/hashlib.html){target="_blank"} in the standard library. | +| Parameter | Default | Description | +| ------------------------------- | ------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **event_key_jmespath** | `""` | JMESPath expression to extract the idempotency key from the event record using [built-in functions](./jmespath_functions.md#built-in-jmespath-functions){target="_blank"} | +| **payload_validation_jmespath** | `""` | JMESPath expression to validate whether certain parameters have changed in the event while the event payload | +| **raise_on_no_idempotency_key** | `False` | Raise exception if no idempotency key was found in the request | +| **expires_after_seconds** | 3600 | The number of seconds to wait before a record is expired | +| **use_local_cache** | `False` | Whether to locally cache idempotency results | +| **local_cache_max_items** | 256 | Max number of items to store in local cache | +| **hash_function** | `md5` | Function to use for calculating hashes, as provided by [hashlib](https://docs.python.org/3/library/hashlib.html){target="_blank" rel="nofollow"} in the standard library. | ### Handling concurrent executions with the same payload @@ -635,7 +635,7 @@ This means that we will raise **`IdempotencyKeyError`** if the evaluation of **` ``` ### Customizing boto configuration - + The **`boto_config`** and **`boto3_session`** parameters enable you to pass in a custom [botocore config object](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html){target="_blank"} or a custom [boto3 session](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html){target="_blank"} when constructing the persistence store. === "Custom session" @@ -744,7 +744,7 @@ The idempotency utility provides several routes to test your code. ### Disabling the idempotency utility When testing your code, you may wish to disable the idempotency logic altogether and focus on testing your business logic. To do this, you can set the environment variable `POWERTOOLS_IDEMPOTENCY_DISABLED` -with a truthy value. If you prefer setting this for specific tests, and are using Pytest, you can use [monkeypatch](https://docs.pytest.org/en/latest/monkeypatch.html){target="_blank"} fixture: +with a truthy value. If you prefer setting this for specific tests, and are using Pytest, you can use [monkeypatch](https://docs.pytest.org/en/latest/monkeypatch.html){target="_blank" rel="nofollow"} fixture: === "test_disabling_idempotency_utility.py" diff --git a/docs/utilities/jmespath_functions.md b/docs/utilities/jmespath_functions.md index 41b6b240adc..881b4c926f7 100644 --- a/docs/utilities/jmespath_functions.md +++ b/docs/utilities/jmespath_functions.md @@ -8,7 +8,7 @@ description: Utility ???+ tip JMESPath is a query language for JSON used by AWS CLI, AWS Python SDK, and Powertools for AWS Lambda (Python). -Built-in [JMESPath](https://jmespath.org/){target="_blank"} Functions to easily deserialize common encoded JSON payloads in Lambda functions. +Built-in [JMESPath](https://jmespath.org/){target="_blank" rel="nofollow"} Functions to easily deserialize common encoded JSON payloads in Lambda functions. ## Key features @@ -30,7 +30,7 @@ Powertools for AWS Lambda (Python) also have utilities like [validation](validat ### Extracting data -You can use the `extract_data_from_envelope` function with any [JMESPath expression](https://jmespath.org/tutorial.html){target="_blank"}. +You can use the `extract_data_from_envelope` function with any [JMESPath expression](https://jmespath.org/tutorial.html){target="_blank" rel="nofollow"}. ???+ tip Another common use case is to fetch deeply nested data, filter, flatten, and more. @@ -187,9 +187,9 @@ This sample will decompress and decode base64 data from Cloudwatch Logs, then us ???+ warning This should only be used for advanced use cases where you have special formats not covered by the built-in functions. -For special binary formats that you want to decode before applying JSON Schema validation, you can bring your own [JMESPath function](https://github.com/jmespath/jmespath.py#custom-functions){target="_blank"} and any additional option via `jmespath_options` param. To keep Powertools for AWS Lambda (Python) built-in functions, you can subclass from `PowertoolsFunctions`. +For special binary formats that you want to decode before applying JSON Schema validation, you can bring your own [JMESPath function](https://github.com/jmespath/jmespath.py#custom-functions){target="_blank" rel="nofollow"} and any additional option via `jmespath_options` param. To keep Powertools for AWS Lambda (Python) built-in functions, you can subclass from `PowertoolsFunctions`. -Here is an example of how to decompress messages using [zlib](https://docs.python.org/3/library/zlib.html){target="_blank"}: +Here is an example of how to decompress messages using [zlib](https://docs.python.org/3/library/zlib.html){target="_blank" rel="nofollow"}: === "powertools_custom_jmespath_function.py" diff --git a/docs/utilities/middleware_factory.md b/docs/utilities/middleware_factory.md index c726d939e0a..35e5453af65 100644 --- a/docs/utilities/middleware_factory.md +++ b/docs/utilities/middleware_factory.md @@ -68,6 +68,16 @@ You can also have your own keyword arguments after the mandatory arguments. --8<-- "examples/middleware_factory/src/getting_started_middleware_with_params_payload.json" ``` +### Environment variables + +The following environment variable is available to configure the middleware factory at a global scope: + +| Setting | Description | Environment variable | Default | +|----------------------|------------------------------------------------------------------------------|-----------------------------------------|---------| +| **Middleware Trace** | Creates sub-segment for each custom middleware. | `POWERTOOLS_TRACE_MIDDLEWARES` | `false` | + +You can also use [`POWERTOOLS_TRACE_MIDDLEWARES`](#tracing-middleware-execution) on a per-method basis, which will consequently override the environment variable value. + ## Advanced For advanced use cases, you can instantiate [Tracer](../core/tracer.md){target="_blank"} inside your middleware, and add annotations as well as metadata for additional operational insights. @@ -92,7 +102,7 @@ If you are making use of [Tracer](../core/tracer.md){target="_blank"}, you can t This makes use of an existing Tracer instance that you may have initialized anywhere in your code. ???+ warning - You must [enable Active Tracing](../core/tracer/#permissions){target="_blank"} in your Lambda function when using this feature, otherwise Lambda cannot send traces to XRay. + You must [enable Active Tracing](../core/tracer.md#permissions){target="_blank"} in your Lambda function when using this feature, otherwise Lambda cannot send traces to XRay. === "getting_started_middleware_tracer_function.py" ```python hl_lines="8 14 15 36" @@ -147,5 +157,5 @@ In the example below, we create a Middleware with the following features: * Use `trace_execution` to quickly understand the performance impact of your middlewares, and reduce or merge tasks when necessary * When nesting multiple middlewares, always return the handler with event and context, or response -* Keep in mind [Python decorators execution order](https://realpython.com/primer-on-python-decorators/#nesting-decorators){target="_blank"}. Lambda handler is actually called once (top-down) +* Keep in mind [Python decorators execution order](https://realpython.com/primer-on-python-decorators/#nesting-decorators){target="_blank" rel="nofollow"}. Lambda handler is actually called once (top-down) * Async middlewares are not supported diff --git a/docs/utilities/parameters.md b/docs/utilities/parameters.md index 97a7ab3156d..d2d80230c77 100644 --- a/docs/utilities/parameters.md +++ b/docs/utilities/parameters.md @@ -107,6 +107,17 @@ The following will retrieve the latest version and store it in the cache. --8<-- "examples/parameters/src/getting_started_appconfig.py" ``` +### Environment variables + +The following environment variables are available to configure the parameter utility at a global scope: + +| Setting | Description | Environment variable | Default | +|-----------------------|--------------------------------------------------------------------------------|-------------------------------------|---------| +| **Max Age** | Adjusts for how long values are kept in cache (in seconds). | `POWERTOOLS_PARAMETERS_MAX_AGE` | `5` | +| **Debug Sample Rate** | Sets whether to decrypt or not values retrieved from AWS SSM Parameters Store. | `POWERTOOLS_PARAMETERS_SSM_DECRYPT` | `false` | + +You can also use [`POWERTOOLS_PARAMETERS_MAX_AGE`](#adjusting-cache-ttl) through the `max_age` parameter and [`POWERTOOLS_PARAMETERS_SSM_DECRYPT`](#ssmprovider) through the `decrypt` parameter to override the environment variable values. + ## Advanced ### Adjusting cache TTL @@ -304,7 +315,7 @@ You can create your own custom parameter store provider by inheriting the `BaseP All transformation and caching logic is handled by the `get()` and `get_multiple()` methods from the base provider class. -Here are two examples of implementing a custom parameter store. One using an external service like [Hashicorp Vault](https://www.vaultproject.io/){target="_blank"}, a widely popular key-value and secret storage and the other one using [Amazon S3](https://aws.amazon.com/s3/?nc1=h_ls){target="_blank"}, a popular object storage. +Here are two examples of implementing a custom parameter store. One using an external service like [Hashicorp Vault](https://www.vaultproject.io/){target="_blank" rel="nofollow"}, a widely popular key-value and secret storage and the other one using [Amazon S3](https://aws.amazon.com/s3/?nc1=h_ls){target="_blank"}, a popular object storage. === "working_with_own_provider_vault.py" ```python hl_lines="5 13 20 24" @@ -458,7 +469,7 @@ The **`config`** , **`boto3_session`**, and **`boto3_client`** parameters enabl ### Mocking parameter values -For unit testing your applications, you can mock the calls to the parameters utility to avoid calling AWS APIs. This can be achieved in a number of ways - in this example, we use the [pytest monkeypatch fixture](https://docs.pytest.org/en/latest/how-to/monkeypatch.html){target="_blank"} to patch the `parameters.get_parameter` method: +For unit testing your applications, you can mock the calls to the parameters utility to avoid calling AWS APIs. This can be achieved in a number of ways - in this example, we use the [pytest monkeypatch fixture](https://docs.pytest.org/en/latest/how-to/monkeypatch.html){target="_blank" rel="nofollow"} to patch the `parameters.get_parameter` method: === "test_single_mock.py" ```python hl_lines="4 8" @@ -478,8 +489,8 @@ If we need to use this pattern across multiple tests, we can avoid repetition by ``` Alternatively, if we need more fully featured mocking (for example checking the arguments passed to `get_parameter`), we -can use [unittest.mock](https://docs.python.org/3/library/unittest.mock.html){target="_blank"} from the python stdlib instead of pytest's `monkeypatch` fixture. In this example, we use the -[patch](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.patch){target="_blank"} decorator to replace the `aws_lambda_powertools.utilities.parameters.get_parameter` function with a [MagicMock](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.MagicMock){target="_blank"} +can use [unittest.mock](https://docs.python.org/3/library/unittest.mock.html){target="_blank" rel="nofollow"} from the python stdlib instead of pytest's `monkeypatch` fixture. In this example, we use the +[patch](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.patch){target="_blank" rel="nofollow"} decorator to replace the `aws_lambda_powertools.utilities.parameters.get_parameter` function with a [MagicMock](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.MagicMock){target="_blank" rel="nofollow"} object named `get_parameter_mock`. === "test_with_monkeypatch.py" diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index d98835a8381..846460e43d2 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -4,7 +4,7 @@ description: Utility --- -This utility provides data parsing and deep validation using [Pydantic](https://pydantic-docs.helpmanual.io/){target="_blank"}. +This utility provides data parsing and deep validation using [Pydantic](https://pydantic-docs.helpmanual.io/){target="_blank" rel="nofollow"}. ## Key features @@ -29,7 +29,7 @@ Add `aws-lambda-powertools[parser]` as a dependency in your preferred tool: _e.g This will increase the compressed package size by >10MB due to the Pydantic dependency. To reduce the impact on the package size at the expense of 30%-50% of its performance [Pydantic can also be - installed without binary files](https://pydantic-docs.helpmanual.io/install/#performance-vs-package-size-trade-off){target="_blank"}: + installed without binary files](https://pydantic-docs.helpmanual.io/install/#performance-vs-package-size-trade-off){target="_blank" rel="nofollow"}: Pip example: `SKIP_CYTHON=1 pip install --no-binary pydantic aws-lambda-powertools[parser]` @@ -260,13 +260,13 @@ for order_item in ret.detail.items: ???+ tip When extending a `string` field containing JSON, you need to wrap the field - with [Pydantic's Json Type](https://pydantic-docs.helpmanual.io/usage/types/#json-type){target="_blank"}: + with [Pydantic's Json Type](https://pydantic-docs.helpmanual.io/usage/types/#json-type){target="_blank" rel="nofollow"}: ```python hl_lines="14 18-19" --8<-- "examples/parser/src/extending_built_in_models_with_json_mypy.py" ``` - Alternatively, you could use a [Pydantic validator](https://pydantic-docs.helpmanual.io/usage/validators/){target="_blank"} to transform the JSON string into a dict before the mapping: + Alternatively, you could use a [Pydantic validator](https://pydantic-docs.helpmanual.io/usage/validators/){target="_blank" rel="nofollow"} to transform the JSON string into a dict before the mapping: ```python hl_lines="18-20 24-25" --8<-- "examples/parser/src/extending_built_in_models_with_json_validator.py" @@ -511,14 +511,14 @@ parse(model=UserModel, event=payload) ### Advanced use cases ???+ tip "Tip: Looking to auto-generate models from JSON, YAML, JSON Schemas, OpenApi, etc?" - Use Koudai Aono's [data model code generation tool for Pydantic](https://github.com/koxudaxi/datamodel-code-generator){target="_blank"} + Use Koudai Aono's [data model code generation tool for Pydantic](https://github.com/koxudaxi/datamodel-code-generator){target="_blank" rel="nofollow"} -There are number of advanced use cases well documented in Pydantic's doc such as creating [immutable models](https://pydantic-docs.helpmanual.io/usage/models/#faux-immutability){target="_blank"}, [declaring fields with dynamic values](https://pydantic-docs.helpmanual.io/usage/models/#field-with-dynamic-default-value){target="_blank"}. +There are number of advanced use cases well documented in Pydantic's doc such as creating [immutable models](https://pydantic-docs.helpmanual.io/usage/models/#faux-immutability){target="_blank" rel="nofollow"}, [declaring fields with dynamic values](https://pydantic-docs.helpmanual.io/usage/models/#field-with-dynamic-default-value){target="_blank" rel="nofollow"}. ???+ tip "Pydantic helper functions" - Pydantic also offers [functions](https://pydantic-docs.helpmanual.io/usage/models/#helper-functions){target="_blank"} to parse models from files, dicts, string, etc. + Pydantic also offers [functions](https://pydantic-docs.helpmanual.io/usage/models/#helper-functions){target="_blank" rel="nofollow"} to parse models from files, dicts, string, etc. -Two possible unknown use cases are Models and exception' serialization. Models have methods to [export them](https://pydantic-docs.helpmanual.io/usage/exporting_models/){target="_blank"} as `dict`, `JSON`, `JSON Schema`, and Validation exceptions can be exported as JSON. +Two possible unknown use cases are Models and exception' serialization. Models have methods to [export them](https://pydantic-docs.helpmanual.io/usage/exporting_models/){target="_blank" rel="nofollow"} as `dict`, `JSON`, `JSON Schema`, and Validation exceptions can be exported as JSON. ```python hl_lines="21 28-31" title="Converting data models in various formats" from aws_lambda_powertools.utilities import Logger diff --git a/docs/utilities/streaming.md b/docs/utilities/streaming.md index c89c0cf8b86..49a114235af 100644 --- a/docs/utilities/streaming.md +++ b/docs/utilities/streaming.md @@ -87,15 +87,15 @@ We provide popular built-in transformations that you can apply against your stre | Name | Description | Class name | | -------- | ----------------------------------------------------------------------------------------------------------------- | ------------- | -| **Gzip** | Gunzips the stream of data using the [gzip library](https://docs.python.org/3/library/gzip.html){target="_blank"} | GzipTransform | -| **Zip** | Exposes the stream as a [ZipFile object](https://docs.python.org/3/library/zipfile.html){target="_blank"} | ZipTransform | +| **Gzip** | Gunzips the stream of data using the [gzip library](https://docs.python.org/3/library/gzip.html){target="_blank" rel="nofollow"} | GzipTransform | +| **Zip** | Exposes the stream as a [ZipFile object](https://docs.python.org/3/library/zipfile.html){target="_blank" rel="nofollow"} | ZipTransform | | **CSV** | Parses each CSV line as a CSV object, returning dictionary objects | CsvTransform | ## Advanced ### Skipping or reading backwards -`S3Object` implements [Python I/O interface](https://docs.python.org/3/tutorial/inputoutput.html){target="_blank"}. This means you can use `seek` to start reading contents of your file from any particular position, saving you processing time. +`S3Object` implements [Python I/O interface](https://docs.python.org/3/tutorial/inputoutput.html){target="_blank" rel="nofollow"}. This means you can use `seek` to start reading contents of your file from any particular position, saving you processing time. #### Reading backwards @@ -133,9 +133,9 @@ We will propagate additional options to the underlying implementation for each t | Name | Available options | | ----------------- | ------------------------------------------------------------------------------------------------------ | -| **GzipTransform** | [GzipFile constructor](https://docs.python.org/3/library/gzip.html#gzip.GzipFile){target="_blank"} | -| **ZipTransform** | [ZipFile constructor](https://docs.python.org/3/library/zipfile.html#zipfile.ZipFile){target="_blank"} | -| **CsvTransform** | [DictReader constructor](https://docs.python.org/3/library/csv.html#csv.DictReader){target="_blank"} | +| **GzipTransform** | [GzipFile constructor](https://docs.python.org/3/library/gzip.html#gzip.GzipFile){target="_blank" rel="nofollow"} | +| **ZipTransform** | [ZipFile constructor](https://docs.python.org/3/library/zipfile.html#zipfile.ZipFile){target="_blank" rel="nofollow"} | +| **CsvTransform** | [DictReader constructor](https://docs.python.org/3/library/csv.html#csv.DictReader){target="_blank" rel="nofollow"} | For instance, take `ZipTransform`. You can use the `compression` parameter if you want to unzip an S3 object compressed with `LZMA`. diff --git a/docs/utilities/validation.md b/docs/utilities/validation.md index 1eead47a970..1b569ddc14c 100644 --- a/docs/utilities/validation.md +++ b/docs/utilities/validation.md @@ -23,9 +23,9 @@ You can validate inbound and outbound events using [`validator` decorator](#vali You can also use the standalone `validate` function, if you want more control over the validation process such as handling a validation error. ???+ tip "Tip: Using JSON Schemas for the first time?" - Check this [step-by-step tour in the official JSON Schema website](https://json-schema.org/learn/getting-started-step-by-step.html){target="_blank"}. + Check this [step-by-step tour in the official JSON Schema website](https://json-schema.org/learn/getting-started-step-by-step.html){target="_blank" rel="nofollow"}. - We support any JSONSchema draft supported by [fastjsonschema](https://horejsek.github.io/python-fastjsonschema/){target="_blank"} library. + We support any JSONSchema draft supported by [fastjsonschema](https://horejsek.github.io/python-fastjsonschema/){target="_blank" rel="nofollow"} library. ???+ warning Both `validator` decorator and `validate` standalone function expects your JSON Schema to be a **dictionary**, not a filename. @@ -91,7 +91,7 @@ You can also gracefully handle schema validation errors by catching `SchemaValid You might want to validate only a portion of your event - This is what the `envelope` parameter is for. -Envelopes are [JMESPath expressions](https://jmespath.org/tutorial.html){target="_blank"} to extract a portion of JSON you want before applying JSON Schema validation. +Envelopes are [JMESPath expressions](https://jmespath.org/tutorial.html){target="_blank" rel="nofollow"} to extract a portion of JSON you want before applying JSON Schema validation. Here is a sample custom EventBridge event, where we only validate what's inside the `detail` key: @@ -112,8 +112,9 @@ Here is a sample custom EventBridge event, where we only validate what's inside ```json --8<-- "examples/validation/src/getting_started_validator_unwrapping_payload.json" ``` - -This is quite powerful because you can use JMESPath Query language to extract records from [arrays](https://jmespath.org/tutorial.html#list-and-slice-projections){target="_blank"}, combine [pipe](https://jmespath.org/tutorial.html#pipe-expressions){target="_blank"} and [function expressions](https://jmespath.org/tutorial.html#functions){target="_blank"}. + +This is quite powerful because you can use JMESPath Query language to extract records from [arrays](https://jmespath.org/tutorial.html#list-and-slice-projections){target="_blank" rel="nofollow"}, combine [pipe](https://jmespath.org/tutorial.html#pipe-expressions){target="_blank" rel="nofollow"} and [function expressions](https://jmespath.org/tutorial.html#functions){target="_blank" rel="nofollow"}. + When combined, these features allow you to extract what you need before validating the actual payload. @@ -141,23 +142,23 @@ We provide built-in envelopes to easily extract the payload from popular event s Here is a handy table with built-in envelopes along with their JMESPath expressions in case you want to build your own. -| Envelope | JMESPath expression | -| --------------------------------- | ------------------------------------------------------------------------ | -| **`API_GATEWAY_HTTP`** | `powertools_json(body)` | -| **`API_GATEWAY_REST`** | `powertools_json(body)` | -| **`CLOUDWATCH_EVENTS_SCHEDULED`** | `detail` | -| **`CLOUDWATCH_LOGS`** | `awslogs.powertools_base64_gzip(data) | powertools_json(@).logEvents[*]` | -| **`EVENTBRIDGE`** | `detail` | -| **`KINESIS_DATA_STREAM`** | `Records[*].kinesis.powertools_json(powertools_base64(data))` | -| **`SNS`** | `Records[0].Sns.Message | powertools_json(@)` | -| **`SQS`** | `Records[*].powertools_json(body)` | +| Envelope | JMESPath expression | +| --------------------------------- | ------------------------------------------------------------- | +| **`API_GATEWAY_HTTP`** | `powertools_json(body)` | +| **`API_GATEWAY_REST`** | `powertools_json(body)` | +| **`CLOUDWATCH_EVENTS_SCHEDULED`** | `detail` | +| **`CLOUDWATCH_LOGS`** | `awslogs.powertools_base64_gzip(data) | powertools_json(@).logEvents[*]` | +| **`EVENTBRIDGE`** | `detail` | +| **`KINESIS_DATA_STREAM`** | `Records[*].kinesis.powertools_json(powertools_base64(data))` | +| **`SNS`** | `Records[0].Sns.Message | powertools_json(@)` | +| **`SQS`** | `Records[*].powertools_json(body)` | ## Advanced ### Validating custom formats ???+ note - JSON Schema DRAFT 7 [has many new built-in formats](https://json-schema.org/understanding-json-schema/reference/string.html#format){target="_blank"} such as date, time, and specifically a regex format which might be a better replacement for a custom format, if you do have control over the schema. + JSON Schema DRAFT 7 [has many new built-in formats](https://json-schema.org/understanding-json-schema/reference/string.html#format){target="_blank" rel="nofollow"} such as date, time, and specifically a regex format which might be a better replacement for a custom format, if you do have control over the schema. JSON Schemas with custom formats like `awsaccountid` will fail validation. If you have these, you can pass them using `formats` parameter: @@ -193,7 +194,7 @@ For each format defined in a dictionary key, you must use a regex, or a function ### Built-in JMESPath functions You might have events or responses that contain non-encoded JSON, where you need to decode before validating them. - + You can use our built-in [JMESPath functions](./jmespath_functions.md){target="_blank"} within your expressions to do exactly that to [deserialize JSON Strings](./jmespath_functions.md#powertools_json-function){target="_blank"}, [decode base64](./jmespath_functions.md#powertools_base64-function){target="_blank"}, and [decompress gzip data](./jmespath_functions.md#powertools_base64_gzip-function){target="_blank"}. ???+ info diff --git a/docs/we_made_this.md b/docs/we_made_this.md index 3f3257f40e5..a1229ec99aa 100644 --- a/docs/we_made_this.md +++ b/docs/we_made_this.md @@ -7,19 +7,19 @@ description: Blog posts, tutorials, and videos about Powertools for AWS Lambda ( This space is dedicated to highlight our awesome community content featuring Powertools for AWS Lambda (Python) ๐Ÿ™! -!!! info "[Get your content featured here](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=community-content&template=share_your_work.yml&title=%5BI+Made+This%5D%3A+%3CTITLE%3E){target="_blank"}!" +!!! info "[Get your content featured here](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=community-content&template=share_your_work.yml&title=%5BI+Made+This%5D%3A+%3CTITLE%3E){target="_blank" rel="nofollow"}!" ## Connect -[![Join our Discord](https://dcbadge.vercel.app/api/server/B8zZKbbyET)](https://discord.gg/B8zZKbbyET){target="_blank"} +[![Join our Discord](https://dcbadge.vercel.app/api/server/B8zZKbbyET)](https://discord.gg/B8zZKbbyET){target="_blank" rel="nofollow"} -Join us on [Discord](https://discord.gg/B8zZKbbyET){target="_blank"} to connect with the Powertools for AWS Lambda (Python) community ๐Ÿ‘‹. Ask questions, learn from each other, contribute, hang out with key contributors, and more! +Join us on [Discord](https://discord.gg/B8zZKbbyET){target="_blank" rel="nofollow"} to connect with the Powertools for AWS Lambda (Python) community ๐Ÿ‘‹. Ask questions, learn from each other, contribute, hang out with key contributors, and more! ## Blog posts ### AWS Lambda Cookbook โ€” Following best practices with Lambda Powertools -> **Author: [Ran Isenberg](mailto:ran.isenberg@ranthebuilder.cloud) [:material-twitter:](https://twitter.com/IsenbergRan){target="_blank"} [:material-linkedin:](https://www.linkedin.com/in/ranisenberg/){target="_blank"}** +> **Author: [Ran Isenberg](mailto:ran.isenberg@ranthebuilder.cloud) [:material-twitter:](https://twitter.com/IsenbergRan){target="_blank" rel="nofollow"} [:material-linkedin:](https://www.linkedin.com/in/ranisenberg/){target="_blank" rel="nofollow"}** A collection of articles explaining in detail how Lambda Powertools helps with a Serverless adoption strategy and its challenges. @@ -41,66 +41,66 @@ A collection of articles explaining in detail how Lambda Powertools helps with a ### Making all your APIs idempotent -> **Author: [Michael Walmsley](https://twitter.com/walmsles){target="_blank"}** :material-twitter: +> **Author: [Michael Walmsley](https://twitter.com/walmsles){target="_blank" rel="nofollow"}** :material-twitter: This article dives into what idempotency means for APIs, their use cases, and how to implement them. -* [blog.walmsles.io/making-all-your-apis-idempotent](https://blog.walmsles.io/making-all-your-apis-idempotent){target="_blank"} +* [blog.walmsles.io/making-all-your-apis-idempotent](https://blog.walmsles.io/making-all-your-apis-idempotent){target="_blank" rel="nofollow"} ### Deep dive on Lambda Powertools Idempotency feature -> **Author: [Michael Walmsley](https://twitter.com/walmsles){target="_blank"}** :material-twitter: +> **Author: [Michael Walmsley](https://twitter.com/walmsles){target="_blank" rel="nofollow"}** :material-twitter: This article describes how to best calculate your idempotency token, implementation details, and how to handle idempotency in RESTful APIs. -* [blog.walmsles.io/aws-lambda-powertools-idempotency-a-deeper-dive](https://blog.walmsles.io/aws-lambda-powertools-idempotency-a-deeper-dive){target="_blank"} +* [blog.walmsles.io/aws-lambda-powertools-idempotency-a-deeper-dive](https://blog.walmsles.io/aws-lambda-powertools-idempotency-a-deeper-dive){target="_blank" rel="nofollow"} ### Developing AWS Lambda functions with AWS Lambda Powertools -> **Author: [Stephan Huber](https://linkedin.com/in/sthuber90){target="_blank"}** :material-linkedin: +> **Author: [Stephan Huber](https://linkedin.com/in/sthuber90){target="_blank" rel="nofollow"}** :material-linkedin: This article walks through how to add Powertools to an existing project, covers Tracer, Logger, Metrics, and JSON Schema Validation. -* [globaldatanet.com/tech-blog/develop-lambda-functions-with-aws-lambda-powertools](https://globaldatanet.com/tech-blog/develop-lambda-functions-with-aws-lambda-powertools){target="_blank"} +* [globaldatanet.com/tech-blog/develop-lambda-functions-with-aws-lambda-powertools](https://globaldatanet.com/tech-blog/develop-lambda-functions-with-aws-lambda-powertools){target="_blank" rel="nofollow"} ### Speed-up event-driven projects -> **Author: [Joris Conijn](https://www.linkedin.com/in/jorisconijn){target="_blank"}** :material-linkedin: +> **Author: [Joris Conijn](https://www.linkedin.com/in/jorisconijn){target="_blank" rel="nofollow"}** :material-linkedin: This article walks through a sample AWS EventBridge cookiecutter template presented at the AWS Community Day Netherlands 2022. -* [binx.io/2022/10/11/speedup-event-driven-projects/](https://binx.io/2022/10/11/speedup-event-driven-projects/){target="_blank"} -* [Slides](https://www.slideshare.net/JorisConijn/let-codecommit-work-for-you){target="_blank"} +* [binx.io/2022/10/11/speedup-event-driven-projects/](https://binx.io/2022/10/11/speedup-event-driven-projects/){target="_blank" rel="nofollow"} +* [Slides](https://www.slideshare.net/JorisConijn/let-codecommit-work-for-you){target="_blank" rel="nofollow"} ### Implementing Feature Flags with AWS AppConfig and AWS Lambda Powertools -> **Author: [Ran Isenberg](mailto:ran.isenberg@ranthebuilder.cloud) [:material-twitter:](https://twitter.com/IsenbergRan){target="_blank"} [:material-linkedin:](https://www.linkedin.com/in/ranisenberg/){target="_blank"}** +> **Author: [Ran Isenberg](mailto:ran.isenberg@ranthebuilder.cloud) [:material-twitter:](https://twitter.com/IsenbergRan){target="_blank" rel="nofollow"} [:material-linkedin:](https://www.linkedin.com/in/ranisenberg/){target="_blank" rel="nofollow"}** This article walks through how CyberArk uses Powertools to implement Feature Flags with AWS AppConfig -* [aws.amazon.com/blogs/mt/how-cyberark-implements-feature-flags-with-aws-appconfig](https://aws.amazon.com/blogs/mt/how-cyberark-implements-feature-flags-with-aws-appconfig){target="_blank"} +* [aws.amazon.com/blogs/mt/how-cyberark-implements-feature-flags-with-aws-appconfig](https://aws.amazon.com/blogs/mt/how-cyberark-implements-feature-flags-with-aws-appconfig){target="_blank" rel="nofollow"} ### Designing for Idempotency -> **Author: [Valentin Dreismann](linkedin.com/in/valentin-dreismann-69694b16a){target="_blank"}** :material-linkedin: +> **Author: [Valentin Dreismann](https://linkedin.com/in/valentin-dreismann-69694b16a){target="_blank" rel="nofollow"}** :material-linkedin: This article outlines the importance of idempotency, key considerations and trade-offs when implementing in your systems. -* [Idempotency the right way](https://engineering.cloudflight.io/idempotency-the-right-way){target="_blank"} +* [Idempotency the right way](https://engineering.cloudflight.io/idempotency-the-right-way){target="_blank" rel="nofollow"} ### Implementing Idempotency in Serverless Architectures -> **Author: [Seongwoo Choi](https://www.linkedin.com/in/%EC%84%B1%EC%9A%B0-%EC%B5%9C-44b12b1ab/){target="_blank"}** :material-linkedin: +> **Author: [Seongwoo Choi](https://www.linkedin.com/in/%EC%84%B1%EC%9A%B0-%EC%B5%9C-44b12b1ab/){target="_blank" rel="nofollow"}** :material-linkedin: This blog post focuses on the importance of idempotency in distributed services and explores streamlined idempotent request flows. It provides guidance on idempotency tests using duplicate requests. -* [Implementing Idempotency in Serverless Architectures](https://medium.com/@nuatmochoi/implementing-idempotency-in-serverless-architectures-f9079ef1c7da){target="_blank"} +* [Implementing Idempotency in Serverless Architectures](https://medium.com/@nuatmochoi/implementing-idempotency-in-serverless-architectures-f9079ef1c7da){target="_blank" rel="nofollow"} ## Videos #### Building a resilient input handling with Parser -> **Author: [Ran Isenberg](mailto:ran.isenberg@ranthebuilder.cloud) [:material-twitter:](https://twitter.com/IsenbergRan){target="_blank"} [:material-linkedin:](https://www.linkedin.com/in/ranisenberg/){target="_blank"}** +> **Author: [Ran Isenberg](mailto:ran.isenberg@ranthebuilder.cloud) [:material-twitter:](https://twitter.com/IsenbergRan){target="_blank" rel="nofollow"} [:material-linkedin:](https://www.linkedin.com/in/ranisenberg/){target="_blank" rel="nofollow"}** When building applications with AWS Lambda it is critical to verify the data structure and validate the input due to the multiple different sources that can trigger them. In this session Ran Isenberg (CyberArk) will present one of the interesting features of AWS Lambda Powertools for python: the parser. @@ -110,15 +110,15 @@ In this session you will learn how to increase code quality, extensibility and t #### Talk DEV to me | Feature Flags with AWS Lambda Powertools -> **Author: [Ran Isenberg](mailto:ran.isenberg@ranthebuilder.cloud) [:material-twitter:](https://twitter.com/IsenbergRan){target="_blank"} [:material-linkedin:](https://www.linkedin.com/in/ranisenberg/){target="_blank"}** +> **Author: [Ran Isenberg](mailto:ran.isenberg@ranthebuilder.cloud) [:material-twitter:](https://twitter.com/IsenbergRan){target="_blank" rel="nofollow"} [:material-linkedin:](https://www.linkedin.com/in/ranisenberg/){target="_blank" rel="nofollow"}** -A deep dive in the [Feature Flags](./utilities/feature_flags.md){target="_blank"} feature along with tips and tricks. +A deep dive in the [Feature Flags](./utilities/feature_flags.md){target="_blank" rel="nofollow"} feature along with tips and tricks. #### Level Up Your CI/CD With Smart AWS Feature Flags -> **Author: [Ran Isenberg](mailto:ran.isenberg@ranthebuilder.cloud) [:material-twitter:](https://twitter.com/IsenbergRan){target="_blank"} [:material-linkedin:](https://www.linkedin.com/in/ranisenberg/){target="_blank"}** +> **Author: [Ran Isenberg](mailto:ran.isenberg@ranthebuilder.cloud) [:material-twitter:](https://twitter.com/IsenbergRan){target="_blank" rel="nofollow"} [:material-linkedin:](https://www.linkedin.com/in/ranisenberg/){target="_blank" rel="nofollow"}** Feature flags can improve your CI/CD process by enabling capabilities otherwise not possible, thus making them an enabler of DevOps and a crucial part of continuous integration. Partial rollouts, A/B testing, and the ability to quickly change a configuration without redeploying code are advantages you gain by using features flags. @@ -130,13 +130,13 @@ In this talk, you will learn the added value of using feature flags as part of y ### Introduction to Lambda Powertools -> **Author: [Michael Walmsley](https://twitter.com/walmsles){target="_blank"}** :material-twitter: +> **Author: [Michael Walmsley](https://twitter.com/walmsles){target="_blank" rel="nofollow"}** :material-twitter: This repo contains documentation for a live coding workshop for the AWS Programming and Tools Meetup in Melbourne. The workshop will start with the SAM Cli "Hello World" example API project. Throughout the labs we will introduce each of the AWS Lambda Powertools Core utilities to showcase how simple they are to use and adopt for all your projects, and how powerful they are at bringing you closer to the Well Architected Serverless Lens. -* :material-github: [github.com/walmsles/lambda-powertools-coding-workshop](https://github.com/walmsles/lambda-powertools-coding-workshop){target="_blank"} +* :material-github: [github.com/walmsles/lambda-powertools-coding-workshop](https://github.com/walmsles/lambda-powertools-coding-workshop){target="_blank" rel="nofollow"} **Walk-through video** @@ -146,9 +146,9 @@ Throughout the labs we will introduce each of the AWS Lambda Powertools Core uti ### Complete Lambda Handler Cookbook -> **Author: [Ran Isenberg](mailto:ran.isenberg@ranthebuilder.cloud) [:material-twitter:](https://twitter.com/IsenbergRan){target="_blank"} [:material-linkedin:](https://www.linkedin.com/in/ranisenberg/){target="_blank"}** +> **Author: [Ran Isenberg](mailto:ran.isenberg@ranthebuilder.cloud) [:material-twitter:](https://twitter.com/IsenbergRan){target="_blank" rel="nofollow"} [:material-linkedin:](https://www.linkedin.com/in/ranisenberg/){target="_blank" rel="nofollow"}** -This repository provides a working, deployable, open source based, AWS Lambda handler and [AWS CDK](https://aws.amazon.com/cdk/){target="_blank"} Python code. +This repository provides a working, deployable, open source based, AWS Lambda handler and [AWS CDK](https://aws.amazon.com/cdk/){target="_blank" rel="nofollow"} Python code. This handler embodies Serverless best practices and has all the bells and whistles for a proper production ready handler. It uses many of the AWS Lambda Powertools utilities for Python. @@ -156,7 +156,7 @@ This handler embodies Serverless best practices and has all the bells and whistl ### Serverless Transactional Message App -> **Author: [Santiago Garcia Arango](mailto:san99tiago@gmail.com) [:material-web:](https://san99tiago.com/){target="_blank"} [:material-linkedin:](https://www.linkedin.com/in/san99tiago/){target="_blank"}** +> **Author: [Santiago Garcia Arango](mailto:san99tiago@gmail.com) [:material-web:](https://san99tiago.com/){target="_blank" rel="nofollow"} [:material-linkedin:](https://www.linkedin.com/in/san99tiago/){target="_blank" rel="nofollow"}** This repository contains a well documented example of a Transactional Messages App that illustrates how to use Lambda PowerTools to process SQS messages in batches (with IaC on top of CDK). diff --git a/examples/batch_processing/sam/dynamodb_batch_processing.yaml b/examples/batch_processing/sam/dynamodb_batch_processing.yaml index 2ed70d65a86..4e436c083e5 100644 --- a/examples/batch_processing/sam/dynamodb_batch_processing.yaml +++ b/examples/batch_processing/sam/dynamodb_batch_processing.yaml @@ -6,7 +6,7 @@ Globals: Function: Timeout: 5 MemorySize: 256 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: diff --git a/examples/batch_processing/sam/kinesis_batch_processing.yaml b/examples/batch_processing/sam/kinesis_batch_processing.yaml index 314d4f8c98f..6c80bd2f333 100644 --- a/examples/batch_processing/sam/kinesis_batch_processing.yaml +++ b/examples/batch_processing/sam/kinesis_batch_processing.yaml @@ -6,7 +6,7 @@ Globals: Function: Timeout: 5 MemorySize: 256 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: diff --git a/examples/batch_processing/sam/sqs_batch_processing.yaml b/examples/batch_processing/sam/sqs_batch_processing.yaml index 77871c3478b..2dd827107d4 100644 --- a/examples/batch_processing/sam/sqs_batch_processing.yaml +++ b/examples/batch_processing/sam/sqs_batch_processing.yaml @@ -6,7 +6,7 @@ Globals: Function: Timeout: 5 MemorySize: 256 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: diff --git a/examples/batch_processing/src/context_manager_access.py b/examples/batch_processing/src/context_manager_access.py index 9882092bd83..dea3f881a48 100644 --- a/examples/batch_processing/src/context_manager_access.py +++ b/examples/batch_processing/src/context_manager_access.py @@ -26,14 +26,15 @@ def record_handler(record: SQSRecord): @logger.inject_lambda_context @tracer.capture_lambda_handler def lambda_handler(event, context: LambdaContext): - batch = event["Records"] + batch = event["Records"] # (1)! with processor(records=batch, handler=record_handler): processed_messages: List[Tuple] = processor.process() for message in processed_messages: - status: Literal["success"] | Literal["fail"] = message[0] + status: Literal["success", "fail"] = message[0] + cause: str = message[1] # (2)! record: SQSRecord = message[2] - logger.info(status, record=record) + logger.info(status, record=record, cause=cause) return processor.response() diff --git a/examples/batch_processing/src/context_manager_access_output.txt b/examples/batch_processing/src/context_manager_access_output.txt new file mode 100644 index 00000000000..cf3d6267f4d --- /dev/null +++ b/examples/batch_processing/src/context_manager_access_output.txt @@ -0,0 +1,12 @@ +[ + ( + "fail", + " + ), + ( + "success", + "success", + {'messageId': '88891c36-32eb-4a25-9905-654a32916893', 'receiptHandle': 'AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a', 'body': 'success', 'attributes': {'ApproximateReceiveCount': '1', 'SentTimestamp': '1545082649183', 'SenderId': 'AIDAIENQZJOLO23YVJ4VO', 'ApproximateFirstReceiveTimestamp': '1545082649185'}, 'messageAttributes': {}, 'md5OfBody': 'e4e68fb7bd0e697a0ae8f1bb342846b3', 'eventSource': 'aws:sqs', 'eventSourceARN': 'arn:aws:sqs:us-east-2:123456789012:my-queue', 'awsRegion': 'us-east-1'} + ) +] diff --git a/examples/batch_processing/src/context_manager_access_output_pydantic.txt b/examples/batch_processing/src/context_manager_access_output_pydantic.txt new file mode 100644 index 00000000000..748a6e61aa0 --- /dev/null +++ b/examples/batch_processing/src/context_manager_access_output_pydantic.txt @@ -0,0 +1,17 @@ +[ + ( + "fail", # (1)! + ":1 validation error for OrderSqs\nbody\n JSON object must be str, bytes or bytearray (type=type_error.json)", + + ), + ( + "success", + "success", + {'messageId': '88891c36-32eb-4a25-9905-654a32916893', 'receiptHandle': 'AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a', 'body': 'success', 'attributes': {'ApproximateReceiveCount': '1', 'SentTimestamp': '1545082649183', 'SenderId': 'AIDAIENQZJOLO23YVJ4VO', 'ApproximateFirstReceiveTimestamp': '1545082649185'}, 'messageAttributes': {}, 'md5OfBody': 'e4e68fb7bd0e697a0ae8f1bb342846b3', 'eventSource': 'aws:sqs', 'eventSourceARN': 'arn:aws:sqs:us-east-2:123456789012:my-queue', 'awsRegion': 'us-east-1'} + ), + ( + "fail", # (2)! + ":Failed to process record.", + OrderSqs(messageId='9d0bfba5-d213-4b64-89bd-f4fbd7e58358', receiptHandle='AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a', body=Order(item={'type': 'fail'}), attributes=SqsAttributesModel(ApproximateReceiveCount='1', ApproximateFirstReceiveTimestamp=datetime.datetime(2018, 12, 17, 21, 37, 29, 185000, tzinfo=datetime.timezone.utc), MessageDeduplicationId=None, MessageGroupId=None, SenderId='AIDAIENQZJOLO23YVJ4VO', SentTimestamp=datetime.datetime(2018, 12, 17, 21, 37, 29, 183000, tzinfo=datetime.timezone.utc), SequenceNumber=None, AWSTraceHeader=None), messageAttributes={}, md5OfBody='e4e68fb7bd0e697a0ae8f1bb342846b3', md5OfMessageAttributes=None, eventSource='aws:sqs', eventSourceARN='arn:aws:sqs:us-east-2:123456789012:my-queue', awsRegion='us-east-1') + ) +] diff --git a/examples/batch_processing/src/extending_failure.py b/examples/batch_processing/src/extending_processor_handlers.py similarity index 78% rename from examples/batch_processing/src/extending_failure.py rename to examples/batch_processing/src/extending_processor_handlers.py index 424c9a5189b..1e50c406c09 100644 --- a/examples/batch_processing/src/extending_failure.py +++ b/examples/batch_processing/src/extending_processor_handlers.py @@ -1,4 +1,5 @@ import json +from typing import Any, Dict from aws_lambda_powertools import Logger, Metrics, Tracer from aws_lambda_powertools.metrics import MetricUnit @@ -9,11 +10,16 @@ FailureResponse, process_partial_response, ) +from aws_lambda_powertools.utilities.batch.base import SuccessResponse from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord from aws_lambda_powertools.utilities.typing import LambdaContext class MyProcessor(BatchProcessor): + def success_handler(self, record: Dict[str, Any], result: Any) -> SuccessResponse: + metrics.add_metric(name="BatchRecordSuccesses", unit=MetricUnit.Count, value=1) + return super().success_handler(record, result) + def failure_handler(self, record: SQSRecord, exception: ExceptionInfo) -> FailureResponse: metrics.add_metric(name="BatchRecordFailures", unit=MetricUnit.Count, value=1) return super().failure_handler(record, exception) diff --git a/examples/batch_processing/src/getting_started_dynamodb.py b/examples/batch_processing/src/getting_started_dynamodb.py index 61990e2bd26..f56f0324bad 100644 --- a/examples/batch_processing/src/getting_started_dynamodb.py +++ b/examples/batch_processing/src/getting_started_dynamodb.py @@ -11,7 +11,7 @@ ) from aws_lambda_powertools.utilities.typing import LambdaContext -processor = BatchProcessor(event_type=EventType.DynamoDBStreams) +processor = BatchProcessor(event_type=EventType.DynamoDBStreams) # (1)! tracer = Tracer() logger = Logger() diff --git a/examples/batch_processing/src/getting_started_error_handling.py b/examples/batch_processing/src/getting_started_error_handling.py new file mode 100644 index 00000000000..7307f0d0d09 --- /dev/null +++ b/examples/batch_processing/src/getting_started_error_handling.py @@ -0,0 +1,35 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import ( + BatchProcessor, + EventType, + process_partial_response, +) +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.SQS) +tracer = Tracer() +logger = Logger() + + +class InvalidPayload(Exception): + ... + + +@tracer.capture_method +def record_handler(record: SQSRecord): + payload: str = record.body + logger.info(payload) + if not payload: + raise InvalidPayload("Payload does not contain minimum information to be processed.") # (1)! + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +def lambda_handler(event, context: LambdaContext): + return process_partial_response( # (2)! + event=event, + record_handler=record_handler, + processor=processor, + context=context, + ) diff --git a/examples/batch_processing/src/getting_started_kinesis.py b/examples/batch_processing/src/getting_started_kinesis.py index 179154e3b1f..a3410fa57a2 100644 --- a/examples/batch_processing/src/getting_started_kinesis.py +++ b/examples/batch_processing/src/getting_started_kinesis.py @@ -9,7 +9,7 @@ ) from aws_lambda_powertools.utilities.typing import LambdaContext -processor = BatchProcessor(event_type=EventType.KinesisDataStreams) +processor = BatchProcessor(event_type=EventType.KinesisDataStreams) # (1)! tracer = Tracer() logger = Logger() diff --git a/examples/batch_processing/src/getting_started_sqs.py b/examples/batch_processing/src/getting_started_sqs.py index 8b6fe4c4266..c8c2facb09a 100644 --- a/examples/batch_processing/src/getting_started_sqs.py +++ b/examples/batch_processing/src/getting_started_sqs.py @@ -1,5 +1,3 @@ -import json - from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.utilities.batch import ( BatchProcessor, @@ -9,20 +7,23 @@ from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord from aws_lambda_powertools.utilities.typing import LambdaContext -processor = BatchProcessor(event_type=EventType.SQS) +processor = BatchProcessor(event_type=EventType.SQS) # (1)! tracer = Tracer() logger = Logger() @tracer.capture_method -def record_handler(record: SQSRecord): - payload: str = record.body - if payload: - item: dict = json.loads(payload) - logger.info(item) +def record_handler(record: SQSRecord): # (2)! + payload: str = record.json_body # if json string data, otherwise record.body for str + logger.info(payload) @logger.inject_lambda_context @tracer.capture_lambda_handler def lambda_handler(event, context: LambdaContext): - return process_partial_response(event=event, record_handler=record_handler, processor=processor, context=context) + return process_partial_response( # (3)! + event=event, + record_handler=record_handler, + processor=processor, + context=context, + ) diff --git a/examples/batch_processing/src/getting_started_sqs_fifo.py b/examples/batch_processing/src/getting_started_sqs_fifo.py index d30fb319c85..95d7463eb18 100644 --- a/examples/batch_processing/src/getting_started_sqs_fifo.py +++ b/examples/batch_processing/src/getting_started_sqs_fifo.py @@ -1,5 +1,3 @@ -import json - from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.utilities.batch import ( SqsFifoPartialProcessor, @@ -8,17 +6,15 @@ from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord from aws_lambda_powertools.utilities.typing import LambdaContext -processor = SqsFifoPartialProcessor() +processor = SqsFifoPartialProcessor() # (1)! tracer = Tracer() logger = Logger() @tracer.capture_method def record_handler(record: SQSRecord): - payload: str = record.body - if payload: - item: dict = json.loads(payload) - logger.info(item) + payload: str = record.json_body # if json string data, otherwise record.body for str + logger.info(payload) @logger.inject_lambda_context diff --git a/examples/event_handler_graphql/src/assert_graphql_response.py b/examples/event_handler_graphql/src/assert_graphql_response.py index 548aece15e0..d78698e109b 100644 --- a/examples/event_handler_graphql/src/assert_graphql_response.py +++ b/examples/event_handler_graphql/src/assert_graphql_response.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from dataclasses import dataclass from pathlib import Path diff --git a/examples/event_handler_rest/src/strip_route_prefix_regex.py b/examples/event_handler_rest/src/strip_route_prefix_regex.py new file mode 100644 index 00000000000..4ea4b4249f4 --- /dev/null +++ b/examples/event_handler_rest/src/strip_route_prefix_regex.py @@ -0,0 +1,21 @@ +import re + +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.utilities.typing import LambdaContext + +# This will support: +# /v1/dev/subscriptions/ +# /v1/stg/subscriptions/ +# /v1/qa/subscriptions/ +# /v2/dev/subscriptions/ +# ... +app = APIGatewayRestResolver(strip_prefixes=[re.compile(r"/v[1-3]+/(dev|stg|qa)")]) + + +@app.get("/subscriptions/") +def get_subscription(subscription): + return {"subscription_id": subscription} + + +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/feature_flags/src/getting_all_enabled_features.py b/examples/feature_flags/src/getting_all_enabled_features.py index 49877512578..088176687a9 100644 --- a/examples/feature_flags/src/getting_all_enabled_features.py +++ b/examples/feature_flags/src/getting_all_enabled_features.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from aws_lambda_powertools.event_handler import APIGatewayRestResolver from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags from aws_lambda_powertools.utilities.typing import LambdaContext diff --git a/examples/idempotency/templates/sam.yaml b/examples/idempotency/templates/sam.yaml index 8443a0914d7..7c2f65a6a4d 100644 --- a/examples/idempotency/templates/sam.yaml +++ b/examples/idempotency/templates/sam.yaml @@ -17,7 +17,7 @@ Resources: HelloWorldFunction: Type: AWS::Serverless::Function Properties: - Runtime: python3.10 + Runtime: python3.11 Handler: app.py Policies: - Statement: diff --git a/examples/logger/sam/template.yaml b/examples/logger/sam/template.yaml index 358696def9a..ddaa2f16407 100644 --- a/examples/logger/sam/template.yaml +++ b/examples/logger/sam/template.yaml @@ -5,7 +5,7 @@ Description: Powertools for AWS Lambda (Python) version Globals: Function: Timeout: 5 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: @@ -14,7 +14,7 @@ Globals: Layers: # Find the latest Layer version in the official documentation # https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:39 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:40 Resources: LoggerLambdaHandlerExample: diff --git a/examples/metrics/sam/template.yaml b/examples/metrics/sam/template.yaml index 3bb19572774..ace4c71f2e1 100644 --- a/examples/metrics/sam/template.yaml +++ b/examples/metrics/sam/template.yaml @@ -5,7 +5,7 @@ Description: Powertools for AWS Lambda (Python) version Globals: Function: Timeout: 5 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: @@ -15,7 +15,7 @@ Globals: Layers: # Find the latest Layer version in the official documentation # https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:39 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:40 Resources: CaptureLambdaHandlerExample: diff --git a/examples/metrics/src/clear_metrics_in_tests.py b/examples/metrics/src/clear_metrics_in_tests.py index cea3879af83..a5462d3d9e1 100644 --- a/examples/metrics/src/clear_metrics_in_tests.py +++ b/examples/metrics/src/clear_metrics_in_tests.py @@ -1,7 +1,7 @@ import pytest from aws_lambda_powertools import Metrics -from aws_lambda_powertools.metrics import metrics as metrics_global +from aws_lambda_powertools.metrics.provider import cold_start @pytest.fixture(scope="function", autouse=True) @@ -9,6 +9,6 @@ def reset_metric_set(): # Clear out every metric data prior to every test metrics = Metrics() metrics.clear_metrics() - metrics_global.is_cold_start = True # ensure each test has cold start + cold_start.is_cold_start = True # ensure each test has cold start metrics.clear_default_dimensions() # remove persisted default dimensions, if any yield diff --git a/examples/metrics_datadog/sam/template.yaml b/examples/metrics_datadog/sam/template.yaml new file mode 100644 index 00000000000..39c8883c150 --- /dev/null +++ b/examples/metrics_datadog/sam/template.yaml @@ -0,0 +1,39 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Description: Powertools for AWS Lambda (Python) version + +Globals: + Function: + Timeout: 5 + Runtime: python3.11 + Tracing: Active + Environment: + Variables: + POWERTOOLS_METRICS_NAMESPACE: ServerlessAirline + # [Production setup] + # DATADOG_API_KEY_SECRET_ARN: "" + # [Development only] + DD_API_KEY: "" + # Configuration details: https://docs.datadoghq.com/serverless/installation/python/?tab=datadogcli + DD_SITE: datadoghq.com + + Layers: + # Find the latest Layer version in the official documentation + # https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:40 + # Find the latest Layer version in the Datadog official documentation + + # Datadog SDK + # Latest versions: https://github.com/DataDog/datadog-lambda-python/releases + - !Sub arn:aws:lambda:${AWS::Region}:464622532012:layer:Datadog-Python310:78 + + # Datadog Lambda Extension + # Latest versions: https://github.com/DataDog/datadog-lambda-extension/releases + - !Sub arn:aws:lambda:${AWS::Region}:464622532012:layer:Datadog-Extension:45 + +Resources: + CaptureLambdaHandlerExample: + Type: AWS::Serverless::Function + Properties: + CodeUri: ../src + Handler: capture_lambda_handler.handler diff --git a/examples/metrics_datadog/src/add_datadog_metrics.py b/examples/metrics_datadog/src/add_datadog_metrics.py new file mode 100644 index 00000000000..6fe6774152e --- /dev/null +++ b/examples/metrics_datadog/src/add_datadog_metrics.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1) diff --git a/examples/metrics_datadog/src/add_metrics_with_tags.py b/examples/metrics_datadog/src/add_metrics_with_tags.py new file mode 100644 index 00000000000..9ebb0680c13 --- /dev/null +++ b/examples/metrics_datadog/src/add_metrics_with_tags.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1, tag1="powertools", tag2="python") diff --git a/examples/metrics_datadog/src/add_metrics_with_timestamp.py b/examples/metrics_datadog/src/add_metrics_with_timestamp.py new file mode 100644 index 00000000000..b2bef65e9ab --- /dev/null +++ b/examples/metrics_datadog/src/add_metrics_with_timestamp.py @@ -0,0 +1,11 @@ +import time + +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1, timestamp=int(time.time())) diff --git a/examples/metrics_datadog/src/assert_single_datadog_metric.py b/examples/metrics_datadog/src/assert_single_datadog_metric.py new file mode 100644 index 00000000000..7b6ebf0909b --- /dev/null +++ b/examples/metrics_datadog/src/assert_single_datadog_metric.py @@ -0,0 +1,9 @@ +import add_datadog_metrics + + +def test_log_metrics(capsys): + add_datadog_metrics.lambda_handler({}, {}) + + log = capsys.readouterr().out.strip() # remove any extra line + + assert "SuccessfulBooking" in log # basic string assertion in JSON str diff --git a/examples/metrics_datadog/src/capture_cold_start_datadog_metric.py b/examples/metrics_datadog/src/capture_cold_start_datadog_metric.py new file mode 100644 index 00000000000..ec8c2fc1e19 --- /dev/null +++ b/examples/metrics_datadog/src/capture_cold_start_datadog_metric.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +@metrics.log_metrics(capture_cold_start_metric=True) +def lambda_handler(event: dict, context: LambdaContext): + return diff --git a/examples/metrics_datadog/src/capture_cold_start_metric_output.json b/examples/metrics_datadog/src/capture_cold_start_metric_output.json new file mode 100644 index 00000000000..ee7da985f66 --- /dev/null +++ b/examples/metrics_datadog/src/capture_cold_start_metric_output.json @@ -0,0 +1,8 @@ +{ + "m":"ColdStart", + "v":1, + "e":1691707488, + "t":[ + "function_name:HelloWorldFunction" + ] + } diff --git a/examples/metrics_datadog/src/clear_datadog_metrics_in_tests.py b/examples/metrics_datadog/src/clear_datadog_metrics_in_tests.py new file mode 100644 index 00000000000..e80552eba83 --- /dev/null +++ b/examples/metrics_datadog/src/clear_datadog_metrics_in_tests.py @@ -0,0 +1,13 @@ +import pytest + +from aws_lambda_powertools.metrics.provider import cold_start +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics + + +@pytest.fixture(scope="function", autouse=True) +def reset_metric_set(): + # Clear out every metric data prior to every test + metrics = DatadogMetrics() + metrics.clear_metrics() + cold_start.is_cold_start = True # ensure each test has cold start + yield diff --git a/examples/metrics_datadog/src/flush_datadog_metrics.py b/examples/metrics_datadog/src/flush_datadog_metrics.py new file mode 100644 index 00000000000..89e02fc2f3f --- /dev/null +++ b/examples/metrics_datadog/src/flush_datadog_metrics.py @@ -0,0 +1,17 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +def book_flight(flight_id: str, **kwargs): + # logic to book flight + ... + metrics.add_metric(name="SuccessfulBooking", value=1) + + +def lambda_handler(event: dict, context: LambdaContext): + try: + book_flight(flight_id=event.get("flight_id", "")) + finally: + metrics.flush_metrics() diff --git a/examples/metrics_datadog/src/flush_metrics_to_standard_output.py b/examples/metrics_datadog/src/flush_metrics_to_standard_output.py new file mode 100644 index 00000000000..a58fe877925 --- /dev/null +++ b/examples/metrics_datadog/src/flush_metrics_to_standard_output.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics(flush_to_log=True) + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1) diff --git a/examples/metrics_datadog/src/log_metrics_output.json b/examples/metrics_datadog/src/log_metrics_output.json new file mode 100644 index 00000000000..782cea9dc4f --- /dev/null +++ b/examples/metrics_datadog/src/log_metrics_output.json @@ -0,0 +1,9 @@ +{ + "m":"SuccessfulBooking", + "v":1, + "e":1691707076, + "t":[ + "tag1:powertools", + "tag2:python" + ] +} diff --git a/examples/metrics_datadog/src/log_metrics_standard_output.json b/examples/metrics_datadog/src/log_metrics_standard_output.json new file mode 100644 index 00000000000..35fcb8a096a --- /dev/null +++ b/examples/metrics_datadog/src/log_metrics_standard_output.json @@ -0,0 +1,8 @@ +{ + "m":"SuccessfulBooking", + "v":1, + "e":1691768022, + "t":[ + + ] +} diff --git a/examples/metrics_datadog/src/raise_on_empty_datadog_metrics.py b/examples/metrics_datadog/src/raise_on_empty_datadog_metrics.py new file mode 100644 index 00000000000..2242b1dfe06 --- /dev/null +++ b/examples/metrics_datadog/src/raise_on_empty_datadog_metrics.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +@metrics.log_metrics(raise_on_empty_metrics=True) # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + # no metrics being created will now raise SchemaValidationError + return diff --git a/examples/metrics_datadog/src/run_tests_env_var.sh b/examples/metrics_datadog/src/run_tests_env_var.sh new file mode 100644 index 00000000000..5663afd3ba4 --- /dev/null +++ b/examples/metrics_datadog/src/run_tests_env_var.sh @@ -0,0 +1 @@ +POWERTOOLS_METRICS_NAMESPACE="ServerlessAirline" DD_FLUSH_TO_LOG="True" python -m pytest # (1)! diff --git a/examples/metrics_datadog/src/set_default_tags.py b/examples/metrics_datadog/src/set_default_tags.py new file mode 100644 index 00000000000..94d4335b212 --- /dev/null +++ b/examples/metrics_datadog/src/set_default_tags.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() +metrics.set_default_tags(tag1="powertools", tag2="python") + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1) diff --git a/examples/metrics_datadog/src/set_default_tags_log_metrics.py b/examples/metrics_datadog/src/set_default_tags_log_metrics.py new file mode 100644 index 00000000000..c276c1d53ff --- /dev/null +++ b/examples/metrics_datadog/src/set_default_tags_log_metrics.py @@ -0,0 +1,11 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + +default_tags = {"tag1": "powertools", "tag2": "python"} + + +@metrics.log_metrics(default_tags=default_tags) # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1) diff --git a/examples/parameters/src/get_parameter_by_name_error_handling.py b/examples/parameters/src/get_parameter_by_name_error_handling.py index 7cae4525e83..a150beaae22 100644 --- a/examples/parameters/src/get_parameter_by_name_error_handling.py +++ b/examples/parameters/src/get_parameter_by_name_error_handling.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any from aws_lambda_powertools.utilities.parameters.ssm import get_parameters_by_name diff --git a/examples/parameters/src/getting_started_parameter_by_name.py b/examples/parameters/src/getting_started_parameter_by_name.py index 95d63937ab7..a7f6e2107a2 100644 --- a/examples/parameters/src/getting_started_parameter_by_name.py +++ b/examples/parameters/src/getting_started_parameter_by_name.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any from aws_lambda_powertools.utilities.parameters.ssm import get_parameters_by_name diff --git a/examples/tracer/sam/template.yaml b/examples/tracer/sam/template.yaml index e76f47c0f1c..d9e7d8a29da 100644 --- a/examples/tracer/sam/template.yaml +++ b/examples/tracer/sam/template.yaml @@ -5,7 +5,7 @@ Description: Powertools for AWS Lambda (Python) version Globals: Function: Timeout: 5 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: @@ -13,7 +13,7 @@ Globals: Layers: # Find the latest Layer version in the official documentation # https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:39 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:40 Resources: CaptureLambdaHandlerExample: diff --git a/includes/abbreviations.md b/includes/abbreviations.md new file mode 100644 index 00000000000..ed52b93fe64 --- /dev/null +++ b/includes/abbreviations.md @@ -0,0 +1 @@ +*[observability provider]: An AWS Lambda Observability Partner diff --git a/layer/scripts/layer-balancer/go.mod b/layer/scripts/layer-balancer/go.mod index ce67f824a6c..cfb05ca1bfd 100644 --- a/layer/scripts/layer-balancer/go.mod +++ b/layer/scripts/layer-balancer/go.mod @@ -3,24 +3,24 @@ module layerbalancer go 1.18 require ( - github.com/aws/aws-sdk-go-v2 v1.19.0 - github.com/aws/aws-sdk-go-v2/config v1.18.28 - github.com/aws/aws-sdk-go-v2/service/lambda v1.37.1 + github.com/aws/aws-sdk-go-v2 v1.20.2 + github.com/aws/aws-sdk-go-v2/config v1.18.34 + github.com/aws/aws-sdk-go-v2/service/lambda v1.39.3 golang.org/x/exp v0.0.0-20230321023759-10a507213a29 golang.org/x/sync v0.3.0 ) require ( - github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 // indirect - github.com/aws/aws-sdk-go-v2/credentials v1.13.27 // indirect - github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.5 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.35 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.29 // indirect - github.com/aws/aws-sdk-go-v2/internal/ini v1.3.36 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.29 // indirect - github.com/aws/aws-sdk-go-v2/service/sso v1.12.13 // indirect - github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.13 // indirect - github.com/aws/aws-sdk-go-v2/service/sts v1.19.3 // indirect - github.com/aws/smithy-go v1.13.5 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.12 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.13.33 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.9 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.39 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.33 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.3.40 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.33 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.13.3 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.15.3 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.21.3 // indirect + github.com/aws/smithy-go v1.14.1 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect ) diff --git a/layer/scripts/layer-balancer/go.sum b/layer/scripts/layer-balancer/go.sum index 38d0b70db29..e2db967f5a8 100644 --- a/layer/scripts/layer-balancer/go.sum +++ b/layer/scripts/layer-balancer/go.sum @@ -1,31 +1,31 @@ -github.com/aws/aws-sdk-go-v2 v1.19.0 h1:klAT+y3pGFBU/qVf1uzwttpBbiuozJYWzNLHioyDJ+k= -github.com/aws/aws-sdk-go-v2 v1.19.0/go.mod h1:uzbQtefpm44goOPmdKyAlXSNcwlRgF3ePWVW6EtJvvw= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 h1:dK82zF6kkPeCo8J1e+tGx4JdvDIQzj7ygIoLg8WMuGs= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10/go.mod h1:VeTZetY5KRJLuD/7fkQXMU6Mw7H5m/KP2J5Iy9osMno= -github.com/aws/aws-sdk-go-v2/config v1.18.28 h1:TINEaKyh1Td64tqFvn09iYpKiWjmHYrG1fa91q2gnqw= -github.com/aws/aws-sdk-go-v2/config v1.18.28/go.mod h1:nIL+4/8JdAuNHEjn/gPEXqtnS02Q3NXB/9Z7o5xE4+A= -github.com/aws/aws-sdk-go-v2/credentials v1.13.27 h1:dz0yr/yR1jweAnsCx+BmjerUILVPQ6FS5AwF/OyG1kA= -github.com/aws/aws-sdk-go-v2/credentials v1.13.27/go.mod h1:syOqAek45ZXZp29HlnRS/BNgMIW6uiRmeuQsz4Qh2UE= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.5 h1:kP3Me6Fy3vdi+9uHd7YLr6ewPxRL+PU6y15urfTaamU= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.5/go.mod h1:Gj7tm95r+QsDoN2Fhuz/3npQvcZbkEf5mL70n3Xfluc= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.35 h1:hMUCiE3Zi5AHrRNGf5j985u0WyqI6r2NULhUfo0N/No= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.35/go.mod h1:ipR5PvpSPqIqL5Mi82BxLnfMkHVbmco8kUwO2xrCi0M= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.29 h1:yOpYx+FTBdpk/g+sBU6Cb1H0U/TLEcYYp66mYqsPpcc= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.29/go.mod h1:M/eUABlDbw2uVrdAn+UsI6M727qp2fxkp8K0ejcBDUY= -github.com/aws/aws-sdk-go-v2/internal/ini v1.3.36 h1:8r5m1BoAWkn0TDC34lUculryf7nUF25EgIMdjvGCkgo= -github.com/aws/aws-sdk-go-v2/internal/ini v1.3.36/go.mod h1:Rmw2M1hMVTwiUhjwMoIBFWFJMhvJbct06sSidxInkhY= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.29 h1:IiDolu/eLmuB18DRZibj77n1hHQT7z12jnGO7Ze3pLc= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.29/go.mod h1:fDbkK4o7fpPXWn8YAPmTieAMuB9mk/VgvW64uaUqxd4= -github.com/aws/aws-sdk-go-v2/service/lambda v1.37.1 h1:BRdW2JcxZSsen77Y0WoWIWY4+H9EXT55uEPWZKIcDHY= -github.com/aws/aws-sdk-go-v2/service/lambda v1.37.1/go.mod h1:zmdE2b9ZX8milexhZc3SeC3LwJRJpJ0k0fsuMBOSCEI= -github.com/aws/aws-sdk-go-v2/service/sso v1.12.13 h1:sWDv7cMITPcZ21QdreULwxOOAmE05JjEsT6fCDtDA9k= -github.com/aws/aws-sdk-go-v2/service/sso v1.12.13/go.mod h1:DfX0sWuT46KpcqbMhJ9QWtxAIP1VozkDWf8VAkByjYY= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.13 h1:BFubHS/xN5bjl818QaroN6mQdjneYQ+AOx44KNXlyH4= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.13/go.mod h1:BzqsVVFduubEmzrVtUFQQIQdFqvUItF8XUq2EnS8Wog= -github.com/aws/aws-sdk-go-v2/service/sts v1.19.3 h1:e5mnydVdCVWxP+5rPAGi2PYxC7u2OZgH1ypC114H04U= -github.com/aws/aws-sdk-go-v2/service/sts v1.19.3/go.mod h1:yVGZA1CPkmUhBdA039jXNJJG7/6t+G+EBWmFq23xqnY= -github.com/aws/smithy-go v1.13.5 h1:hgz0X/DX0dGqTYpGALqXJoRKRj5oQ7150i5FdTePzO8= -github.com/aws/smithy-go v1.13.5/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= +github.com/aws/aws-sdk-go-v2 v1.20.2 h1:0Aok9u/HVTk7RtY6M1KDcthbaMKGhhS0eLPxIdSIzRI= +github.com/aws/aws-sdk-go-v2 v1.20.2/go.mod h1:NU06lETsFm8fUC6ZjhgDpVBcGZTFQ6XM+LZWZxMI4ac= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.12 h1:lN6L3LrYHeZ6xCxaIYtoWCx4GMLk4nRknsh29OMSqHY= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.12/go.mod h1:TDCkEAkMTXxTs0oLBGBKpBZbk3NLh8EvAfF0Q3x8/0c= +github.com/aws/aws-sdk-go-v2/config v1.18.34 h1:bFf7CtSgwz/vE4tl0cNbWbf6EDQ2TZR5VrsrO9ardoY= +github.com/aws/aws-sdk-go-v2/config v1.18.34/go.mod h1:uJ/keVhwR8vsSaErMu2Vb3dArUZZKLVTcOsKXIFfvjs= +github.com/aws/aws-sdk-go-v2/credentials v1.13.33 h1:esA1X5Eti1xSGCF0W0LYpHH/r6p+MqT0DiKXsfDEPxs= +github.com/aws/aws-sdk-go-v2/credentials v1.13.33/go.mod h1:jNC10ZEYuLlt9IOowix60yNiO6vGA14RVK3oUfX5KgI= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.9 h1:DnNHcClgyFV5suHJ4axqhmG3YeRGgIu6yv29IEWR9aE= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.9/go.mod h1:kz0hzQXlc/5Y5mkbwTKX8A+aTRA45t8Aavly60bQzAQ= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.39 h1:OBokd2jreL7ItwqRRcN5QiSt24/i2r742aRsd2qMyeg= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.39/go.mod h1:OLmjwglQh90dCcFJDGD+T44G0ToLH+696kRwRhS1KOU= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.33 h1:gcRN6PXAo8w3HYFp2wFyr+WYEP4n/a25/IOhzJl36Yw= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.33/go.mod h1:S/zgOphghZAIvrbtvsVycoOncfqh1Hc4uGDIHqDLwTU= +github.com/aws/aws-sdk-go-v2/internal/ini v1.3.40 h1:glWaI8WyeYqQN4zh4zqogzSpNPj8rf11Nj+oE3ghQPw= +github.com/aws/aws-sdk-go-v2/internal/ini v1.3.40/go.mod h1:OCnFHzgaBY2PuGiHSzLlfqV4j5rJrky7YMfBXcx2Uk0= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.33 h1:cr70Hw6Lq9cqRst1y4YOHLiaVWaWtBPiqdloinNkfis= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.33/go.mod h1:kcNtzCcEoflp+6e2CDTmm2h3xQGZOBZqYA/8DhYx/S8= +github.com/aws/aws-sdk-go-v2/service/lambda v1.39.3 h1:8T6YpLdpu7wqPr9RZALRJWEm+NbkQykzN6Mdy2lOIQw= +github.com/aws/aws-sdk-go-v2/service/lambda v1.39.3/go.mod h1:PxfJo3p3ze0lFI8Zsu0tqjB2edJu2ZAEzQzT2LQUY3o= +github.com/aws/aws-sdk-go-v2/service/sso v1.13.3 h1:nceOkYE0jmaG9CoyXHJJm00FAQ8JE+/LCKJJ06hH/Nc= +github.com/aws/aws-sdk-go-v2/service/sso v1.13.3/go.mod h1:DApEBnZzexe+LDLaNrGOJA8xtRMCpikLW1gX7jZhHxc= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.15.3 h1:90qW9puxI7LgmiYKSPhx6wz4XqgVauTxCyS3185+JpA= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.15.3/go.mod h1:kKpyLjToIS7E3z0672lBhxIPD+uoQ9V0MYRYCVGIkO0= +github.com/aws/aws-sdk-go-v2/service/sts v1.21.3 h1:s3wBkMxfA/u2EJJl6KRsPcWv858lDHkhinqXyN6fkZI= +github.com/aws/aws-sdk-go-v2/service/sts v1.21.3/go.mod h1:b+y9zL57mwCRy6ftp9Nc7CONGHX3sZ50ZCLTrI5xpCc= +github.com/aws/smithy-go v1.14.1 h1:EFKMUmH/iHMqLiwoEDx2rRjRQpI1YCn5jTysoaDujFs= +github.com/aws/smithy-go v1.14.1/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= diff --git a/layer/scripts/layer-balancer/main.go b/layer/scripts/layer-balancer/main.go index 0a800a5c524..a4b28fd0545 100644 --- a/layer/scripts/layer-balancer/main.go +++ b/layer/scripts/layer-balancer/main.go @@ -79,6 +79,7 @@ var singleArchitectureRegions = []string{ "eu-central-2", "eu-south-2", "me-central-1", + "il-central-1", } // getLayerVersion returns the latest version of a layer in a region diff --git a/mkdocs.yml b/mkdocs.yml index 77322a535be..1b9f4545239 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -11,10 +11,14 @@ nav: - API reference: api/" target="_blank - Upgrade guide: upgrade.md - We Made This (Community): we_made_this.md + - Roadmap: roadmap.md - Features: - core/tracer.md - core/logger.md - - core/metrics.md + - Metrics: + - core/metrics/index.md + - Amazon CloudWatch EMF: core/metrics.md + - Datadog: core/metrics/datadog.md - Event Handler: - core/event_handler/api_gateway.md - core/event_handler/appsync.md @@ -56,7 +60,6 @@ theme: features: - header.autohide - navigation.sections - - navigation.expand - navigation.top - navigation.instant - navigation.indexes @@ -72,6 +75,7 @@ theme: markdown_extensions: - admonition + - abbr - pymdownx.tabbed: alternate_style: true - pymdownx.highlight: @@ -81,6 +85,8 @@ markdown_extensions: base_path: "." check_paths: true restrict_base_path: false + auto_append: + - includes/abbreviations.md - meta - toc: permalink: true @@ -95,6 +101,8 @@ markdown_extensions: - name: mermaid class: mermaid format: !!python/name:pymdownx.superfences.fence_code_format + - pymdownx.tasklist: + custom_checkbox: true copyright: Copyright © 2023 Amazon Web Services diff --git a/package-lock.json b/package-lock.json index 64ddf8ac968..755bd95890c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,13 +11,13 @@ "package-lock.json": "^1.0.0" }, "devDependencies": { - "aws-cdk": "^2.88.0" + "aws-cdk": "^2.92.0" } }, "node_modules/aws-cdk": { - "version": "2.88.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.88.0.tgz", - "integrity": "sha512-7Tj0uusA2nsEOsqkd4kB5vmzciz7l/eGBN5a+Ce4/CCcoe4ZCvT85L+T6tK0aohUTLZTAlTPBceH34RN5iMYpA==", + "version": "2.92.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.92.0.tgz", + "integrity": "sha512-9aAWJvZWSBJQxcsDopXYUAm6/pGz6vOQy2zfkn+YBuBkNelvW+ok15KPY4xn5m76tYnN79W03Gnfp/nxZUlcww==", "dev": true, "bin": { "cdk": "bin/cdk" @@ -51,9 +51,9 @@ }, "dependencies": { "aws-cdk": { - "version": "2.88.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.88.0.tgz", - "integrity": "sha512-7Tj0uusA2nsEOsqkd4kB5vmzciz7l/eGBN5a+Ce4/CCcoe4ZCvT85L+T6tK0aohUTLZTAlTPBceH34RN5iMYpA==", + "version": "2.92.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.92.0.tgz", + "integrity": "sha512-9aAWJvZWSBJQxcsDopXYUAm6/pGz6vOQy2zfkn+YBuBkNelvW+ok15KPY4xn5m76tYnN79W03Gnfp/nxZUlcww==", "dev": true, "requires": { "fsevents": "2.3.2" diff --git a/package.json b/package.json index 396ea6c7921..a08922eb2bb 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.88.0" + "aws-cdk": "^2.92.0" }, "dependencies": { "package-lock.json": "^1.0.0" diff --git a/poetry.lock b/poetry.lock index 3b196373e24..d225f187002 100644 --- a/poetry.lock +++ b/poetry.lock @@ -93,17 +93,17 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-alpha" -version = "2.88.0a0" +version = "2.91.0a0" description = "The CDK Construct Library for AWS::APIGatewayv2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-alpha-2.88.0a0.tar.gz", hash = "sha256:87f746336687ba519263ced055912eff8bef643629d5f1eb17f3cdef2408d441"}, - {file = "aws_cdk.aws_apigatewayv2_alpha-2.88.0a0-py3-none-any.whl", hash = "sha256:be0da36994bedbbc4b82db245fa5f3fae59f95665c9d60527f7ece5e9b4df6a4"}, + {file = "aws-cdk.aws-apigatewayv2-alpha-2.91.0a0.tar.gz", hash = "sha256:a7b0e78862f3dd81cf13740df2ecda1c877545500872dc476f2dbf3807632a32"}, + {file = "aws_cdk.aws_apigatewayv2_alpha-2.91.0a0-py3-none-any.whl", hash = "sha256:e3d606055c2fe268d80f96052b583060a25fadcdee79d89a75f2eac4354f2e69"}, ] [package.dependencies] -aws-cdk-lib = "2.88.0" +aws-cdk-lib = "2.91.0" constructs = ">=10.0.0,<11.0.0" jsii = ">=1.85.0,<2.0.0" publication = ">=0.0.3" @@ -111,18 +111,18 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-authorizers-alpha" -version = "2.88.0a0" +version = "2.91.0a0" description = "Authorizers for AWS APIGateway V2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.88.0a0.tar.gz", hash = "sha256:60f3a0ac560b6f3ff729b50110841f134eecd842bf69d02602e750589a35cbff"}, - {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.88.0a0-py3-none-any.whl", hash = "sha256:85e57c5a7a86829a594634f82448c95443d4d29c30baf361257e57fd4df24efc"}, + {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.91.0a0.tar.gz", hash = "sha256:cafd747af66f92755f188172f0e892503bc73c26f0d6d95e5f733c67b0307fa8"}, + {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.91.0a0-py3-none-any.whl", hash = "sha256:972393ad1c220708616322946ba3f8936cbe143a69e543762295c1ea02d69849"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.88.0.a0" -aws-cdk-lib = "2.88.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.91.0.a0" +aws-cdk-lib = "2.91.0" constructs = ">=10.0.0,<11.0.0" jsii = ">=1.85.0,<2.0.0" publication = ">=0.0.3" @@ -130,18 +130,18 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-integrations-alpha" -version = "2.88.0a0" +version = "2.91.0a0" description = "Integrations for AWS APIGateway V2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.88.0a0.tar.gz", hash = "sha256:a604acb1dde9840ccc24c23aba542b42764c826c8100b787e16198113d6b6e89"}, - {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.88.0a0-py3-none-any.whl", hash = "sha256:ff06fc8192bece85f82f7b008c93e5ada8af1466612b0b76287edce8c5415c47"}, + {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.91.0a0.tar.gz", hash = "sha256:db607df2563f0b839795a41218a59e3ebc29e906dd08aed7b0b59aceba0bde02"}, + {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.91.0a0-py3-none-any.whl", hash = "sha256:34d0f103846613a72cfae8419be2e4302863a1e8f6e81951b0a51c2f62ab80b3"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.88.0.a0" -aws-cdk-lib = "2.88.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.91.0.a0" +aws-cdk-lib = "2.91.0" constructs = ">=10.0.0,<11.0.0" jsii = ">=1.85.0,<2.0.0" publication = ">=0.0.3" @@ -149,19 +149,19 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-lib" -version = "2.88.0" +version = "2.91.0" description = "Version 2 of the AWS Cloud Development Kit library" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk-lib-2.88.0.tar.gz", hash = "sha256:6514217e6485133b30848f11b6c78ec955d41bed4e39e5ba4bea49c379830e56"}, - {file = "aws_cdk_lib-2.88.0-py3-none-any.whl", hash = "sha256:db4716689cb94e249b8c672139221f1d7866b7d6caca314b52552fa3bacab22c"}, + {file = "aws-cdk-lib-2.91.0.tar.gz", hash = "sha256:1163926527a8b7da931cddea77a4824b929b3f775447c3b7427ecdef7701ce74"}, + {file = "aws_cdk_lib-2.91.0-py3-none-any.whl", hash = "sha256:ec2cadeb5727ea8259ad8a54ac9ff40502032cd2572c81f4594df93365da39da"}, ] [package.dependencies] "aws-cdk.asset-awscli-v1" = ">=2.2.200,<3.0.0" "aws-cdk.asset-kubectl-v20" = ">=2.1.2,<3.0.0" -"aws-cdk.asset-node-proxy-agent-v5" = ">=2.0.165,<3.0.0" +"aws-cdk.asset-node-proxy-agent-v5" = ">=2.0.166,<3.0.0" constructs = ">=10.0.0,<11.0.0" jsii = ">=1.85.0,<2.0.0" publication = ">=0.0.3" @@ -183,13 +183,13 @@ requests = ">=0.14.0" [[package]] name = "aws-sam-translator" -version = "1.71.0" +version = "1.73.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ - {file = "aws-sam-translator-1.71.0.tar.gz", hash = "sha256:a3ea80aeb116d7978b26ac916d2a5a24d012b742bf28262b17769c4b886e8fba"}, - {file = "aws_sam_translator-1.71.0-py3-none-any.whl", hash = "sha256:17fb87c8137d8d49e7a978396b2b3b279211819dee44618415aab1e99c2cb659"}, + {file = "aws-sam-translator-1.73.0.tar.gz", hash = "sha256:bfa7cad3a78f002edeec5e39fd61b616cf84f34f61010c5dc2f7a76845fe7a02"}, + {file = "aws_sam_translator-1.73.0-py3-none-any.whl", hash = "sha256:c0132b065d743773fcd2573ed1ae60e0129fa46043fad76430261b098a811924"}, ] [package.dependencies] @@ -291,17 +291,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.28.9" +version = "1.28.24" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.28.9-py3-none-any.whl", hash = "sha256:01f078047eb4d238c6b9c6cc623f2af33b4ae67980c5326691e35cb5493ff6c7"}, - {file = "boto3-1.28.9.tar.gz", hash = "sha256:4cc0c6005be910e52077227e670930ab55a41ba86cdb6d1c052571d08cd4d32c"}, + {file = "boto3-1.28.24-py3-none-any.whl", hash = "sha256:0300ca6ec8bc136eb316b32cc1e30c66b85bc497f5a5fe42e095ae4280569708"}, + {file = "boto3-1.28.24.tar.gz", hash = "sha256:9d1b4713c888e53a218648ad71522bee9bec9d83f2999fff2494675af810b632"}, ] [package.dependencies] -botocore = ">=1.31.9,<1.32.0" +botocore = ">=1.31.24,<1.32.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -310,13 +310,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.31.9" +version = "1.31.24" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.31.9-py3-none-any.whl", hash = "sha256:e56ccd3536a90094ea5b176b5dd33bfe4f049efdf71af468ea1661bd424c787d"}, - {file = "botocore-1.31.9.tar.gz", hash = "sha256:bd849d3ac95f1781385ed831d753a04a3ec870a59d6598175aaedd71dc2baf5f"}, + {file = "botocore-1.31.24-py3-none-any.whl", hash = "sha256:8c7ba9b09e9104e2d473214e1ffcf84b77e04cf6f5f2344942c1eed9e299f947"}, + {file = "botocore-1.31.24.tar.gz", hash = "sha256:2d8f412c67f9285219f52d5dbbb6ef0dfa9f606da29cbdd41b6d6474bcc4bbd4"}, ] [package.dependencies] @@ -327,6 +327,31 @@ urllib3 = ">=1.25.4,<1.27" [package.extras] crt = ["awscrt (==0.16.26)"] +[[package]] +name = "bytecode" +version = "0.13.0" +description = "Python module to generate and modify bytecode" +optional = false +python-versions = ">=3.6" +files = [ + {file = "bytecode-0.13.0-py3-none-any.whl", hash = "sha256:e69f92e7d27f99d5d7d76e6a824bd3d9ff857c72b59927aaf87e1a620f67fe50"}, + {file = "bytecode-0.13.0.tar.gz", hash = "sha256:6af3c2f0a31ce05dce41f7eea5cc380e33f5e8fbb7dcee3b52467a00acd52fcd"}, +] + +[[package]] +name = "bytecode" +version = "0.14.2" +description = "Python module to generate and modify bytecode" +optional = false +python-versions = ">=3.8" +files = [ + {file = "bytecode-0.14.2-py3-none-any.whl", hash = "sha256:e368a2b9bbd7c986133c951250db94fb32f774cfc49752a9db9073bcf9899762"}, + {file = "bytecode-0.14.2.tar.gz", hash = "sha256:386378d9025d68ddb144870ae74330a492717b11b8c9164c4034e88add808f0c"}, +] + +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} + [[package]] name = "cattrs" version = "23.1.2" @@ -365,13 +390,13 @@ files = [ [[package]] name = "cfn-lint" -version = "0.78.2" +version = "0.79.7" description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ - {file = "cfn-lint-0.78.2.tar.gz", hash = "sha256:363787653b71cd5dc4e2de30aaab835afd48b64e2257fe1b4b78efac01635e8c"}, - {file = "cfn_lint-0.78.2-py3-none-any.whl", hash = "sha256:5db9306faf9368ccbbd1ca1272b2b311cecc48499fa98303510d5c4bce478109"}, + {file = "cfn-lint-0.79.7.tar.gz", hash = "sha256:fcdc195a89810482af93a335b57500fc928111998d8389087f85fd59155fc904"}, + {file = "cfn_lint-0.79.7-py3-none-any.whl", hash = "sha256:05d819e47f74f4d4862b874b0cc911b4ff9773862601ffec8a4f8791df0eab3a"}, ] [package.dependencies] @@ -598,6 +623,154 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "datadog" +version = "0.46.0" +description = "The Datadog Python library" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "datadog-0.46.0-py2.py3-none-any.whl", hash = "sha256:3d7bcda6177b43be4cdb52e16b4bdd4f9005716c0dd7cfea009e018c36bb7a3d"}, + {file = "datadog-0.46.0.tar.gz", hash = "sha256:e4fbc92a85e2b0919a226896ae45fc5e4b356c0c57f1c2659659dfbe0789c674"}, +] + +[package.dependencies] +requests = ">=2.6.0" + +[[package]] +name = "datadog-lambda" +version = "4.78.0" +description = "The Datadog AWS Lambda Library" +optional = false +python-versions = ">=3.7.0,<4" +files = [ + {file = "datadog_lambda-4.78.0-py3-none-any.whl", hash = "sha256:660bae6057f3b2033b0c035e9d542af491e40f9ce57b97b4891c491262b9148c"}, + {file = "datadog_lambda-4.78.0.tar.gz", hash = "sha256:3e57faa8f80ddd43b595355b92045fde8f9ed87efe8619133e82cebb87cbe434"}, +] + +[package.dependencies] +datadog = ">=0.41.0,<1.0.0" +ddtrace = "1.15.2" +importlib_metadata = {version = "*", markers = "python_version < \"3.8\""} +typing_extensions = {version = ">=4.0,<5.0", markers = "python_version < \"3.8\""} +urllib3 = "<2.0.0" +wrapt = ">=1.11.2,<2.0.0" + +[package.extras] +dev = ["boto3 (>=1.10.33,<2.0.0)", "flake8 (>=3.7.9,<4.0.0)", "httpretty (>=0.9.7,<0.10.0)", "nose2 (>=0.9.1,<0.10.0)", "requests (>=2.22.0,<3.0.0)"] + +[[package]] +name = "ddsketch" +version = "2.0.4" +description = "Distributed quantile sketches" +optional = false +python-versions = ">=2.7" +files = [ + {file = "ddsketch-2.0.4-py3-none-any.whl", hash = "sha256:3227a270fd686a29d3a7128f9352ccf852314410380fc11384356f1ae2a75938"}, + {file = "ddsketch-2.0.4.tar.gz", hash = "sha256:32f7314077fec8747d4faebaec2c854b5ffc399c5f552f73fa94024f48d74d64"}, +] + +[package.dependencies] +protobuf = {version = ">=3.0.0", markers = "python_version >= \"3.7\""} +six = "*" + +[[package]] +name = "ddtrace" +version = "1.15.2" +description = "Datadog APM client library" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "ddtrace-1.15.2-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:ca0411333fbdb0fafa06d412bbd76ab8d2647cc9dcb8a7833952ce4fe09eb421"}, + {file = "ddtrace-1.15.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e2603749f97a5191b32f710c8ec5248bb58f4f9a1cb337559f93c5f0f8cea33b"}, + {file = "ddtrace-1.15.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8392087809e047f701e38ecc4f2990bcfe399a22c516a1dbcbdff50fb7382a79"}, + {file = "ddtrace-1.15.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:2f7649c24a7463be9b86d5f11ac6eaa2014896eaf409e67f3dc813a6bb0ed8b6"}, + {file = "ddtrace-1.15.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:dbdbc5bf3b2b56b8e61b241ee372d897b295344e269475f38e837c9bfe03ae2c"}, + {file = "ddtrace-1.15.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:23d39c72ad1844977a80d79206d773c3ec1f1346816b9e45427c25ef88597b4e"}, + {file = "ddtrace-1.15.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:da458bbbc4de14dd8e8f60aefe42a66c551a9f50c69c6e361acc7edab579a3e4"}, + {file = "ddtrace-1.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d07bb0c50d2df7ff9281bea83534db5127cee8ac2f94111c9544d03d49f60613"}, + {file = "ddtrace-1.15.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:499b3e3d0359e492792ecf8ab6efcf4b1991fbaa523338774333e9a2a66d9d37"}, + {file = "ddtrace-1.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedd0937f83e0d7b261960365fec5771f39ced599c90f589548a1738a586799d"}, + {file = "ddtrace-1.15.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7643d20991cd7e1c21e65d8b5c292a9dca8d124f69f9e96cc2b5fb8d47802c3a"}, + {file = "ddtrace-1.15.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3f6bb76fe33c2e4842236036f78b1bbdd4da0f2b846627ca7d72b01ac49b3076"}, + {file = "ddtrace-1.15.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ebc9b89501c8a557dab43170e4a12e90358130413a87a0276ccaa0038b0636a4"}, + {file = "ddtrace-1.15.2-cp310-cp310-win32.whl", hash = "sha256:c10ca0e3a63310d314ec7fa55d53f4b4434f06c4d321d64d757814679161bf5d"}, + {file = "ddtrace-1.15.2-cp310-cp310-win_amd64.whl", hash = "sha256:6208635442ea52ff3f97b9fc64ac25772cda8f105a607a385e55bf524bceefc5"}, + {file = "ddtrace-1.15.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8524f460be02b402f63b11ad3b1177955c8608f814e1758b87f53f15bf9a7599"}, + {file = "ddtrace-1.15.2-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5a2dd127a65e12189055818ab72d44d80587acaaf450c65624e0482d63ff9970"}, + {file = "ddtrace-1.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3350c647120fbc3355eb35ce054c88e63bc073d71949f377d59b1152a2ed0f4"}, + {file = "ddtrace-1.15.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:adb76713207f0ef688f68a539f9cb63e19cd149d48d36befb835f67f49395ed7"}, + {file = "ddtrace-1.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8ff5c250c5abfbbbd76a7d3167308a2373ad7e55ecf3c7c26a62fcd2be8a57"}, + {file = "ddtrace-1.15.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:264bed998870b63f548b96f57dd771014cd02ef0b21bb382e745900a7b72ef28"}, + {file = "ddtrace-1.15.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:20bfd8db155167d2ccfddc25b50649338534b12cb00f7ed08514af1eb6a4956e"}, + {file = "ddtrace-1.15.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72479172bd10f5998188a05b0b4a109ccb2a93467a0aa1e6656d5396c83fb253"}, + {file = "ddtrace-1.15.2-cp311-cp311-win32.whl", hash = "sha256:23bee3d0eb971cc1565caa429620b82f2d69ef648e9c792046b9481188dba9ab"}, + {file = "ddtrace-1.15.2-cp311-cp311-win_amd64.whl", hash = "sha256:65a29d23ecfbc7cc4ca1069a5586aa836ae3978e64251414933432078bc29bc2"}, + {file = "ddtrace-1.15.2-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:3a2852da4a76503211ca8b77a50fc86df36ba15fab04b45a6a17faa386f53839"}, + {file = "ddtrace-1.15.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:244180c6decb244c7fda929dc5969b3a510e5a4857239063de1fae139fac9837"}, + {file = "ddtrace-1.15.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:46f9ba0c282a62953f03d1add8eae8c80613244bb93a1ff997dad71d07ce6c72"}, + {file = "ddtrace-1.15.2-cp35-cp35m-win32.whl", hash = "sha256:a39dbf1ca657cc3a876143301e5d775e2f9bcf2ed1e9b4366fb3cf9d6a345a82"}, + {file = "ddtrace-1.15.2-cp35-cp35m-win_amd64.whl", hash = "sha256:7cfd9514e82871321e86897fe567c7548fc45da523df591f3e5adc6633a5781c"}, + {file = "ddtrace-1.15.2-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:3a2978b07d19d4ebf936fde1e455c61b3d88f103f1f9e360b9269fe1a1dc608a"}, + {file = "ddtrace-1.15.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e7795a7f65a6e844ab57a0b31d400e79c4a1f69d174fab8edc69e6d2db56962"}, + {file = "ddtrace-1.15.2-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aae5306b3b0ec48cb8ade3362629c31bd25999244addff0f4a2f6f3934509894"}, + {file = "ddtrace-1.15.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14fb33bd6d9fa70638d43de7b5170e1c9961d3fbc277314609941e108c45716d"}, + {file = "ddtrace-1.15.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:745ce3c9559fa331ef30208ff1ccaafe3ab3c02f2e01177c560c94acd6f4de27"}, + {file = "ddtrace-1.15.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7706d35215d2cca0a89581ec11da56e25742914ae0865b928034ee9ad7278cf3"}, + {file = "ddtrace-1.15.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0bc18e900d1495deb61093f1af38d94af6a1ca66dd541fd47bd093c3f3b80b4a"}, + {file = "ddtrace-1.15.2-cp36-cp36m-win32.whl", hash = "sha256:b13f4042ef3f391714aca5ca1f03ff3c24c1d201ab5af02f0405335aa5602ff5"}, + {file = "ddtrace-1.15.2-cp36-cp36m-win_amd64.whl", hash = "sha256:eb32e3b3d0f472447b3d427a075007135b3c39488c1fe0f1e097863f326a439b"}, + {file = "ddtrace-1.15.2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:0953fd9a2555801d68674bb4905f64805efe1e02b3f11def21eb7655be046717"}, + {file = "ddtrace-1.15.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9436ec9cc00b70730d2c1777f11aca7f4863a49ddd27d0b1478e84c1a7667b6f"}, + {file = "ddtrace-1.15.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7215b21c1eaf56b38bf46c66193db3736ecadeb9ae1b9ca780a91addbaa9853"}, + {file = "ddtrace-1.15.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15a5f7155b99fe9393bfa4f0e4ef2610ddf59e70aefcf99a95acae8b31e29cc4"}, + {file = "ddtrace-1.15.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:df103a600c2901dc54929ef58dee41887a0bb558efbf7e41a7489bd6264fcf44"}, + {file = "ddtrace-1.15.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d51a73238ad8ceff4232ffa94b860d61187b325e7fab746044dafa312d6bc415"}, + {file = "ddtrace-1.15.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bfc5777204c0c34465fc4ce38d8d1268d9f95ffcbf7e4025e9a5d3e87d3e17c3"}, + {file = "ddtrace-1.15.2-cp37-cp37m-win32.whl", hash = "sha256:9516dbfc974af9632d75e9c32b38e695b88ea18ebfa4580dd0f768bc05272fba"}, + {file = "ddtrace-1.15.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a510252a3d5be6c29db2c69cbd2535268532e8d568fae06b295a06041e1b969d"}, + {file = "ddtrace-1.15.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:82995243719c87aefc85d7df0e1ae61bba8ae1f805d48cbaf2132beb215f1968"}, + {file = "ddtrace-1.15.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:ca5dd51910a81451d236fccdbf5d3ca8e284aa3be56f08db92644f85ef88c56e"}, + {file = "ddtrace-1.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d76f485769c035df3ede4ad9830bac06aa8b69ac4617f2eb1251b1094468009"}, + {file = "ddtrace-1.15.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4dd5f5e477021b8810b2b685e1e16ba5a99f31239e22abc71794688b7f3e6e4d"}, + {file = "ddtrace-1.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ec73676c60cc3cf08430f19a59daccbbb5770edc74ad15a99bf4237a40d0fb"}, + {file = "ddtrace-1.15.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6b140f11b89d902174df05e8b9c1eb1b522a63e6c60c5d68ccac8913bb371bbb"}, + {file = "ddtrace-1.15.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c7c8a0e140d28e49cf8cd96cdec8e17232c5525ed5c154729b8afb6cb93a8e2b"}, + {file = "ddtrace-1.15.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0112d258c3a681a63e5f057b9e3ee8504b60d773d95baf195462d9ff4096caa9"}, + {file = "ddtrace-1.15.2-cp38-cp38-win32.whl", hash = "sha256:6ea7b80eb8019a70c999ef8cfd34fd6078a2ae154007d124d5e642531bf1a9d6"}, + {file = "ddtrace-1.15.2-cp38-cp38-win_amd64.whl", hash = "sha256:282b8c9b46d7a8450325299cf348a0f1d8f9f34d174a0ea402bc1a1df4ad7cf3"}, + {file = "ddtrace-1.15.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:53b171404b59c1e030ea614e194d1483fb42437a02ffdd7f4a45175613dd7cb4"}, + {file = "ddtrace-1.15.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9ba06236dd8bd64776b7b734dd9421709670fef090857448e75c97acb30cdce7"}, + {file = "ddtrace-1.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6abe5ba4396c9f7633cab68d0e81c5fd94f7c77b046b3ee969eded068a522d7"}, + {file = "ddtrace-1.15.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61a1b48f97a07e2f422ec01bb23861716300cebe4afd917ab36bb4db68904da4"}, + {file = "ddtrace-1.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86e186dc66802e2d71b94330c1635fd4c3f881a1bb71747be162a57b7602daaa"}, + {file = "ddtrace-1.15.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:762b5a0777454299c4ac62177578969ed551c973063f87a8825d9d073e5250ce"}, + {file = "ddtrace-1.15.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:889d359f4382fde41893ba5c00b412cbea8502e1b6bb6c83bf87fa6e63cbfabe"}, + {file = "ddtrace-1.15.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c4d3c9ce3456181e535b9da42bde7c850dc7224039fd083e95b05010c2ff9748"}, + {file = "ddtrace-1.15.2-cp39-cp39-win32.whl", hash = "sha256:69e47d28327a7afb263c16cc6bf1227e1b2bf1fdb2d559dce913a138a3f36807"}, + {file = "ddtrace-1.15.2-cp39-cp39-win_amd64.whl", hash = "sha256:da780fbfe6dd749ee571a468b8e86f1fd4f51626d35626c2356f8a440efe0dfa"}, + {file = "ddtrace-1.15.2.tar.gz", hash = "sha256:e5c1a5965ea8d8260586769102d79522bc7d9758a271252bb58ee05d6c5cd9a8"}, +] + +[package.dependencies] +attrs = {version = ">=20", markers = "python_version > \"2.7\""} +bytecode = [ + {version = ">=0.13.0,<0.14.0", markers = "python_version == \"3.7\""}, + {version = "*", markers = "python_version >= \"3.8\""}, +] +cattrs = {version = "*", markers = "python_version >= \"3.7\""} +ddsketch = ">=2.0.1" +envier = "*" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +opentelemetry-api = {version = ">=1", markers = "python_version >= \"3.7\""} +protobuf = {version = ">=3", markers = "python_version >= \"3.7\""} +six = ">=1.12.0" +typing-extensions = "*" +xmltodict = ">=0.12" + +[package.extras] +opentracing = ["opentracing (>=2.0.0)"] + [[package]] name = "decorator" version = "5.1.1" @@ -609,6 +782,37 @@ files = [ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "envier" +version = "0.4.0" +description = "Python application configuration via the environment" +optional = false +python-versions = ">=2.7" +files = [ + {file = "envier-0.4.0-py3-none-any.whl", hash = "sha256:7b91af0f16ea3e56d91ec082f038987e81b441fc19c657a8b8afe0909740a706"}, + {file = "envier-0.4.0.tar.gz", hash = "sha256:e68dcd1ed67d8b6313883e27dff3e701b7fba944d2ed4b7f53d0cc2e12364a82"}, +] + +[package.extras] +mypy = ["mypy"] + [[package]] name = "exceptiongroup" version = "1.1.2" @@ -666,16 +870,6 @@ files = [ docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] -[[package]] -name = "future" -version = "0.18.3" -description = "Clean single-source support for Python 3 and 2" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, -] - [[package]] name = "ghp-import" version = "2.1.0" @@ -1005,13 +1199,13 @@ pbr = "*" [[package]] name = "jsii" -version = "1.85.0" +version = "1.86.1" description = "Python client for jsii runtime" optional = false python-versions = "~=3.7" files = [ - {file = "jsii-1.85.0-py3-none-any.whl", hash = "sha256:379feb1a1a3c4e449307564f42a7cddef05e43760cbfbbfe8434f6448cd668a0"}, - {file = "jsii-1.85.0.tar.gz", hash = "sha256:b77194cf053c06c6bdffc887a4d1d2a41113c6f4780a7d78d70a780a70998008"}, + {file = "jsii-1.86.1-py3-none-any.whl", hash = "sha256:32eb46ed4c9a35bc92b892ef049ed1996f13be38ffef964d607e8fe930471b3e"}, + {file = "jsii-1.86.1.tar.gz", hash = "sha256:44f9a820eea92c9508693f72d3129b5a080421c949c32303f4f7b2cc98a81f59"}, ] [package.dependencies] @@ -1143,19 +1337,20 @@ restructuredtext = ["rst2ansi"] [[package]] name = "markdown" -version = "3.3.7" -description = "Python implementation of Markdown." +version = "3.4.4" +description = "Python implementation of John Gruber's Markdown." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, - {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, + {file = "Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941"}, + {file = "Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6"}, ] [package.dependencies] importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} [package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.0)", "mkdocs-nature (>=0.4)"] testing = ["coverage", "pyyaml"] [[package]] @@ -1287,13 +1482,13 @@ test = ["coverage", "flake8 (>=3.0)", "shtab"] [[package]] name = "mkdocs" -version = "1.4.3" +version = "1.5.2" description = "Project documentation with Markdown." optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs-1.4.3-py3-none-any.whl", hash = "sha256:6ee46d309bda331aac915cd24aab882c179a933bd9e77b80ce7d2eaaa3f689dd"}, - {file = "mkdocs-1.4.3.tar.gz", hash = "sha256:5955093bbd4dd2e9403c5afaf57324ad8b04f16886512a3ee6ef828956481c57"}, + {file = "mkdocs-1.5.2-py3-none-any.whl", hash = "sha256:60a62538519c2e96fe8426654a67ee177350451616118a41596ae7c876bb7eac"}, + {file = "mkdocs-1.5.2.tar.gz", hash = "sha256:70d0da09c26cff288852471be03c23f0f521fc15cf16ac89c7a3bfb9ae8d24f9"}, ] [package.dependencies] @@ -1302,9 +1497,12 @@ colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} ghp-import = ">=1.0" importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} jinja2 = ">=2.11.1" -markdown = ">=3.2.1,<3.4" +markdown = ">=3.2.1" +markupsafe = ">=2.0.1" mergedeep = ">=1.3.4" packaging = ">=20.5" +pathspec = ">=0.11.1" +platformdirs = ">=2.2.0" pyyaml = ">=5.1" pyyaml-env-tag = ">=0.1" typing-extensions = {version = ">=3.10", markers = "python_version < \"3.8\""} @@ -1312,7 +1510,7 @@ watchdog = ">=2.0" [package.extras] i18n = ["babel (>=2.9.0)"] -min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pathspec (==0.11.1)", "platformdirs (==2.2.0)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] [[package]] name = "mkdocs-git-revision-date-plugin" @@ -1331,20 +1529,20 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "9.1.19" +version = "9.1.21" description = "Documentation that simply works" optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs_material-9.1.19-py3-none-any.whl", hash = "sha256:fb0a149294b319aedf36983919d8c40c9e566db21ead16258e20ebd2e6c0961c"}, - {file = "mkdocs_material-9.1.19.tar.gz", hash = "sha256:73b94b08c765e92a80645aac58d6a741fc5f587deec2b715489c714827b15a6f"}, + {file = "mkdocs_material-9.1.21-py3-none-any.whl", hash = "sha256:58bb2f11ef240632e176d6f0f7d1cff06be1d11c696a5a1b553b808b4280ed47"}, + {file = "mkdocs_material-9.1.21.tar.gz", hash = "sha256:71940cdfca84ab296b6362889c25395b1621273fb16c93deda257adb7ff44ec8"}, ] [package.dependencies] colorama = ">=0.4" jinja2 = ">=3.0" markdown = ">=3.2" -mkdocs = ">=1.4.2" +mkdocs = ">=1.5.0" mkdocs-material-extensions = ">=1.1" pygments = ">=2.14" pymdown-extensions = ">=9.9.1" @@ -1428,13 +1626,13 @@ reports = ["lxml"] [[package]] name = "mypy-boto3-appconfig" -version = "1.28.0" -description = "Type annotations for boto3.AppConfig 1.28.0 service generated with mypy-boto3-builder 7.14.5" +version = "1.28.16" +description = "Type annotations for boto3.AppConfig 1.28.16 service generated with mypy-boto3-builder 7.17.1" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-appconfig-1.28.0.tar.gz", hash = "sha256:753044339ce1da00e0b60f387ed957013712ab69ca51a9b56859a4ae502c806a"}, - {file = "mypy_boto3_appconfig-1.28.0-py3-none-any.whl", hash = "sha256:5708545675610ceb686339e90f4d6f8276f9e2ad2f15db2833ebc593185708f5"}, + {file = "mypy-boto3-appconfig-1.28.16.tar.gz", hash = "sha256:42354a56461db07ffb743066a1c008efa9804c378913051780a4aef583425556"}, + {file = "mypy_boto3_appconfig-1.28.16-py3-none-any.whl", hash = "sha256:12a6a4ab6a1fc30d7a24415506dcd69eae4742160286910242cefc15722ac532"}, ] [package.dependencies] @@ -1442,13 +1640,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-appconfigdata" -version = "1.28.0" -description = "Type annotations for boto3.AppConfigData 1.28.0 service generated with mypy-boto3-builder 7.14.5" +version = "1.28.16" +description = "Type annotations for boto3.AppConfigData 1.28.16 service generated with mypy-boto3-builder 7.17.1" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-appconfigdata-1.28.0.tar.gz", hash = "sha256:1f0331fcb6642c44d335d08250ee5abcd840030304d480adf85ea017ba941bb4"}, - {file = "mypy_boto3_appconfigdata-1.28.0-py3-none-any.whl", hash = "sha256:c01ecbbda949497dbb650cc761915a7e14b621db5bc20cc1fd3f49620e74c9bc"}, + {file = "mypy-boto3-appconfigdata-1.28.16.tar.gz", hash = "sha256:4f736f907c3da1f63ce03d121dab4c0cd35f3875176db86d9ba87ee0907adcd9"}, + {file = "mypy_boto3_appconfigdata-1.28.16-py3-none-any.whl", hash = "sha256:68a26f891b94fe45894c8d7d9773faafa45d70dd2c5fe85085998673e6b0bbc4"}, ] [package.dependencies] @@ -1456,13 +1654,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-cloudformation" -version = "1.28.10" -description = "Type annotations for boto3.CloudFormation 1.28.10 service generated with mypy-boto3-builder 7.15.1" +version = "1.28.19" +description = "Type annotations for boto3.CloudFormation 1.28.19 service generated with mypy-boto3-builder 7.17.2" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-cloudformation-1.28.10.tar.gz", hash = "sha256:81632665c3c9a648a665af390c555e5a8ad6bf0d4a48e4729aa1ead11b643aef"}, - {file = "mypy_boto3_cloudformation-1.28.10-py3-none-any.whl", hash = "sha256:c2623baf32c3c47976a4454d5812367ee27cee7f3ab0fbe98818ac9020db54c0"}, + {file = "mypy-boto3-cloudformation-1.28.19.tar.gz", hash = "sha256:efb08a2a6d7c744d0d8d60f04514c531355aa7972b53f025d9e08e3adf3a5504"}, + {file = "mypy_boto3_cloudformation-1.28.19-py3-none-any.whl", hash = "sha256:aadf78eb2f2e3b2e83a4844a80d0c5d0d72ad11c453a11efdd28b0c309b05bf6"}, ] [package.dependencies] @@ -1470,13 +1668,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-cloudwatch" -version = "1.28.0" -description = "Type annotations for boto3.CloudWatch 1.28.0 service generated with mypy-boto3-builder 7.14.5" +version = "1.28.28" +description = "Type annotations for boto3.CloudWatch 1.28.28 service generated with mypy-boto3-builder 7.17.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-cloudwatch-1.28.0.tar.gz", hash = "sha256:c34cc45c8a57702e11cf38de590af447d90cd3ea68328ea2908452d8a09d471a"}, - {file = "mypy_boto3_cloudwatch-1.28.0-py3-none-any.whl", hash = "sha256:8812c6120111798f84b2e1fe5808aae1f5766c183746ea336dec14f9bdf3308b"}, + {file = "mypy-boto3-cloudwatch-1.28.28.tar.gz", hash = "sha256:6dfad8f0f5fffbe1350c6e0f2fab4a0e184d1714f3de644937ad0bc458e7229c"}, + {file = "mypy_boto3_cloudwatch-1.28.28-py3-none-any.whl", hash = "sha256:102a5f5c63ec2654f6446e6ae12705de4bebc1a599fe35608e953ce151cc29a9"}, ] [package.dependencies] @@ -1484,13 +1682,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-dynamodb" -version = "1.28.0" -description = "Type annotations for boto3.DynamoDB 1.28.0 service generated with mypy-boto3-builder 7.14.5" +version = "1.28.27" +description = "Type annotations for boto3.DynamoDB 1.28.27 service generated with mypy-boto3-builder 7.17.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-dynamodb-1.28.0.tar.gz", hash = "sha256:d12ed66edd7ded7089297b533d77e8b8ed8844da4e097cd912b61a08bfe4948b"}, - {file = "mypy_boto3_dynamodb-1.28.0-py3-none-any.whl", hash = "sha256:622f3d14dc1835a17ca511672d2f8fd08c03c4930f2845d06d1632b9f0c92aaf"}, + {file = "mypy-boto3-dynamodb-1.28.27.tar.gz", hash = "sha256:b6786cf953e65293ec25c791e7efcd8ededceb6bda2e04910785b0f62584417d"}, + {file = "mypy_boto3_dynamodb-1.28.27-py3-none-any.whl", hash = "sha256:218f7bcb04010058aea5a735d52b87c4f70e8c5feb44e64ab6baf377ebb4e22a"}, ] [package.dependencies] @@ -1498,13 +1696,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-lambda" -version = "1.28.0" -description = "Type annotations for boto3.Lambda 1.28.0 service generated with mypy-boto3-builder 7.14.5" +version = "1.28.19" +description = "Type annotations for boto3.Lambda 1.28.19 service generated with mypy-boto3-builder 7.17.2" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-lambda-1.28.0.tar.gz", hash = "sha256:a3abaf98e514f2cabed2cf0b1e77c94c81f0fbbba8a549b3c073577a9f9ac196"}, - {file = "mypy_boto3_lambda-1.28.0-py3-none-any.whl", hash = "sha256:5fd38df6418829b2f3a184918fc8ad1d49b6738509f1832cdc71f33588efbdfb"}, + {file = "mypy-boto3-lambda-1.28.19.tar.gz", hash = "sha256:955b7702f02f2037ba4c058f6dcebfcce50090ac13c9d031a0052fa9136ec59e"}, + {file = "mypy_boto3_lambda-1.28.19-py3-none-any.whl", hash = "sha256:88582f8ca71bd7a6bbcf8b05155476f0a9dea79630a4da36d367482925241710"}, ] [package.dependencies] @@ -1512,13 +1710,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-logs" -version = "1.28.1" -description = "Type annotations for boto3.CloudWatchLogs 1.28.1 service generated with mypy-boto3-builder 7.14.5" +version = "1.28.16" +description = "Type annotations for boto3.CloudWatchLogs 1.28.16 service generated with mypy-boto3-builder 7.17.1" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-logs-1.28.1.tar.gz", hash = "sha256:e0278a977d68c15120c0f2f4a85b46d0ca3e17c676c7d218dbbb3cfa1b4c8ef1"}, - {file = "mypy_boto3_logs-1.28.1-py3-none-any.whl", hash = "sha256:0ab3b7b39f1c3b0d530096fe2bfb9df09b989b0ea718e5ecbf823c32b016e319"}, + {file = "mypy-boto3-logs-1.28.16.tar.gz", hash = "sha256:2d6c613f17ecafff8d56ccdadc6642d1abdbd4674434a683ca8966304e201220"}, + {file = "mypy_boto3_logs-1.28.16-py3-none-any.whl", hash = "sha256:f8998bf7df00f712d507e6f4a830841e8b3806a865871dafdd03e4d06072e658"}, ] [package.dependencies] @@ -1526,13 +1724,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-s3" -version = "1.28.8" -description = "Type annotations for boto3.S3 1.28.8 service generated with mypy-boto3-builder 7.15.1" +version = "1.28.27" +description = "Type annotations for boto3.S3 1.28.27 service generated with mypy-boto3-builder 7.17.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-s3-1.28.8.tar.gz", hash = "sha256:c9ed17fee2c0e2edeb2966b3796af7b349dcc4eeee54dbd59a269fdb9418eb55"}, - {file = "mypy_boto3_s3-1.28.8-py3-none-any.whl", hash = "sha256:75b929c517c5ad8f97c14dfba5f8521db569157dc4ac76a07a178805777cff8c"}, + {file = "mypy-boto3-s3-1.28.27.tar.gz", hash = "sha256:f1094344f68d1ffe2b998404e2e4ff9aa4239438692187fa83ad7b734739991c"}, + {file = "mypy_boto3_s3-1.28.27-py3-none-any.whl", hash = "sha256:f4fdefbfe084c92a6b3d000689e61ab12a985a72b07c5ff157f8a66bcbdb83ba"}, ] [package.dependencies] @@ -1540,13 +1738,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-secretsmanager" -version = "1.28.3.post2" -description = "Type annotations for boto3.SecretsManager 1.28.3 service generated with mypy-boto3-builder 7.15.0" +version = "1.28.24" +description = "Type annotations for boto3.SecretsManager 1.28.24 service generated with mypy-boto3-builder 7.17.2" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-secretsmanager-1.28.3.post2.tar.gz", hash = "sha256:f359f6446ac856d0887e40cb0f5bc6e0a60873524be5dd4b68be1d0fc4ac513e"}, - {file = "mypy_boto3_secretsmanager-1.28.3.post2-py3-none-any.whl", hash = "sha256:3a5e5619ee945f244d2dfefcb382c85874171a18b46f75403465622095284d25"}, + {file = "mypy-boto3-secretsmanager-1.28.24.tar.gz", hash = "sha256:13461d8d2891ec0e430437dbb71c0879ee431ddfedb6b21c265878642faeb2a7"}, + {file = "mypy_boto3_secretsmanager-1.28.24-py3-none-any.whl", hash = "sha256:e224809e28d99c1360bfe6428e8b567bb4a43c38a71263eba0ff4de7fa321142"}, ] [package.dependencies] @@ -1554,13 +1752,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-ssm" -version = "1.28.0" -description = "Type annotations for boto3.SSM 1.28.0 service generated with mypy-boto3-builder 7.14.5" +version = "1.28.16" +description = "Type annotations for boto3.SSM 1.28.16 service generated with mypy-boto3-builder 7.17.1" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-ssm-1.28.0.tar.gz", hash = "sha256:15482d2bff7995230549d145547f0ea92d01b68716aa25297e2a2da015922309"}, - {file = "mypy_boto3_ssm-1.28.0-py3-none-any.whl", hash = "sha256:e6ac60818c807baeeb0ef6714832c23904f2ed463fc40133059e2f63abd432fa"}, + {file = "mypy-boto3-ssm-1.28.16.tar.gz", hash = "sha256:736d11cb8def9a7c7206cbd64ff6b81fc3e74acb02b63985418937a0d2758d88"}, + {file = "mypy_boto3_ssm-1.28.16-py3-none-any.whl", hash = "sha256:75d94def8b8752bc84705d967d7fa25427ba5dbe3a5efc08e0579b4b074246f7"}, ] [package.dependencies] @@ -1568,13 +1766,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-xray" -version = "1.28.0" -description = "Type annotations for boto3.XRay 1.28.0 service generated with mypy-boto3-builder 7.14.5" +version = "1.28.16" +description = "Type annotations for boto3.XRay 1.28.16 service generated with mypy-boto3-builder 7.17.1" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-xray-1.28.0.tar.gz", hash = "sha256:8ce07598f7eeabe66e8dc8cb7e906efb96198b9102f58e9315e6daf166abf3e7"}, - {file = "mypy_boto3_xray-1.28.0-py3-none-any.whl", hash = "sha256:64cd601a829c274665b977853f85b27464986e9eec1ebc03f5bc4530a400b2f6"}, + {file = "mypy-boto3-xray-1.28.16.tar.gz", hash = "sha256:6ddd4acccf272bf663522c5fcd31b9b7dacbed4a01c91e44e4e8c0abb2343c0a"}, + {file = "mypy_boto3_xray-1.28.16-py3-none-any.whl", hash = "sha256:4bc7dfd46cb71ca6a9d46b47453d59a08ac80c872cc429dacb45a93abd737172"}, ] [package.dependencies] @@ -1609,6 +1807,21 @@ doc = ["nb2plots (>=0.6)", "numpydoc (>=1.1)", "pillow (>=8.2)", "pydata-sphinx- extra = ["lxml (>=4.5)", "pydot (>=1.4.1)", "pygraphviz (>=1.7)"] test = ["codecov (>=2.1)", "pytest (>=6.2)", "pytest-cov (>=2.12)"] +[[package]] +name = "opentelemetry-api" +version = "1.19.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_api-1.19.0-py3-none-any.whl", hash = "sha256:dcd2a0ad34b691964947e1d50f9e8c415c32827a1d87f0459a72deb9afdf5597"}, + {file = "opentelemetry_api-1.19.0.tar.gz", hash = "sha256:db374fb5bea00f3c7aa290f5d94cea50b659e6ea9343384c5f6c2bb5d5e8db65"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<7.0" + [[package]] name = "packaging" version = "23.1" @@ -1622,13 +1835,13 @@ files = [ [[package]] name = "pathspec" -version = "0.11.1" +version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] [[package]] @@ -1670,21 +1883,21 @@ files = [ [[package]] name = "platformdirs" -version = "3.9.1" +version = "3.10.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.9.1-py3-none-any.whl", hash = "sha256:ad8291ae0ae5072f66c16945166cb11c63394c7a3ad1b1bc9828ca3162da8c2f"}, - {file = "platformdirs-3.9.1.tar.gz", hash = "sha256:1b42b450ad933e981d56e59f1b97495428c9bd60698baab9f3eb3d00d5822421"}, + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, ] [package.dependencies] -typing-extensions = {version = ">=4.6.3", markers = "python_version < \"3.8\""} +typing-extensions = {version = ">=4.7.1", markers = "python_version < \"3.8\""} [package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] name = "pluggy" @@ -1704,6 +1917,28 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "protobuf" +version = "4.24.0" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "protobuf-4.24.0-cp310-abi3-win32.whl", hash = "sha256:81cb9c4621d2abfe181154354f63af1c41b00a4882fb230b4425cbaed65e8f52"}, + {file = "protobuf-4.24.0-cp310-abi3-win_amd64.whl", hash = "sha256:6c817cf4a26334625a1904b38523d1b343ff8b637d75d2c8790189a4064e51c3"}, + {file = "protobuf-4.24.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ae97b5de10f25b7a443b40427033e545a32b0e9dda17bcd8330d70033379b3e5"}, + {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:567fe6b0647494845d0849e3d5b260bfdd75692bf452cdc9cb660d12457c055d"}, + {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:a6b1ca92ccabfd9903c0c7dde8876221dc7d8d87ad5c42e095cc11b15d3569c7"}, + {file = "protobuf-4.24.0-cp37-cp37m-win32.whl", hash = "sha256:a38400a692fd0c6944c3c58837d112f135eb1ed6cdad5ca6c5763336e74f1a04"}, + {file = "protobuf-4.24.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5ab19ee50037d4b663c02218a811a5e1e7bb30940c79aac385b96e7a4f9daa61"}, + {file = "protobuf-4.24.0-cp38-cp38-win32.whl", hash = "sha256:e8834ef0b4c88666ebb7c7ec18045aa0f4325481d724daa624a4cf9f28134653"}, + {file = "protobuf-4.24.0-cp38-cp38-win_amd64.whl", hash = "sha256:8bb52a2be32db82ddc623aefcedfe1e0eb51da60e18fcc908fb8885c81d72109"}, + {file = "protobuf-4.24.0-cp39-cp39-win32.whl", hash = "sha256:ae7a1835721086013de193311df858bc12cd247abe4ef9710b715d930b95b33e"}, + {file = "protobuf-4.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:44825e963008f8ea0d26c51911c30d3e82e122997c3c4568fd0385dd7bacaedf"}, + {file = "protobuf-4.24.0-py3-none-any.whl", hash = "sha256:82e6e9ebdd15b8200e8423676eab38b774624d6a1ad696a60d86a2ac93f18201"}, + {file = "protobuf-4.24.0.tar.gz", hash = "sha256:5d0ceb9de6e08311832169e601d1fc71bd8e8c779f3ee38a97a78554945ecb85"}, +] + [[package]] name = "publication" version = "0.0.3" @@ -1728,47 +1963,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.11" +version = "1.10.12" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff44c5e89315b15ff1f7fdaf9853770b810936d6b01a7bcecaa227d2f8fe444f"}, - {file = "pydantic-1.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c098d4ab5e2d5b3984d3cb2527e2d6099d3de85630c8934efcfdc348a9760e"}, - {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16928fdc9cb273c6af00d9d5045434c39afba5f42325fb990add2c241402d151"}, - {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0588788a9a85f3e5e9ebca14211a496409cb3deca5b6971ff37c556d581854e7"}, - {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9baf78b31da2dc3d3f346ef18e58ec5f12f5aaa17ac517e2ffd026a92a87588"}, - {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:373c0840f5c2b5b1ccadd9286782852b901055998136287828731868027a724f"}, - {file = "pydantic-1.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:c3339a46bbe6013ef7bdd2844679bfe500347ac5742cd4019a88312aa58a9847"}, - {file = "pydantic-1.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08a6c32e1c3809fbc49debb96bf833164f3438b3696abf0fbeceb417d123e6eb"}, - {file = "pydantic-1.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a451ccab49971af043ec4e0d207cbc8cbe53dbf148ef9f19599024076fe9c25b"}, - {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02d24f7b2b365fed586ed73582c20f353a4c50e4be9ba2c57ab96f8091ddae"}, - {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f34739a89260dfa420aa3cbd069fbcc794b25bbe5c0a214f8fb29e363484b66"}, - {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e297897eb4bebde985f72a46a7552a7556a3dd11e7f76acda0c1093e3dbcf216"}, - {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d185819a7a059550ecb85d5134e7d40f2565f3dd94cfd870132c5f91a89cf58c"}, - {file = "pydantic-1.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:4400015f15c9b464c9db2d5d951b6a780102cfa5870f2c036d37c23b56f7fc1b"}, - {file = "pydantic-1.10.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2417de68290434461a266271fc57274a138510dca19982336639484c73a07af6"}, - {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:331c031ba1554b974c98679bd0780d89670d6fd6f53f5d70b10bdc9addee1713"}, - {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8268a735a14c308923e8958363e3a3404f6834bb98c11f5ab43251a4e410170c"}, - {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:44e51ba599c3ef227e168424e220cd3e544288c57829520dc90ea9cb190c3248"}, - {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d7781f1d13b19700b7949c5a639c764a077cbbdd4322ed505b449d3ca8edcb36"}, - {file = "pydantic-1.10.11-cp37-cp37m-win_amd64.whl", hash = "sha256:7522a7666157aa22b812ce14c827574ddccc94f361237ca6ea8bb0d5c38f1629"}, - {file = "pydantic-1.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc64eab9b19cd794a380179ac0e6752335e9555d214cfcb755820333c0784cb3"}, - {file = "pydantic-1.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8dc77064471780262b6a68fe67e013298d130414d5aaf9b562c33987dbd2cf4f"}, - {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe429898f2c9dd209bd0632a606bddc06f8bce081bbd03d1c775a45886e2c1cb"}, - {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:192c608ad002a748e4a0bed2ddbcd98f9b56df50a7c24d9a931a8c5dd053bd3d"}, - {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef55392ec4bb5721f4ded1096241e4b7151ba6d50a50a80a2526c854f42e6a2f"}, - {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e0bb6efe86281623abbeeb0be64eab740c865388ee934cd3e6a358784aca6e"}, - {file = "pydantic-1.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:265a60da42f9f27e0b1014eab8acd3e53bd0bad5c5b4884e98a55f8f596b2c19"}, - {file = "pydantic-1.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:469adf96c8e2c2bbfa655fc7735a2a82f4c543d9fee97bd113a7fb509bf5e622"}, - {file = "pydantic-1.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6cbfbd010b14c8a905a7b10f9fe090068d1744d46f9e0c021db28daeb8b6de1"}, - {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abade85268cc92dff86d6effcd917893130f0ff516f3d637f50dadc22ae93999"}, - {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9738b0f2e6c70f44ee0de53f2089d6002b10c33264abee07bdb5c7f03038303"}, - {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:787cf23e5a0cde753f2eabac1b2e73ae3844eb873fd1f5bdbff3048d8dbb7604"}, - {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:174899023337b9fc685ac8adaa7b047050616136ccd30e9070627c1aaab53a13"}, - {file = "pydantic-1.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:1954f8778489a04b245a1e7b8b22a9d3ea8ef49337285693cf6959e4b757535e"}, - {file = "pydantic-1.10.11-py3-none-any.whl", hash = "sha256:008c5e266c8aada206d0627a011504e14268a62091450210eda7c07fabe6963e"}, - {file = "pydantic-1.10.11.tar.gz", hash = "sha256:f66d479cf7eb331372c470614be6511eae96f1f120344c25f3f9bb59fb1b5528"}, + {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, + {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, + {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, + {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, + {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, + {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, + {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, + {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, + {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, + {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, ] [package.dependencies] @@ -1780,13 +2015,13 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pygments" -version = "2.15.1" +version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, ] [package.extras] @@ -2049,115 +2284,117 @@ pyyaml = "*" [[package]] name = "radon" -version = "5.1.0" +version = "6.0.1" description = "Code Metrics in Python" optional = false python-versions = "*" files = [ - {file = "radon-5.1.0-py2.py3-none-any.whl", hash = "sha256:fa74e018197f1fcb54578af0f675d8b8e2342bd8e0b72bef8197bc4c9e645f36"}, - {file = "radon-5.1.0.tar.gz", hash = "sha256:cb1d8752e5f862fb9e20d82b5f758cbc4fb1237c92c9a66450ea0ea7bf29aeee"}, + {file = "radon-6.0.1-py2.py3-none-any.whl", hash = "sha256:632cc032364a6f8bb1010a2f6a12d0f14bc7e5ede76585ef29dc0cecf4cd8859"}, + {file = "radon-6.0.1.tar.gz", hash = "sha256:d1ac0053943a893878940fedc8b19ace70386fc9c9bf0a09229a44125ebf45b5"}, ] [package.dependencies] colorama = {version = ">=0.4.1", markers = "python_version > \"3.4\""} -future = "*" -mando = ">=0.6,<0.7" +mando = ">=0.6,<0.8" + +[package.extras] +toml = ["tomli (>=2.0.1)"] [[package]] name = "regex" -version = "2023.6.3" +version = "2023.8.8" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.6" files = [ - {file = "regex-2023.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd"}, - {file = "regex-2023.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568"}, - {file = "regex-2023.6.3-cp310-cp310-win32.whl", hash = "sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1"}, - {file = "regex-2023.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0"}, - {file = "regex-2023.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969"}, - {file = "regex-2023.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477"}, - {file = "regex-2023.6.3-cp311-cp311-win32.whl", hash = "sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9"}, - {file = "regex-2023.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af"}, - {file = "regex-2023.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787"}, - {file = "regex-2023.6.3-cp36-cp36m-win32.whl", hash = "sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54"}, - {file = "regex-2023.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27"}, - {file = "regex-2023.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb"}, - {file = "regex-2023.6.3-cp37-cp37m-win32.whl", hash = "sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7"}, - {file = "regex-2023.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23"}, - {file = "regex-2023.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07"}, - {file = "regex-2023.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9"}, - {file = "regex-2023.6.3-cp38-cp38-win32.whl", hash = "sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88"}, - {file = "regex-2023.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72"}, - {file = "regex-2023.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751"}, - {file = "regex-2023.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd"}, - {file = "regex-2023.6.3-cp39-cp39-win32.whl", hash = "sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f"}, - {file = "regex-2023.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a"}, - {file = "regex-2023.6.3.tar.gz", hash = "sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0"}, + {file = "regex-2023.8.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:88900f521c645f784260a8d346e12a1590f79e96403971241e64c3a265c8ecdb"}, + {file = "regex-2023.8.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3611576aff55918af2697410ff0293d6071b7e00f4b09e005d614686ac4cd57c"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8a0ccc8f2698f120e9e5742f4b38dc944c38744d4bdfc427616f3a163dd9de5"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c662a4cbdd6280ee56f841f14620787215a171c4e2d1744c9528bed8f5816c96"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf0633e4a1b667bfe0bb10b5e53fe0d5f34a6243ea2530eb342491f1adf4f739"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:551ad543fa19e94943c5b2cebc54c73353ffff08228ee5f3376bd27b3d5b9800"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54de2619f5ea58474f2ac211ceea6b615af2d7e4306220d4f3fe690c91988a61"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5ec4b3f0aebbbe2fc0134ee30a791af522a92ad9f164858805a77442d7d18570"}, + {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ae646c35cb9f820491760ac62c25b6d6b496757fda2d51be429e0e7b67ae0ab"}, + {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca339088839582d01654e6f83a637a4b8194d0960477b9769d2ff2cfa0fa36d2"}, + {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d9b6627408021452dcd0d2cdf8da0534e19d93d070bfa8b6b4176f99711e7f90"}, + {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:bd3366aceedf274f765a3a4bc95d6cd97b130d1dda524d8f25225d14123c01db"}, + {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7aed90a72fc3654fba9bc4b7f851571dcc368120432ad68b226bd593f3f6c0b7"}, + {file = "regex-2023.8.8-cp310-cp310-win32.whl", hash = "sha256:80b80b889cb767cc47f31d2b2f3dec2db8126fbcd0cff31b3925b4dc6609dcdb"}, + {file = "regex-2023.8.8-cp310-cp310-win_amd64.whl", hash = "sha256:b82edc98d107cbc7357da7a5a695901b47d6eb0420e587256ba3ad24b80b7d0b"}, + {file = "regex-2023.8.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1e7d84d64c84ad97bf06f3c8cb5e48941f135ace28f450d86af6b6512f1c9a71"}, + {file = "regex-2023.8.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce0f9fbe7d295f9922c0424a3637b88c6c472b75eafeaff6f910494a1fa719ef"}, + {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06c57e14ac723b04458df5956cfb7e2d9caa6e9d353c0b4c7d5d54fcb1325c46"}, + {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7a9aaa5a1267125eef22cef3b63484c3241aaec6f48949b366d26c7250e0357"}, + {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b7408511fca48a82a119d78a77c2f5eb1b22fe88b0d2450ed0756d194fe7a9a"}, + {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14dc6f2d88192a67d708341f3085df6a4f5a0c7b03dec08d763ca2cd86e9f559"}, + {file = "regex-2023.8.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48c640b99213643d141550326f34f0502fedb1798adb3c9eb79650b1ecb2f177"}, + {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0085da0f6c6393428bf0d9c08d8b1874d805bb55e17cb1dfa5ddb7cfb11140bf"}, + {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:964b16dcc10c79a4a2be9f1273fcc2684a9eedb3906439720598029a797b46e6"}, + {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7ce606c14bb195b0e5108544b540e2c5faed6843367e4ab3deb5c6aa5e681208"}, + {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:40f029d73b10fac448c73d6eb33d57b34607f40116e9f6e9f0d32e9229b147d7"}, + {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3b8e6ea6be6d64104d8e9afc34c151926f8182f84e7ac290a93925c0db004bfd"}, + {file = "regex-2023.8.8-cp311-cp311-win32.whl", hash = "sha256:942f8b1f3b223638b02df7df79140646c03938d488fbfb771824f3d05fc083a8"}, + {file = "regex-2023.8.8-cp311-cp311-win_amd64.whl", hash = "sha256:51d8ea2a3a1a8fe4f67de21b8b93757005213e8ac3917567872f2865185fa7fb"}, + {file = "regex-2023.8.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e951d1a8e9963ea51efd7f150450803e3b95db5939f994ad3d5edac2b6f6e2b4"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704f63b774218207b8ccc6c47fcef5340741e5d839d11d606f70af93ee78e4d4"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22283c769a7b01c8ac355d5be0715bf6929b6267619505e289f792b01304d898"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91129ff1bb0619bc1f4ad19485718cc623a2dc433dff95baadbf89405c7f6b57"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de35342190deb7b866ad6ba5cbcccb2d22c0487ee0cbb251efef0843d705f0d4"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b993b6f524d1e274a5062488a43e3f9f8764ee9745ccd8e8193df743dbe5ee61"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3026cbcf11d79095a32d9a13bbc572a458727bd5b1ca332df4a79faecd45281c"}, + {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:293352710172239bf579c90a9864d0df57340b6fd21272345222fb6371bf82b3"}, + {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d909b5a3fff619dc7e48b6b1bedc2f30ec43033ba7af32f936c10839e81b9217"}, + {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:3d370ff652323c5307d9c8e4c62efd1956fb08051b0e9210212bc51168b4ff56"}, + {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:b076da1ed19dc37788f6a934c60adf97bd02c7eea461b73730513921a85d4235"}, + {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e9941a4ada58f6218694f382e43fdd256e97615db9da135e77359da257a7168b"}, + {file = "regex-2023.8.8-cp36-cp36m-win32.whl", hash = "sha256:a8c65c17aed7e15a0c824cdc63a6b104dfc530f6fa8cb6ac51c437af52b481c7"}, + {file = "regex-2023.8.8-cp36-cp36m-win_amd64.whl", hash = "sha256:aadf28046e77a72f30dcc1ab185639e8de7f4104b8cb5c6dfa5d8ed860e57236"}, + {file = "regex-2023.8.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:423adfa872b4908843ac3e7a30f957f5d5282944b81ca0a3b8a7ccbbfaa06103"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ae594c66f4a7e1ea67232a0846649a7c94c188d6c071ac0210c3e86a5f92109"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e51c80c168074faa793685656c38eb7a06cbad7774c8cbc3ea05552d615393d8"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09b7f4c66aa9d1522b06e31a54f15581c37286237208df1345108fcf4e050c18"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e73e5243af12d9cd6a9d6a45a43570dbe2e5b1cdfc862f5ae2b031e44dd95a8"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:941460db8fe3bd613db52f05259c9336f5a47ccae7d7def44cc277184030a116"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f0ccf3e01afeb412a1a9993049cb160d0352dba635bbca7762b2dc722aa5742a"}, + {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2e9216e0d2cdce7dbc9be48cb3eacb962740a09b011a116fd7af8c832ab116ca"}, + {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:5cd9cd7170459b9223c5e592ac036e0704bee765706445c353d96f2890e816c8"}, + {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4873ef92e03a4309b3ccd8281454801b291b689f6ad45ef8c3658b6fa761d7ac"}, + {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:239c3c2a339d3b3ddd51c2daef10874410917cd2b998f043c13e2084cb191684"}, + {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1005c60ed7037be0d9dea1f9c53cc42f836188227366370867222bda4c3c6bd7"}, + {file = "regex-2023.8.8-cp37-cp37m-win32.whl", hash = "sha256:e6bd1e9b95bc5614a7a9c9c44fde9539cba1c823b43a9f7bc11266446dd568e3"}, + {file = "regex-2023.8.8-cp37-cp37m-win_amd64.whl", hash = "sha256:9a96edd79661e93327cfeac4edec72a4046e14550a1d22aa0dd2e3ca52aec921"}, + {file = "regex-2023.8.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f2181c20ef18747d5f4a7ea513e09ea03bdd50884a11ce46066bb90fe4213675"}, + {file = "regex-2023.8.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a2ad5add903eb7cdde2b7c64aaca405f3957ab34f16594d2b78d53b8b1a6a7d6"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9233ac249b354c54146e392e8a451e465dd2d967fc773690811d3a8c240ac601"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920974009fb37b20d32afcdf0227a2e707eb83fe418713f7a8b7de038b870d0b"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd2b6c5dfe0929b6c23dde9624483380b170b6e34ed79054ad131b20203a1a63"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96979d753b1dc3b2169003e1854dc67bfc86edf93c01e84757927f810b8c3c93"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ae54a338191e1356253e7883d9d19f8679b6143703086245fb14d1f20196be9"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2162ae2eb8b079622176a81b65d486ba50b888271302190870b8cc488587d280"}, + {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c884d1a59e69e03b93cf0dfee8794c63d7de0ee8f7ffb76e5f75be8131b6400a"}, + {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf9273e96f3ee2ac89ffcb17627a78f78e7516b08f94dc435844ae72576a276e"}, + {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:83215147121e15d5f3a45d99abeed9cf1fe16869d5c233b08c56cdf75f43a504"}, + {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3f7454aa427b8ab9101f3787eb178057c5250478e39b99540cfc2b889c7d0586"}, + {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0640913d2c1044d97e30d7c41728195fc37e54d190c5385eacb52115127b882"}, + {file = "regex-2023.8.8-cp38-cp38-win32.whl", hash = "sha256:0c59122ceccb905a941fb23b087b8eafc5290bf983ebcb14d2301febcbe199c7"}, + {file = "regex-2023.8.8-cp38-cp38-win_amd64.whl", hash = "sha256:c12f6f67495ea05c3d542d119d270007090bad5b843f642d418eb601ec0fa7be"}, + {file = "regex-2023.8.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:82cd0a69cd28f6cc3789cc6adeb1027f79526b1ab50b1f6062bbc3a0ccb2dbc3"}, + {file = "regex-2023.8.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bb34d1605f96a245fc39790a117ac1bac8de84ab7691637b26ab2c5efb8f228c"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:987b9ac04d0b38ef4f89fbc035e84a7efad9cdd5f1e29024f9289182c8d99e09"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dd6082f4e2aec9b6a0927202c85bc1b09dcab113f97265127c1dc20e2e32495"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7eb95fe8222932c10d4436e7a6f7c99991e3fdd9f36c949eff16a69246dee2dc"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7098c524ba9f20717a56a8d551d2ed491ea89cbf37e540759ed3b776a4f8d6eb"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b694430b3f00eb02c594ff5a16db30e054c1b9589a043fe9174584c6efa8033"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2aeab3895d778155054abea5238d0eb9a72e9242bd4b43f42fd911ef9a13470"}, + {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:988631b9d78b546e284478c2ec15c8a85960e262e247b35ca5eaf7ee22f6050a"}, + {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:67ecd894e56a0c6108ec5ab1d8fa8418ec0cff45844a855966b875d1039a2e34"}, + {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:14898830f0a0eb67cae2bbbc787c1a7d6e34ecc06fbd39d3af5fe29a4468e2c9"}, + {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f2200e00b62568cfd920127782c61bc1c546062a879cdc741cfcc6976668dfcf"}, + {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9691a549c19c22d26a4f3b948071e93517bdf86e41b81d8c6ac8a964bb71e5a6"}, + {file = "regex-2023.8.8-cp39-cp39-win32.whl", hash = "sha256:6ab2ed84bf0137927846b37e882745a827458689eb969028af8032b1b3dac78e"}, + {file = "regex-2023.8.8-cp39-cp39-win_amd64.whl", hash = "sha256:5543c055d8ec7801901e1193a51570643d6a6ab8751b1f7dd9af71af467538bb"}, + {file = "regex-2023.8.8.tar.gz", hash = "sha256:fcbdc5f2b0f1cd0f6a56cdb46fe41d2cce1e644e3b68832f3eeebc5fb0f7712e"}, ] [[package]] @@ -2196,13 +2433,13 @@ decorator = ">=3.4.2" [[package]] name = "rich" -version = "13.4.2" +version = "13.5.2" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.4.2-py3-none-any.whl", hash = "sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec"}, - {file = "rich-13.4.2.tar.gz", hash = "sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898"}, + {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, + {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, ] [package.dependencies] @@ -2215,28 +2452,28 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.0.280" +version = "0.0.284" description = "An extremely fast Python linter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.0.280-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:48ed5aca381050a4e2f6d232db912d2e4e98e61648b513c350990c351125aaec"}, - {file = "ruff-0.0.280-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:ef6ee3e429fd29d6a5ceed295809e376e6ece5b0f13c7e703efaf3d3bcb30b96"}, - {file = "ruff-0.0.280-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d878370f7e9463ac40c253724229314ff6ebe4508cdb96cb536e1af4d5a9cd4f"}, - {file = "ruff-0.0.280-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:83e8f372fa5627eeda5b83b5a9632d2f9c88fc6d78cead7e2a1f6fb05728d137"}, - {file = "ruff-0.0.280-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7008fc6ca1df18b21fa98bdcfc711dad5f94d0fc3c11791f65e460c48ef27c82"}, - {file = "ruff-0.0.280-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:fe7118c1eae3fda17ceb409629c7f3b5a22dffa7caf1f6796776936dca1fe653"}, - {file = "ruff-0.0.280-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:37359cd67d2af8e09110a546507c302cbea11c66a52d2a9b6d841d465f9962d4"}, - {file = "ruff-0.0.280-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd58af46b0221efb95966f1f0f7576df711cb53e50d2fdb0e83c2f33360116a4"}, - {file = "ruff-0.0.280-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e7c15828d09f90e97bea8feefcd2907e8c8ce3a1f959c99f9b4b3469679f33c"}, - {file = "ruff-0.0.280-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2dae8f2d9c44c5c49af01733c2f7956f808db682a4193180dedb29dd718d7bbe"}, - {file = "ruff-0.0.280-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5f972567163a20fb8c2d6afc60c2ea5ef8b68d69505760a8bd0377de8984b4f6"}, - {file = "ruff-0.0.280-py3-none-musllinux_1_2_i686.whl", hash = "sha256:8ffa7347ad11643f29de100977c055e47c988cd6d9f5f5ff83027600b11b9189"}, - {file = "ruff-0.0.280-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7a37dab70114671d273f203268f6c3366c035fe0c8056614069e90a65e614bfc"}, - {file = "ruff-0.0.280-py3-none-win32.whl", hash = "sha256:7784e3606352fcfb193f3cd22b2e2117c444cb879ef6609ec69deabd662b0763"}, - {file = "ruff-0.0.280-py3-none-win_amd64.whl", hash = "sha256:4a7d52457b5dfcd3ab24b0b38eefaead8e2dca62b4fbf10de4cd0938cf20ce30"}, - {file = "ruff-0.0.280-py3-none-win_arm64.whl", hash = "sha256:b7de5b8689575918e130e4384ed9f539ce91d067c0a332aedef6ca7188adac2d"}, - {file = "ruff-0.0.280.tar.gz", hash = "sha256:581c43e4ac5e5a7117ad7da2120d960a4a99e68ec4021ec3cd47fe1cf78f8380"}, + {file = "ruff-0.0.284-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:8b949084941232e2c27f8d12c78c5a6a010927d712ecff17231ee1a8371c205b"}, + {file = "ruff-0.0.284-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:a3930d66b35e4dc96197422381dff2a4e965e9278b5533e71ae8474ef202fab0"}, + {file = "ruff-0.0.284-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d1f7096038961d8bc3b956ee69d73826843eb5b39a5fa4ee717ed473ed69c95"}, + {file = "ruff-0.0.284-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bcaf85907fc905d838f46490ee15f04031927bbea44c478394b0bfdeadc27362"}, + {file = "ruff-0.0.284-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3660b85a9d84162a055f1add334623ae2d8022a84dcd605d61c30a57b436c32"}, + {file = "ruff-0.0.284-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:0a3218458b140ea794da72b20ea09cbe13c4c1cdb7ac35e797370354628f4c05"}, + {file = "ruff-0.0.284-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2fe880cff13fffd735387efbcad54ba0ff1272bceea07f86852a33ca71276f4"}, + {file = "ruff-0.0.284-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1d098ea74d0ce31478765d1f8b4fbdbba2efc532397b5c5e8e5ea0c13d7e5ae"}, + {file = "ruff-0.0.284-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c79ae3308e308b94635cd57a369d1e6f146d85019da2fbc63f55da183ee29b"}, + {file = "ruff-0.0.284-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f86b2b1e7033c00de45cc176cf26778650fb8804073a0495aca2f674797becbb"}, + {file = "ruff-0.0.284-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e37e086f4d623c05cd45a6fe5006e77a2b37d57773aad96b7802a6b8ecf9c910"}, + {file = "ruff-0.0.284-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d29dfbe314e1131aa53df213fdfea7ee874dd96ea0dd1471093d93b59498384d"}, + {file = "ruff-0.0.284-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:88295fd649d0aa1f1271441df75bf06266a199497afd239fd392abcfd75acd7e"}, + {file = "ruff-0.0.284-py3-none-win32.whl", hash = "sha256:735cd62fccc577032a367c31f6a9de7c1eb4c01fa9a2e60775067f44f3fc3091"}, + {file = "ruff-0.0.284-py3-none-win_amd64.whl", hash = "sha256:f67ed868d79fbcc61ad0fa034fe6eed2e8d438d32abce9c04b7c4c1464b2cf8e"}, + {file = "ruff-0.0.284-py3-none-win_arm64.whl", hash = "sha256:1292cfc764eeec3cde35b3a31eae3f661d86418b5e220f5d5dba1c27a6eccbb6"}, + {file = "ruff-0.0.284.tar.gz", hash = "sha256:ebd3cc55cd499d326aac17a331deaea29bea206e01c08862f9b5c6e93d77a491"}, ] [[package]] @@ -2273,13 +2510,13 @@ pbr = "*" [[package]] name = "sentry-sdk" -version = "1.28.1" +version = "1.29.2" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = "*" files = [ - {file = "sentry-sdk-1.28.1.tar.gz", hash = "sha256:dcd88c68aa64dae715311b5ede6502fd684f70d00a7cd4858118f0ba3153a3ae"}, - {file = "sentry_sdk-1.28.1-py2.py3-none-any.whl", hash = "sha256:6bdb25bd9092478d3a817cb0d01fa99e296aea34d404eac3ca0037faa5c2aa0a"}, + {file = "sentry-sdk-1.29.2.tar.gz", hash = "sha256:a99ee105384788c3f228726a88baf515fe7b5f1d2d0f215a03d194369f158df7"}, + {file = "sentry_sdk-1.29.2-py2.py3-none-any.whl", hash = "sha256:3e17215d8006612e2df02b0e73115eb8376c37e3f586d8436fa41644e605074d"}, ] [package.dependencies] @@ -2571,7 +2808,7 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -optional = true +optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, @@ -2653,20 +2890,31 @@ files = [ [[package]] name = "xenon" -version = "0.9.0" +version = "0.9.1" description = "Monitor code metrics for Python on your CI server" optional = false python-versions = "*" files = [ - {file = "xenon-0.9.0-py2.py3-none-any.whl", hash = "sha256:994c80c7f1c6d40596b600b93734d85a5739208f31895ef99f1e4d362caf9e35"}, - {file = "xenon-0.9.0.tar.gz", hash = "sha256:d2b9cb6c6260f771a432c1e588e51fddb17858f88f73ef641e7532f7a5f58fb8"}, + {file = "xenon-0.9.1-py2.py3-none-any.whl", hash = "sha256:b2888a5764ebd57a1f9f1624fde86e8303cb30c686e492f19d98867c458f7870"}, + {file = "xenon-0.9.1.tar.gz", hash = "sha256:d6745111c3e258b749a4fd424b1b899d99ea183cea232365ee2f88fe7d80c03b"}, ] [package.dependencies] PyYAML = ">=4.2b1,<7.0" -radon = ">=4,<6" +radon = ">=4,<7" requests = ">=2.0,<3.0" +[[package]] +name = "xmltodict" +version = "0.13.0" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.4" +files = [ + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, +] + [[package]] name = "zipp" version = "3.15.0" @@ -2685,6 +2933,7 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [extras] all = ["aws-xray-sdk", "fastjsonschema", "pydantic"] aws-sdk = ["boto3"] +datadog = [] parser = ["pydantic"] tracer = ["aws-xray-sdk"] validation = ["fastjsonschema"] @@ -2692,4 +2941,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "4d7f3cf0fdeb0a018ce3f0ac0b39ec0f04894eab8367d3c05efbd3d02642e0f6" +content-hash = "58807838331ba630a2669d8ea9dd3e745fb86f6de03b340dec16a494dc984c46" diff --git a/pyproject.toml b/pyproject.toml index bcc3bb9610f..bf310af0983 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "2.21.0" +version = "2.23.0" description = "Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity." authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"] @@ -46,8 +46,8 @@ pytest-mock = "^3.11.1" pdoc3 = "^0.10.0" pytest-asyncio = "^0.21.1" bandit = "^1.7.5" -radon = "^5.1.0" -xenon = "^0.9.0" +radon = "^6.0.1" +xenon = "^0.9.1" mkdocs-git-revision-date-plugin = "^0.3.2" mike = "^1.1.2" pytest-xdist = "^3.3.1" @@ -56,26 +56,27 @@ aws-cdk-lib = "^2.88.0" "aws-cdk.aws-apigatewayv2-integrations-alpha" = "^2.38.1-alpha.0" "aws-cdk.aws-apigatewayv2-authorizers-alpha" = "^2.38.1-alpha.0" pytest-benchmark = "^4.0.0" -mypy-boto3-appconfig = "^1.28.0" -mypy-boto3-cloudformation = "^1.28.10" -mypy-boto3-cloudwatch = "^1.28.0" -mypy-boto3-dynamodb = "^1.28.0" -mypy-boto3-lambda = "^1.28.0" -mypy-boto3-logs = "^1.28.1" -mypy-boto3-secretsmanager = "^1.28.3" -mypy-boto3-ssm = "^1.28.0" -mypy-boto3-s3 = "^1.28.8" -mypy-boto3-xray = "^1.28.0" +mypy-boto3-appconfig = "^1.28.16" +mypy-boto3-cloudformation = "^1.28.19" +mypy-boto3-cloudwatch = "^1.28.28" +mypy-boto3-dynamodb = "^1.28.27" +mypy-boto3-lambda = "^1.28.19" +mypy-boto3-logs = "^1.28.16" +mypy-boto3-secretsmanager = "^1.28.24" +mypy-boto3-ssm = "^1.28.16" +mypy-boto3-s3 = "^1.28.27" +mypy-boto3-xray = "^1.28.16" types-requests = "^2.31.0" typing-extensions = "^4.6.2" -mkdocs-material = "^9.1.19" +mkdocs-material = "^9.1.21" filelock = "^3.12.2" checksumdir = "^1.2.0" -mypy-boto3-appconfigdata = "^1.28.0" +mypy-boto3-appconfigdata = "^1.28.16" ijson = "^3.2.2" typed-ast = { version = "^1.5.5", python = "< 3.8"} hvac = "^1.1.1" aws-requests-auth = "^0.4.3" +datadog-lambda = "^4.77.0" [tool.poetry.extras] parser = ["pydantic"] @@ -84,14 +85,15 @@ tracer = ["aws-xray-sdk"] all = ["pydantic", "aws-xray-sdk", "fastjsonschema"] # allow customers to run code locally without emulators (SAM CLI, etc.) aws-sdk = ["boto3"] +datadog=["datadog-lambda"] [tool.poetry.group.dev.dependencies] -cfn-lint = "0.78.2" +cfn-lint = "0.79.7" mypy = "^1.1.1" types-python-dateutil = "^2.8.19.6" httpx = ">=0.23.3,<0.25.0" sentry-sdk = "^1.22.2" -ruff = ">=0.0.272,<0.0.281" +ruff = ">=0.0.272,<0.0.285" retry2 = "^0.9.5" [tool.coverage.run] diff --git a/ruff.toml b/ruff.toml index 424040ede1f..83910e44ebf 100644 --- a/ruff.toml +++ b/ruff.toml @@ -35,6 +35,8 @@ ignore = [ "PLW0603", #https://beta.ruff.rs/docs/rules/global-statement/ "B904", # raise-without-from-inside-except - disabled temporarily "PLC1901", # Compare-to-empty-string - disabled temporarily + "PYI024", + "FA100" # Enable this rule when drop support to Python 3.7 ] # Exclude files and directories @@ -69,3 +71,5 @@ split-on-trailing-comma = true "tests/e2e/utils/data_fetcher/__init__.py" = ["F401"] "aws_lambda_powertools/utilities/data_classes/s3_event.py" = ["A003"] "aws_lambda_powertools/utilities/parser/models/__init__.py" = ["E402"] +# Maintenance: we're keeping EphemeralMetrics code in case of Hyrum's law so we can quickly revert it +"aws_lambda_powertools/metrics/metrics.py" = ["ERA001"] diff --git a/tests/e2e/idempotency/test_idempotency_dynamodb.py b/tests/e2e/idempotency/test_idempotency_dynamodb.py index a374f763bd4..1d61cb69f9f 100644 --- a/tests/e2e/idempotency/test_idempotency_dynamodb.py +++ b/tests/e2e/idempotency/test_idempotency_dynamodb.py @@ -4,7 +4,7 @@ import pytest from tests.e2e.utils import data_fetcher -from tests.e2e.utils.functions import execute_lambdas_in_parallel +from tests.e2e.utils.data_fetcher.common import GetLambdaResponseOptions, get_lambda_response_in_parallel @pytest.fixture @@ -73,14 +73,21 @@ def test_ttl_caching_expiration_idempotency(ttl_cache_expiration_handler_fn_arn: @pytest.mark.xdist_group(name="idempotency") def test_ttl_caching_timeout_idempotency(ttl_cache_timeout_handler_fn_arn: str): # GIVEN - payload_timeout_execution = json.dumps({"sleep": 5, "message": "Powertools for AWS Lambda (Python) - TTL 1s"}) - payload_working_execution = json.dumps({"sleep": 0, "message": "Powertools for AWS Lambda (Python) - TTL 1s"}) + payload_timeout_execution = json.dumps( + {"sleep": 5, "message": "Powertools for AWS Lambda (Python) - TTL 1s"}, + sort_keys=True, + ) + payload_working_execution = json.dumps( + {"sleep": 0, "message": "Powertools for AWS Lambda (Python) - TTL 1s"}, + sort_keys=True, + ) # WHEN # first call should fail due to timeout execution_with_timeout, _ = data_fetcher.get_lambda_response( lambda_arn=ttl_cache_timeout_handler_fn_arn, payload=payload_timeout_execution, + raise_on_error=False, ) execution_with_timeout_response = execution_with_timeout["Payload"].read().decode("utf-8") @@ -99,12 +106,15 @@ def test_ttl_caching_timeout_idempotency(ttl_cache_timeout_handler_fn_arn: str): @pytest.mark.xdist_group(name="idempotency") def test_parallel_execution_idempotency(parallel_execution_handler_fn_arn: str): # GIVEN - arguments = json.dumps({"message": "Powertools for AWS Lambda (Python) - Parallel execution"}) + payload = json.dumps({"message": "Powertools for AWS Lambda (Python) - Parallel execution"}) - # WHEN - # executing Lambdas in parallel - lambdas_arn = [parallel_execution_handler_fn_arn, parallel_execution_handler_fn_arn] - execution_result_list = execute_lambdas_in_parallel("data_fetcher.get_lambda_response", lambdas_arn, arguments) + invocation_options = [ + GetLambdaResponseOptions(lambda_arn=parallel_execution_handler_fn_arn, payload=payload, raise_on_error=False), + GetLambdaResponseOptions(lambda_arn=parallel_execution_handler_fn_arn, payload=payload, raise_on_error=False), + ] + + # WHEN executing Lambdas in parallel + execution_result_list = get_lambda_response_in_parallel(invocation_options) timeout_execution_response = execution_result_list[0][0]["Payload"].read().decode("utf-8") error_idempotency_execution_response = execution_result_list[1][0]["Payload"].read().decode("utf-8") diff --git a/tests/e2e/utils/data_builder/traces.py b/tests/e2e/utils/data_builder/traces.py index 59350c8ff68..e6356582a30 100644 --- a/tests/e2e/utils/data_builder/traces.py +++ b/tests/e2e/utils/data_builder/traces.py @@ -2,7 +2,7 @@ def build_trace_default_query(function_name: str) -> str: - return f'service("{function_name}")' + return f'service(id(name: "{function_name}"))' def build_put_annotations_input(**annotations: str) -> List[Dict]: diff --git a/tests/e2e/utils/data_fetcher/common.py b/tests/e2e/utils/data_fetcher/common.py index 3bbdaa24e6a..9c251cd6ed2 100644 --- a/tests/e2e/utils/data_fetcher/common.py +++ b/tests/e2e/utils/data_fetcher/common.py @@ -1,24 +1,76 @@ +import functools +import time +from concurrent.futures import Future, ThreadPoolExecutor from datetime import datetime -from typing import Optional, Tuple +from typing import List, Optional, Tuple import boto3 import requests from mypy_boto3_lambda import LambdaClient from mypy_boto3_lambda.type_defs import InvocationResponseTypeDef +from pydantic import BaseModel from requests import Request, Response from requests.exceptions import RequestException from retry import retry +GetLambdaResponse = Tuple[InvocationResponseTypeDef, datetime] + + +class GetLambdaResponseOptions(BaseModel): + lambda_arn: str + payload: Optional[str] = None + client: Optional[LambdaClient] = None + raise_on_error: bool = True + + # Maintenance: Pydantic v2 deprecated it; we should update in v3 + class Config: + arbitrary_types_allowed = True + def get_lambda_response( lambda_arn: str, payload: Optional[str] = None, client: Optional[LambdaClient] = None, -) -> Tuple[InvocationResponseTypeDef, datetime]: + raise_on_error: bool = True, +) -> GetLambdaResponse: + """Invoke function synchronously + + Parameters + ---------- + lambda_arn : str + Lambda function ARN to invoke + payload : Optional[str], optional + JSON payload for Lambda invocation, by default None + client : Optional[LambdaClient], optional + Boto3 Lambda SDK client, by default None + raise_on_error : bool, optional + Whether to raise exception upon invocation error, by default True + + Returns + ------- + Tuple[InvocationResponseTypeDef, datetime] + Function response and approximate execution time + + Raises + ------ + RuntimeError + Function invocation error details + """ client = client or boto3.client("lambda") payload = payload or "" execution_time = datetime.utcnow() - return client.invoke(FunctionName=lambda_arn, InvocationType="RequestResponse", Payload=payload), execution_time + response: InvocationResponseTypeDef = client.invoke( + FunctionName=lambda_arn, + InvocationType="RequestResponse", + Payload=payload, + ) + + has_error = response.get("FunctionError", "") == "Unhandled" + if has_error and raise_on_error: + error_payload = response["Payload"].read().decode() + raise RuntimeError(f"Function failed invocation: {error_payload}") + + return response, execution_time @retry(RequestException, delay=2, jitter=1.5, tries=5) @@ -27,3 +79,39 @@ def get_http_response(request: Request) -> Response: result = session.send(request.prepare()) result.raise_for_status() return result + + +def get_lambda_response_in_parallel( + get_lambda_response_options: List[GetLambdaResponseOptions], +) -> List[GetLambdaResponse]: + """Invoke functions in parallel + + Parameters + ---------- + get_lambda_response_options : List[GetLambdaResponseOptions] + List of options to call get_lambda_response with + + Returns + ------- + List[GetLambdaResponse] + Function responses and approximate execution time + """ + result_list = [] + with ThreadPoolExecutor() as executor: + running_tasks: List[Future] = [] + for options in get_lambda_response_options: + # Sleep 0.5, 1, 1.5, ... seconds between each invocation. This way + # we can guarantee that lambdas are executed in parallel, but they are + # called in the same "order" as they are passed in, thus guaranteeing that + # we can assert on the correct output. + time.sleep(0.5 * len(running_tasks)) + + get_lambda_response_callback = functools.partial(get_lambda_response, **options.dict()) + running_tasks.append( + executor.submit(get_lambda_response_callback), + ) + + executor.shutdown(wait=True) + result_list.extend(running_task.result() for running_task in running_tasks) + + return result_list diff --git a/tests/e2e/utils/functions.py b/tests/e2e/utils/functions.py deleted file mode 100644 index 64135c96aa3..00000000000 --- a/tests/e2e/utils/functions.py +++ /dev/null @@ -1,32 +0,0 @@ -import time -from concurrent.futures import Future, ThreadPoolExecutor -from typing import List - -from tests.e2e.utils import data_fetcher # noqa F401 - - -def execute_lambdas_in_parallel(function_name: str, lambdas_arn: list, arguments: str): - result_list = [] - with ThreadPoolExecutor() as executor: - running_tasks: List[Future] = [] - for arn in lambdas_arn: - # Sleep 0.5, 1, 1.5, ... seconds between each invocation. This way - # we can guarantee that lambdas are executed in parallel, but they are - # called in the same "order" as they are passed in, thus guaranteeing that - # we can assert on the correct output. - time.sleep(0.5 * len(running_tasks)) - running_tasks.append( - executor.submit( - lambda lname, larn, largs: eval(lname)(larn, largs), - function_name, - arn, - arguments, - ), - ) - - executor.shutdown(wait=True) - - for running_task in running_tasks: - result_list.append(running_task.result()) - - return result_list diff --git a/tests/functional/event_handler/test_api_gateway.py b/tests/functional/event_handler/test_api_gateway.py index 26c71e1f27d..2afd1241bed 100644 --- a/tests/functional/event_handler/test_api_gateway.py +++ b/tests/functional/event_handler/test_api_gateway.py @@ -1,5 +1,6 @@ import base64 import json +import re import zlib from copy import deepcopy from decimal import Decimal @@ -1077,6 +1078,38 @@ def foo(): assert response["statusCode"] == 200 +@pytest.mark.parametrize( + "path", + [ + pytest.param("/stg/foo", id="path matched pay prefix"), + pytest.param("/dev/foo", id="path matched pay prefix with multiple numbers"), + pytest.param("/foo", id="path does not start with any of the prefixes"), + ], +) +def test_remove_prefix_by_regex(path: str): + app = ApiGatewayResolver(strip_prefixes=[re.compile(r"/(dev|stg)")]) + + @app.get("/foo") + def foo(): + ... + + response = app({"httpMethod": "GET", "path": path}, None) + + assert response["statusCode"] == 200 + + +def test_empty_path_when_using_regexes(): + app = ApiGatewayResolver(strip_prefixes=[re.compile(r"/(dev|stg)")]) + + @app.get("/") + def foo(): + ... + + response = app({"httpMethod": "GET", "path": "/dev"}, None) + + assert response["statusCode"] == 200 + + @pytest.mark.parametrize( "prefix", [ diff --git a/tests/functional/metrics/conftest.py b/tests/functional/metrics/conftest.py new file mode 100644 index 00000000000..2de3a0087c2 --- /dev/null +++ b/tests/functional/metrics/conftest.py @@ -0,0 +1,96 @@ +from typing import Any, Dict, List, Union + +import pytest + +from aws_lambda_powertools.metrics import ( + MetricResolution, + Metrics, + MetricUnit, +) +from aws_lambda_powertools.metrics.provider.cold_start import reset_cold_start_flag + + +@pytest.fixture(scope="function", autouse=True) +def reset_metric_set(): + metrics = Metrics() + metrics.clear_metrics() + metrics.clear_default_dimensions() + reset_cold_start_flag() # ensure each test has cold start + yield + + +@pytest.fixture +def metric_with_resolution() -> Dict[str, Union[str, int]]: + return {"name": "single_metric", "unit": MetricUnit.Count, "value": 1, "resolution": MetricResolution.High} + + +@pytest.fixture +def metric() -> Dict[str, str]: + return {"name": "single_metric", "unit": MetricUnit.Count, "value": 1} + + +@pytest.fixture +def metric_datadog() -> Dict[str, str]: + return {"name": "single_metric", "value": 1, "timestamp": 1691678198, "powertools": "datadog"} + + +@pytest.fixture +def metrics() -> List[Dict[str, str]]: + return [ + {"name": "metric_one", "unit": MetricUnit.Count, "value": 1}, + {"name": "metric_two", "unit": MetricUnit.Count, "value": 1}, + ] + + +@pytest.fixture +def metrics_same_name() -> List[Dict[str, str]]: + return [ + {"name": "metric_one", "unit": MetricUnit.Count, "value": 1}, + {"name": "metric_one", "unit": MetricUnit.Count, "value": 5}, + ] + + +@pytest.fixture +def dimension() -> Dict[str, str]: + return {"name": "test_dimension", "value": "test"} + + +@pytest.fixture +def dimensions() -> List[Dict[str, str]]: + return [ + {"name": "test_dimension", "value": "test"}, + {"name": "test_dimension_2", "value": "test"}, + ] + + +@pytest.fixture +def non_str_dimensions() -> List[Dict[str, Any]]: + return [ + {"name": "test_dimension", "value": True}, + {"name": "test_dimension_2", "value": 3}, + ] + + +@pytest.fixture +def namespace() -> str: + return "test_namespace" + + +@pytest.fixture +def service() -> str: + return "test_service" + + +@pytest.fixture +def metadata() -> Dict[str, str]: + return {"key": "username", "value": "test"} + + +@pytest.fixture +def a_hundred_metrics() -> List[Dict[str, str]]: + return [{"name": f"metric_{i}", "unit": "Count", "value": 1} for i in range(100)] + + +@pytest.fixture +def a_hundred_metric_values() -> List[Dict[str, str]]: + return [{"name": "metric", "unit": "Count", "value": i} for i in range(100)] diff --git a/tests/functional/test_metrics.py b/tests/functional/metrics/test_metrics_cloudwatch_emf.py similarity index 93% rename from tests/functional/test_metrics.py rename to tests/functional/metrics/test_metrics_cloudwatch_emf.py index 5a6222f248d..5c4a1de1128 100644 --- a/tests/functional/test_metrics.py +++ b/tests/functional/metrics/test_metrics_cloudwatch_emf.py @@ -1,116 +1,42 @@ +from __future__ import annotations + import json import warnings from collections import namedtuple -from typing import Any, Dict, List, Union +from typing import Dict, List import pytest -from aws_lambda_powertools import Metrics, single_metric from aws_lambda_powertools.metrics import ( EphemeralMetrics, MetricResolution, MetricResolutionError, + Metrics, MetricUnit, MetricUnitError, MetricValueError, SchemaValidationError, + single_metric, +) +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.cloudwatch import ( + AmazonCloudWatchEMFProvider, ) -from aws_lambda_powertools.metrics.base import ( +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.constants import ( MAX_DIMENSIONS, - MetricManager, - reset_cold_start_flag, ) - - -@pytest.fixture(scope="function", autouse=True) -def reset_metric_set(): - metrics = Metrics() - metrics.clear_metrics() - metrics.clear_default_dimensions() - reset_cold_start_flag() # ensure each test has cold start - yield - - -@pytest.fixture -def metric_with_resolution() -> Dict[str, Union[str, int]]: - return {"name": "single_metric", "unit": MetricUnit.Count, "value": 1, "resolution": MetricResolution.High} - - -@pytest.fixture -def metric() -> Dict[str, str]: - return {"name": "single_metric", "unit": MetricUnit.Count, "value": 1} - - -@pytest.fixture -def metrics() -> List[Dict[str, str]]: - return [ - {"name": "metric_one", "unit": MetricUnit.Count, "value": 1}, - {"name": "metric_two", "unit": MetricUnit.Count, "value": 1}, - ] - - -@pytest.fixture -def metrics_same_name() -> List[Dict[str, str]]: - return [ - {"name": "metric_one", "unit": MetricUnit.Count, "value": 1}, - {"name": "metric_one", "unit": MetricUnit.Count, "value": 5}, - ] - - -@pytest.fixture -def dimension() -> Dict[str, str]: - return {"name": "test_dimension", "value": "test"} - - -@pytest.fixture -def dimensions() -> List[Dict[str, str]]: - return [ - {"name": "test_dimension", "value": "test"}, - {"name": "test_dimension_2", "value": "test"}, - ] - - -@pytest.fixture -def non_str_dimensions() -> List[Dict[str, Any]]: - return [ - {"name": "test_dimension", "value": True}, - {"name": "test_dimension_2", "value": 3}, - ] - - -@pytest.fixture -def namespace() -> str: - return "test_namespace" - - -@pytest.fixture -def service() -> str: - return "test_service" - - -@pytest.fixture -def metadata() -> Dict[str, str]: - return {"key": "username", "value": "test"} - - -@pytest.fixture -def a_hundred_metrics() -> List[Dict[str, str]]: - return [{"name": f"metric_{i}", "unit": "Count", "value": 1} for i in range(100)] - - -@pytest.fixture -def a_hundred_metric_values() -> List[Dict[str, str]]: - return [{"name": "metric", "unit": "Count", "value": i} for i in range(100)] +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.types import ( + CloudWatchEMFOutput, +) def serialize_metrics( metrics: List[Dict], dimensions: List[Dict], namespace: str, - metadatas: List[Dict] = None, -) -> Dict: + metadatas: List[Dict] | None = None, +) -> CloudWatchEMFOutput: """Helper function to build EMF object from a list of metrics, dimensions""" - my_metrics = MetricManager(namespace=namespace) + my_metrics = AmazonCloudWatchEMFProvider(namespace=namespace) for dimension in dimensions: my_metrics.add_dimension(**dimension) @@ -125,9 +51,14 @@ def serialize_metrics( return my_metrics.serialize_metric_set() -def serialize_single_metric(metric: Dict, dimension: Dict, namespace: str, metadata: Dict = None) -> Dict: +def serialize_single_metric( + metric: Dict, + dimension: Dict, + namespace: str, + metadata: Dict | None = None, +) -> CloudWatchEMFOutput: """Helper function to build EMF object from a given metric, dimension and namespace""" - my_metrics = MetricManager(namespace=namespace) + my_metrics = AmazonCloudWatchEMFProvider(namespace=namespace) my_metrics.add_metric(**metric) my_metrics.add_dimension(**dimension) @@ -147,7 +78,7 @@ def capture_metrics_output(capsys): return json.loads(capsys.readouterr().out.strip()) -def capture_metrics_output_multiple_emf_objects(capsys): +def capture_metrics_output_multiple_emf_objects(capsys) -> List[CloudWatchEMFOutput]: return [json.loads(line.strip()) for line in capsys.readouterr().out.split("\n") if line] @@ -228,6 +159,27 @@ def test_single_metric_default_dimensions_inherit(capsys, metric, dimension, nam assert expected == output +def test_log_metrics_preconfigured_provider(capsys, metrics, dimensions, namespace): + # GIVEN Metrics is initialized + provider = AmazonCloudWatchEMFProvider(namespace=namespace) + my_metrics = Metrics(provider=provider) + for metric in metrics: + my_metrics.add_metric(**metric) + for dimension in dimensions: + my_metrics.add_dimension(**dimension) + + # WHEN we manually the metrics + my_metrics.flush_metrics() + + output = capture_metrics_output(capsys) + expected = serialize_metrics(metrics=metrics, dimensions=dimensions, namespace=namespace) + + # THEN we should have no exceptions + # and a valid EMF object should be flushed correctly + remove_timestamp(metrics=[output, expected]) + assert expected == output + + def test_log_metrics(capsys, metrics, dimensions, namespace): # GIVEN Metrics is initialized my_metrics = Metrics(namespace=namespace) @@ -1010,7 +962,7 @@ def test_metric_manage_metadata_set(): expected_dict = {"setting": "On"} try: - metric = MetricManager(metadata_set=expected_dict) + metric = AmazonCloudWatchEMFProvider(metadata_set=expected_dict) assert metric.metadata_set == expected_dict except AttributeError: pytest.fail("AttributeError should not be raised") @@ -1052,6 +1004,39 @@ def test_clear_default_dimensions(namespace): assert not my_metrics.default_dimensions +def test_get_and_set_namespace_and_service_properties(namespace, service, metrics, capsys): + # GIVEN Metrics instance is initialized without namespace and service + my_metrics = Metrics() + + # WHEN we set service and namespace before flushing the metric + @my_metrics.log_metrics + def lambda_handler(evt, ctx): + my_metrics.namespace = namespace + my_metrics.service = service + for metric in metrics: + my_metrics.add_metric(**metric) + + lambda_handler({}, {}) + invocation = capture_metrics_output(capsys) + + assert service in json.dumps(invocation) + assert namespace in json.dumps(invocation) + + +def test_clear_default_dimensions_with_provider(namespace): + # GIVEN Metrics is initialized with provider and we persist a set of default dimensions + my_provider = AmazonCloudWatchEMFProvider(namespace=namespace) + my_metrics = Metrics(provider=my_provider) + my_metrics.set_default_dimensions(environment="test", log_group="/lambda/test") + + # WHEN they are removed via clear_default_dimensions method + my_metrics.clear_default_dimensions() + + # THEN there should be no default dimensions in provider and metrics + assert not my_metrics.default_dimensions + assert not my_provider.default_dimensions + + def test_default_dimensions_across_instances(namespace): # GIVEN Metrics is initialized and we persist a set of default dimensions my_metrics = Metrics(namespace=namespace) @@ -1143,6 +1128,7 @@ def test_ephemeral_metrics_isolated_data_set_with_default_dimension(metric, dime # GIVEN two EphemeralMetrics instances are initialized # One with default dimension and another without my_metrics = EphemeralMetrics(namespace=namespace) + my_metrics.set_default_dimensions(dev="powertools") isolated_metrics = EphemeralMetrics(namespace=namespace) diff --git a/tests/functional/metrics/test_metrics_datadog.py b/tests/functional/metrics/test_metrics_datadog.py new file mode 100644 index 00000000000..c81c825f656 --- /dev/null +++ b/tests/functional/metrics/test_metrics_datadog.py @@ -0,0 +1,281 @@ +import json +import warnings +from collections import namedtuple + +import pytest +from test_metrics_provider import capture_metrics_output + +from aws_lambda_powertools.metrics.exceptions import MetricValueError, SchemaValidationError +from aws_lambda_powertools.metrics.provider.cold_start import reset_cold_start_flag +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics, DatadogProvider + + +def test_datadog_coldstart(capsys): + reset_cold_start_flag() + + # GIVEN DatadogMetrics is initialized + dd_provider = DatadogProvider(flush_to_log=True) + metrics = DatadogMetrics(provider=dd_provider) + + LambdaContext = namedtuple("LambdaContext", "function_name") + + # WHEN log_metrics is used with capture_cold_start_metric + @metrics.log_metrics(capture_cold_start_metric=True) + def lambda_handler(event, context): + metrics.add_metric(name="item_sold", value=1, product="latte", order="online") + + lambda_handler({}, LambdaContext("example_fn2")) + logs = capsys.readouterr().out.strip() + + # THEN ColdStart metric and function_name and service dimension should be logged + assert "ColdStart" in logs + assert "example_fn2" in logs + + +def test_datadog_write_to_log_with_env_variable(capsys, monkeypatch): + # GIVEN DD_FLUSH_TO_LOG env is configured + monkeypatch.setenv("DD_FLUSH_TO_LOG", "True") + metrics = DatadogMetrics() + + # WHEN we add a metric + metrics.add_metric(name="item_sold", value=1, product="latte", order="online") + metrics.flush_metrics() + logs = capture_metrics_output(capsys) + + # THEN metrics is flushed to log + logs["e"] = "" + assert logs == json.loads('{"m":"item_sold","v":1,"e":"","t":["product:latte","order:online"]}') + + +def test_datadog_with_invalid_metric_value(): + # GIVEN DatadogMetrics is initialized + metrics = DatadogMetrics() + + # WHEN we pass an incorrect metric value (non-numeric) + # WHEN we attempt to serialize a valid Datadog metric + # THEN it should fail validation and raise MetricValueError + with pytest.raises(MetricValueError, match=".*is not a valid number"): + metrics.add_metric(name="item_sold", value="a", product="latte", order="online") + + +def test_datadog_with_invalid_metric_name(): + # GIVEN DatadogMetrics is initialized + metrics = DatadogMetrics() + + # WHEN we a metric name starting with a number + # WHEN we attempt to serialize a valid Datadog metric + # THEN it should fail validation and raise MetricValueError + with pytest.raises(SchemaValidationError, match="Invalid metric name.*"): + metrics.add_metric(name="1_item_sold", value="a", product="latte", order="online") + + +def test_datadog_raise_on_empty(): + # GIVEN DatadogMetrics is initialized + metrics = DatadogMetrics() + + LambdaContext = namedtuple("LambdaContext", "function_name") + + # WHEN we set raise_on_empty_metrics to True + @metrics.log_metrics(raise_on_empty_metrics=True) + def lambda_handler(event, context): + pass + + # THEN it should fail with no metric serialized + with pytest.raises(SchemaValidationError, match="Must contain at least one metric."): + lambda_handler({}, LambdaContext("example_fn")) + + +def test_datadog_tags_using_kwargs(capsys): + # GIVEN DatadogMetrics is initialized + metrics = DatadogMetrics(flush_to_log=True) + + # WHEN we add tags using kwargs + metrics.add_metric("order_valve", 12.45, sales="sam") + metrics.flush_metrics() + logs = capsys.readouterr().out.strip() + log_dict = json.loads(logs) + tag_list = log_dict.get("t") + + # THEN tags must be present + assert "sales:sam" in tag_list + + +def test_metrics_clear_metrics_after_invocation(metric_datadog): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics(flush_to_log=True) + my_metrics.add_metric(**metric_datadog) + + # WHEN log_metrics is used to flush metrics from memory + @my_metrics.log_metrics + def lambda_handler(evt, context): + pass + + lambda_handler({}, {}) + + # THEN metric set should be empty after function has been run + assert my_metrics.metric_set == [] + + +def test_metrics_decorator_with_metrics_warning(): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics(flush_to_log=True) + + # WHEN using the log_metrics decorator and no metrics have been added + @my_metrics.log_metrics + def lambda_handler(evt, context): + pass + + # THEN it should raise a warning instead of throwing an exception + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("default") + lambda_handler({}, {}) + assert len(w) == 1 + assert str(w[-1].message) == ( + "No application metrics to publish. The cold-start metric may be published if enabled. " + "If application metrics should never be empty, consider using 'raise_on_empty_metrics'" + ) + + +def test_metrics_with_default_namespace(capsys, namespace): + # GIVEN DatadogMetrics is initialized with default namespace + metrics = DatadogMetrics(flush_to_log=True) + + LambdaContext = namedtuple("LambdaContext", "function_name") + + # WHEN we add metrics + @metrics.log_metrics + def lambda_handler(event, context): + metrics.add_metric(name="item_sold", value=1, product="latte", order="online") + + lambda_handler({}, LambdaContext("example_fn2")) + logs = capsys.readouterr().out.strip() + + # THEN default namespace must be assumed + assert namespace not in logs + + +def test_datadog_with_non_default_namespace(capsys, namespace): + # GIVEN DatadogMetrics is initialized with a non-default namespace + metrics = DatadogMetrics(namespace=namespace, flush_to_log=True) + + LambdaContext = namedtuple("LambdaContext", "function_name") + + # WHEN log_metrics is used + @metrics.log_metrics + def lambda_handler(event, context): + metrics.add_metric(name="item_sold", value=1, product="latte", order="online") + + lambda_handler({}, LambdaContext("example_fn")) + logs = capsys.readouterr().out.strip() + + # THEN namespace must be present in logs + assert namespace in logs + + +def test_serialize_metrics(metric_datadog): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics(flush_to_log=True) + my_metrics.add_metric(**metric_datadog) + + # WHEN we serialize metrics + my_metrics.serialize_metric_set() + + # THEN metric set should be empty after function has been run + assert my_metrics.metric_set[0]["m"] == "single_metric" + + +def test_clear_metrics(metric): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics(flush_to_log=True) + my_metrics.add_metric(**metric) + my_metrics.clear_metrics() + + # THEN metric set should be empty after function has been run + assert my_metrics.metric_set == [] + + +def test_persist_default_tags(capsys): + # GIVEN DatadogMetrics is initialized and we persist a set of default tags + my_metrics = DatadogMetrics(flush_to_log=True) + my_metrics.set_default_tags(environment="test", log_group="/lambda/test") + + # WHEN we utilize log_metrics to serialize + # and flush metrics and clear all metrics and tags from memory + # at the end of a function execution + @my_metrics.log_metrics + def lambda_handler(evt, ctx): + my_metrics.add_metric(name="item_sold", value=1) + + lambda_handler({}, {}) + first_invocation = capsys.readouterr().out.strip() + + lambda_handler({}, {}) + second_invocation = capsys.readouterr().out.strip() + + # THEN we should have default tags in both outputs + assert "environment" in first_invocation + assert "environment" in second_invocation + + +def test_log_metrics_with_default_tags(capsys): + # GIVEN DatadogMetrics is initialized and we persist a set of default tags + my_metrics = DatadogMetrics(flush_to_log=True) + default_tags = {"environment": "test", "log_group": "/lambda/test"} + + # WHEN we utilize log_metrics with default dimensions to serialize + # and flush metrics and clear all metrics and tags from memory + # at the end of a function execution + @my_metrics.log_metrics(default_tags=default_tags) + def lambda_handler(evt, ctx): + my_metrics.add_metric(name="item_sold", value=1) + + lambda_handler({}, {}) + first_invocation = capsys.readouterr().out.strip() + + lambda_handler({}, {}) + second_invocation = capsys.readouterr().out.strip() + + # THEN we should have default tags in both outputs + assert "environment" in first_invocation + assert "environment" in second_invocation + + +def test_clear_default_tags(): + # GIVEN DatadogMetrics is initialized and we persist a set of default tags + my_metrics = DatadogMetrics() + my_metrics.set_default_tags(environment="test", log_group="/lambda/test") + + # WHEN they are removed via clear_default_tags method + my_metrics.clear_default_tags() + + # THEN there should be no default tags + assert not my_metrics.default_tags + + +def test_namespace_var_precedence(monkeypatch, namespace): + # GIVEN we use POWERTOOLS_METRICS_NAMESPACE + monkeypatch.setenv("POWERTOOLS_METRICS_NAMESPACE", "a_namespace") + my_metrics = DatadogMetrics(namespace=namespace, flush_to_log=True) + + # WHEN creating a metric and explicitly set a namespace + my_metrics.add_metric(name="item_sold", value=1) + + output = my_metrics.serialize_metric_set() + + # THEN namespace should match the explicitly passed variable and not the env var + assert output[0]["m"] == f"{namespace}.item_sold" + + +def test_namespace_env_var(monkeypatch): + # GIVEN POWERTOOLS_METRICS_NAMESPACE is set + env_namespace = "a_namespace" + monkeypatch.setenv("POWERTOOLS_METRICS_NAMESPACE", env_namespace) + my_metrics = DatadogMetrics(flush_to_log=True) + + # WHEN creating a metric and explicitly set a namespace + my_metrics.add_metric(name="item_sold", value=1) + + output = my_metrics.serialize_metric_set() + + # THEN namespace should match the explicitly passed variable and not the env var + assert output[0]["m"] == f"{env_namespace}.item_sold" diff --git a/tests/functional/metrics/test_metrics_provider.py b/tests/functional/metrics/test_metrics_provider.py new file mode 100644 index 00000000000..2ed84a23a21 --- /dev/null +++ b/tests/functional/metrics/test_metrics_provider.py @@ -0,0 +1,62 @@ +from __future__ import annotations + +import json +from typing import Any, List + +from aws_lambda_powertools.metrics import ( + SchemaValidationError, +) +from aws_lambda_powertools.metrics.metrics import Metrics +from aws_lambda_powertools.metrics.provider import BaseProvider +from aws_lambda_powertools.utilities.typing import LambdaContext + + +def capture_metrics_output(capsys): + return json.loads(capsys.readouterr().out.strip()) + + +class FakeMetricsProvider(BaseProvider): + def __init__(self): + self.metric_store: List = [] + + def add_metric(self, name: str, value: float, tag: List = None, *args, **kwargs): + self.metric_store.append({"name": name, "value": value}) + + def serialize_metric_set(self, raise_on_empty_metrics: bool = False, *args, **kwargs): + if raise_on_empty_metrics and len(self.metric_store) == 0: + raise SchemaValidationError("Must contain at least one metric.") + + self.result = json.dumps(self.metric_store) + + def flush_metrics(self, *args, **kwargs): + print(json.dumps(self.metric_store)) + + def clear_metrics(self): + self.metric_store.clear() + + def add_cold_start_metric(self, context: LambdaContext) -> Any: + self.metric_store.append({"name": "ColdStart", "value": 1, "function_name": context.function_name}) + + +def test_metrics_class_with_custom_provider(capsys, metric): + provider = FakeMetricsProvider() + metrics = Metrics(provider=provider) + metrics.add_metric(**metric) + metrics.flush_metrics() + output = capture_metrics_output(capsys) + assert output[0]["name"] == metric["name"] + assert output[0]["value"] == metric["value"] + + +def test_metrics_provider_class_decorate(): + # GIVEN Metrics is initialized + my_metrics = Metrics() + + # WHEN log_metrics is used to serialize metrics + @my_metrics.log_metrics + def lambda_handler(evt, context): + return True + + # THEN log_metrics should invoke the function it decorates + # and return no error if we have a namespace and dimension + assert lambda_handler({}, {}) is True diff --git a/tests/functional/test_logger_powertools_formatter.py b/tests/functional/test_logger_powertools_formatter.py index 8b874894e27..61c3f76efd4 100644 --- a/tests/functional/test_logger_powertools_formatter.py +++ b/tests/functional/test_logger_powertools_formatter.py @@ -251,6 +251,27 @@ def test_log_dict_xray_is_updated_when_tracing_id_changes(stdout, monkeypatch, s monkeypatch.delenv(name="_X_AMZN_TRACE_ID") +def test_log_dict_xray_is_not_present_when_explicitly_disabled( + stdout: io.StringIO, + monkeypatch: pytest.MonkeyPatch, + service_name: str, +): + # GIVEN a logger is initialized within a Lambda function with X-Ray enabled + # and X-Ray Trace ID key is explicitly disabled + trace_id = "1-5759e988-bd862e3fe1be46a994272793" + trace_header = f"Root={trace_id};Parent=53995c3f42cd8ad8;Sampled=1" + monkeypatch.setenv(name="_X_AMZN_TRACE_ID", value=trace_header) + logger = Logger(service=service_name, stream=stdout, xray_trace_id=None) + + # WHEN logging a message + logger.info("foo") + + log_dict: dict = json.loads(stdout.getvalue()) + + # THEN `xray_trace_id`` key should not be present + assert "xray_trace_id" not in log_dict + + def test_log_custom_std_log_attribute(stdout, service_name): # GIVEN a logger where we have a standard log attr process # https://docs.python.org/3/library/logging.html#logrecord-attributes diff --git a/tests/unit/metrics/conftest.py b/tests/unit/metrics/conftest.py new file mode 100644 index 00000000000..8d601e4d13b --- /dev/null +++ b/tests/unit/metrics/conftest.py @@ -0,0 +1,6 @@ +import pytest + + +@pytest.fixture +def namespace() -> str: + return "test_namespace" diff --git a/tests/unit/metrics/test_functions.py b/tests/unit/metrics/test_functions.py new file mode 100644 index 00000000000..f3414720bba --- /dev/null +++ b/tests/unit/metrics/test_functions.py @@ -0,0 +1,63 @@ +import pytest + +from aws_lambda_powertools.metrics.functions import ( + extract_cloudwatch_metric_resolution_value, + extract_cloudwatch_metric_unit_value, +) +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.exceptions import ( + MetricResolutionError, + MetricUnitError, +) +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.metric_properties import MetricResolution, MetricUnit + + +def test_extract_invalid_cloudwatch_metric_resolution_value(): + metric_resolutions = [resolution.value for resolution in MetricResolution] + + # GIVEN an invalid EMF resolution value + resolution = 2 + + # WHEN try to extract this value + # THEN must fail with MetricResolutionError + with pytest.raises(MetricResolutionError, match="Invalid metric resolution.*"): + extract_cloudwatch_metric_resolution_value(metric_resolutions, resolution=resolution) + + +def test_extract_valid_cloudwatch_metric_resolution_value(): + metric_resolutions = [resolution.value for resolution in MetricResolution] + + # GIVEN a valid EMF resolution value + resolution = 1 + + # WHEN try to extract this value + extracted_resolution_value = extract_cloudwatch_metric_resolution_value(metric_resolutions, resolution=resolution) + + # THEN value must be extracted + assert extracted_resolution_value == resolution + + +def test_extract_invalid_cloudwatch_metric_unit_value(): + metric_units = [unit.value for unit in MetricUnit] + metric_unit_valid_options = list(MetricUnit.__members__) + + # GIVEN an invalid EMF unit value + unit = "Fake" + + # WHEN try to extract this value + # THEN must fail with MetricUnitError + with pytest.raises(MetricUnitError, match="Invalid metric unit.*"): + extract_cloudwatch_metric_unit_value(metric_units, metric_unit_valid_options, unit=unit) + + +def test_extract_valid_cloudwatch_metric_unit_value(): + metric_units = [unit.value for unit in MetricUnit] + metric_unit_valid_options = list(MetricUnit.__members__) + + # GIVEN an invalid EMF unit value + unit = "Count" + + # WHEN try to extract this value + extracted_unit_value = extract_cloudwatch_metric_unit_value(metric_units, metric_unit_valid_options, unit=unit) + + # THEN value must be extracted + assert extracted_unit_value == unit diff --git a/tests/unit/metrics/test_unit_datadog.py b/tests/unit/metrics/test_unit_datadog.py new file mode 100644 index 00000000000..ab54e9730fe --- /dev/null +++ b/tests/unit/metrics/test_unit_datadog.py @@ -0,0 +1,69 @@ +import pytest + +from aws_lambda_powertools.metrics.exceptions import SchemaValidationError +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.metrics.provider.datadog.warnings import DatadogDataValidationWarning + + +def test_get_namespace_property(namespace): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics(namespace=namespace) + + # WHEN we try to access the namespace property + # THEN namespace property must be present + assert my_metrics.namespace == namespace + + +def test_set_namespace_property(namespace): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics() + + # WHEN we set the namespace property after ther initialization + my_metrics.namespace = namespace + + # THEN namespace property must be present + assert my_metrics.namespace == namespace + + +def test_default_tags_across_instances(): + # GIVEN DatadogMetrics is initialized and we persist a set of default tags + my_metrics = DatadogMetrics() + my_metrics.set_default_tags(environment="test", log_group="/lambda/test") + + # WHEN a new DatadogMetrics instance is created + same_metrics = DatadogMetrics() + + # THEN default tags should also be present in the new instance + assert "environment" in same_metrics.default_tags + + +def test_invalid_datadog_metric_name(): + metrics = DatadogMetrics() + + # GIVEN three metrics names with different invalid names + metric_name_1 = "1_metric" # Metric name must not start with number + metric_name_2 = "metric_รง" # Metric name must not contains unicode characters + metric_name_3 = "".join(["x" for _ in range(201)]) # Metric name must have less than 200 characters + + # WHEN we try to validate those metrics names + # THEN must be False + with pytest.raises(SchemaValidationError, match="Invalid metric name.*"): + metrics.add_metric(name=metric_name_1, value=1) + + with pytest.raises(SchemaValidationError, match="Invalid metric name.*"): + metrics.add_metric(name=metric_name_2, value=1) + + with pytest.raises(SchemaValidationError, match="Invalid metric name.*"): + metrics.add_metric(name=metric_name_3, value=1) + + +def test_invalid_datadog_metric_tag(): + metrics = DatadogMetrics() + + # GIVEN three metrics with different invalid tags + metric_tag_1 = "".join(["x" for _ in range(201)]) # Metric tags must have less than 200 characters + + # WHEN we try to validate those metrics tags + # THEN must be False + with pytest.warns(DatadogDataValidationWarning): + metrics.add_metric(name="metric_2", value=1, tag1=metric_tag_1) diff --git a/tests/unit/parser/test_apigwv2.py b/tests/unit/parser/test_apigwv2.py index 9ffc7f525bc..b52bad28b40 100644 --- a/tests/unit/parser/test_apigwv2.py +++ b/tests/unit/parser/test_apigwv2.py @@ -63,6 +63,13 @@ def test_apigw_v2_event_jwt_authorizer(): assert parsed_event.stageVariables == raw_event["stageVariables"] +def test_apigw_v2_event_empty_jwt_scopes(): + raw_event = load_event("apiGatewayProxyV2Event.json") + raw_event["requestContext"]["authorizer"]["jwt"]["scopes"] = None + + APIGatewayProxyEventV2Model(**raw_event) + + def test_api_gateway_proxy_v2_event_lambda_authorizer(): raw_event = load_event("apiGatewayProxyV2LambdaAuthorizerEvent.json") parsed_event: APIGatewayProxyEventV2Model = APIGatewayProxyEventV2Model(**raw_event)