From d9cf9fa9059401ebc46f0619e2d991e8e9271540 Mon Sep 17 00:00:00 2001 From: Joey Zhao <5253430+joeyzhao2018@users.noreply.github.com> Date: Tue, 4 Mar 2025 15:56:32 -0500 Subject: [PATCH 01/44] chore: remove the hardcoded 2.21.3 version in dockerfile (#568) * remove the hardcoded 2.21.3 version in dockerfile * Apply suggestions from code review Co-authored-by: datadog-datadog-prod-us1[bot] <88084959+datadog-datadog-prod-us1[bot]@users.noreply.github.com> --------- Co-authored-by: Munir Abdinur Co-authored-by: datadog-datadog-prod-us1[bot] <88084959+datadog-datadog-prod-us1[bot]@users.noreply.github.com> --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 757d671a..7f522e5a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,7 +14,7 @@ ENV PATH=/root/.cargo/bin:$PATH # Install datadog_lambda and dependencies from local COPY . . -RUN pip install . ddtrace==2.21.3 -t ./python/lib/$runtime/site-packages +RUN pip install --no-cache-dir . -t ./python/lib/$runtime/site-packages # Remove botocore (40MB) to reduce package size. aws-xray-sdk # installs it, while it's already provided by the Lambda Runtime. From aec49103a8a68362bea6bf1509b7bc51df7a3088 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?jordan=20gonz=C3=A1lez?= <30836115+duncanista@users.noreply.github.com> Date: Tue, 4 Mar 2025 15:58:02 -0500 Subject: [PATCH 02/44] add `CODEOWNERS` (#569) --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) create mode 100644 .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..e53b2646 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @DataDog/serverless-aws From 53306e38df8514525649789495baa7cad91eecd6 Mon Sep 17 00:00:00 2001 From: Aleksandr Pasechnik Date: Tue, 4 Mar 2025 16:45:00 -0500 Subject: [PATCH 03/44] chore: new govcloud release script (#567) also refactoring the environments template datasource and adding layer bundling jobs. --- ci/datasources/environments.yaml | 4 +- ci/input_files/build.yaml.tpl | 66 +++++++++++--- ci/publish_layers.sh | 36 ++++++-- scripts/publish_govcloud.sh | 105 ++++++++++++++++++++++ scripts/publish_prod.sh | 144 +++++++++++++------------------ 5 files changed, 255 insertions(+), 100 deletions(-) create mode 100755 scripts/publish_govcloud.sh diff --git a/ci/datasources/environments.yaml b/ci/datasources/environments.yaml index 90056ab0..1ae2b4d7 100644 --- a/ci/datasources/environments.yaml +++ b/ci/datasources/environments.yaml @@ -1,9 +1,9 @@ environments: - - name: sandbox + sandbox: external_id: sandbox-publish-externalid role_to_assume: sandbox-layer-deployer account: 425362996713 - - name: prod + prod: external_id: prod-publish-externalid role_to_assume: dd-serverless-layer-deployer-role account: 464622532012 diff --git a/ci/input_files/build.yaml.tpl b/ci/input_files/build.yaml.tpl index eae6b0d1..769f87f5 100644 --- a/ci/input_files/build.yaml.tpl +++ b/ci/input_files/build.yaml.tpl @@ -103,9 +103,6 @@ integration-test ({{ $runtime.name }}-{{ $runtime.arch }}): script: - RUNTIME_PARAM={{ $runtime.python_version }} ARCH={{ $runtime.arch }} ./scripts/run_integration_tests.sh -{{ range $environment := (ds "environments").environments }} - -{{ if or (eq $environment.name "prod") }} sign-layer ({{ $runtime.name }}-{{ $runtime.arch }}): stage: sign tags: ["arch:amd64"] @@ -128,22 +125,25 @@ sign-layer ({{ $runtime.name }}-{{ $runtime.arch }}): before_script: - apt-get update - apt-get install -y uuid-runtime + {{ with $environment := (ds "environments").environments.prod }} - EXTERNAL_ID_NAME={{ $environment.external_id }} ROLE_TO_ASSUME={{ $environment.role_to_assume }} AWS_ACCOUNT={{ $environment.account }} source ./ci/get_secrets.sh + {{ end }} script: - - LAYER_FILE=datadog_lambda_py-{{ $runtime.arch}}-{{ $runtime.python_version }}.zip ./scripts/sign_layers.sh {{ $environment.name }} -{{ end }} + - LAYER_FILE=datadog_lambda_py-{{ $runtime.arch}}-{{ $runtime.python_version }}.zip ./scripts/sign_layers.sh prod + +{{ range $environment_name, $environment := (ds "environments").environments }} -publish-layer-{{ $environment.name }} ({{ $runtime.name }}-{{ $runtime.arch }}): +publish-layer-{{ $environment_name }} ({{ $runtime.name }}-{{ $runtime.arch }}): stage: publish tags: ["arch:amd64"] image: registry.ddbuild.io/images/docker:20.10-py3 rules: - - if: '"{{ $environment.name }}" =~ /^(sandbox|staging)/' + - if: '"{{ $environment_name }}" == "sandbox"' when: manual allow_failure: true - if: '$CI_COMMIT_TAG =~ /^v.*/' needs: -{{ if or (eq $environment.name "prod") }} +{{ if or (eq $environment_name "prod") }} - sign-layer ({{ $runtime.name }}-{{ $runtime.arch}}) {{ else }} - build-layer ({{ $runtime.name }}-{{ $runtime.arch }}) @@ -153,7 +153,7 @@ publish-layer-{{ $environment.name }} ({{ $runtime.name }}-{{ $runtime.arch }}): - integration-test ({{ $runtime.name }}-{{ $runtime.arch }}) {{ end }} dependencies: -{{ if or (eq $environment.name "prod") }} +{{ if or (eq $environment_name "prod") }} - sign-layer ({{ $runtime.name }}-{{ $runtime.arch}}) {{ else }} - build-layer ({{ $runtime.name }}-{{ $runtime.arch }}) @@ -166,7 +166,7 @@ publish-layer-{{ $environment.name }} ({{ $runtime.name }}-{{ $runtime.arch }}): before_script: - EXTERNAL_ID_NAME={{ $environment.external_id }} ROLE_TO_ASSUME={{ $environment.role_to_assume }} AWS_ACCOUNT={{ $environment.account }} source ./ci/get_secrets.sh script: - - STAGE={{ $environment.name }} PYTHON_VERSION={{ $runtime.python_version }} ARCH={{ $runtime.arch }} ./ci/publish_layers.sh + - STAGE={{ $environment_name }} PYTHON_VERSION={{ $runtime.python_version }} ARCH={{ $runtime.arch }} ./ci/publish_layers.sh {{- end }} @@ -186,3 +186,49 @@ publish-pypi-package: {{- end }} script: - ./ci/publish_pypi.sh + +layer bundle: + stage: build + tags: ["arch:amd64"] + image: registry.ddbuild.io/images/docker:20.10 + needs: + {{ range (ds "runtimes").runtimes }} + - build-layer ({{ .name }}-{{ .arch }}) + {{ end }} + dependencies: + {{ range (ds "runtimes").runtimes }} + - build-layer ({{ .name }}-{{ .arch }}) + {{ end }} + artifacts: + expire_in: 1 hr + paths: + - datadog_lambda_py-bundle-${CI_JOB_ID}/ + name: datadog_lambda_py-bundle-${CI_JOB_ID} + script: + - rm -rf datadog_lambda_py-bundle-${CI_JOB_ID} + - mkdir -p datadog_lambda_py-bundle-${CI_JOB_ID} + - cp .layers/datadog_lambda_py-*.zip datadog_lambda_py-bundle-${CI_JOB_ID} + +signed layer bundle: + stage: sign + image: registry.ddbuild.io/images/docker:20.10-py3 + tags: ["arch:amd64"] + rules: + - if: '$CI_COMMIT_TAG =~ /^v.*/' + needs: + {{ range (ds "runtimes").runtimes }} + - sign-layer ({{ .name }}-{{ .arch }}) + {{ end }} + dependencies: + {{ range (ds "runtimes").runtimes }} + - sign-layer ({{ .name }}-{{ .arch }}) + {{ end }} + artifacts: + expire_in: 1 day + paths: + - datadog_lambda_py-signed-bundle-${CI_JOB_ID}/ + name: datadog_lambda_py-signed-bundle-${CI_JOB_ID} + script: + - rm -rf datadog_lambda_py-signed-bundle-${CI_JOB_ID} + - mkdir -p datadog_lambda_py-signed-bundle-${CI_JOB_ID} + - cp .layers/datadog_lambda_py-*.zip datadog_lambda_py-signed-bundle-${CI_JOB_ID} diff --git a/ci/publish_layers.sh b/ci/publish_layers.sh index 85317ddd..58257bf1 100755 --- a/ci/publish_layers.sh +++ b/ci/publish_layers.sh @@ -24,7 +24,20 @@ AWS_CLI_PYTHON_VERSIONS=( "python3.13" "python3.13" ) -PYTHON_VERSIONS=("3.8-amd64" "3.8-arm64" "3.9-amd64" "3.9-arm64" "3.10-amd64" "3.10-arm64" "3.11-amd64" "3.11-arm64" "3.12-amd64" "3.12-arm64" "3.13-amd64" "3.13-arm64") +PYTHON_VERSIONS=( + "3.8-amd64" + "3.8-arm64" + "3.9-amd64" + "3.9-arm64" + "3.10-amd64" + "3.10-arm64" + "3.11-amd64" + "3.11-arm64" + "3.12-amd64" + "3.12-arm64" + "3.13-amd64" + "3.13-arm64" +) LAYER_PATHS=( ".layers/datadog_lambda_py-amd64-3.8.zip" ".layers/datadog_lambda_py-arm64-3.8.zip" @@ -53,11 +66,16 @@ LAYERS=( "Datadog-Python313" "Datadog-Python313-ARM" ) -STAGES=('prod', 'sandbox', 'staging') +STAGES=('prod', 'sandbox', 'staging', 'gov-staging', 'gov-prod') printf "Starting script...\n\n" -printf "Installing dependencies\n" -pip install awscli + +if [ -z "$SKIP_PIP_INSTALL" ]; then + echo "Installing dependencies" + pip install awscli +else + echo "Skipping pip install" +fi publish_layer() { region=$1 @@ -89,7 +107,7 @@ fi printf "Python version specified: $PYTHON_VERSION\n" if [[ ! ${PYTHON_VERSIONS[@]} =~ $PYTHON_VERSION ]]; then - printf "[Error] Unsupported PYTHON_VERSION found.\n" + printf "[Error] Unsupported PYTHON_VERSION found: $PYTHON_VERSION.\n" exit 1 fi @@ -133,8 +151,14 @@ if [[ ! ${STAGES[@]} =~ $STAGE ]]; then fi layer="${LAYERS[$index]}" +if [ -z "$LAYER_NAME_SUFFIX" ]; then + echo "No layer name suffix" +else + layer="${layer}-${LAYER_NAME_SUFFIX}" +fi +echo "layer name: $layer" -if [[ "$STAGE" =~ ^(staging|sandbox)$ ]]; then +if [[ "$STAGE" =~ ^(staging|sandbox|gov-staging)$ ]]; then # Deploy latest version latest_version=$(aws lambda list-layer-versions --region $REGION --layer-name $layer --query 'LayerVersions[0].Version || `0`') VERSION=$(($latest_version + 1)) diff --git a/scripts/publish_govcloud.sh b/scripts/publish_govcloud.sh new file mode 100755 index 00000000..5fd107b0 --- /dev/null +++ b/scripts/publish_govcloud.sh @@ -0,0 +1,105 @@ +#! /usr/bin/env bash + +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2025 Datadog, Inc. +# +# USAGE: download the layer bundle from the build pipeline in gitlab. Use the +# Download button on the `layer bundle` job. This will be a zip file containing +# all of the required layers. Run this script as follows: +# +# ENVIRONMENT=[us1-staging-fed or us1-fed] [LAYER_NAME_SUFFIX=optional-layer-suffix] [REGIONS=us-gov-west-1] ./scripts/publish_govcloud.sh +# +# protip: you can drag the zip file from finder into your terminal to insert +# its path. + +set -e + +LAYER_PACKAGE=$1 + +if [ -z "$LAYER_PACKAGE" ]; then + printf "[ERROR]: layer package not provided\n" + exit 1 +fi + +PACKAGE_NAME=$(basename "$LAYER_PACKAGE" .zip) + +if [ -z "$ENVIRONMENT" ]; then + printf "[ERROR]: ENVIRONMENT not specified\n" + exit 1 +fi + +if [ "$ENVIRONMENT" = "us1-staging-fed" ]; then + AWS_VAULT_ROLE=sso-govcloud-us1-staging-fed-power-user + + export STAGE=gov-staging + + if [[ ! "$PACKAGE_NAME" =~ ^datadog_lambda_py-(signed-)?bundle-[0-9]+$ ]]; then + echo "[ERROR]: Unexpected package name: $PACKAGE_NAME" + exit 1 + fi + +elif [ $ENVIRONMENT = "us1-fed" ]; then + AWS_VAULT_ROLE=sso-govcloud-us1-fed-engineering + + export STAGE=gov-prod + + if [[ ! "$PACKAGE_NAME" =~ ^datadog_lambda_py-signed-bundle-[0-9]+$ ]]; then + echo "[ERROR]: Unexpected package name: $PACKAGE_NAME" + exit 1 + fi + +else + printf "[ERROR]: ENVIRONMENT not supported, must be us1-staging-fed or us1-fed.\n" + exit 1 +fi + +TEMP_DIR=$(mktemp -d) +unzip $LAYER_PACKAGE -d $TEMP_DIR +cp -v $TEMP_DIR/$PACKAGE_NAME/*.zip .layers/ + + +AWS_VAULT_PREFIX="aws-vault exec $AWS_VAULT_ROLE --" + +echo "Checking that you have access to the GovCloud AWS account" +$AWS_VAULT_PREFIX aws sts get-caller-identity + + +AVAILABLE_REGIONS=$($AWS_VAULT_PREFIX aws ec2 describe-regions | jq -r '.[] | .[] | .RegionName') + +# Determine the target regions +if [ -z "$REGIONS" ]; then + echo "Region not specified, running for all available regions." + REGIONS=$AVAILABLE_REGIONS +else + echo "Region specified: $REGIONS" + if [[ ! "$AVAILABLE_REGIONS" == *"$REGIONS"* ]]; then + echo "Could not find $REGIONS in available regions: $AVAILABLE_REGIONS" + echo "" + echo "EXITING SCRIPT." + exit 1 + fi +fi + +for region in $REGIONS +do + echo "Starting publishing layers for region $region..." + + export REGION=$region + + for python_version in "3.8" "3.9" "3.10" "3.11" "3.12" "3.13"; do + for arch in "amd64" "arm64"; do + export PYTHON_VERSION=$python_version + export ARCH=$arch + + export SKIP_PIP_INSTALL=true + + echo "Publishing layer for $PYTHON_VERSION and $ARCH" + + $AWS_VAULT_PREFIX ./ci/publish_layers.sh + done + done +done + +echo "Done !" diff --git a/scripts/publish_prod.sh b/scripts/publish_prod.sh index f3d13653..d2918c54 100755 --- a/scripts/publish_prod.sh +++ b/scripts/publish_prod.sh @@ -6,6 +6,11 @@ set -e read -p "Are we only doing the simplified GovCloud release? ONLY IF THE NORMAL RELEASE IS DONE AND YOU HAVE DOWNLOADED THE LAYERS (y/n)? " GOVCLOUD_ONLY +if [ $GOVCLOUD_ONLY != "n" ]; then + echo "GovCloud publishing is now supported only in publich_govcloud.sh" + exit 1 +fi + # Ensure on main, and pull the latest BRANCH=$(git rev-parse --abbrev-ref HEAD) if [ $BRANCH != "main" ]; then @@ -34,107 +39,82 @@ else fi # Ensure pypi registry access -if [ $GOVCLOUD_ONLY == "y" ]; then - echo "Skipping PyPI check since this is a GovCloud-only release" - -else - read -p "Do you have access to PyPI (y/n)?" CONT - if [ "$CONT" != "y" ]; then - echo "Exiting" - exit 1 - fi +read -p "Do you have access to PyPI (y/n)?" CONT +if [ "$CONT" != "y" ]; then + echo "Exiting" + exit 1 fi CURRENT_VERSION=$(poetry version --short) LAYER_VERSION=$(echo $NEW_VERSION | cut -d '.' -f 2) -if [ $GOVCLOUD_ONLY == "y" ]; then - echo "Skipping Libary Updates, code changes, layer builds and signing for GovCloud-only release" +read -p "Ready to update the library version from $CURRENT_VERSION to $NEW_VERSION and publish layer version $LAYER_VERSION (y/n)?" CONT +if [ "$CONT" != "y" ]; then + echo "Exiting" + exit 1 +fi +echo "Answer 'n' if already done in a PR" +read -p "Update pyproject.toml version? (y/n)?" CONT +if [ "$CONT" != "y" ]; then + echo "Skipping updating package.json version" else - read -p "Ready to update the library version from $CURRENT_VERSION to $NEW_VERSION and publish layer version $LAYER_VERSION (y/n)?" CONT - if [ "$CONT" != "y" ]; then - echo "Exiting" - exit 1 - fi - - echo "Answer 'n' if already done in a PR" - read -p "Update pyproject.toml version? (y/n)?" CONT - if [ "$CONT" != "y" ]; then - echo "Skipping updating package.json version" - else - echo - echo "Replacing version in pyproject.toml and datadog_lambda/version.py" - echo - - poetry version ${NEW_VERSION} - echo "__version__ = \"${NEW_VERSION}\"" > datadog_lambda/version.py - fi - echo - echo "Building layers..." - ./scripts/build_layers.sh - + echo "Replacing version in pyproject.toml and datadog_lambda/version.py" echo - echo "Signing layers for commercial AWS regions" - aws-vault exec sso-prod-engineering -- ./scripts/sign_layers.sh prod - - echo "Answer 'n' if GitLab already did this" - read -p "Deploy layers to commercial AWS (y/n)?" CONT - if [ "$CONT" != "y" ]; then - echo "Skipping deployment to commercial AWS" - else - echo "Ensuring you have access to the production AWS account" - aws-vault exec sso-prod-engineering -- aws sts get-caller-identity - - echo - echo "Publishing layers to commercial AWS regions" - VERSION=$LAYER_VERSION aws-vault exec sso-prod-engineering --no-session -- ./scripts/publish_layers.sh - fi + + poetry version ${NEW_VERSION} + echo "__version__ = \"${NEW_VERSION}\"" > datadog_lambda/version.py fi -read -p "Deploy layers to GovCloud AWS (y/n)?" CONT +echo +echo "Building layers..." +./scripts/build_layers.sh + +echo +echo "Signing layers for commercial AWS regions" +aws-vault exec sso-prod-engineering -- ./scripts/sign_layers.sh prod + +echo "Answer 'n' if GitLab already did this" +read -p "Deploy layers to commercial AWS (y/n)?" CONT if [ "$CONT" != "y" ]; then - echo "Skipping deployment to GovCloud AWS" + echo "Skipping deployment to commercial AWS" else - echo "Ensuring you have access to the AWS GovCloud account" - aws-vault exec sso-govcloud-us1-fed-engineering -- aws sts get-caller-identity + echo "Ensuring you have access to the production AWS account" + aws-vault exec sso-prod-engineering -- aws sts get-caller-identity - echo "Publishing layers to GovCloud AWS regions" - VERSION=$LAYER_VERSION aws-vault exec sso-govcloud-us1-fed-engineering -- ./scripts/publish_layers.sh + echo + echo "Publishing layers to commercial AWS regions" + VERSION=$LAYER_VERSION aws-vault exec sso-prod-engineering --no-session -- ./scripts/publish_layers.sh fi -if [ $GOVCLOUD_ONLY == "y" ]; then - echo "Skipping PyPI check and Github Release since this is a GovCloud-only release" - +echo "Answer 'n' if GitLab already did this" +read -p "Ready to publish $NEW_VERSION to PyPI (y/n)?" CONT +if [ "$CONT" != "y" ]; then + echo "Skipping publishing to PyPI" else - echo "Answer 'n' if GitLab already did this" - read -p "Ready to publish $NEW_VERSION to PyPI (y/n)?" CONT - if [ "$CONT" != "y" ]; then - echo "Skipping publishing to PyPI" - else - echo - echo "Publishing to https://pypi.org/project/datadog-lambda/" - ./scripts/pypi.sh - fi - - - echo "Answer 'n' if you already released in GitHub" - read -p "Do you want to bump the version in GitHub? (y/n)" CONT - if [ "$CONT" != "y" ]; then - echo "Skipping publishing updates to GitHub" - else - echo - echo 'Publishing updates to github' - git commit pyproject.toml datadog_lambda/version.py -m "Bump version to ${NEW_VERSION}" - git push origin main - git tag "v$LAYER_VERSION" - git push origin "refs/tags/v$LAYER_VERSION" - fi + echo + echo "Publishing to https://pypi.org/project/datadog-lambda/" + ./scripts/pypi.sh +fi + +echo "Answer 'n' if you already released in GitHub" +read -p "Do you want to bump the version in GitHub? (y/n)" CONT +if [ "$CONT" != "y" ]; then + echo "Skipping publishing updates to GitHub" +else echo - echo "Now create a new release with the tag v${LAYER_VERSION} created unless you have done this already" - echo "https://github.com/DataDog/datadog-lambda-python/releases/new?tag=v$LAYER_VERSION&title=v$LAYER_VERSION" + echo 'Publishing updates to github' + git commit pyproject.toml datadog_lambda/version.py -m "Bump version to ${NEW_VERSION}" + git push origin main + git tag "v$LAYER_VERSION" + git push origin "refs/tags/v$LAYER_VERSION" fi + +echo +echo "Now create a new release with the tag v${LAYER_VERSION} created unless you have done this already" +echo "https://github.com/DataDog/datadog-lambda-python/releases/new?tag=v$LAYER_VERSION&title=v$LAYER_VERSION" + # Open a PR to the documentation repo to automatically bump layer version VERSION=$LAYER_VERSION LAYER=datadog-lambda-python ./scripts/create_documentation_pr.sh From dc1ccba4b81d80097341057bf786c25ec0efc52a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?jordan=20gonz=C3=A1lez?= <30836115+duncanista@users.noreply.github.com> Date: Tue, 4 Mar 2025 17:06:41 -0500 Subject: [PATCH 04/44] update `CODEOWNERS` (#570) --- .github/CODEOWNERS | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e53b2646..26b4b78e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1,6 @@ -* @DataDog/serverless-aws +* @DataDog/serverless-aws +datadog_lambda/tracing.py @DataDog/apm-serverless +datadog_lambda/patch.py @DataDog/apm-serverless +datadog_lambda/span_points.py @DataDog/apm-serverless +datadog_lambda/cold_start.py @DataDog/apm-serverless +datadog_lambda/wrapper.py @DataDog/apm-serverless From fc0beaae643d59a4745ae6d8da163ad39cdb5970 Mon Sep 17 00:00:00 2001 From: Rey Abolofia Date: Fri, 7 Mar 2025 10:24:16 -0800 Subject: [PATCH 05/44] Pin ddtrace to Date: Mon, 10 Mar 2025 19:53:38 -0400 Subject: [PATCH 06/44] Enable LLM Observability with `agentless_enabled=True` by default with a parsed API key (#572) * enable llmobs agentless with parsed api_key * extract getting api key to its own function * lint --- datadog_lambda/api.py | 58 ++++++++++++++++++++++++--------------- datadog_lambda/wrapper.py | 9 +++++- 2 files changed, 44 insertions(+), 23 deletions(-) diff --git a/datadog_lambda/api.py b/datadog_lambda/api.py index b5414fd9..a114fe8f 100644 --- a/datadog_lambda/api.py +++ b/datadog_lambda/api.py @@ -4,6 +4,7 @@ logger = logging.getLogger(__name__) KMS_ENCRYPTION_CONTEXT_KEY = "LambdaFunctionName" +api_key = None def decrypt_kms_api_key(kms_client, ciphertext): @@ -46,6 +47,40 @@ def decrypt_kms_api_key(kms_client, ciphertext): return plaintext +def get_api_key() -> str: + """ + Gets the Datadog API key from the environment variables or secrets manager. + Extracts the result to a global value to avoid repeated calls to the + secrets manager from different products. + """ + global api_key + if api_key: + return api_key + + import boto3 + + DD_API_KEY_SECRET_ARN = os.environ.get("DD_API_KEY_SECRET_ARN", "") + DD_API_KEY_SSM_NAME = os.environ.get("DD_API_KEY_SSM_NAME", "") + DD_KMS_API_KEY = os.environ.get("DD_KMS_API_KEY", "") + DD_API_KEY = os.environ.get("DD_API_KEY", os.environ.get("DATADOG_API_KEY", "")) + + if DD_API_KEY_SECRET_ARN: + api_key = boto3.client("secretsmanager").get_secret_value( + SecretId=DD_API_KEY_SECRET_ARN + )["SecretString"] + elif DD_API_KEY_SSM_NAME: + api_key = boto3.client("ssm").get_parameter( + Name=DD_API_KEY_SSM_NAME, WithDecryption=True + )["Parameter"]["Value"] + elif DD_KMS_API_KEY: + kms_client = boto3.client("kms") + api_key = decrypt_kms_api_key(kms_client, DD_KMS_API_KEY) + else: + api_key = DD_API_KEY + + return api_key + + def init_api(): if not os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true": # Make sure that this package would always be lazy-loaded/outside from the critical path @@ -54,28 +89,7 @@ def init_api(): from datadog import api if not api._api_key: - import boto3 - - DD_API_KEY_SECRET_ARN = os.environ.get("DD_API_KEY_SECRET_ARN", "") - DD_API_KEY_SSM_NAME = os.environ.get("DD_API_KEY_SSM_NAME", "") - DD_KMS_API_KEY = os.environ.get("DD_KMS_API_KEY", "") - DD_API_KEY = os.environ.get( - "DD_API_KEY", os.environ.get("DATADOG_API_KEY", "") - ) - - if DD_API_KEY_SECRET_ARN: - api._api_key = boto3.client("secretsmanager").get_secret_value( - SecretId=DD_API_KEY_SECRET_ARN - )["SecretString"] - elif DD_API_KEY_SSM_NAME: - api._api_key = boto3.client("ssm").get_parameter( - Name=DD_API_KEY_SSM_NAME, WithDecryption=True - )["Parameter"]["Value"] - elif DD_KMS_API_KEY: - kms_client = boto3.client("kms") - api._api_key = decrypt_kms_api_key(kms_client, DD_KMS_API_KEY) - else: - api._api_key = DD_API_KEY + api._api_key = get_api_key() logger.debug("Setting DATADOG_API_KEY of length %d", len(api._api_key)) diff --git a/datadog_lambda/wrapper.py b/datadog_lambda/wrapper.py index 2632d22e..6afa9a07 100644 --- a/datadog_lambda/wrapper.py +++ b/datadog_lambda/wrapper.py @@ -56,10 +56,14 @@ if profiling_env_var: from ddtrace.profiling import profiler +llmobs_api_key = None llmobs_env_var = os.environ.get("DD_LLMOBS_ENABLED", "false").lower() in ("true", "1") if llmobs_env_var: + from datadog_lambda.api import get_api_key from ddtrace.llmobs import LLMObs + llmobs_api_key = get_api_key() + logger = logging.getLogger(__name__) DD_FLUSH_TO_LOG = "DD_FLUSH_TO_LOG" @@ -229,7 +233,10 @@ def __init__(self, func): # Enable LLM Observability if llmobs_env_var: - LLMObs.enable() + LLMObs.enable( + agentless_enabled=True, + api_key=llmobs_api_key, + ) logger.debug("datadog_lambda_wrapper initialized") except Exception as e: From 71b64fa1de4c0e56bb49cc2201a87da0d10409c5 Mon Sep 17 00:00:00 2001 From: Nicholas Hulston Date: Wed, 12 Mar 2025 14:16:42 -0400 Subject: [PATCH 07/44] Use FIPs endpoints in Govcloud regions (#575) --- datadog_lambda/api.py | 31 +++++++++++++-- tests/test_api.py | 89 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 117 insertions(+), 3 deletions(-) create mode 100644 tests/test_api.py diff --git a/datadog_lambda/api.py b/datadog_lambda/api.py index a114fe8f..03135912 100644 --- a/datadog_lambda/api.py +++ b/datadog_lambda/api.py @@ -64,16 +64,41 @@ def get_api_key() -> str: DD_KMS_API_KEY = os.environ.get("DD_KMS_API_KEY", "") DD_API_KEY = os.environ.get("DD_API_KEY", os.environ.get("DATADOG_API_KEY", "")) + REGION = os.environ.get("AWS_REGION", "") + is_gov_region = REGION.startswith("us-gov-") + if is_gov_region: + logger.debug( + "Govcloud region detected. Using FIPs endpoints for secrets management." + ) + if DD_API_KEY_SECRET_ARN: - api_key = boto3.client("secretsmanager").get_secret_value( + # Secrets manager endpoints: https://docs.aws.amazon.com/general/latest/gr/asm.html + fips_endpoint = ( + f"https://secretsmanager-fips.{REGION}.amazonaws.com" + if is_gov_region + else None + ) + secrets_manager_client = boto3.client( + "secretsmanager", endpoint_url=fips_endpoint + ) + api_key = secrets_manager_client.get_secret_value( SecretId=DD_API_KEY_SECRET_ARN )["SecretString"] elif DD_API_KEY_SSM_NAME: - api_key = boto3.client("ssm").get_parameter( + # SSM endpoints: https://docs.aws.amazon.com/general/latest/gr/ssm.html + fips_endpoint = ( + f"https://ssm-fips.{REGION}.amazonaws.com" if is_gov_region else None + ) + ssm_client = boto3.client("ssm", endpoint_url=fips_endpoint) + api_key = ssm_client.get_parameter( Name=DD_API_KEY_SSM_NAME, WithDecryption=True )["Parameter"]["Value"] elif DD_KMS_API_KEY: - kms_client = boto3.client("kms") + # KMS endpoints: https://docs.aws.amazon.com/general/latest/gr/kms.html + fips_endpoint = ( + f"https://kms-fips.{REGION}.amazonaws.com" if is_gov_region else None + ) + kms_client = boto3.client("kms", endpoint_url=fips_endpoint) api_key = decrypt_kms_api_key(kms_client, DD_KMS_API_KEY) else: api_key = DD_API_KEY diff --git a/tests/test_api.py b/tests/test_api.py new file mode 100644 index 00000000..a69f4382 --- /dev/null +++ b/tests/test_api.py @@ -0,0 +1,89 @@ +import os +import unittest +from unittest.mock import patch, MagicMock + +import datadog_lambda.api as api + + +class TestDatadogLambdaAPI(unittest.TestCase): + def setUp(self): + api.api_key = None + self.env_patcher = patch.dict( + os.environ, + { + "DD_API_KEY_SECRET_ARN": "", + "DD_API_KEY_SSM_NAME": "", + "DD_KMS_API_KEY": "", + "DD_API_KEY": "", + "DATADOG_API_KEY": "", + "AWS_REGION": "", + }, + clear=True, + ) + self.env_patcher.start() + + @patch("boto3.client") + def test_secrets_manager_fips_endpoint(self, mock_boto3_client): + mock_client = MagicMock() + mock_client.get_secret_value.return_value = {"SecretString": "test-api-key"} + mock_boto3_client.return_value = mock_client + + os.environ["AWS_REGION"] = "us-gov-east-1" + os.environ["DD_API_KEY_SECRET_ARN"] = "test-secrets-arn" + + api_key = api.get_api_key() + + mock_boto3_client.assert_called_with( + "secretsmanager", + endpoint_url="https://secretsmanager-fips.us-gov-east-1.amazonaws.com", + ) + self.assertEqual(api_key, "test-api-key") + + @patch("boto3.client") + def test_ssm_fips_endpoint(self, mock_boto3_client): + mock_client = MagicMock() + mock_client.get_parameter.return_value = { + "Parameter": {"Value": "test-api-key"} + } + mock_boto3_client.return_value = mock_client + + os.environ["AWS_REGION"] = "us-gov-west-1" + os.environ["DD_API_KEY_SSM_NAME"] = "test-ssm-param" + + api_key = api.get_api_key() + + mock_boto3_client.assert_called_with( + "ssm", endpoint_url="https://ssm-fips.us-gov-west-1.amazonaws.com" + ) + self.assertEqual(api_key, "test-api-key") + + @patch("boto3.client") + @patch("datadog_lambda.api.decrypt_kms_api_key") + def test_kms_fips_endpoint(self, mock_decrypt_kms, mock_boto3_client): + mock_client = MagicMock() + mock_boto3_client.return_value = mock_client + mock_decrypt_kms.return_value = "test-api-key" + + os.environ["AWS_REGION"] = "us-gov-west-1" + os.environ["DD_KMS_API_KEY"] = "encrypted-api-key" + + api_key = api.get_api_key() + + mock_boto3_client.assert_called_with( + "kms", endpoint_url="https://kms-fips.us-gov-west-1.amazonaws.com" + ) + self.assertEqual(api_key, "test-api-key") + + @patch("boto3.client") + def test_no_fips_for_standard_regions(self, mock_boto3_client): + mock_client = MagicMock() + mock_client.get_secret_value.return_value = {"SecretString": "test-api-key"} + mock_boto3_client.return_value = mock_client + + os.environ.clear() + os.environ["AWS_REGION"] = "us-west-2" + os.environ["DD_API_KEY_SECRET_ARN"] = "test-arn" + + api.get_api_key() + + mock_boto3_client.assert_called_with("secretsmanager", endpoint_url=None) From 7d7c15346f871d9a5d3333a8f95334b7fe3d2bf3 Mon Sep 17 00:00:00 2001 From: Nicholas Hulston Date: Fri, 14 Mar 2025 14:14:07 -0700 Subject: [PATCH 08/44] Use correct SecretsManager region (#576) --- datadog_lambda/api.py | 21 ++++++++++++++------- tests/test_api.py | 33 ++++++++++++++++++++++++++++++--- 2 files changed, 44 insertions(+), 10 deletions(-) diff --git a/datadog_lambda/api.py b/datadog_lambda/api.py index 03135912..ad860873 100644 --- a/datadog_lambda/api.py +++ b/datadog_lambda/api.py @@ -64,8 +64,8 @@ def get_api_key() -> str: DD_KMS_API_KEY = os.environ.get("DD_KMS_API_KEY", "") DD_API_KEY = os.environ.get("DD_API_KEY", os.environ.get("DATADOG_API_KEY", "")) - REGION = os.environ.get("AWS_REGION", "") - is_gov_region = REGION.startswith("us-gov-") + LAMBDA_REGION = os.environ.get("AWS_REGION", "") + is_gov_region = LAMBDA_REGION.startswith("us-gov-") if is_gov_region: logger.debug( "Govcloud region detected. Using FIPs endpoints for secrets management." @@ -73,13 +73,20 @@ def get_api_key() -> str: if DD_API_KEY_SECRET_ARN: # Secrets manager endpoints: https://docs.aws.amazon.com/general/latest/gr/asm.html - fips_endpoint = ( - f"https://secretsmanager-fips.{REGION}.amazonaws.com" + try: + secrets_region = DD_API_KEY_SECRET_ARN.split(":")[3] + except Exception: + logger.debug( + "Invalid secret arn in DD_API_KEY_SECRET_ARN. Unable to get API key." + ) + return "" + endpoint_url = ( + f"https://secretsmanager-fips.{secrets_region}.amazonaws.com" if is_gov_region else None ) secrets_manager_client = boto3.client( - "secretsmanager", endpoint_url=fips_endpoint + "secretsmanager", endpoint_url=endpoint_url, region_name=secrets_region ) api_key = secrets_manager_client.get_secret_value( SecretId=DD_API_KEY_SECRET_ARN @@ -87,7 +94,7 @@ def get_api_key() -> str: elif DD_API_KEY_SSM_NAME: # SSM endpoints: https://docs.aws.amazon.com/general/latest/gr/ssm.html fips_endpoint = ( - f"https://ssm-fips.{REGION}.amazonaws.com" if is_gov_region else None + f"https://ssm-fips.{LAMBDA_REGION}.amazonaws.com" if is_gov_region else None ) ssm_client = boto3.client("ssm", endpoint_url=fips_endpoint) api_key = ssm_client.get_parameter( @@ -96,7 +103,7 @@ def get_api_key() -> str: elif DD_KMS_API_KEY: # KMS endpoints: https://docs.aws.amazon.com/general/latest/gr/kms.html fips_endpoint = ( - f"https://kms-fips.{REGION}.amazonaws.com" if is_gov_region else None + f"https://kms-fips.{LAMBDA_REGION}.amazonaws.com" if is_gov_region else None ) kms_client = boto3.client("kms", endpoint_url=fips_endpoint) api_key = decrypt_kms_api_key(kms_client, DD_KMS_API_KEY) diff --git a/tests/test_api.py b/tests/test_api.py index a69f4382..c7facb43 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -29,13 +29,36 @@ def test_secrets_manager_fips_endpoint(self, mock_boto3_client): mock_boto3_client.return_value = mock_client os.environ["AWS_REGION"] = "us-gov-east-1" - os.environ["DD_API_KEY_SECRET_ARN"] = "test-secrets-arn" + os.environ[ + "DD_API_KEY_SECRET_ARN" + ] = "arn:aws:secretsmanager:us-gov-east-1:1234567890:secret:key-name-123ABC" api_key = api.get_api_key() mock_boto3_client.assert_called_with( "secretsmanager", endpoint_url="https://secretsmanager-fips.us-gov-east-1.amazonaws.com", + region_name="us-gov-east-1", + ) + self.assertEqual(api_key, "test-api-key") + + @patch("boto3.client") + def test_secrets_manager_different_region(self, mock_boto3_client): + mock_client = MagicMock() + mock_client.get_secret_value.return_value = {"SecretString": "test-api-key"} + mock_boto3_client.return_value = mock_client + + os.environ["AWS_REGION"] = "us-east-1" + os.environ[ + "DD_API_KEY_SECRET_ARN" + ] = "arn:aws:secretsmanager:us-west-1:1234567890:secret:key-name-123ABC" + + api_key = api.get_api_key() + + mock_boto3_client.assert_called_with( + "secretsmanager", + endpoint_url=None, + region_name="us-west-1", ) self.assertEqual(api_key, "test-api-key") @@ -82,8 +105,12 @@ def test_no_fips_for_standard_regions(self, mock_boto3_client): os.environ.clear() os.environ["AWS_REGION"] = "us-west-2" - os.environ["DD_API_KEY_SECRET_ARN"] = "test-arn" + os.environ[ + "DD_API_KEY_SECRET_ARN" + ] = "arn:aws:secretsmanager:us-west-2:1234567890:secret:key-name-123ABC" api.get_api_key() - mock_boto3_client.assert_called_with("secretsmanager", endpoint_url=None) + mock_boto3_client.assert_called_with( + "secretsmanager", endpoint_url=None, region_name="us-west-2" + ) From 5a55fe45a49b8d7e6c8db0734f203bdff648f383 Mon Sep 17 00:00:00 2001 From: Rey Abolofia Date: Mon, 17 Mar 2025 13:16:39 -0700 Subject: [PATCH 09/44] Use sam/build-python images for building layers. (#577) --- scripts/build_layers.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_layers.sh b/scripts/build_layers.sh index a0d6ee39..23941b7a 100755 --- a/scripts/build_layers.sh +++ b/scripts/build_layers.sh @@ -61,7 +61,7 @@ function docker_build_zip { # between different python runtimes. temp_dir=$(mktemp -d) docker buildx build -t datadog-lambda-python-${arch}:$1 . --no-cache \ - --build-arg image=public.ecr.aws/docker/library/python:$1 \ + --build-arg image=public.ecr.aws/sam/build-python$1:1 \ --build-arg runtime=python$1 \ --platform linux/${arch} \ --progress=plain \ From 8398da08d0c6a6de4459b9619969bbb97b8895ee Mon Sep 17 00:00:00 2001 From: Rey Abolofia Date: Tue, 18 Mar 2025 13:11:15 -0700 Subject: [PATCH 10/44] Lazy load boto client when using datadogpy for metrics. (#558) * Lazy load boto client when using datadogpy for metrics. * Update test mocking. --- datadog_lambda/api.py | 14 +++++++++----- pyproject.toml | 4 ++-- tests/test_api.py | 10 +++++----- 3 files changed, 16 insertions(+), 12 deletions(-) diff --git a/datadog_lambda/api.py b/datadog_lambda/api.py index ad860873..c539ea05 100644 --- a/datadog_lambda/api.py +++ b/datadog_lambda/api.py @@ -57,8 +57,6 @@ def get_api_key() -> str: if api_key: return api_key - import boto3 - DD_API_KEY_SECRET_ARN = os.environ.get("DD_API_KEY_SECRET_ARN", "") DD_API_KEY_SSM_NAME = os.environ.get("DD_API_KEY_SSM_NAME", "") DD_KMS_API_KEY = os.environ.get("DD_KMS_API_KEY", "") @@ -85,7 +83,7 @@ def get_api_key() -> str: if is_gov_region else None ) - secrets_manager_client = boto3.client( + secrets_manager_client = _boto3_client( "secretsmanager", endpoint_url=endpoint_url, region_name=secrets_region ) api_key = secrets_manager_client.get_secret_value( @@ -96,7 +94,7 @@ def get_api_key() -> str: fips_endpoint = ( f"https://ssm-fips.{LAMBDA_REGION}.amazonaws.com" if is_gov_region else None ) - ssm_client = boto3.client("ssm", endpoint_url=fips_endpoint) + ssm_client = _boto3_client("ssm", endpoint_url=fips_endpoint) api_key = ssm_client.get_parameter( Name=DD_API_KEY_SSM_NAME, WithDecryption=True )["Parameter"]["Value"] @@ -105,7 +103,7 @@ def get_api_key() -> str: fips_endpoint = ( f"https://kms-fips.{LAMBDA_REGION}.amazonaws.com" if is_gov_region else None ) - kms_client = boto3.client("kms", endpoint_url=fips_endpoint) + kms_client = _boto3_client("kms", endpoint_url=fips_endpoint) api_key = decrypt_kms_api_key(kms_client, DD_KMS_API_KEY) else: api_key = DD_API_KEY @@ -133,3 +131,9 @@ def init_api(): # Unmute exceptions from datadog api client, so we can catch and handle them api._mute = False + + +def _boto3_client(*args, **kwargs): + import botocore.session + + return botocore.session.get_session().create_client(*args, **kwargs) diff --git a/pyproject.toml b/pyproject.toml index a3860567..8db5e352 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ datadog = ">=0.51.0,<1.0.0" wrapt = "^1.11.2" ddtrace = ">=2.20.0,<4" ujson = ">=5.9.0" -boto3 = { version = "^1.34.0", optional = true } +botocore = { version = "^1.34.0", optional = true } requests = { version ="^2.22.0", optional = true } pytest = { version= "^8.0.0", optional = true } pytest-benchmark = { version = "^4.0", optional = true } @@ -38,7 +38,7 @@ flake8 = { version = "^5.0.4", optional = true } [tool.poetry.extras] dev = [ - "boto3", + "botocore", "flake8", "pytest", "pytest-benchmark", diff --git a/tests/test_api.py b/tests/test_api.py index c7facb43..c98d91eb 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -22,7 +22,7 @@ def setUp(self): ) self.env_patcher.start() - @patch("boto3.client") + @patch("botocore.session.Session.create_client") def test_secrets_manager_fips_endpoint(self, mock_boto3_client): mock_client = MagicMock() mock_client.get_secret_value.return_value = {"SecretString": "test-api-key"} @@ -42,7 +42,7 @@ def test_secrets_manager_fips_endpoint(self, mock_boto3_client): ) self.assertEqual(api_key, "test-api-key") - @patch("boto3.client") + @patch("botocore.session.Session.create_client") def test_secrets_manager_different_region(self, mock_boto3_client): mock_client = MagicMock() mock_client.get_secret_value.return_value = {"SecretString": "test-api-key"} @@ -62,7 +62,7 @@ def test_secrets_manager_different_region(self, mock_boto3_client): ) self.assertEqual(api_key, "test-api-key") - @patch("boto3.client") + @patch("botocore.session.Session.create_client") def test_ssm_fips_endpoint(self, mock_boto3_client): mock_client = MagicMock() mock_client.get_parameter.return_value = { @@ -80,7 +80,7 @@ def test_ssm_fips_endpoint(self, mock_boto3_client): ) self.assertEqual(api_key, "test-api-key") - @patch("boto3.client") + @patch("botocore.session.Session.create_client") @patch("datadog_lambda.api.decrypt_kms_api_key") def test_kms_fips_endpoint(self, mock_decrypt_kms, mock_boto3_client): mock_client = MagicMock() @@ -97,7 +97,7 @@ def test_kms_fips_endpoint(self, mock_decrypt_kms, mock_boto3_client): ) self.assertEqual(api_key, "test-api-key") - @patch("boto3.client") + @patch("botocore.session.Session.create_client") def test_no_fips_for_standard_regions(self, mock_boto3_client): mock_client = MagicMock() mock_client.get_secret_value.return_value = {"SecretString": "test-api-key"} From 58a55bb4e4ef6fabe85580a3914c1f4d060f9378 Mon Sep 17 00:00:00 2001 From: Rey Abolofia Date: Tue, 18 Mar 2025 13:11:44 -0700 Subject: [PATCH 11/44] Re-remove iast .so file. (#560) * Re-remove iast .so file. Commit https://github.com/DataDog/dd-trace-py/commit/fa18def52e13f863bd8de48cb8ef88feba0caf92 was merged to address this. * Re-remove taint tracking. --- Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 7f522e5a..0e79d884 100644 --- a/Dockerfile +++ b/Dockerfile @@ -22,10 +22,10 @@ RUN rm -rf ./python/lib/$runtime/site-packages/botocore* RUN rm -rf ./python/lib/$runtime/site-packages/setuptools RUN rm -rf ./python/lib/$runtime/site-packages/jsonschema/tests RUN find . -name 'libddwaf.so' -delete -# Comment this line out for now since ddtrace now tries to import it -# RUN rm ./python/lib/$runtime/site-packages/ddtrace/appsec/_iast/_stacktrace*.so -RUN rm ./python/lib/$runtime/site-packages/ddtrace/internal/datadog/profiling/libdd_wrapper*.so -RUN rm ./python/lib/$runtime/site-packages/ddtrace/internal/datadog/profiling/ddup/_ddup.*.so +RUN rm -f ./python/lib/$runtime/site-packages/ddtrace/appsec/_iast/_taint_tracking/*.so +RUN rm -f ./python/lib/$runtime/site-packages/ddtrace/appsec/_iast/_stacktrace*.so +RUN rm -f ./python/lib/$runtime/site-packages/ddtrace/internal/datadog/profiling/libdd_wrapper*.so +RUN rm -f ./python/lib/$runtime/site-packages/ddtrace/internal/datadog/profiling/ddup/_ddup.*.so # _stack_v2 may not exist for some versions of ddtrace (e.g. under python 3.13) RUN rm -f ./python/lib/$runtime/site-packages/ddtrace/internal/datadog/profiling/stack_v2/_stack_v2.*.so # remove *.dist-info directories except any entry_points.txt files From 96a6abd227a328d94d233d7baf951f2975f1b267 Mon Sep 17 00:00:00 2001 From: Abhinav Vedmala Date: Wed, 19 Mar 2025 09:50:53 -0400 Subject: [PATCH 12/44] Propagate Step Function Trace Context through Managed Services (#573) Allows us to extract Step Function trace context in the following cases 1. SFN -> EventBridge -> Lambda 2. SFN -> EventBridge -> SQS -> Lambda 3. SFN -> SQS -> Lambda 4. SFN -> SNS -> Lambda 5. SFN -> SNS -> SQS -> Lambda --- datadog_lambda/tracing.py | 81 ++++++--- datadog_lambda/trigger.py | 30 ++- datadog_lambda/wrapper.py | 3 - tests/test_tracing.py | 374 ++++++++++++++++++++++++-------------- tests/test_trigger.py | 66 +++++++ 5 files changed, 387 insertions(+), 167 deletions(-) diff --git a/datadog_lambda/tracing.py b/datadog_lambda/tracing.py index a73423e1..0fae76dd 100644 --- a/datadog_lambda/tracing.py +++ b/datadog_lambda/tracing.py @@ -39,6 +39,7 @@ _EventSource, parse_event_source, get_first_record, + is_step_function_event, EventTypes, EventSubtypes, ) @@ -271,6 +272,15 @@ def extract_context_from_sqs_or_sns_event_or_context(event, lambda_context): if dd_json_data: dd_data = json.loads(dd_json_data) + + if is_step_function_event(dd_data): + try: + return extract_context_from_step_functions(dd_data, None) + except Exception: + logger.debug( + "Failed to extract Step Functions context from SQS/SNS event." + ) + return propagator.extract(dd_data) else: # Handle case where trace context is injected into attributes.AWSTraceHeader @@ -313,6 +323,15 @@ def _extract_context_from_eventbridge_sqs_event(event): body = json.loads(body_str) detail = body.get("detail") dd_context = detail.get("_datadog") + + if is_step_function_event(dd_context): + try: + return extract_context_from_step_functions(dd_context, None) + except Exception: + logger.debug( + "Failed to extract Step Functions context from EventBridge to SQS event." + ) + return propagator.extract(dd_context) @@ -320,12 +339,23 @@ def extract_context_from_eventbridge_event(event, lambda_context): """ Extract datadog trace context from an EventBridge message's Details. This is only possible if Details is a JSON string. + + If we find a Step Function context, try to extract the trace context from + that header. """ try: detail = event.get("detail") dd_context = detail.get("_datadog") if not dd_context: return extract_context_from_lambda_context(lambda_context) + + try: + return extract_context_from_step_functions(dd_context, None) + except Exception: + logger.debug( + "Failed to extract Step Functions context from EventBridge event." + ) + return propagator.extract(dd_context) except Exception as e: logger.debug("The trace extractor returned with error %s", e) @@ -424,7 +454,7 @@ def _generate_sfn_trace_id(execution_id: str, part: str): def extract_context_from_step_functions(event, lambda_context): """ Only extract datadog trace context when Step Functions Context Object is injected - into lambda's event dict. + into lambda's event dict. Unwrap "Payload" if it exists to handle Legacy Lambda cases. If '_datadog' header is present, we have two cases: 1. Root is a Lambda and we use its traceID @@ -435,25 +465,25 @@ def extract_context_from_step_functions(event, lambda_context): object. """ try: + event = event.get("Payload", event) + event = event.get("_datadog", event) + meta = {} - dd_data = event.get("_datadog") - if dd_data and dd_data.get("serverless-version") == "v1": - if "x-datadog-trace-id" in dd_data: # lambda root - trace_id = int(dd_data.get("x-datadog-trace-id")) - high_64_bit_trace_id = _parse_high_64_bits( - dd_data.get("x-datadog-tags") - ) + if event.get("serverless-version") == "v1": + if "x-datadog-trace-id" in event: # lambda root + trace_id = int(event.get("x-datadog-trace-id")) + high_64_bit_trace_id = _parse_high_64_bits(event.get("x-datadog-tags")) if high_64_bit_trace_id: meta["_dd.p.tid"] = high_64_bit_trace_id else: # sfn root - root_execution_id = dd_data.get("RootExecutionId") + root_execution_id = event.get("RootExecutionId") trace_id = _generate_sfn_trace_id(root_execution_id, LOWER_64_BITS) meta["_dd.p.tid"] = _generate_sfn_trace_id( root_execution_id, HIGHER_64_BITS ) - parent_id = _generate_sfn_parent_id(dd_data) + parent_id = _generate_sfn_parent_id(event) else: execution_id = event.get("Execution").get("Id") trace_id = _generate_sfn_trace_id(execution_id, LOWER_64_BITS) @@ -472,20 +502,6 @@ def extract_context_from_step_functions(event, lambda_context): return extract_context_from_lambda_context(lambda_context) -def is_legacy_lambda_step_function(event): - """ - Check if the event is a step function that called a legacy lambda - """ - if not isinstance(event, dict) or "Payload" not in event: - return False - - event = event.get("Payload") - return isinstance(event, dict) and ( - "_datadog" in event - or ("Execution" in event and "StateMachine" in event and "State" in event) - ) - - def extract_context_custom_extractor(extractor, event, lambda_context): """ Extract Datadog trace context using a custom trace extractor function @@ -1309,8 +1325,18 @@ def create_inferred_span_from_eventbridge_event(event, context): synchronicity="async", tag_source="self", ) - dt_format = "%Y-%m-%dT%H:%M:%SZ" + timestamp = event.get("time") + dt_format = "%Y-%m-%dT%H:%M:%SZ" + + # Use more granular timestamp from upstream Step Function if possible + try: + if is_step_function_event(event.get("detail")): + timestamp = event["detail"]["_datadog"]["State"]["EnteredTime"] + dt_format = "%Y-%m-%dT%H:%M:%S.%fZ" + except (TypeError, KeyError, AttributeError): + logger.debug("Error parsing timestamp from Step Functions event") + dt = datetime.strptime(timestamp, dt_format) tracer.set_tags(_dd_origin) @@ -1320,6 +1346,11 @@ def create_inferred_span_from_eventbridge_event(event, context): if span: span.set_tags(tags) span.start = dt.replace(tzinfo=timezone.utc).timestamp() + + # Since inferred span will later parent Lambda, preserve Lambda's current parent + if dd_trace_context.span_id: + span.parent_id = dd_trace_context.span_id + return span diff --git a/datadog_lambda/trigger.py b/datadog_lambda/trigger.py index 11759a0a..708138bf 100644 --- a/datadog_lambda/trigger.py +++ b/datadog_lambda/trigger.py @@ -146,9 +146,7 @@ def parse_event_source(event: dict) -> _EventSource: if event.get("source") == "aws.events" or has_event_categories: event_source = _EventSource(EventTypes.CLOUDWATCH_EVENTS) - if ( - "_datadog" in event and event.get("_datadog").get("serverless-version") == "v1" - ) or ("Execution" in event and "StateMachine" in event and "State" in event): + if is_step_function_event(event): event_source = _EventSource(EventTypes.STEPFUNCTIONS) event_record = get_first_record(event) @@ -369,3 +367,29 @@ def extract_http_status_code_tag(trigger_tags, response): status_code = response.status_code return str(status_code) + + +def is_step_function_event(event): + """ + Check if the event is a step function that invoked the current lambda. + + The whole event can be wrapped in "Payload" in Legacy Lambda cases. There may also be a + "_datadog" for JSONata style context propagation. + + The actual event must contain "Execution", "StateMachine", and "State" fields. + """ + event = event.get("Payload", event) + + # JSONPath style + if "Execution" in event and "StateMachine" in event and "State" in event: + return True + + # JSONata style + dd_context = event.get("_datadog") + return ( + dd_context + and "Execution" in dd_context + and "StateMachine" in dd_context + and "State" in dd_context + and "serverless-version" in dd_context + ) diff --git a/datadog_lambda/wrapper.py b/datadog_lambda/wrapper.py index 6afa9a07..5641bd15 100644 --- a/datadog_lambda/wrapper.py +++ b/datadog_lambda/wrapper.py @@ -45,7 +45,6 @@ is_authorizer_response, tracer, propagator, - is_legacy_lambda_step_function, ) from datadog_lambda.trigger import ( extract_trigger_tags, @@ -286,8 +285,6 @@ def _before(self, event, context): self.response = None set_cold_start(init_timestamp_ns) submit_invocations_metric(context) - if is_legacy_lambda_step_function(event): - event = event["Payload"] self.trigger_tags = extract_trigger_tags(event, context) # Extract Datadog trace context and source from incoming requests dd_context, trace_context_source, event_source = extract_dd_trace_context( diff --git a/tests/test_tracing.py b/tests/test_tracing.py index 5480a92c..0a961a62 100644 --- a/tests/test_tracing.py +++ b/tests/test_tracing.py @@ -36,15 +36,12 @@ _convert_xray_trace_id, _convert_xray_entity_id, _convert_xray_sampling, - InferredSpanInfo, create_service_mapping, determine_service_name, service_mapping as global_service_mapping, propagator, emit_telemetry_on_exception_outside_of_handler, - is_legacy_lambda_step_function, ) -from datadog_lambda.trigger import EventTypes from tests.utils import get_mock_context @@ -613,9 +610,39 @@ def test_with_complete_datadog_trace_headers_with_trigger_tags(self): ] ) + def _test_step_function_trace_data_common( + self, event, expected_trace_id, expected_span_id, expected_tid + ): + """Common test logic for step function trace data tests""" + lambda_ctx = get_mock_context() + expected_context = Context( + trace_id=expected_trace_id, + span_id=expected_span_id, + sampling_priority=1, + meta={"_dd.p.tid": expected_tid}, + ) + expected_headers = { + TraceHeader.TRACE_ID: str(expected_trace_id), + TraceHeader.PARENT_ID: "10713633173203262661", + TraceHeader.SAMPLING_PRIORITY: "1", + TraceHeader.TAGS: f"_dd.p.tid={expected_tid}", + } + + ctx, source, _ = extract_dd_trace_context(event, lambda_ctx) + + self.assertEqual(source, "event") + self.assertEqual(ctx, expected_context) + self.assertEqual(get_dd_trace_context(), expected_headers) + + create_dd_dummy_metadata_subsegment(ctx, XraySubsegment.TRACE_KEY) + self.mock_send_segment.assert_called_with( + XraySubsegment.TRACE_KEY, + expected_context, + ) + @with_trace_propagation_style("datadog") def test_step_function_trace_data(self): - lambda_ctx = get_mock_context() + """Test basic step function trace data extraction""" sfn_event = { "Execution": { "Id": "arn:aws:states:sa-east-1:425362996713:execution:abhinav-activity-state-machine:72a7ca3e-901c-41bb-b5a3-5f279b92a316", @@ -634,79 +661,39 @@ def test_step_function_trace_data(self): "Name": "abhinav-activity-state-machine", }, } - ctx, source, event_source = extract_dd_trace_context(sfn_event, lambda_ctx) - self.assertEqual(source, "event") - expected_context = Context( - trace_id=435175499815315247, - span_id=3929055471293792800, - sampling_priority=1, - meta={"_dd.p.tid": "3e7a89d1b7310603"}, + self._test_step_function_trace_data_common( + sfn_event, 435175499815315247, 3929055471293792800, "3e7a89d1b7310603" ) - self.assertEqual(ctx, expected_context) - self.assertEqual( - get_dd_trace_context(), - { - TraceHeader.TRACE_ID: "435175499815315247", - TraceHeader.PARENT_ID: "10713633173203262661", - TraceHeader.SAMPLING_PRIORITY: "1", - TraceHeader.TAGS: "_dd.p.tid=3e7a89d1b7310603", + + @with_trace_propagation_style("datadog") + def test_step_function_trace_data_retry(self): + """Test step function trace data extraction with non-zero retry count""" + sfn_event = { + "Execution": { + "Id": "arn:aws:states:sa-east-1:425362996713:execution:abhinav-activity-state-machine:72a7ca3e-901c-41bb-b5a3-5f279b92a316", + "Name": "72a7ca3e-901c-41bb-b5a3-5f279b92a316", + "RoleArn": "arn:aws:iam::425362996713:role/service-role/StepFunctions-abhinav-activity-state-machine-role-22jpbgl6j", + "StartTime": "2024-12-04T19:38:04.069Z", + "RedriveCount": 0, }, + "State": { + "Name": "Lambda Invoke", + "EnteredTime": "2024-12-04T19:38:04.118Z", + "RetryCount": 1, + }, + "StateMachine": { + "Id": "arn:aws:states:sa-east-1:425362996713:stateMachine:abhinav-activity-state-machine", + "Name": "abhinav-activity-state-machine", + }, + } + self._test_step_function_trace_data_common( + sfn_event, 435175499815315247, 5063839446130725204, "3e7a89d1b7310603" ) - create_dd_dummy_metadata_subsegment(ctx, XraySubsegment.TRACE_KEY) - self.mock_send_segment.assert_called_with( - XraySubsegment.TRACE_KEY, - expected_context, - ) - - @with_trace_propagation_style("datadog") - def test_step_function_trace_data_retry(self): - lambda_ctx = get_mock_context() - sfn_event = { - "Execution": { - "Id": "arn:aws:states:sa-east-1:425362996713:execution:abhinav-activity-state-machine:72a7ca3e-901c-41bb-b5a3-5f279b92a316", - "Name": "72a7ca3e-901c-41bb-b5a3-5f279b92a316", - "RoleArn": "arn:aws:iam::425362996713:role/service-role/StepFunctions-abhinav-activity-state-machine-role-22jpbgl6j", - "StartTime": "2024-12-04T19:38:04.069Z", - "RedriveCount": 0, - }, - "State": { - "Name": "Lambda Invoke", - "EnteredTime": "2024-12-04T19:38:04.118Z", - "RetryCount": 1, - }, - "StateMachine": { - "Id": "arn:aws:states:sa-east-1:425362996713:stateMachine:abhinav-activity-state-machine", - "Name": "abhinav-activity-state-machine", - }, - } - ctx, source, event_source = extract_dd_trace_context(sfn_event, lambda_ctx) - self.assertEqual(source, "event") - expected_context = Context( - trace_id=435175499815315247, - span_id=5063839446130725204, - sampling_priority=1, - meta={"_dd.p.tid": "3e7a89d1b7310603"}, - ) - self.assertEqual(ctx, expected_context) - self.assertEqual( - get_dd_trace_context(), - { - TraceHeader.TRACE_ID: "435175499815315247", - TraceHeader.PARENT_ID: "10713633173203262661", - TraceHeader.SAMPLING_PRIORITY: "1", - TraceHeader.TAGS: "_dd.p.tid=3e7a89d1b7310603", - }, - ) - create_dd_dummy_metadata_subsegment(ctx, XraySubsegment.TRACE_KEY) - self.mock_send_segment.assert_called_with( - XraySubsegment.TRACE_KEY, - expected_context, - ) - # https://github.com/DataDog/logs-backend/blob/c17618cb552fc369ca40282bae0a65803f82f694/domains/serverless/apps/logs-to-traces-reducer/src/test/resources/test-json-files/stepfunctions/RedriveTest/snapshots/RedriveLambdaSuccessTraceMerging.json#L46 + # https://github.com/DataDog/logs-backend/blob/65ea567150f24e5498008f3cf8cabef9ea995f5d/domains/serverless/apps/logs-to-traces-reducer/src/test/resources/test-json-files/stepfunctions/RedriveTest/snapshots/RedriveLambdaSuccessTraceMerging.json#L45-L46 @with_trace_propagation_style("datadog") def test_step_function_trace_data_redrive(self): - lambda_ctx = get_mock_context() + """Test step function trace data extraction with non-zero redrive count""" sfn_event = { "Execution": { "Id": "arn:aws:states:sa-east-1:425362996713:execution:abhinav-activity-state-machine:72a7ca3e-901c-41bb-b5a3-5f279b92a316", @@ -725,33 +712,13 @@ def test_step_function_trace_data_redrive(self): "Name": "abhinav-activity-state-machine", }, } - ctx, source, event_source = extract_dd_trace_context(sfn_event, lambda_ctx) - self.assertEqual(source, "event") - expected_context = Context( - trace_id=435175499815315247, - span_id=8782364156266188026, - sampling_priority=1, - meta={"_dd.p.tid": "3e7a89d1b7310603"}, - ) - self.assertEqual(ctx, expected_context) - self.assertEqual( - get_dd_trace_context(), - { - TraceHeader.TRACE_ID: "435175499815315247", - TraceHeader.PARENT_ID: "10713633173203262661", - TraceHeader.SAMPLING_PRIORITY: "1", - TraceHeader.TAGS: "_dd.p.tid=3e7a89d1b7310603", - }, - ) - create_dd_dummy_metadata_subsegment(ctx, XraySubsegment.TRACE_KEY) - self.mock_send_segment.assert_called_with( - XraySubsegment.TRACE_KEY, - expected_context, + self._test_step_function_trace_data_common( + sfn_event, 435175499815315247, 8782364156266188026, "3e7a89d1b7310603" ) @with_trace_propagation_style("datadog") def test_step_function_trace_data_lambda_root(self): - lambda_ctx = get_mock_context() + """Test JSONata style step function trace data extraction where there's an upstream Lambda""" sfn_event = { "_datadog": { "Execution": { @@ -769,33 +736,13 @@ def test_step_function_trace_data_lambda_root(self): "serverless-version": "v1", } } - ctx, source, event_source = extract_dd_trace_context(sfn_event, lambda_ctx) - self.assertEqual(source, "event") - expected_context = Context( - trace_id=5821803790426892636, - span_id=6880978411788117524, - sampling_priority=1, - meta={"_dd.p.tid": "672a7cb100000000"}, - ) - self.assertEqual(ctx, expected_context) - self.assertEqual( - get_dd_trace_context(), - { - TraceHeader.TRACE_ID: "5821803790426892636", - TraceHeader.PARENT_ID: "10713633173203262661", - TraceHeader.SAMPLING_PRIORITY: "1", - TraceHeader.TAGS: "_dd.p.tid=672a7cb100000000", - }, - ) - create_dd_dummy_metadata_subsegment(ctx, XraySubsegment.TRACE_KEY) - self.mock_send_segment.assert_called_with( - XraySubsegment.TRACE_KEY, - expected_context, + self._test_step_function_trace_data_common( + sfn_event, 5821803790426892636, 6880978411788117524, "672a7cb100000000" ) @with_trace_propagation_style("datadog") def test_step_function_trace_data_sfn_root(self): - lambda_ctx = get_mock_context() + """Test JSONata style step function trace data extraction where there's an upstream step function""" sfn_event = { "_datadog": { "Execution": { @@ -812,28 +759,183 @@ def test_step_function_trace_data_sfn_root(self): "serverless-version": "v1", } } - ctx, source, event_source = extract_dd_trace_context(sfn_event, lambda_ctx) - self.assertEqual(source, "event") - expected_context = Context( - trace_id=4521899030418994483, - span_id=6880978411788117524, - sampling_priority=1, - meta={"_dd.p.tid": "12d1270d99cc5e03"}, + self._test_step_function_trace_data_common( + sfn_event, 4521899030418994483, 6880978411788117524, "12d1270d99cc5e03" ) - self.assertEqual(ctx, expected_context) - self.assertEqual( - get_dd_trace_context(), - { - TraceHeader.TRACE_ID: "4521899030418994483", - TraceHeader.PARENT_ID: "10713633173203262661", - TraceHeader.SAMPLING_PRIORITY: "1", - TraceHeader.TAGS: "_dd.p.tid=12d1270d99cc5e03", + + @with_trace_propagation_style("datadog") + def test_step_function_trace_data_eventbridge(self): + """Test step function trace data extraction through EventBridge""" + eventbridge_event = { + "version": "0", + "id": "eaacd8db-02de-ab13-ed5a-8ffb84048294", + "detail-type": "StepFunctionTask", + "source": "my.eventbridge", + "account": "425362996713", + "time": "2025-03-13T15:17:34Z", + "region": "sa-east-1", + "resources": [ + "arn:aws:states:sa-east-1:425362996713:stateMachine:abhinav-inner-state-machine", + "arn:aws:states:sa-east-1:425362996713:execution:abhinav-inner-state-machine:912eaa4c-291a-488a-bda3-d06bcc21203d", + ], + "detail": { + "Message": "Hello from Step Functions!", + "TaskToken": "AQCEAAAAKgAAAAMAAAAAAAAAAeMHr6sb8Ll5IKntjIiLGaBkaNeweo84kKYKDTvDaSAP1vjuYRJEGqFdHsKMyZL8ZcgAdanKpkbhPEN5hpoCe+BH9KblWeDsJxkDCk/meN5SaPlC1qS7Q/7/KqBq+tmAOCSy+MjdqFsnihy5Yo6g6C9uuPn7ccSB/609d8pznFm9nigEos/82emwi18lm67/+/bn4RTX4S7qV4RoGWUWUPeHfr34xWOipCt4SVDkoQPZdRVpq3wyRJP2zcK0zup24/opJqKKSCI5Q9orALNB2jEjDyQ9LE4mSrafoe0tcm/bOAGfrcpR3AwtArUiF6JPYd7Nw0XWWyPXFBjiQTJDhZFlGfllJ1N91eiN8wlzUX1+I0vw/t2PoEmuQ2VCJYCbl1ybjX/tQ97GZ9ogjY9N7VYy5uD5xfZ6VAyetUR06HUtbUIXTVxULm7wmsHb979W/fIQXsrxbFzc0+ypKaqGXJBoq7xX//irjpuNhWg1Wgfn0hxuXl5oN/LkqI83T8f9SdnJMxRDpaHDpttqbjVESB/Pf9o7gakjJj12+r2uiJNc81k50uhuHdFOGsImFHKV8hb1LGcq0ZzUKT5SbEDV2k+ezOP+O9Sk4c0unbpNLM3PKLKxVLhu2gtiIIVCHUHGmumW", + "_datadog": { + "Execution": { + "Id": "arn:aws:states:sa-east-1:425362996713:execution:abhinav-inner-state-machine:912eaa4c-291a-488a-bda3-d06bcc21203d", + "StartTime": "2025-03-13T15:17:33.972Z", + "Name": "912eaa4c-291a-488a-bda3-d06bcc21203d", + "RoleArn": "arn:aws:iam::425362996713:role/service-role/StepFunctions-abhinav-activity-state-machine-role-22jpbgl6j", + "RedriveCount": 0, + }, + "StateMachine": { + "Id": "arn:aws:states:sa-east-1:425362996713:stateMachine:abhinav-inner-state-machine", + "Name": "abhinav-inner-state-machine", + }, + "State": { + "Name": "EventBridge PutEvents", + "EnteredTime": "2025-03-13T15:17:34.008Z", + "RetryCount": 0, + }, + "Task": { + "Token": "AQCEAAAAKgAAAAMAAAAAAAAAAeMHr6sb8Ll5IKntjIiLGaBkaNeweo84kKYKDTvDaSAP1vjuYRJEGqFdHsKMyZL8ZcgAdanKpkbhPEN5hpoCe+BH9KblWeDsJxkDCk/meN5SaPlC1qS7Q/7/KqBq+tmAOCSy+MjdqFsnihy5Yo6g6C9uuPn7ccSB/609d8pznFm9nigEos/82emwi18lm67/+/bn4RTX4S7qV4RoGWUWUPeHfr34xWOipCt4SVDkoQPZdRVpq3wyRJP2zcK0zup24/opJqKKSCI5Q9orALNB2jEjDyQ9LE4mSrafoe0tcm/bOAGfrcpR3AwtArUiF6JPYd7Nw0XWWyPXFBjiQTJDhZFlGfllJ1N91eiN8wlzUX1+I0vw/t2PoEmuQ2VCJYCbl1ybjX/tQ97GZ9ogjY9N7VYy5uD5xfZ6VAyetUR06HUtbUIXTVxULm7wmsHb979W/fIQXsrxbFzc0+ypKaqGXJBoq7xX//irjpuNhWg1Wgfn0hxuXl5oN/LkqI83T8f9SdnJMxRDpaHDpttqbjVESB/Pf9o7gakjJj12+r2uiJNc81k50uhuHdFOGsImFHKV8hb1LGcq0ZzUKT5SbEDV2k+ezOP+O9Sk4c0unbpNLM3PKLKxVLhu2gtiIIVCHUHGmumW" + }, + "RootExecutionId": "arn:aws:states:sa-east-1:425362996713:execution:abhinav-inner-state-machine:912eaa4c-291a-488a-bda3-d06bcc21203d", + "serverless-version": "v1", + }, }, + } + self._test_step_function_trace_data_common( + eventbridge_event, + 3401561763239692811, + 10430178702434539423, + "a49ff3b7fb47b0b", ) - create_dd_dummy_metadata_subsegment(ctx, XraySubsegment.TRACE_KEY) - self.mock_send_segment.assert_called_with( - XraySubsegment.TRACE_KEY, - expected_context, + + @with_trace_propagation_style("datadog") + def test_step_function_trace_data_sqs(self): + """Test step function trace data extraction through SQS""" + sqs_event = { + "Records": [ + { + "EventSource": "aws:sns", + "EventVersion": "1.0", + "EventSubscriptionArn": "arn:aws:sns:sa-east-1:425362996713:logs-to-traces-dev-topic:f1653ba3-2ff7-4c8e-9381-45a7a62f9708", + "Sns": { + "Type": "Notification", + "MessageId": "e39184ea-bfd8-5efa-96fe-e4a64a457ff7", + "TopicArn": "arn:aws:sns:sa-east-1:425362996713:logs-to-traces-dev-topic", + "Subject": None, + "Message": "{}", + "Timestamp": "2025-03-13T15:01:49.942Z", + "SignatureVersion": "1", + "Signature": "WJHKq+pNOLgxa7+dB1dud02RM/30Jvz+KiMZzjRl38/Pphz90H24eGyIbnq3BJXYEyawFCHC6sq/5HcwXouGc5gbah6he+JpqXahMEs6cyMs2tg9SXxooRHEGv5iiZXKhnDcJYOrQ+iFExO9w+WFWfJjO2m/EDVVSYvuDjDV7mmTwAgEOD0zUvWpT7wOeKGG5Uk916Ppy3iMV7sCoHV/RwVikdhCWDDmxbdqteGduAXPdGESE/aj6kUx9ibEOKXyhC+7H1/j0tlhUchl6LZsTf1Gaiq2yEqKXKvsupcG3hRZ6FtIWP0jGlFhpW5EHc2oiHIVOsQceCYPqXYMCZvFuA==", + "SigningCertUrl": "https://sns.sa-east-1.amazonaws.com/SimpleNotificationService-9c6465fa7f48f5cacd23014631ec1136.pem", + "UnsubscribeUrl": "https://sns.sa-east-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:sa-east-1:425362996713:logs-to-traces-dev-topic:f1653ba3-2ff7-4c8e-9381-45a7a62f9708", + "MessageAttributes": { + "_datadog": { + "Type": "String", + "Value": '{"Execution":{"Id":"arn:aws:states:sa-east-1:425362996713:execution:abhinav-inner-state-machine:79478846-0cff-44de-91f5-02c96ff65762","StartTime":"2025-03-13T15:01:49.738Z","Name":"79478846-0cff-44de-91f5-02c96ff65762","RoleArn":"arn:aws:iam::425362996713:role/service-role/StepFunctions-abhinav-activity-state-machine-role-22jpbgl6j","RedriveCount":0},"StateMachine":{"Id":"arn:aws:states:sa-east-1:425362996713:stateMachine:abhinav-inner-state-machine","Name":"abhinav-inner-state-machine"},"State":{"Name":"SNS Publish","EnteredTime":"2025-03-13T15:01:49.768Z","RetryCount":0},"RootExecutionId":"arn:aws:states:sa-east-1:425362996713:execution:abhinav-inner-state-machine:79478846-0cff-44de-91f5-02c96ff65762","serverless-version":"v1"}', + } + }, + }, + } + ] + } + self._test_step_function_trace_data_common( + sqs_event, 3818106616964044169, 15912108710769293902, "3a4fd1a254eb514a" + ) + + @with_trace_propagation_style("datadog") + def test_step_function_trace_data_eventbridge_sqs(self): + """Test step function trace data extraction through EventBridge and SQS""" + eventbridge_sqs_event = { + "Records": [ + { + "messageId": "9ed082ad-2f4d-4309-ab99-9553d2be5613", + "receiptHandle": "AQEB6z7FatNIXbWOTC4Bx+udD0flrnT7XMehruTohl8O2KI2t9hvo5oxGIOhwcb+QtS5aRXsFE35TgGE8kZHlHK7Sa8jQUen6XmsPG7qB6BPdXjr0eunM2SDAtLj0mDSKx907VIKRYQG+qpI9ZyNK7Bi786oQIz2UkZGZru9zlXxJtAQiXBqfJ+OfTzhIwkPu04czU6lYfAbxdyNaBNdBEsTNJKPjquvcq1ZBVCHkn9L6wo8jha6XreoeS2WJ5N26ZLKtAl3wlSUByB92OKZU2mEuNboyY7bgK+nkx4N8fVVrafVXnY9YHuq60eQcZ/nusWFeJlVyN7NFypYP2IOn25xylltEACKbgUdEsFU2h5k7yI2DVk5eAt9vB6qmAJlgfkGsXG0SZrCADoIKXl9jpwajw==", + "body": '{"version":"0","id":"ff6d828b-b35e-abdf-64b6-6ea2cf698c0b","detail-type":"StepFunctionTask","source":"my.eventbridge","account":"425362996713","time":"2025-03-13T15:14:21Z","region":"sa-east-1","resources":["arn:aws:states:sa-east-1:425362996713:stateMachine:abhinav-inner-state-machine","arn:aws:states:sa-east-1:425362996713:execution:abhinav-inner-state-machine:fe087266-fe48-4a31-a21b-691f4e7ea985"],"detail":{"Message":"Hello from Step Functions!","TaskToken":"AQCEAAAAKgAAAAMAAAAAAAAAAfi3HMLTw3u9h0vSmkjyHlK1tv5bQUyA7i+6LIvrBWu+3S+DMuQ79JpMtAuCaMN/AGSuGPO7OPeTNA/9v7/kzAsLoPzwPhbrDPXP4SVF1YIO663PvtX/tEWxnAfwLqwDyx8G8VEsVLcmiiOafFCKJwn0OP/DoAWc0sjhWwRxIoQ0ipBGhOqU8rO8SFZVvxUbkosNejnhT7B6314pC89JZLpXU7SxFe+XrgN+uRAvFxsH/+RwDf94xk5hhtukH7HzhJKWN2WCtUISd84pM/1V7ppDuJ3FHgJT22xQIbEGA9Q4o+pLLehzE2SHCdo7eWYQqN+7BanxBNMI6kBMaf5nuh9izAp38lsrmHJyO8NvXgWg+F9hoTZX4RpV9CCwvRFrCRcCeDq4/uJzbvB4AwwA2q2Llm0X8yH0pKvPZ2v7pl4nCWdnEgj920I8AmBCuozbKP7gJRnAqfx3MnOSkpZTeGnHkp0ly8EevwCT2zX/1GQnCAx02kBaDJgUMputFeruMBzwVtlEVBFUUgaWbJwHzz2htuAw282pdATrKfv4VV1N962uLBJ32wd9a92rX7VXXToitvZGIvf/Z7cu4xfAzxQH1rIQ3M4ojkR9r48qoYtnYDlEf+BkIL8L4+xpbRFSBk3p","_datadog":{"Execution":{"Id":"arn:aws:states:sa-east-1:425362996713:execution:abhinav-inner-state-machine:fe087266-fe48-4a31-a21b-691f4e7ea985","StartTime":"2025-03-13T15:14:21.730Z","Name":"fe087266-fe48-4a31-a21b-691f4e7ea985","RoleArn":"arn:aws:iam::425362996713:role/service-role/StepFunctions-abhinav-activity-state-machine-role-22jpbgl6j","RedriveCount":0},"StateMachine":{"Id":"arn:aws:states:sa-east-1:425362996713:stateMachine:abhinav-inner-state-machine","Name":"abhinav-inner-state-machine"},"State":{"Name":"EventBridge PutEvents","EnteredTime":"2025-03-13T15:14:21.765Z","RetryCount":0},"Task":{"Token":"AQCEAAAAKgAAAAMAAAAAAAAAAfi3HMLTw3u9h0vSmkjyHlK1tv5bQUyA7i+6LIvrBWu+3S+DMuQ79JpMtAuCaMN/AGSuGPO7OPeTNA/9v7/kzAsLoPzwPhbrDPXP4SVF1YIO663PvtX/tEWxnAfwLqwDyx8G8VEsVLcmiiOafFCKJwn0OP/DoAWc0sjhWwRxIoQ0ipBGhOqU8rO8SFZVvxUbkosNejnhT7B6314pC89JZLpXU7SxFe+XrgN+uRAvFxsH/+RwDf94xk5hhtukH7HzhJKWN2WCtUISd84pM/1V7ppDuJ3FHgJT22xQIbEGA9Q4o+pLLehzE2SHCdo7eWYQqN+7BanxBNMI6kBMaf5nuh9izAp38lsrmHJyO8NvXgWg+F9hoTZX4RpV9CCwvRFrCRcCeDq4/uJzbvB4AwwA2q2Llm0X8yH0pKvPZ2v7pl4nCWdnEgj920I8AmBCuozbKP7gJRnAqfx3MnOSkpZTeGnHkp0ly8EevwCT2zX/1GQnCAx02kBaDJgUMputFeruMBzwVtlEVBFUUgaWbJwHzz2htuAw282pdATrKfv4VV1N962uLBJ32wd9a92rX7VXXToitvZGIvf/Z7cu4xfAzxQH1rIQ3M4ojkR9r48qoYtnYDlEf+BkIL8L4+xpbRFSBk3p"},"RootExecutionId":"arn:aws:states:sa-east-1:425362996713:execution:abhinav-inner-state-machine:fe087266-fe48-4a31-a21b-691f4e7ea985","serverless-version":"v1"}}}', + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1741878862068", + "SenderId": "AROAWGCM4HXUUNHLDXVER:6145b5ba998f311c8ac27f5cade2b915", + "ApproximateFirstReceiveTimestamp": "1741878862075", + }, + "messageAttributes": {}, + "md5OfBody": "e5cf8197b304a4dd4fd5db8e4842484b", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:sa-east-1:425362996713:abhinav-q", + "awsRegion": "sa-east-1", + } + ] + } + self._test_step_function_trace_data_common( + eventbridge_sqs_event, + 6527209323865742984, + 14276854885394865473, + "2ee7d9862d048173", + ) + + @with_trace_propagation_style("datadog") + def test_step_function_trace_data_sns(self): + """Test step function trace data extraction through SNS""" + sns_event = { + "Records": [ + { + "EventSource": "aws:sns", + "EventVersion": "1.0", + "EventSubscriptionArn": "arn:aws:sns:sa-east-1:425362996713:logs-to-traces-dev-topic:f1653ba3-2ff7-4c8e-9381-45a7a62f9708", + "Sns": { + "Type": "Notification", + "MessageId": "7bc0c17d-bf88-5ff4-af7f-a131463a0d90", + "TopicArn": "arn:aws:sns:sa-east-1:425362996713:logs-to-traces-dev-topic", + "Subject": None, + "Message": "{}", + "Timestamp": "2025-03-13T15:19:14.245Z", + "SignatureVersion": "1", + "Signature": "r8RoYzq4uNcq0yj7sxcp8sTbFiDk8zqtocG7mJuE2MPVuR8O5eNg2ohofokUnC84xADlCq5k6ElP55lbbY36tQO+qDGdV6+TGN4bAL9FiQrDE6tQYYJdlv/sYE7iOOgnRBC9ljEdCIDNtQNGCfND/8JzatPg8KAy7xMRcLrGWu4xIMEysqNTz7rETfhdZjLQPssAht44KcoUJCH4/VuB+B9W1RhwA+M8Q3tqxzahIXzcgDM8OlmfkBlXo4FDVF3WUzjXLf9AMOg+66GupjQFtUpmRMkA8KXSV1HCso7e6nIIWtOnUoWeDDUfQPFFq4TNSlb6h2NuebaHdnW5nhxnJQ==", + "SigningCertUrl": "https://sns.sa-east-1.amazonaws.com/SimpleNotificationService-9c6465fa7f48f5cacd23014631ec1136.pem", + "UnsubscribeUrl": "https://sns.sa-east-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:sa-east-1:425362996713:logs-to-traces-dev-topic:f1653ba3-2ff7-4c8e-9381-45a7a62f9708", + "MessageAttributes": { + "_datadog": { + "Type": "String", + "Value": '{"Execution":{"Id":"arn:aws:states:sa-east-1:425362996713:execution:abhinav-inner-state-machine:11623e4f-70ee-4330-8fbe-955152dea54c","StartTime":"2025-03-13T15:19:14.019Z","Name":"11623e4f-70ee-4330-8fbe-955152dea54c","RoleArn":"arn:aws:iam::425362996713:role/service-role/StepFunctions-abhinav-activity-state-machine-role-22jpbgl6j","RedriveCount":0},"StateMachine":{"Id":"arn:aws:states:sa-east-1:425362996713:stateMachine:abhinav-inner-state-machine","Name":"abhinav-inner-state-machine"},"State":{"Name":"SNS Publish","EnteredTime":"2025-03-13T15:19:14.061Z","RetryCount":0},"RootExecutionId":"arn:aws:states:sa-east-1:425362996713:execution:abhinav-inner-state-machine:11623e4f-70ee-4330-8fbe-955152dea54c","serverless-version":"v1"}', + } + }, + }, + } + ] + } + self._test_step_function_trace_data_common( + sns_event, 1459500239678510857, 13193042003602978730, "fafc98885fd4647" + ) + + @with_trace_propagation_style("datadog") + def test_step_function_trace_data_sns_sqs(self): + """Test step function trace data extraction through SNS and SQS""" + sns_sqs_event = { + "Records": [ + { + "messageId": "9ec3339f-cd1a-43ba-9681-3e9113b430d3", + "receiptHandle": "AQEBJ5gIvqEWQt39NHPMAoK57cGgKtrgTtckWeWdDRi2FeucYr6pBhNjzXuUrmoHZMozX1WaoABtfQ5+kX5ucDBpA2Ci3Q07Z4MYvA6X0Sw13HCkiBnLrHPmH/F3rUBjvdRkIIKqA2ACX58MdkaYGNpqsHTJHB613wa8z4zurK0u7eUIXrr+e+gtsuPD39hiWlJo7cpBVv7y178rzMX8gPQTnRJv1cjhCHENtjWTSmfFC5N+BIQNIcjFsTTDRSovZlNIfAEuS+uowgzk0DUyoTJD5nFTL8lQHeXGRCUQe58/UY9OwRXEFVPGZOQR4OI9Wa4Kf/keFypTk9YwC9DhSeKvzZ0wBvejyl1n0ztT45+XYoWfi0mxGWM5b7r9wT36RDmjnM6vszH/d3fhZSRPASxWBQ==", + "body": '{\n "Type" : "Notification",\n "MessageId" : "1f3078d0-c792-5cf3-a130-189c3b846a3f",\n "TopicArn" : "arn:aws:sns:sa-east-1:425362996713:logs-to-traces-dev-topic",\n "Message" : "{}",\n "Timestamp" : "2025-03-13T15:29:26.348Z",\n "SignatureVersion" : "1",\n "Signature" : "mxOqAQ5o/isJrMS0PezHKRaA3g8Z/8YDbkToqhJub6I66LGtl+NYhyfTyllbgxvRP2XD2meKPRSgPI3nLyq8UHsWgyYwe3Tsv8QpRunCVE9Pebh+V1LGPWfjOiL0e+bnaj956QJD99560LJ6bzWP9QO584/zfOdcw6E5XQZfAI+pvEsf28Dy0WJO/lWTATRZDf8wGhmc7uKI1ZMsrOaNoUD8PXVqsI4yrJHxhzMb3SrC7YjI/PnNIbcn6ezwprbUdbZvyNAfJiE0k5IlppA089tMXC/ItgC7AgQhG9huPdKi5KdWGACK7gEwqmFwL+5T33sUXDaH2g58WhCs76pKEw==",\n "SigningCertURL" : "https://sns.sa-east-1.amazonaws.com/SimpleNotificationService-9c6465fa7f48f5cacd23014631ec1136.pem",\n "UnsubscribeURL" : "https://sns.sa-east-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:sa-east-1:425362996713:logs-to-traces-dev-topic:5f64545d-ae9a-4a5f-a7ee-798a0bd8519e",\n "MessageAttributes" : {\n "_datadog" : {"Type":"String","Value":"{\\"Execution\\":{\\"Id\\":\\"arn:aws:states:sa-east-1:425362996713:execution:abhinav-inner-state-machine:37ff72b8-0ee0-49e2-93c0-8a1764206a03\\",\\"StartTime\\":\\"2025-03-13T15:29:26.144Z\\",\\"Name\\":\\"37ff72b8-0ee0-49e2-93c0-8a1764206a03\\",\\"RoleArn\\":\\"arn:aws:iam::425362996713:role/service-role/StepFunctions-abhinav-activity-state-machine-role-22jpbgl6j\\",\\"RedriveCount\\":0},\\"StateMachine\\":{\\"Id\\":\\"arn:aws:states:sa-east-1:425362996713:stateMachine:abhinav-inner-state-machine\\",\\"Name\\":\\"abhinav-inner-state-machine\\"},\\"State\\":{\\"Name\\":\\"SNS Publish\\",\\"EnteredTime\\":\\"2025-03-13T15:29:26.182Z\\",\\"RetryCount\\":0},\\"RootExecutionId\\":\\"arn:aws:states:sa-east-1:425362996713:execution:abhinav-inner-state-machine:37ff72b8-0ee0-49e2-93c0-8a1764206a03\\",\\"serverless-version\\":\\"v1\\"}"}\n }\n}', + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1741879766424", + "SenderId": "AIDAIOA2GYWSHW4E2VXIO", + "ApproximateFirstReceiveTimestamp": "1741879766432", + }, + "messageAttributes": {}, + "md5OfBody": "52af59de28507d7e67324b46c95337d8", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:sa-east-1:425362996713:abhinav-q", + "awsRegion": "sa-east-1", + } + ] + } + self._test_step_function_trace_data_common( + sns_sqs_event, 5708348677301000120, 18223515719478572006, "45457f5f3fde3fa1" ) diff --git a/tests/test_trigger.py b/tests/test_trigger.py index be028a23..9cb088f1 100644 --- a/tests/test_trigger.py +++ b/tests/test_trigger.py @@ -9,6 +9,7 @@ get_event_source_arn, extract_trigger_tags, extract_http_status_code_tag, + is_step_function_event, ) from tests.utils import get_mock_context @@ -543,3 +544,68 @@ def test_extract_http_status_code_tag_from_response_object(self): response.status_code = 403 status_code = extract_http_status_code_tag(trigger_tags, response) self.assertEqual(status_code, "403") + + +class IsStepFunctionEvent(unittest.TestCase): + def test_is_step_function_event_jsonata(self): + event = { + "_datadog": { + "Execution": { + "Id": "665c417c-1237-4742-aaca-8b3becbb9e75", + "RedriveCount": 0, + }, + "StateMachine": {}, + "State": { + "Name": "my-awesome-state", + "EnteredTime": "Mon Nov 13 12:43:33 PST 2023", + "RetryCount": 0, + }, + "x-datadog-trace-id": "5821803790426892636", + "x-datadog-tags": "_dd.p.dm=-0,_dd.p.tid=672a7cb100000000", + "serverless-version": "v1", + } + } + self.assertTrue(is_step_function_event(event)) + + def test_is_step_function_event_jsonpath(self): + event = { + "Execution": { + "Id": "665c417c-1237-4742-aaca-8b3becbb9e75", + "RedriveCount": 0, + }, + "StateMachine": {}, + "State": { + "Name": "my-awesome-state", + "EnteredTime": "Mon Nov 13 12:43:33 PST 2023", + "RetryCount": 0, + }, + } + self.assertTrue(is_step_function_event(event)) + + def test_is_step_function_event_legacy_lambda(self): + event = { + "Payload": { + "Execution": { + "Id": "665c417c-1237-4742-aaca-8b3becbb9e75", + "RedriveCount": 0, + }, + "StateMachine": {}, + "State": { + "Name": "my-awesome-state", + "EnteredTime": "Mon Nov 13 12:43:33 PST 2023", + "RetryCount": 0, + }, + } + } + self.assertTrue(is_step_function_event(event)) + + def test_is_step_function_event_dd_header(self): + event = { + "_datadog": { + "x-datadog-trace-id": "5821803790426892636", + "x-datadog-parent-id": "5821803790426892636", + "x-datadog-tags": "_dd.p.dm=-0,_dd.p.tid=672a7cb100000000", + "x-datadog-sampling-priority": "1", + } + } + self.assertFalse(is_step_function_event(event)) From 92ec3b8e36b688ae972b1fe23c9737a6488668fa Mon Sep 17 00:00:00 2001 From: Nicholas Hulston Date: Wed, 19 Mar 2025 07:43:20 -0700 Subject: [PATCH 13/44] Submit metric for DynamoDB Stream Type (#578) --- datadog_lambda/metric.py | 14 ++++++++++++++ datadog_lambda/span_pointers.py | 4 ++++ .../snapshots/logs/async-metrics_python310.log | 9 +++++++++ .../snapshots/logs/async-metrics_python311.log | 9 +++++++++ .../snapshots/logs/async-metrics_python312.log | 9 +++++++++ .../snapshots/logs/async-metrics_python313.log | 9 +++++++++ .../snapshots/logs/async-metrics_python38.log | 9 +++++++++ .../snapshots/logs/async-metrics_python39.log | 9 +++++++++ .../snapshots/logs/sync-metrics_python310.log | 9 +++++++++ .../snapshots/logs/sync-metrics_python311.log | 9 +++++++++ .../snapshots/logs/sync-metrics_python312.log | 11 ++++++++++- .../snapshots/logs/sync-metrics_python313.log | 11 ++++++++++- .../snapshots/logs/sync-metrics_python38.log | 9 +++++++++ .../snapshots/logs/sync-metrics_python39.log | 11 ++++++++++- 14 files changed, 129 insertions(+), 3 deletions(-) diff --git a/datadog_lambda/metric.py b/datadog_lambda/metric.py index 6389c268..f9c67a26 100644 --- a/datadog_lambda/metric.py +++ b/datadog_lambda/metric.py @@ -188,3 +188,17 @@ def submit_errors_metric(lambda_context): lambda_context (object): Lambda context dict passed to the function by AWS """ submit_enhanced_metric("errors", lambda_context) + + +def submit_dynamodb_stream_type_metric(event): + stream_view_type = ( + event.get("Records", [{}])[0].get("dynamodb", {}).get("StreamViewType") + ) + if stream_view_type: + lambda_metric( + "datadog.serverless.dynamodb.stream.type", + 1, + timestamp=None, + tags=[f"streamtype:{stream_view_type}"], + force_async=True, + ) diff --git a/datadog_lambda/span_pointers.py b/datadog_lambda/span_pointers.py index e111469e..40d959e6 100644 --- a/datadog_lambda/span_pointers.py +++ b/datadog_lambda/span_pointers.py @@ -6,6 +6,8 @@ from ddtrace._trace._span_pointer import _SpanPointerDirection from ddtrace._trace._span_pointer import _SpanPointerDescription + +from datadog_lambda.metric import submit_dynamodb_stream_type_metric from datadog_lambda.trigger import EventTypes @@ -28,6 +30,8 @@ def calculate_span_pointers( return _calculate_s3_span_pointers_for_event(event) elif event_source.equals(EventTypes.DYNAMODB): + # Temporary metric. TODO eventually remove(@nhulston) + submit_dynamodb_stream_type_metric(event) return _calculate_dynamodb_span_pointers_for_event(event) except Exception as e: diff --git a/tests/integration/snapshots/logs/async-metrics_python310.log b/tests/integration/snapshots/logs/async-metrics_python310.log index ed0d3b43..24d3fb5b 100644 --- a/tests/integration/snapshots/logs/async-metrics_python310.log +++ b/tests/integration/snapshots/logs/async-metrics_python310.log @@ -188,6 +188,15 @@ START "dd_lambda_layer:datadog-python310_X.X.X" ] } +{ + "m": "datadog.serverless.dynamodb.stream.type", + "v": 1, + "e": XXXX, + "t": [ + "streamtype:NEW_AND_OLD_IMAGES", + "dd_lambda_layer:datadog-python310_X.X.X" + ] +} { "m": "hello.dog", "v": 1, diff --git a/tests/integration/snapshots/logs/async-metrics_python311.log b/tests/integration/snapshots/logs/async-metrics_python311.log index b57a1b5a..e4fa66bc 100644 --- a/tests/integration/snapshots/logs/async-metrics_python311.log +++ b/tests/integration/snapshots/logs/async-metrics_python311.log @@ -188,6 +188,15 @@ START "dd_lambda_layer:datadog-python311_X.X.X" ] } +{ + "m": "datadog.serverless.dynamodb.stream.type", + "v": 1, + "e": XXXX, + "t": [ + "streamtype:NEW_AND_OLD_IMAGES", + "dd_lambda_layer:datadog-python311_X.X.X" + ] +} { "m": "hello.dog", "v": 1, diff --git a/tests/integration/snapshots/logs/async-metrics_python312.log b/tests/integration/snapshots/logs/async-metrics_python312.log index 1b7e4b08..0d632c6c 100644 --- a/tests/integration/snapshots/logs/async-metrics_python312.log +++ b/tests/integration/snapshots/logs/async-metrics_python312.log @@ -188,6 +188,15 @@ START "dd_lambda_layer:datadog-python312_X.X.X" ] } +{ + "m": "datadog.serverless.dynamodb.stream.type", + "v": 1, + "e": XXXX, + "t": [ + "streamtype:NEW_AND_OLD_IMAGES", + "dd_lambda_layer:datadog-python312_X.X.X" + ] +} { "m": "hello.dog", "v": 1, diff --git a/tests/integration/snapshots/logs/async-metrics_python313.log b/tests/integration/snapshots/logs/async-metrics_python313.log index 32342559..09070709 100644 --- a/tests/integration/snapshots/logs/async-metrics_python313.log +++ b/tests/integration/snapshots/logs/async-metrics_python313.log @@ -188,6 +188,15 @@ START "dd_lambda_layer:datadog-python313_X.X.X" ] } +{ + "m": "datadog.serverless.dynamodb.stream.type", + "v": 1, + "e": XXXX, + "t": [ + "streamtype:NEW_AND_OLD_IMAGES", + "dd_lambda_layer:datadog-python313_X.X.X" + ] +} { "m": "hello.dog", "v": 1, diff --git a/tests/integration/snapshots/logs/async-metrics_python38.log b/tests/integration/snapshots/logs/async-metrics_python38.log index 9dc9edf6..4a506930 100644 --- a/tests/integration/snapshots/logs/async-metrics_python38.log +++ b/tests/integration/snapshots/logs/async-metrics_python38.log @@ -188,6 +188,15 @@ START "dd_lambda_layer:datadog-python38_X.X.X" ] } +{ + "m": "datadog.serverless.dynamodb.stream.type", + "v": 1, + "e": XXXX, + "t": [ + "streamtype:NEW_AND_OLD_IMAGES", + "dd_lambda_layer:datadog-python38_X.X.X" + ] +} { "m": "hello.dog", "v": 1, diff --git a/tests/integration/snapshots/logs/async-metrics_python39.log b/tests/integration/snapshots/logs/async-metrics_python39.log index 89e5d227..54081402 100644 --- a/tests/integration/snapshots/logs/async-metrics_python39.log +++ b/tests/integration/snapshots/logs/async-metrics_python39.log @@ -188,6 +188,15 @@ START "dd_lambda_layer:datadog-python39_X.X.X" ] } +{ + "m": "datadog.serverless.dynamodb.stream.type", + "v": 1, + "e": XXXX, + "t": [ + "streamtype:NEW_AND_OLD_IMAGES", + "dd_lambda_layer:datadog-python39_X.X.X" + ] +} { "m": "hello.dog", "v": 1, diff --git a/tests/integration/snapshots/logs/sync-metrics_python310.log b/tests/integration/snapshots/logs/sync-metrics_python310.log index 6eab44c2..e2569775 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python310.log +++ b/tests/integration/snapshots/logs/sync-metrics_python310.log @@ -207,6 +207,15 @@ START "dd_lambda_layer:datadog-python310_X.X.X" ] } +{ + "m": "datadog.serverless.dynamodb.stream.type", + "v": 1, + "e": XXXX, + "t": [ + "streamtype:NEW_AND_OLD_IMAGES", + "dd_lambda_layer:datadog-python310_X.X.X" + ] +} HTTP GET https://datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","User-Agent:python-requests/X.X.X","traceparent:XXX","tracestate:XXX HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","User-Agent:python-requests/X.X.X","traceparent:XXX","tracestate:XXX { diff --git a/tests/integration/snapshots/logs/sync-metrics_python311.log b/tests/integration/snapshots/logs/sync-metrics_python311.log index 87cd6a6a..69d4a695 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python311.log +++ b/tests/integration/snapshots/logs/sync-metrics_python311.log @@ -207,6 +207,15 @@ START "dd_lambda_layer:datadog-python311_X.X.X" ] } +{ + "m": "datadog.serverless.dynamodb.stream.type", + "v": 1, + "e": XXXX, + "t": [ + "streamtype:NEW_AND_OLD_IMAGES", + "dd_lambda_layer:datadog-python311_X.X.X" + ] +} HTTP GET https://datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","User-Agent:python-requests/X.X.X","traceparent:XXX","tracestate:XXX HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","User-Agent:python-requests/X.X.X","traceparent:XXX","tracestate:XXX { diff --git a/tests/integration/snapshots/logs/sync-metrics_python312.log b/tests/integration/snapshots/logs/sync-metrics_python312.log index 41b5a71c..49bae0a2 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python312.log +++ b/tests/integration/snapshots/logs/sync-metrics_python312.log @@ -207,6 +207,15 @@ START "dd_lambda_layer:datadog-python312_X.X.X" ] } +{ + "m": "datadog.serverless.dynamodb.stream.type", + "v": 1, + "e": XXXX, + "t": [ + "streamtype:NEW_AND_OLD_IMAGES", + "dd_lambda_layer:datadog-python312_X.X.X" + ] +} HTTP GET https://datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","User-Agent:python-requests/X.X.X","traceparent:XXX","tracestate:XXX HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","User-Agent:python-requests/X.X.X","traceparent:XXX","tracestate:XXX { @@ -546,7 +555,6 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A ] } HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","Content-Encoding:deflate","Content-Length:XXXX","Content-Type:application/json","DD-API-KEY:XXXX","User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)","traceparent:XXX","tracestate:XXX -END Duration: XXXX ms Memory Used: XXXX MB { "traces": [ [ @@ -585,6 +593,7 @@ END Duration: XXXX ms Memory Used: XXXX MB ] ] } +END Duration: XXXX ms Memory Used: XXXX MB START { "m": "aws.lambda.enhanced.invocations", diff --git a/tests/integration/snapshots/logs/sync-metrics_python313.log b/tests/integration/snapshots/logs/sync-metrics_python313.log index 439e44d6..2f461f6f 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python313.log +++ b/tests/integration/snapshots/logs/sync-metrics_python313.log @@ -207,6 +207,15 @@ START "dd_lambda_layer:datadog-python313_X.X.X" ] } +{ + "m": "datadog.serverless.dynamodb.stream.type", + "v": 1, + "e": XXXX, + "t": [ + "streamtype:NEW_AND_OLD_IMAGES", + "dd_lambda_layer:datadog-python313_X.X.X" + ] +} HTTP GET https://datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","User-Agent:python-requests/X.X.X","traceparent:XXX","tracestate:XXX HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","User-Agent:python-requests/X.X.X","traceparent:XXX","tracestate:XXX { @@ -1302,7 +1311,6 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A ] } HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","Content-Encoding:deflate","Content-Length:XXXX","Content-Type:application/json","DD-API-KEY:XXXX","User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)","traceparent:XXX","tracestate:XXX -END Duration: XXXX ms Memory Used: XXXX MB { "traces": [ [ @@ -1341,6 +1349,7 @@ END Duration: XXXX ms Memory Used: XXXX MB ] ] } +END Duration: XXXX ms Memory Used: XXXX MB START { "m": "aws.lambda.enhanced.invocations", diff --git a/tests/integration/snapshots/logs/sync-metrics_python38.log b/tests/integration/snapshots/logs/sync-metrics_python38.log index b30289ca..83e33d33 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python38.log +++ b/tests/integration/snapshots/logs/sync-metrics_python38.log @@ -207,6 +207,15 @@ START "dd_lambda_layer:datadog-python38_X.X.X" ] } +{ + "m": "datadog.serverless.dynamodb.stream.type", + "v": 1, + "e": XXXX, + "t": [ + "streamtype:NEW_AND_OLD_IMAGES", + "dd_lambda_layer:datadog-python38_X.X.X" + ] +} HTTP GET https://datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","User-Agent:python-requests/X.X.X","traceparent:XXX","tracestate:XXX HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","User-Agent:python-requests/X.X.X","traceparent:XXX","tracestate:XXX { diff --git a/tests/integration/snapshots/logs/sync-metrics_python39.log b/tests/integration/snapshots/logs/sync-metrics_python39.log index 772ea5d1..0a433c34 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python39.log +++ b/tests/integration/snapshots/logs/sync-metrics_python39.log @@ -207,6 +207,15 @@ START "dd_lambda_layer:datadog-python39_X.X.X" ] } +{ + "m": "datadog.serverless.dynamodb.stream.type", + "v": 1, + "e": XXXX, + "t": [ + "streamtype:NEW_AND_OLD_IMAGES", + "dd_lambda_layer:datadog-python39_X.X.X" + ] +} HTTP GET https://datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","User-Agent:python-requests/X.X.X","traceparent:XXX","tracestate:XXX HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","User-Agent:python-requests/X.X.X","traceparent:XXX","tracestate:XXX { @@ -368,6 +377,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A ] } HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","Content-Encoding:deflate","Content-Length:XXXX","Content-Type:application/json","DD-API-KEY:XXXX","User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)","traceparent:XXX","tracestate:XXX +END Duration: XXXX ms Memory Used: XXXX MB { "traces": [ [ @@ -406,7 +416,6 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept ] ] } -END Duration: XXXX ms Memory Used: XXXX MB START { "m": "aws.lambda.enhanced.invocations", From 71c14e370e9f2a086ff676705a2d19b045b15f20 Mon Sep 17 00:00:00 2001 From: Rey Abolofia Date: Mon, 24 Mar 2025 08:38:10 -0700 Subject: [PATCH 14/44] Remove __future__ import. (#579) --- datadog_lambda/handler.py | 1 - 1 file changed, 1 deletion(-) diff --git a/datadog_lambda/handler.py b/datadog_lambda/handler.py index 433d9b92..4f12b1ad 100644 --- a/datadog_lambda/handler.py +++ b/datadog_lambda/handler.py @@ -3,7 +3,6 @@ # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2020 Datadog, Inc. -from __future__ import absolute_import from importlib import import_module import os From 95f9aed2b884f6205eb3e346ef6498bc0ae6aa4e Mon Sep 17 00:00:00 2001 From: Brett Langdon Date: Thu, 27 Mar 2025 15:28:52 -0400 Subject: [PATCH 15/44] Revert "Use sam/build-python images for building layers. (#577)" (#580) This reverts commit 5a55fe45a49b8d7e6c8db0734f203bdff648f383. --- scripts/build_layers.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_layers.sh b/scripts/build_layers.sh index 23941b7a..a0d6ee39 100755 --- a/scripts/build_layers.sh +++ b/scripts/build_layers.sh @@ -61,7 +61,7 @@ function docker_build_zip { # between different python runtimes. temp_dir=$(mktemp -d) docker buildx build -t datadog-lambda-python-${arch}:$1 . --no-cache \ - --build-arg image=public.ecr.aws/sam/build-python$1:1 \ + --build-arg image=public.ecr.aws/docker/library/python:$1 \ --build-arg runtime=python$1 \ --platform linux/${arch} \ --progress=plain \ From 2320c0a909413c8b66ba501fc969c043fbd27ec9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?jordan=20gonz=C3=A1lez?= <30836115+duncanista@users.noreply.github.com> Date: Fri, 28 Mar 2025 16:28:25 -0400 Subject: [PATCH 16/44] chore: lazy load some imports (#581) * lazy load metrics also hashlib right away * black * update invocation metric to be error metric * lazyload `base64` * patch right call --- datadog_lambda/api.py | 2 +- datadog_lambda/tracing.py | 10 ++++++++-- datadog_lambda/trigger.py | 3 ++- datadog_lambda/wrapper.py | 24 ++++++++++++++++-------- tests/test_wrapper.py | 2 +- 5 files changed, 28 insertions(+), 13 deletions(-) diff --git a/datadog_lambda/api.py b/datadog_lambda/api.py index c539ea05..fd3e2c17 100644 --- a/datadog_lambda/api.py +++ b/datadog_lambda/api.py @@ -1,6 +1,5 @@ import os import logging -import base64 logger = logging.getLogger(__name__) KMS_ENCRYPTION_CONTEXT_KEY = "LambdaFunctionName" @@ -9,6 +8,7 @@ def decrypt_kms_api_key(kms_client, ciphertext): from botocore.exceptions import ClientError + import base64 """ Decodes and deciphers the base64-encoded ciphertext given as a parameter using KMS. diff --git a/datadog_lambda/tracing.py b/datadog_lambda/tracing.py index 0fae76dd..9a27673c 100644 --- a/datadog_lambda/tracing.py +++ b/datadog_lambda/tracing.py @@ -2,10 +2,8 @@ # under the Apache License Version 2.0. # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2019 Datadog, Inc. -import hashlib import logging import os -import base64 import traceback import ujson as json from datetime import datetime, timezone @@ -259,6 +257,8 @@ def extract_context_from_sqs_or_sns_event_or_context(event, lambda_context): dd_json_data = None dd_json_data_type = dd_payload.get("Type") or dd_payload.get("dataType") if dd_json_data_type == "Binary": + import base64 + dd_json_data = dd_payload.get("binaryValue") or dd_payload.get("Value") if dd_json_data: dd_json_data = base64.b64decode(dd_json_data) @@ -373,6 +373,8 @@ def extract_context_from_kinesis_event(event, lambda_context): return extract_context_from_lambda_context(lambda_context) data = kinesis.get("data") if data: + import base64 + b64_bytes = data.encode("ascii") str_bytes = base64.b64decode(b64_bytes) data_str = str_bytes.decode("ascii") @@ -387,6 +389,8 @@ def extract_context_from_kinesis_event(event, lambda_context): def _deterministic_sha256_hash(s: str, part: str) -> int: + import hashlib + sha256_hash = hashlib.sha256(s.encode()).hexdigest() # First two chars is '0b'. zfill to ensure 256 bits, but we only care about the first 128 bits binary_hash = bin(int(sha256_hash, 16))[2:].zfill(256) @@ -551,6 +555,8 @@ def get_injected_authorizer_data(event, is_http_api) -> dict: if not dd_data_raw: return None + import base64 + injected_data = json.loads(base64.b64decode(dd_data_raw)) # Lambda authorizer's results can be cached. But the payload will still have the injected diff --git a/datadog_lambda/trigger.py b/datadog_lambda/trigger.py index 708138bf..8090e36e 100644 --- a/datadog_lambda/trigger.py +++ b/datadog_lambda/trigger.py @@ -3,7 +3,6 @@ # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2019 Datadog, Inc. -import base64 import gzip import ujson as json from io import BytesIO, BufferedReader @@ -242,6 +241,8 @@ def parse_event_source_arn(source: _EventSource, event: dict, context: Any) -> s # e.g. arn:aws:logs:us-west-1:123456789012:log-group:/my-log-group-xyz if source.event_type == EventTypes.CLOUDWATCH_LOGS: + import base64 + with gzip.GzipFile( fileobj=BytesIO(base64.b64decode(event.get("awslogs", {}).get("data"))) ) as decompress_stream: diff --git a/datadog_lambda/wrapper.py b/datadog_lambda/wrapper.py index 5641bd15..8c1914e3 100644 --- a/datadog_lambda/wrapper.py +++ b/datadog_lambda/wrapper.py @@ -2,7 +2,6 @@ # under the Apache License Version 2.0. # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2019 Datadog, Inc. -import base64 import os import logging import traceback @@ -23,11 +22,6 @@ XraySubsegment, Headers, ) -from datadog_lambda.metric import ( - flush_stats, - submit_invocations_metric, - submit_errors_metric, -) from datadog_lambda.module_name import modify_module_name from datadog_lambda.patch import patch_all from datadog_lambda.span_pointers import calculate_span_pointers @@ -248,7 +242,11 @@ def __call__(self, event, context, **kwargs): self.response = self.func(event, context, **kwargs) return self.response except Exception: - submit_errors_metric(context) + if not should_use_extension: + from datadog_lambda.metric import submit_errors_metric + + submit_errors_metric(context) + if self.span: self.span.set_traceback() raise @@ -274,6 +272,9 @@ def _inject_authorizer_span_headers(self, request_id): injected_headers[Headers.Parent_Span_Finish_Time] = finish_time_ns if request_id is not None: injected_headers[Headers.Authorizing_Request_Id] = request_id + + import base64 + datadog_data = base64.b64encode( json.dumps(injected_headers, escape_forward_slashes=False).encode() ).decode() @@ -284,7 +285,12 @@ def _before(self, event, context): try: self.response = None set_cold_start(init_timestamp_ns) - submit_invocations_metric(context) + + if not should_use_extension: + from datadog_lambda.metric import submit_invocations_metric + + submit_invocations_metric(context) + self.trigger_tags = extract_trigger_tags(event, context) # Extract Datadog trace context and source from incoming requests dd_context, trace_context_source, event_source = extract_dd_trace_context( @@ -383,6 +389,8 @@ def _after(self, event, context): logger.debug("Failed to create cold start spans. %s", e) if not self.flush_to_log or should_use_extension: + from datadog_lambda.metric import flush_stats + flush_stats(context) if should_use_extension and self.local_testing_mode: # when testing locally, the extension does not know when an diff --git a/tests/test_wrapper.py b/tests/test_wrapper.py index f47285e6..4b243036 100644 --- a/tests/test_wrapper.py +++ b/tests/test_wrapper.py @@ -470,7 +470,7 @@ def lambda_handler(event, context): self.mock_write_metric_point_to_stdout.assert_not_called() def test_only_one_wrapper_in_use(self): - patcher = patch("datadog_lambda.wrapper.submit_invocations_metric") + patcher = patch("datadog_lambda.metric.submit_invocations_metric") self.mock_submit_invocations_metric = patcher.start() self.addCleanup(patcher.stop) From 7cd2baf6361d0a02b8b48e084fbc828a7db66b00 Mon Sep 17 00:00:00 2001 From: Joey Zhao <5253430+joeyzhao2018@users.noreply.github.com> Date: Fri, 4 Apr 2025 10:49:52 -0400 Subject: [PATCH 17/44] v6.107.0 (#583) --- datadog_lambda/version.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/datadog_lambda/version.py b/datadog_lambda/version.py index 0c8d879b..702691d8 100644 --- a/datadog_lambda/version.py +++ b/datadog_lambda/version.py @@ -1 +1 @@ -__version__ = "6.106.0" +__version__ = "6.107.0" diff --git a/pyproject.toml b/pyproject.toml index 8db5e352..165a8cbe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "datadog_lambda" -version = "6.106.0" +version = "6.107.0" description = "The Datadog AWS Lambda Library" authors = ["Datadog, Inc. "] license = "Apache-2.0" From 8207aa224692c1f3eb72cc5cb8dc10251e5368d2 Mon Sep 17 00:00:00 2001 From: Sam Brenner <106700075+sabrenner@users.noreply.github.com> Date: Wed, 16 Apr 2025 13:59:37 -0400 Subject: [PATCH 18/44] revert llmobs api key and forced agentless (#585) --- datadog_lambda/wrapper.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/datadog_lambda/wrapper.py b/datadog_lambda/wrapper.py index 8c1914e3..e81b1baa 100644 --- a/datadog_lambda/wrapper.py +++ b/datadog_lambda/wrapper.py @@ -49,14 +49,10 @@ if profiling_env_var: from ddtrace.profiling import profiler -llmobs_api_key = None llmobs_env_var = os.environ.get("DD_LLMOBS_ENABLED", "false").lower() in ("true", "1") if llmobs_env_var: - from datadog_lambda.api import get_api_key from ddtrace.llmobs import LLMObs - llmobs_api_key = get_api_key() - logger = logging.getLogger(__name__) DD_FLUSH_TO_LOG = "DD_FLUSH_TO_LOG" @@ -226,10 +222,7 @@ def __init__(self, func): # Enable LLM Observability if llmobs_env_var: - LLMObs.enable( - agentless_enabled=True, - api_key=llmobs_api_key, - ) + LLMObs.enable() logger.debug("datadog_lambda_wrapper initialized") except Exception as e: From 33cd5bf9e05bd3e3a35450c42b522ce1280b9add Mon Sep 17 00:00:00 2001 From: Rey Abolofia Date: Thu, 24 Apr 2025 11:30:38 -0700 Subject: [PATCH 19/44] Add new region ap-southeast-7. (#586) --- ci/datasources/regions.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/ci/datasources/regions.yaml b/ci/datasources/regions.yaml index 93816ce9..f74c62b8 100644 --- a/ci/datasources/regions.yaml +++ b/ci/datasources/regions.yaml @@ -12,6 +12,7 @@ regions: - code: "ap-southeast-3" - code: "ap-southeast-4" - code: "ap-southeast-5" + - code: "ap-southeast-7" - code: "ap-northeast-1" - code: "ap-northeast-2" - code: "ap-northeast-3" From 72caf0ae5d29f82649b57fb6075dcf9a4f7a436b Mon Sep 17 00:00:00 2001 From: Nicholas Hulston Date: Tue, 29 Apr 2025 14:10:13 -0400 Subject: [PATCH 20/44] add mx-central-1 region (#587) --- ci/datasources/regions.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/ci/datasources/regions.yaml b/ci/datasources/regions.yaml index f74c62b8..d41bade3 100644 --- a/ci/datasources/regions.yaml +++ b/ci/datasources/regions.yaml @@ -29,4 +29,5 @@ regions: - code: "il-central-1" - code: "me-south-1" - code: "me-central-1" + - code: "mx-central-1" - code: "sa-east-1" From 1f8d3fd96a7dce844a3ecaea4ec2f41be1e73f56 Mon Sep 17 00:00:00 2001 From: Aleksandr Pasechnik Date: Tue, 6 May 2025 11:45:18 -0400 Subject: [PATCH 21/44] feat: Correct FIPS-mode metrics (#588) - Our `dogstatsd` client now supports timestamps for the metrics that it will send. - This unblocks us to always send metrics to the extension, even if they have a timestamp. Confirmed that this actually works now with both bottlecap and the go agent. - Refactored the metrics workflow to have an explicit choice of metrics handlers (Extension, Forwarder, Datadog API, or, for some FIPS usecases, No Handler). - Added a `DD_LAMBDA_FIPS_MODE` flag which allows FIPS-mode logic to be enabled in commercial regions or disabled in govcloud regions. - The new FIPS mode is used for Datadog API Key secret lookup and for metrics handling decisions. ### Breaking Change Since the `DD_LAMBDA_FIPS_MODE` defaults to `true` in govcloud, direct metrics submission there (without an Extension or a Forwarder) will now be disabled. --- datadog_lambda/api.py | 22 ++-- datadog_lambda/dogstatsd.py | 27 +++-- datadog_lambda/fips.py | 19 ++++ datadog_lambda/metric.py | 149 +++++++++++++++----------- datadog_lambda/stats_writer.py | 2 +- datadog_lambda/statsd_writer.py | 6 +- datadog_lambda/thread_stats_writer.py | 3 +- tests/test_api.py | 26 ++++- tests/test_dogstatsd.py | 20 ++-- tests/test_metric.py | 123 +++++++++++++-------- 10 files changed, 254 insertions(+), 143 deletions(-) create mode 100644 datadog_lambda/fips.py diff --git a/datadog_lambda/api.py b/datadog_lambda/api.py index fd3e2c17..d1cee4e4 100644 --- a/datadog_lambda/api.py +++ b/datadog_lambda/api.py @@ -1,5 +1,7 @@ -import os import logging +import os + +from datadog_lambda.fips import fips_mode_enabled logger = logging.getLogger(__name__) KMS_ENCRYPTION_CONTEXT_KEY = "LambdaFunctionName" @@ -7,9 +9,10 @@ def decrypt_kms_api_key(kms_client, ciphertext): - from botocore.exceptions import ClientError import base64 + from botocore.exceptions import ClientError + """ Decodes and deciphers the base64-encoded ciphertext given as a parameter using KMS. For this to work properly, the Lambda function must have the appropriate IAM permissions. @@ -63,10 +66,9 @@ def get_api_key() -> str: DD_API_KEY = os.environ.get("DD_API_KEY", os.environ.get("DATADOG_API_KEY", "")) LAMBDA_REGION = os.environ.get("AWS_REGION", "") - is_gov_region = LAMBDA_REGION.startswith("us-gov-") - if is_gov_region: + if fips_mode_enabled: logger.debug( - "Govcloud region detected. Using FIPs endpoints for secrets management." + "FIPS mode is enabled, using FIPS endpoints for secrets management." ) if DD_API_KEY_SECRET_ARN: @@ -80,7 +82,7 @@ def get_api_key() -> str: return "" endpoint_url = ( f"https://secretsmanager-fips.{secrets_region}.amazonaws.com" - if is_gov_region + if fips_mode_enabled else None ) secrets_manager_client = _boto3_client( @@ -92,7 +94,9 @@ def get_api_key() -> str: elif DD_API_KEY_SSM_NAME: # SSM endpoints: https://docs.aws.amazon.com/general/latest/gr/ssm.html fips_endpoint = ( - f"https://ssm-fips.{LAMBDA_REGION}.amazonaws.com" if is_gov_region else None + f"https://ssm-fips.{LAMBDA_REGION}.amazonaws.com" + if fips_mode_enabled + else None ) ssm_client = _boto3_client("ssm", endpoint_url=fips_endpoint) api_key = ssm_client.get_parameter( @@ -101,7 +105,9 @@ def get_api_key() -> str: elif DD_KMS_API_KEY: # KMS endpoints: https://docs.aws.amazon.com/general/latest/gr/kms.html fips_endpoint = ( - f"https://kms-fips.{LAMBDA_REGION}.amazonaws.com" if is_gov_region else None + f"https://kms-fips.{LAMBDA_REGION}.amazonaws.com" + if fips_mode_enabled + else None ) kms_client = _boto3_client("kms", endpoint_url=fips_endpoint) api_key = decrypt_kms_api_key(kms_client, DD_KMS_API_KEY) diff --git a/datadog_lambda/dogstatsd.py b/datadog_lambda/dogstatsd.py index a627492d..f30a2039 100644 --- a/datadog_lambda/dogstatsd.py +++ b/datadog_lambda/dogstatsd.py @@ -1,11 +1,10 @@ +import errno import logging import os -import socket -import errno import re +import socket from threading import Lock - MIN_SEND_BUFFER_SIZE = 32 * 1024 log = logging.getLogger("datadog_lambda.dogstatsd") @@ -55,14 +54,21 @@ def _get_udp_socket(cls, host, port): return sock - def distribution(self, metric, value, tags=None): + def distribution(self, metric, value, tags=None, timestamp=None): """ - Send a global distribution value, optionally setting tags. + Send a global distribution value, optionally setting tags. The optional + timestamp should be an integer representing seconds since the epoch + (January 1, 1970, 00:00:00 UTC). >>> statsd.distribution("uploaded.file.size", 1445) >>> statsd.distribution("album.photo.count", 26, tags=["gender:female"]) + >>> statsd.distribution( + >>> "historic.file.count", + >>> 5, + >>> timestamp=int(datetime(2020, 2, 14, 12, 0, 0).timestamp()), + >>> ) """ - self._report(metric, "d", value, tags) + self._report(metric, "d", value, tags, timestamp) def close_socket(self): """ @@ -84,20 +90,21 @@ def normalize_tags(self, tag_list): for tag in tag_list ] - def _serialize_metric(self, metric, metric_type, value, tags): + def _serialize_metric(self, metric, metric_type, value, tags, timestamp): # Create/format the metric packet - return "%s:%s|%s%s" % ( + return "%s:%s|%s%s%s" % ( metric, value, metric_type, ("|#" + ",".join(self.normalize_tags(tags))) if tags else "", + ("|T" + str(timestamp)) if timestamp is not None else "", ) - def _report(self, metric, metric_type, value, tags): + def _report(self, metric, metric_type, value, tags, timestamp): if value is None: return - payload = self._serialize_metric(metric, metric_type, value, tags) + payload = self._serialize_metric(metric, metric_type, value, tags, timestamp) # Send it self._send_to_server(payload) diff --git a/datadog_lambda/fips.py b/datadog_lambda/fips.py new file mode 100644 index 00000000..8442ddd9 --- /dev/null +++ b/datadog_lambda/fips.py @@ -0,0 +1,19 @@ +import logging +import os + +is_gov_region = os.environ.get("AWS_REGION", "").startswith("us-gov-") + +fips_mode_enabled = ( + os.environ.get( + "DD_LAMBDA_FIPS_MODE", + "true" if is_gov_region else "false", + ).lower() + == "true" +) + +if is_gov_region or fips_mode_enabled: + logger = logging.getLogger(__name__) + logger.debug( + "Python Lambda Layer FIPS mode is %s.", + "enabled" if fips_mode_enabled else "not enabled", + ) diff --git a/datadog_lambda/metric.py b/datadog_lambda/metric.py index f9c67a26..0c18b517 100644 --- a/datadog_lambda/metric.py +++ b/datadog_lambda/metric.py @@ -3,37 +3,66 @@ # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2019 Datadog, Inc. +import enum +import logging import os import time -import logging -import ujson as json from datetime import datetime, timedelta +import ujson as json + from datadog_lambda.extension import should_use_extension -from datadog_lambda.tags import get_enhanced_metrics_tags, dd_lambda_layer_tag +from datadog_lambda.fips import fips_mode_enabled +from datadog_lambda.tags import dd_lambda_layer_tag, get_enhanced_metrics_tags logger = logging.getLogger(__name__) -lambda_stats = None -extension_thread_stats = None -flush_in_thread = os.environ.get("DD_FLUSH_IN_THREAD", "").lower() == "true" +class MetricsHandler(enum.Enum): + EXTENSION = "extension" + FORWARDER = "forwarder" + DATADOG_API = "datadog_api" + NO_METRICS = "no_metrics" + -if should_use_extension: +def _select_metrics_handler(): + if should_use_extension: + return MetricsHandler.EXTENSION + if os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true": + return MetricsHandler.FORWARDER + + if fips_mode_enabled: + logger.debug( + "With FIPS mode enabled, the Datadog API metrics handler is unavailable." + ) + return MetricsHandler.NO_METRICS + + return MetricsHandler.DATADOG_API + + +metrics_handler = _select_metrics_handler() +logger.debug("identified primary metrics handler as %s", metrics_handler) + + +lambda_stats = None +if metrics_handler == MetricsHandler.EXTENSION: from datadog_lambda.statsd_writer import StatsDWriter lambda_stats = StatsDWriter() -else: + +elif metrics_handler == MetricsHandler.DATADOG_API: # Periodical flushing in a background thread is NOT guaranteed to succeed # and leads to data loss. When disabled, metrics are only flushed at the # end of invocation. To make metrics submitted from a long-running Lambda # function available sooner, consider using the Datadog Lambda extension. - from datadog_lambda.thread_stats_writer import ThreadStatsWriter from datadog_lambda.api import init_api + from datadog_lambda.thread_stats_writer import ThreadStatsWriter + flush_in_thread = os.environ.get("DD_FLUSH_IN_THREAD", "").lower() == "true" init_api() lambda_stats = ThreadStatsWriter(flush_in_thread) + enhanced_metrics_enabled = ( os.environ.get("DD_ENHANCED_METRICS", "true").lower() == "true" ) @@ -44,16 +73,19 @@ def lambda_metric(metric_name, value, timestamp=None, tags=None, force_async=Fal Submit a data point to Datadog distribution metrics. https://docs.datadoghq.com/graphing/metrics/distributions/ - When DD_FLUSH_TO_LOG is True, write metric to log, and - wait for the Datadog Log Forwarder Lambda function to submit - the metrics asynchronously. + If the Datadog Lambda Extension is present, metrics are submitted to its + dogstatsd endpoint. + + When DD_FLUSH_TO_LOG is True or force_async is True, write metric to log, + and wait for the Datadog Log Forwarder Lambda function to submit the + metrics asynchronously. Otherwise, the metrics will be submitted to the Datadog API periodically and at the end of the function execution in a background thread. - Note that if the extension is present, it will override the DD_FLUSH_TO_LOG value - and always use the layer to send metrics to the extension + Note that if the extension is present, it will override the DD_FLUSH_TO_LOG + value and always use the layer to send metrics to the extension """ if not metric_name or not isinstance(metric_name, str): logger.warning( @@ -71,56 +103,54 @@ def lambda_metric(metric_name, value, timestamp=None, tags=None, force_async=Fal ) return - flush_to_logs = os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true" tags = [] if tags is None else list(tags) tags.append(dd_lambda_layer_tag) - if should_use_extension and timestamp is not None: - # The extension does not support timestamps for distributions so we create a - # a thread stats writer to submit metrics with timestamps to the API - timestamp_ceiling = int( - (datetime.now() - timedelta(hours=4)).timestamp() - ) # 4 hours ago - if isinstance(timestamp, datetime): - timestamp = int(timestamp.timestamp()) - if timestamp_ceiling > timestamp: - logger.warning( - "Timestamp %s is older than 4 hours, not submitting metric %s", - timestamp, - metric_name, - ) - return - global extension_thread_stats - if extension_thread_stats is None: - from datadog_lambda.thread_stats_writer import ThreadStatsWriter - from datadog_lambda.api import init_api - - init_api() - extension_thread_stats = ThreadStatsWriter(flush_in_thread) - - extension_thread_stats.distribution( - metric_name, value, tags=tags, timestamp=timestamp - ) - return + if metrics_handler == MetricsHandler.EXTENSION: + if timestamp is not None: + if isinstance(timestamp, datetime): + timestamp = int(timestamp.timestamp()) + + timestamp_floor = int((datetime.now() - timedelta(hours=4)).timestamp()) + if timestamp < timestamp_floor: + logger.warning( + "Timestamp %s is older than 4 hours, not submitting metric %s", + timestamp, + metric_name, + ) + return - if should_use_extension: logger.debug( "Sending metric %s value %s to Datadog via extension", metric_name, value ) lambda_stats.distribution(metric_name, value, tags=tags, timestamp=timestamp) + + elif force_async or (metrics_handler == MetricsHandler.FORWARDER): + write_metric_point_to_stdout(metric_name, value, timestamp=timestamp, tags=tags) + + elif metrics_handler == MetricsHandler.DATADOG_API: + lambda_stats.distribution(metric_name, value, tags=tags, timestamp=timestamp) + + elif metrics_handler == MetricsHandler.NO_METRICS: + logger.debug( + "Metric %s cannot be submitted because the metrics handler is disabled", + metric_name, + ), + else: - if flush_to_logs or force_async: - write_metric_point_to_stdout( - metric_name, value, timestamp=timestamp, tags=tags - ) - else: - lambda_stats.distribution( - metric_name, value, tags=tags, timestamp=timestamp - ) + # This should be qutie impossible, but let's at least log a message if + # it somehow happens. + logger.debug( + "Metric %s cannot be submitted because the metrics handler is not configured: %s", + metric_name, + metrics_handler, + ) -def write_metric_point_to_stdout(metric_name, value, timestamp=None, tags=[]): +def write_metric_point_to_stdout(metric_name, value, timestamp=None, tags=None): """Writes the specified metric point to standard output""" + tags = tags or [] + logger.debug( "Sending metric %s value %s to Datadog via log forwarder", metric_name, value ) @@ -138,19 +168,8 @@ def write_metric_point_to_stdout(metric_name, value, timestamp=None, tags=[]): def flush_stats(lambda_context=None): - lambda_stats.flush() - - if extension_thread_stats is not None: - tags = None - if lambda_context is not None: - tags = get_enhanced_metrics_tags(lambda_context) - split_arn = lambda_context.invoked_function_arn.split(":") - if len(split_arn) > 7: - # Get rid of the alias - split_arn.pop() - arn = ":".join(split_arn) - tags.append("function_arn:" + arn) - extension_thread_stats.flush(tags) + if lambda_stats is not None: + lambda_stats.flush() def submit_enhanced_metric(metric_name, lambda_context): diff --git a/datadog_lambda/stats_writer.py b/datadog_lambda/stats_writer.py index d3919c30..563b1ae9 100644 --- a/datadog_lambda/stats_writer.py +++ b/datadog_lambda/stats_writer.py @@ -1,5 +1,5 @@ class StatsWriter: - def distribution(self, metric_name, value, tags=[], timestamp=None): + def distribution(self, metric_name, value, tags=None, timestamp=None): raise NotImplementedError() def flush(self): diff --git a/datadog_lambda/statsd_writer.py b/datadog_lambda/statsd_writer.py index 33843dc6..4aaab8d5 100644 --- a/datadog_lambda/statsd_writer.py +++ b/datadog_lambda/statsd_writer.py @@ -1,5 +1,5 @@ -from datadog_lambda.stats_writer import StatsWriter from datadog_lambda.dogstatsd import statsd +from datadog_lambda.stats_writer import StatsWriter class StatsDWriter(StatsWriter): @@ -7,8 +7,8 @@ class StatsDWriter(StatsWriter): Writes distribution metrics using StatsD protocol """ - def distribution(self, metric_name, value, tags=[], timestamp=None): - statsd.distribution(metric_name, value, tags=tags) + def distribution(self, metric_name, value, tags=None, timestamp=None): + statsd.distribution(metric_name, value, tags=tags, timestamp=timestamp) def flush(self): pass diff --git a/datadog_lambda/thread_stats_writer.py b/datadog_lambda/thread_stats_writer.py index 422a9a0a..f21ee31f 100644 --- a/datadog_lambda/thread_stats_writer.py +++ b/datadog_lambda/thread_stats_writer.py @@ -3,6 +3,7 @@ # Make sure that this package would always be lazy-loaded/outside from the critical path # since underlying packages are quite heavy to load and useless when the extension is present from datadog.threadstats import ThreadStats + from datadog_lambda.stats_writer import StatsWriter logger = logging.getLogger(__name__) @@ -17,7 +18,7 @@ def __init__(self, flush_in_thread): self.thread_stats = ThreadStats(compress_payload=True) self.thread_stats.start(flush_in_thread=flush_in_thread) - def distribution(self, metric_name, value, tags=[], timestamp=None): + def distribution(self, metric_name, value, tags=None, timestamp=None): self.thread_stats.distribution( metric_name, value, tags=tags, timestamp=timestamp ) diff --git a/tests/test_api.py b/tests/test_api.py index c98d91eb..59ee4ee8 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,6 +1,6 @@ import os import unittest -from unittest.mock import patch, MagicMock +from unittest.mock import MagicMock, patch import datadog_lambda.api as api @@ -22,6 +22,7 @@ def setUp(self): ) self.env_patcher.start() + @patch("datadog_lambda.api.fips_mode_enabled", True) @patch("botocore.session.Session.create_client") def test_secrets_manager_fips_endpoint(self, mock_boto3_client): mock_client = MagicMock() @@ -62,6 +63,28 @@ def test_secrets_manager_different_region(self, mock_boto3_client): ) self.assertEqual(api_key, "test-api-key") + @patch("datadog_lambda.api.fips_mode_enabled", True) + @patch("botocore.session.Session.create_client") + def test_secrets_manager_different_region_but_still_fips(self, mock_boto3_client): + mock_client = MagicMock() + mock_client.get_secret_value.return_value = {"SecretString": "test-api-key"} + mock_boto3_client.return_value = mock_client + + os.environ["AWS_REGION"] = "us-east-1" + os.environ[ + "DD_API_KEY_SECRET_ARN" + ] = "arn:aws:secretsmanager:us-west-1:1234567890:secret:key-name-123ABC" + + api_key = api.get_api_key() + + mock_boto3_client.assert_called_with( + "secretsmanager", + endpoint_url="https://secretsmanager-fips.us-west-1.amazonaws.com", + region_name="us-west-1", + ) + self.assertEqual(api_key, "test-api-key") + + @patch("datadog_lambda.api.fips_mode_enabled", True) @patch("botocore.session.Session.create_client") def test_ssm_fips_endpoint(self, mock_boto3_client): mock_client = MagicMock() @@ -80,6 +103,7 @@ def test_ssm_fips_endpoint(self, mock_boto3_client): ) self.assertEqual(api_key, "test-api-key") + @patch("datadog_lambda.api.fips_mode_enabled", True) @patch("botocore.session.Session.create_client") @patch("datadog_lambda.api.decrypt_kms_api_key") def test_kms_fips_endpoint(self, mock_decrypt_kms, mock_boto3_client): diff --git a/tests/test_dogstatsd.py b/tests/test_dogstatsd.py index 149e1a70..ea6afd48 100644 --- a/tests/test_dogstatsd.py +++ b/tests/test_dogstatsd.py @@ -1,5 +1,5 @@ -from collections import deque import unittest +from collections import deque from datadog_lambda.dogstatsd import statsd @@ -36,16 +36,20 @@ def test_init(self): self.assertEqual(statsd.port, 8125) self.assertEqual(statsd.encoding, "utf-8") - def test_distribution_no_tags(self): - statsd.distribution("my.test.metric", 3) + def _checkOnlyOneMetric(self, value): payload = self.recv() metrics = payload.split("\n") self.assertEqual(len(metrics), 1) - self.assertEqual("my.test.metric:3|d", metrics[0]) + self.assertEqual(value, metrics[0]) + + def test_distribution_no_tags(self): + statsd.distribution("my.test.metric", 3) + self._checkOnlyOneMetric("my.test.metric:3|d") def test_distribution_with_tags(self): statsd.distribution("my.test.tags.metric", 3, tags=["taga:valuea,tagb:valueb"]) - payload = self.recv() - metrics = payload.split("\n") - self.assertEqual(len(metrics), 1) - self.assertEqual("my.test.tags.metric:3|d|#taga:valuea_tagb:valueb", metrics[0]) + self._checkOnlyOneMetric("my.test.tags.metric:3|d|#taga:valuea_tagb:valueb") + + def test_distribution_with_timestamp(self): + statsd.distribution("my.test.timestamp.metric", 9, timestamp=123456789) + self._checkOnlyOneMetric("my.test.timestamp.metric:9|d|T123456789") diff --git a/tests/test_metric.py b/tests/test_metric.py index d10a0f0d..a4b0be2c 100644 --- a/tests/test_metric.py +++ b/tests/test_metric.py @@ -1,23 +1,33 @@ import os import unittest - -from unittest.mock import patch, call +from datetime import datetime, timedelta +from unittest.mock import call, patch from botocore.exceptions import ClientError as BotocoreClientError from datadog.api.exceptions import ClientError -from datetime import datetime, timedelta -from datadog_lambda.metric import lambda_metric, flush_stats -from datadog_lambda.api import decrypt_kms_api_key, KMS_ENCRYPTION_CONTEXT_KEY -from datadog_lambda.thread_stats_writer import ThreadStatsWriter +from datadog_lambda.api import KMS_ENCRYPTION_CONTEXT_KEY, decrypt_kms_api_key +from datadog_lambda.metric import ( + MetricsHandler, + _select_metrics_handler, + flush_stats, + lambda_metric, +) from datadog_lambda.tags import dd_lambda_layer_tag +from datadog_lambda.thread_stats_writer import ThreadStatsWriter class TestLambdaMetric(unittest.TestCase): def setUp(self): - patcher = patch("datadog_lambda.metric.lambda_stats") - self.mock_metric_lambda_stats = patcher.start() - self.addCleanup(patcher.stop) + lambda_stats_patcher = patch("datadog_lambda.metric.lambda_stats") + self.mock_metric_lambda_stats = lambda_stats_patcher.start() + self.addCleanup(lambda_stats_patcher.stop) + + stdout_metric_patcher = patch( + "datadog_lambda.metric.write_metric_point_to_stdout" + ) + self.mock_write_metric_point_to_stdout = stdout_metric_patcher.start() + self.addCleanup(stdout_metric_patcher.stop) def test_lambda_metric_tagged_with_dd_lambda_layer(self): lambda_metric("test", 1) @@ -35,67 +45,94 @@ def test_lambda_metric_tagged_with_dd_lambda_layer(self): # let's fake that the extension is present, this should override DD_FLUSH_TO_LOG @patch("datadog_lambda.metric.should_use_extension", True) - def test_lambda_metric_flush_to_log_with_extension(self): + def test_select_metrics_handler_extension_despite_flush_to_logs(self): + os.environ["DD_FLUSH_TO_LOG"] = "True" + self.assertEqual(MetricsHandler.EXTENSION, _select_metrics_handler()) + del os.environ["DD_FLUSH_TO_LOG"] + + @patch("datadog_lambda.metric.should_use_extension", False) + def test_select_metrics_handler_forwarder_when_flush_to_logs(self): os.environ["DD_FLUSH_TO_LOG"] = "True" + self.assertEqual(MetricsHandler.FORWARDER, _select_metrics_handler()) + del os.environ["DD_FLUSH_TO_LOG"] + + @patch("datadog_lambda.metric.should_use_extension", False) + def test_select_metrics_handler_dd_api_fallback(self): + os.environ["DD_FLUSH_TO_LOG"] = "False" + self.assertEqual(MetricsHandler.DATADOG_API, _select_metrics_handler()) + del os.environ["DD_FLUSH_TO_LOG"] + + @patch("datadog_lambda.metric.fips_mode_enabled", True) + @patch("datadog_lambda.metric.should_use_extension", False) + def test_select_metrics_handler_has_no_fallback_in_fips_mode(self): + os.environ["DD_FLUSH_TO_LOG"] = "False" + self.assertEqual(MetricsHandler.NO_METRICS, _select_metrics_handler()) + del os.environ["DD_FLUSH_TO_LOG"] + + @patch("datadog_lambda.metric.metrics_handler", MetricsHandler.EXTENSION) + def test_lambda_metric_goes_to_extension_with_extension_handler(self): lambda_metric("test", 1) self.mock_metric_lambda_stats.distribution.assert_has_calls( [call("test", 1, timestamp=None, tags=[dd_lambda_layer_tag])] ) - del os.environ["DD_FLUSH_TO_LOG"] - @patch("datadog_lambda.metric.should_use_extension", True) - def test_lambda_metric_timestamp_with_extension(self): - patcher = patch("datadog_lambda.metric.extension_thread_stats") - self.mock_metric_extension_thread_stats = patcher.start() - self.addCleanup(patcher.stop) + @patch("datadog_lambda.metric.metrics_handler", MetricsHandler.NO_METRICS) + def test_lambda_metric_has_nowhere_to_go_with_no_metrics_handler(self): + lambda_metric("test", 1) + self.mock_metric_lambda_stats.distribution.assert_not_called() + self.mock_write_metric_point_to_stdout.assert_not_called() + @patch("datadog_lambda.metric.metrics_handler", MetricsHandler.EXTENSION) + def test_lambda_metric_timestamp_with_extension(self): delta = timedelta(minutes=1) timestamp = int((datetime.now() - delta).timestamp()) lambda_metric("test_timestamp", 1, timestamp) - self.mock_metric_lambda_stats.distribution.assert_not_called() - self.mock_metric_extension_thread_stats.distribution.assert_called_with( - "test_timestamp", 1, timestamp=timestamp, tags=[dd_lambda_layer_tag] + self.mock_metric_lambda_stats.distribution.assert_has_calls( + [call("test_timestamp", 1, timestamp=timestamp, tags=[dd_lambda_layer_tag])] ) + self.mock_write_metric_point_to_stdout.assert_not_called() - @patch("datadog_lambda.metric.should_use_extension", True) + @patch("datadog_lambda.metric.metrics_handler", MetricsHandler.EXTENSION) def test_lambda_metric_datetime_with_extension(self): - patcher = patch("datadog_lambda.metric.extension_thread_stats") - self.mock_metric_extension_thread_stats = patcher.start() - self.addCleanup(patcher.stop) - - delta = timedelta(hours=5) + delta = timedelta(minutes=1) timestamp = datetime.now() - delta - lambda_metric("test_timestamp", 1, timestamp) - self.mock_metric_lambda_stats.distribution.assert_not_called() - self.mock_metric_extension_thread_stats.distribution.assert_not_called() + lambda_metric("test_datetime_timestamp", 0, timestamp) + self.mock_metric_lambda_stats.distribution.assert_has_calls( + [ + call( + "test_datetime_timestamp", + 0, + timestamp=int(timestamp.timestamp()), + tags=[dd_lambda_layer_tag], + ) + ] + ) + self.mock_write_metric_point_to_stdout.assert_not_called() - @patch("datadog_lambda.metric.should_use_extension", True) + @patch("datadog_lambda.metric.metrics_handler", MetricsHandler.EXTENSION) def test_lambda_metric_invalid_timestamp_with_extension(self): - patcher = patch("datadog_lambda.metric.extension_thread_stats") - self.mock_metric_extension_thread_stats = patcher.start() - self.addCleanup(patcher.stop) - delta = timedelta(hours=5) timestamp = int((datetime.now() - delta).timestamp()) lambda_metric("test_timestamp", 1, timestamp) self.mock_metric_lambda_stats.distribution.assert_not_called() - self.mock_metric_extension_thread_stats.distribution.assert_not_called() + self.mock_write_metric_point_to_stdout.assert_not_called() + @patch("datadog_lambda.metric.metrics_handler", MetricsHandler.FORWARDER) def test_lambda_metric_flush_to_log(self): - os.environ["DD_FLUSH_TO_LOG"] = "True" - lambda_metric("test", 1) self.mock_metric_lambda_stats.distribution.assert_not_called() - - del os.environ["DD_FLUSH_TO_LOG"] + self.mock_write_metric_point_to_stdout.assert_has_calls( + [call("test", 1, timestamp=None, tags=[dd_lambda_layer_tag])] + ) @patch("datadog_lambda.metric.logger.warning") def test_lambda_metric_invalid_metric_name_none(self, mock_logger_warning): lambda_metric(None, 1) self.mock_metric_lambda_stats.distribution.assert_not_called() + self.mock_write_metric_point_to_stdout.assert_not_called() mock_logger_warning.assert_called_once_with( "Ignoring metric submission. Invalid metric name: %s", None ) @@ -104,6 +141,7 @@ def test_lambda_metric_invalid_metric_name_none(self, mock_logger_warning): def test_lambda_metric_invalid_metric_name_not_string(self, mock_logger_warning): lambda_metric(123, 1) self.mock_metric_lambda_stats.distribution.assert_not_called() + self.mock_write_metric_point_to_stdout.assert_not_called() mock_logger_warning.assert_called_once_with( "Ignoring metric submission. Invalid metric name: %s", 123 ) @@ -112,6 +150,7 @@ def test_lambda_metric_invalid_metric_name_not_string(self, mock_logger_warning) def test_lambda_metric_non_numeric_value(self, mock_logger_warning): lambda_metric("test.non_numeric", "oops") self.mock_metric_lambda_stats.distribution.assert_not_called() + self.mock_write_metric_point_to_stdout.assert_not_called() mock_logger_warning.assert_called_once_with( "Ignoring metric submission for metric '%s' because the value is not numeric: %r", "test.non_numeric", @@ -127,10 +166,6 @@ def setUp(self): self.mock_threadstats_flush_distributions = patcher.start() self.addCleanup(patcher.stop) - patcher = patch("datadog_lambda.metric.extension_thread_stats") - self.mock_extension_thread_stats = patcher.start() - self.addCleanup(patcher.stop) - def test_retry_on_remote_disconnected(self): # Raise the RemoteDisconnected error lambda_stats = ThreadStatsWriter(True) @@ -209,10 +244,6 @@ def test_flush_temp_constant_tags(self): lambda_stats.thread_stats.constant_tags, original_constant_tags ) - def test_flush_stats_without_context(self): - flush_stats(lambda_context=None) - self.mock_extension_thread_stats.flush.assert_called_with(None) - MOCK_FUNCTION_NAME = "myFunction" From 8a01794b02244efab5814f52e442aacf71682aac Mon Sep 17 00:00:00 2001 From: Aleksandr Pasechnik Date: Wed, 7 May 2025 16:45:13 -0400 Subject: [PATCH 22/44] 6.108.0 Release Candidate (#589) --- datadog_lambda/version.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/datadog_lambda/version.py b/datadog_lambda/version.py index 702691d8..bcd37def 100644 --- a/datadog_lambda/version.py +++ b/datadog_lambda/version.py @@ -1 +1 @@ -__version__ = "6.107.0" +__version__ = "6.108.0" diff --git a/pyproject.toml b/pyproject.toml index 165a8cbe..8f16b438 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "datadog_lambda" -version = "6.107.0" +version = "6.108.0" description = "The Datadog AWS Lambda Library" authors = ["Datadog, Inc. "] license = "Apache-2.0" From 8ca58b40dcfa8ad4c2169a27ddca27b19c7fb348 Mon Sep 17 00:00:00 2001 From: Aleksandr Pasechnik Date: Thu, 8 May 2025 11:19:03 -0400 Subject: [PATCH 23/44] fix: timestamps we send to the extension should be integers (#590) Also added some defense in depth for our lower level statsd call. --- datadog_lambda/dogstatsd.py | 2 +- datadog_lambda/metric.py | 12 ++++++++++++ tests/test_dogstatsd.py | 4 ++++ tests/test_metric.py | 28 ++++++++++++++++++++++++++++ 4 files changed, 45 insertions(+), 1 deletion(-) diff --git a/datadog_lambda/dogstatsd.py b/datadog_lambda/dogstatsd.py index f30a2039..a08e2592 100644 --- a/datadog_lambda/dogstatsd.py +++ b/datadog_lambda/dogstatsd.py @@ -97,7 +97,7 @@ def _serialize_metric(self, metric, metric_type, value, tags, timestamp): value, metric_type, ("|#" + ",".join(self.normalize_tags(tags))) if tags else "", - ("|T" + str(timestamp)) if timestamp is not None else "", + ("|T" + str(int(timestamp))) if timestamp is not None else "", ) def _report(self, metric, metric_type, value, tags, timestamp): diff --git a/datadog_lambda/metric.py b/datadog_lambda/metric.py index 0c18b517..c9b978d6 100644 --- a/datadog_lambda/metric.py +++ b/datadog_lambda/metric.py @@ -111,6 +111,18 @@ def lambda_metric(metric_name, value, timestamp=None, tags=None, force_async=Fal if isinstance(timestamp, datetime): timestamp = int(timestamp.timestamp()) + else: + try: + timestamp = int(timestamp) + except Exception: + logger.debug( + "Ignoring metric submission for metric '%s' because the timestamp cannot " + "be turned into an integer: %r", + metric_name, + timestamp, + ) + return + timestamp_floor = int((datetime.now() - timedelta(hours=4)).timestamp()) if timestamp < timestamp_floor: logger.warning( diff --git a/tests/test_dogstatsd.py b/tests/test_dogstatsd.py index ea6afd48..6fe79372 100644 --- a/tests/test_dogstatsd.py +++ b/tests/test_dogstatsd.py @@ -53,3 +53,7 @@ def test_distribution_with_tags(self): def test_distribution_with_timestamp(self): statsd.distribution("my.test.timestamp.metric", 9, timestamp=123456789) self._checkOnlyOneMetric("my.test.timestamp.metric:9|d|T123456789") + + def test_distribution_with_float_timestamp(self): + statsd.distribution("my.test.timestamp.metric", 9, timestamp=123456789.123) + self._checkOnlyOneMetric("my.test.timestamp.metric:9|d|T123456789") diff --git a/tests/test_metric.py b/tests/test_metric.py index a4b0be2c..e7dab2c3 100644 --- a/tests/test_metric.py +++ b/tests/test_metric.py @@ -111,6 +111,34 @@ def test_lambda_metric_datetime_with_extension(self): ) self.mock_write_metric_point_to_stdout.assert_not_called() + @patch("datadog_lambda.metric.metrics_handler", MetricsHandler.EXTENSION) + def test_lambda_metric_float_with_extension(self): + delta = timedelta(minutes=1) + timestamp_float = (datetime.now() - delta).timestamp() + timestamp_int = int(timestamp_float) + + lambda_metric("test_timestamp", 1, timestamp_float) + self.mock_metric_lambda_stats.distribution.assert_has_calls( + [ + call( + "test_timestamp", + 1, + timestamp=timestamp_int, + tags=[dd_lambda_layer_tag], + ) + ] + ) + self.mock_write_metric_point_to_stdout.assert_not_called() + + @patch("datadog_lambda.metric.metrics_handler", MetricsHandler.EXTENSION) + def test_lambda_metric_timestamp_junk_with_extension(self): + delta = timedelta(minutes=1) + timestamp = (datetime.now() - delta).isoformat() + + lambda_metric("test_timestamp", 1, timestamp) + self.mock_metric_lambda_stats.distribution.assert_not_called() + self.mock_write_metric_point_to_stdout.assert_not_called() + @patch("datadog_lambda.metric.metrics_handler", MetricsHandler.EXTENSION) def test_lambda_metric_invalid_timestamp_with_extension(self): delta = timedelta(hours=5) From 1226b2de560afc6a8fe3e631ad84960b5dd03eef Mon Sep 17 00:00:00 2001 From: Aleksandr Pasechnik Date: Fri, 9 May 2025 12:46:04 -0400 Subject: [PATCH 24/44] v6.109.0 (#591) --- datadog_lambda/version.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/datadog_lambda/version.py b/datadog_lambda/version.py index bcd37def..c3aaa6b7 100644 --- a/datadog_lambda/version.py +++ b/datadog_lambda/version.py @@ -1 +1 @@ -__version__ = "6.108.0" +__version__ = "6.109.0" diff --git a/pyproject.toml b/pyproject.toml index 8f16b438..cccef63e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "datadog_lambda" -version = "6.108.0" +version = "6.109.0" description = "The Datadog AWS Lambda Library" authors = ["Datadog, Inc. "] license = "Apache-2.0" From 16774731b8050e3355d24ffe6d917c656cf0c09f Mon Sep 17 00:00:00 2001 From: Nicholas Hulston Date: Fri, 9 May 2025 13:21:33 -0400 Subject: [PATCH 25/44] (feat): Enable Exception Replay in Lambda (#592) * import exception replay from tracer and enable if `DD_EXCEPTION_REPLAY_ENABLED=true` * lint --- datadog_lambda/wrapper.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/datadog_lambda/wrapper.py b/datadog_lambda/wrapper.py index e81b1baa..e5460118 100644 --- a/datadog_lambda/wrapper.py +++ b/datadog_lambda/wrapper.py @@ -53,6 +53,12 @@ if llmobs_env_var: from ddtrace.llmobs import LLMObs +exception_replay_env_var = os.environ.get( + "DD_EXCEPTION_REPLAY_ENABLED", "false" +).lower() in ("true", "1") +if exception_replay_env_var: + from ddtrace.debugging._exception.replay import SpanExceptionHandler + logger = logging.getLogger(__name__) DD_FLUSH_TO_LOG = "DD_FLUSH_TO_LOG" @@ -224,6 +230,11 @@ def __init__(self, func): if llmobs_env_var: LLMObs.enable() + # Enable Exception Replay + if exception_replay_env_var: + logger.debug("Enabling exception replay") + SpanExceptionHandler.enable() + logger.debug("datadog_lambda_wrapper initialized") except Exception as e: logger.error(format_err_with_traceback(e)) From 9b694f7e25c58e9cbffb97392145c6962747fa87 Mon Sep 17 00:00:00 2001 From: Joey Zhao <5253430+joeyzhao2018@users.noreply.github.com> Date: Tue, 13 May 2025 12:47:49 -0400 Subject: [PATCH 26/44] fix: safely getting all the values for trigger tags (#593) * solution2: safely getting all the values * lint * add comment back --- datadog_lambda/trigger.py | 45 +++++++++++++++++++++------ tests/test_trigger.py | 65 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 100 insertions(+), 10 deletions(-) diff --git a/datadog_lambda/trigger.py b/datadog_lambda/trigger.py index 8090e36e..52978d4b 100644 --- a/datadog_lambda/trigger.py +++ b/datadog_lambda/trigger.py @@ -114,10 +114,14 @@ def parse_event_source(event: dict) -> _EventSource: event_source = None - request_context = event.get("requestContext") + # Get requestContext safely and ensure it's a dictionary + request_context = event.get("requestContext", {}) + if not isinstance(request_context, dict): + request_context = {} + if request_context and request_context.get("stage"): if "domainName" in request_context and detect_lambda_function_url_domain( - request_context.get("domainName") + request_context.get("domainName", "") ): return _EventSource(EventTypes.LAMBDA_FUNCTION_URL) event_source = _EventSource(EventTypes.API_GATEWAY) @@ -171,6 +175,8 @@ def parse_event_source(event: dict) -> _EventSource: def detect_lambda_function_url_domain(domain: str) -> bool: # e.g. "etsn5fibjr.lambda-url.eu-south-1.amazonaws.com" + if not isinstance(domain, str): + return False domain_parts = domain.split(".") if len(domain_parts) < 2: return False @@ -283,17 +289,28 @@ def extract_http_tags(event): Extracts HTTP facet tags from the triggering event """ http_tags = {} - request_context = event.get("requestContext") + + # Safely get request_context and ensure it's a dictionary + request_context = event.get("requestContext", {}) + if not isinstance(request_context, dict): + request_context = {} + path = event.get("path") method = event.get("httpMethod") + if request_context and request_context.get("stage"): - if request_context.get("domainName"): - http_tags["http.url"] = request_context.get("domainName") + domain_name = request_context.get("domainName") + if domain_name: + http_tags["http.url"] = domain_name path = request_context.get("path") method = request_context.get("httpMethod") + # Version 2.0 HTTP API Gateway - apigateway_v2_http = request_context.get("http") + apigateway_v2_http = request_context.get("http", {}) + if not isinstance(apigateway_v2_http, dict): + apigateway_v2_http = {} + if event.get("version") == "2.0" and apigateway_v2_http: path = apigateway_v2_http.get("path") method = apigateway_v2_http.get("method") @@ -303,15 +320,23 @@ def extract_http_tags(event): if method: http_tags["http.method"] = method - headers = event.get("headers") + # Safely get headers + headers = event.get("headers", {}) + if not isinstance(headers, dict): + headers = {} + if headers and headers.get("Referer"): http_tags["http.referer"] = headers.get("Referer") # Try to get `routeKey` from API GW v2; otherwise try to get `resource` from API GW v1 route = event.get("routeKey") or event.get("resource") - if route: - # "GET /my/endpoint" = > "/my/endpoint" - http_tags["http.route"] = route.split(" ")[-1] + if route and isinstance(route, str): + try: + # "GET /my/endpoint" = > "/my/endpoint" + http_tags["http.route"] = route.split(" ")[-1] + except Exception: + # If splitting fails, use the route as is + http_tags["http.route"] = route return http_tags diff --git a/tests/test_trigger.py b/tests/test_trigger.py index 9cb088f1..b4da7ff0 100644 --- a/tests/test_trigger.py +++ b/tests/test_trigger.py @@ -256,6 +256,30 @@ def test_event_source_unsupported(self): self.assertEqual(event_source.to_string(), "unknown") self.assertEqual(event_source_arn, None) + def test_event_source_with_non_dict_request_context(self): + # Test with requestContext as a string instead of a dict + event = {"requestContext": "not_a_dict"} + event_source = parse_event_source(event) + # Should still return a valid event source (unknown in this case) + self.assertEqual(event_source.to_string(), "unknown") + + def test_event_source_with_invalid_domain_name(self): + # Test with domainName that isn't a string + event = {"requestContext": {"stage": "prod", "domainName": 12345}} + event_source = parse_event_source(event) + # Should detect as API Gateway since stage is present + self.assertEqual(event_source.to_string(), "api-gateway") + + def test_detect_lambda_function_url_domain_with_invalid_input(self): + from datadog_lambda.trigger import detect_lambda_function_url_domain + + # Test with non-string input + self.assertFalse(detect_lambda_function_url_domain(None)) + self.assertFalse(detect_lambda_function_url_domain(12345)) + self.assertFalse(detect_lambda_function_url_domain({"not": "a-string"})) + # Test with string that would normally cause an exception when split + self.assertFalse(detect_lambda_function_url_domain("")) + class GetTriggerTags(unittest.TestCase): def test_extract_trigger_tags_api_gateway(self): @@ -530,6 +554,47 @@ def test_extract_trigger_tags_list_type_event(self): tags = extract_trigger_tags(event, ctx) self.assertEqual(tags, {}) + def test_extract_http_tags_with_invalid_request_context(self): + from datadog_lambda.trigger import extract_http_tags + + # Test with requestContext as a string instead of a dict + event = {"requestContext": "not_a_dict", "path": "/test", "httpMethod": "GET"} + http_tags = extract_http_tags(event) + # Should still extract valid tags from the event + self.assertEqual( + http_tags, {"http.url_details.path": "/test", "http.method": "GET"} + ) + + def test_extract_http_tags_with_invalid_apigateway_http(self): + from datadog_lambda.trigger import extract_http_tags + + # Test with http in requestContext that's not a dict + event = { + "requestContext": {"stage": "prod", "http": "not_a_dict"}, + "version": "2.0", + } + http_tags = extract_http_tags(event) + # Should not raise an exception + self.assertEqual(http_tags, {}) + + def test_extract_http_tags_with_invalid_headers(self): + from datadog_lambda.trigger import extract_http_tags + + # Test with headers that's not a dict + event = {"headers": "not_a_dict"} + http_tags = extract_http_tags(event) + # Should not raise an exception + self.assertEqual(http_tags, {}) + + def test_extract_http_tags_with_invalid_route(self): + from datadog_lambda.trigger import extract_http_tags + + # Test with routeKey that would cause a split error + event = {"routeKey": 12345} # Not a string + http_tags = extract_http_tags(event) + # Should not raise an exception + self.assertEqual(http_tags, {}) + class ExtractHTTPStatusCodeTag(unittest.TestCase): def test_extract_http_status_code_tag_from_response_dict(self): From b74068bbdb07be1e21b6ed34c73dd23c5a853f14 Mon Sep 17 00:00:00 2001 From: Joey Zhao <5253430+joeyzhao2018@users.noreply.github.com> Date: Wed, 14 May 2025 22:27:42 -0400 Subject: [PATCH 27/44] perf: fewer memory allocation (#597) * fewer memory allocation * Update datadog_lambda/trigger.py * Update datadog_lambda/trigger.py --- datadog_lambda/trigger.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/datadog_lambda/trigger.py b/datadog_lambda/trigger.py index 52978d4b..a2708a59 100644 --- a/datadog_lambda/trigger.py +++ b/datadog_lambda/trigger.py @@ -115,9 +115,9 @@ def parse_event_source(event: dict) -> _EventSource: event_source = None # Get requestContext safely and ensure it's a dictionary - request_context = event.get("requestContext", {}) + request_context = event.get("requestContext") if not isinstance(request_context, dict): - request_context = {} + request_context = None if request_context and request_context.get("stage"): if "domainName" in request_context and detect_lambda_function_url_domain( @@ -291,9 +291,9 @@ def extract_http_tags(event): http_tags = {} # Safely get request_context and ensure it's a dictionary - request_context = event.get("requestContext", {}) + request_context = event.get("requestContext") if not isinstance(request_context, dict): - request_context = {} + request_context = None path = event.get("path") method = event.get("httpMethod") From 1d6d28f318b403358689afa5de79d689a655eb4a Mon Sep 17 00:00:00 2001 From: Nicholas Hulston Date: Thu, 15 May 2025 08:57:26 -0400 Subject: [PATCH 28/44] Add Exception Replay env var to README (#599) * update README.md --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 03cd846a..658babc2 100644 --- a/README.md +++ b/README.md @@ -29,6 +29,7 @@ Besides the environment variables supported by dd-trace-py, the datadog-lambda-p | DD_COLD_START_TRACE_SKIP_LIB | optionally skip creating Cold Start Spans for a comma-separated list of libraries. Useful to limit depth or skip known libraries. | `ddtrace.internal.compat,ddtrace.filters` | | DD_CAPTURE_LAMBDA_PAYLOAD | [Captures incoming and outgoing AWS Lambda payloads][1] in the Datadog APM spans for Lambda invocations. | `false` | | DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH | Determines the level of detail captured from AWS Lambda payloads, which are then assigned as tags for the `aws.lambda` span. It specifies the nesting depth of the JSON payload structure to process. Once the specified maximum depth is reached, the tag's value is set to the stringified value of any nested elements beyond this level.
For example, given the input payload:
{
"lv1" : {
"lv2": {
"lv3": "val"
}
}
}
If the depth is set to `2`, the resulting tag's key is set to `function.request.lv1.lv2` and the value is `{\"lv3\": \"val\"}`.
If the depth is set to `0`, the resulting tag's key is set to `function.request` and value is `{\"lv1\":{\"lv2\":{\"lv3\": \"val\"}}}` | `10` | +| DD_EXCEPTION_REPLAY_ENABLED | When set to `true`, the Lambda will run with Error Tracking Exception Replay enabled, capturing local variables. | `false` | ## Opening Issues From 497aadc4dfb08f662559c18f8971da249be0dc62 Mon Sep 17 00:00:00 2001 From: Nicholas Hulston Date: Fri, 16 May 2025 14:22:28 -0400 Subject: [PATCH 29/44] fix flushing Exception Replay (#601) --- datadog_lambda/wrapper.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/datadog_lambda/wrapper.py b/datadog_lambda/wrapper.py index e5460118..86bbf04d 100644 --- a/datadog_lambda/wrapper.py +++ b/datadog_lambda/wrapper.py @@ -58,6 +58,7 @@ ).lower() in ("true", "1") if exception_replay_env_var: from ddtrace.debugging._exception.replay import SpanExceptionHandler + from ddtrace.debugging._uploader import LogsIntakeUploaderV1 logger = logging.getLogger(__name__) @@ -405,6 +406,10 @@ def _after(self, event, context): if llmobs_env_var: LLMObs.flush() + # Flush exception replay + if exception_replay_env_var: + LogsIntakeUploaderV1._instance.periodic() + if self.encode_authorizer_context and is_authorizer_response(self.response): self._inject_authorizer_span_headers( event.get("requestContext", {}).get("requestId") From 676446cf4998caeefcbac011ab23bde4af954b1e Mon Sep 17 00:00:00 2001 From: Joey Zhao <5253430+joeyzhao2018@users.noreply.github.com> Date: Fri, 23 May 2025 11:16:17 +0200 Subject: [PATCH 30/44] v6.110.0 (#602) --- datadog_lambda/version.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/datadog_lambda/version.py b/datadog_lambda/version.py index c3aaa6b7..9534f0c7 100644 --- a/datadog_lambda/version.py +++ b/datadog_lambda/version.py @@ -1 +1 @@ -__version__ = "6.109.0" +__version__ = "6.110.0" diff --git a/pyproject.toml b/pyproject.toml index cccef63e..ba5bcb17 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "datadog_lambda" -version = "6.109.0" +version = "6.110.0" description = "The Datadog AWS Lambda Library" authors = ["Datadog, Inc. "] license = "Apache-2.0" From 64f81daa97d589f1047d00000c31d85e24e4314d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?jordan=20gonz=C3=A1lez?= <30836115+duncanista@users.noreply.github.com> Date: Fri, 23 May 2025 16:15:23 +0200 Subject: [PATCH 31/44] fix: `http.url` tag to include protocol (#603) * update `http.url` in inferred spans * update `http.url` in trigger tags * fix `resource_names` update * update integration test --- datadog_lambda/tracing.py | 14 +++++--- datadog_lambda/trigger.py | 2 +- .../logs/async-metrics_python310.log | 12 +++---- .../logs/async-metrics_python311.log | 12 +++---- .../logs/async-metrics_python312.log | 12 +++---- .../logs/async-metrics_python313.log | 12 +++---- .../snapshots/logs/async-metrics_python38.log | 12 +++---- .../snapshots/logs/async-metrics_python39.log | 12 +++---- .../snapshots/logs/sync-metrics_python310.log | 12 +++---- .../snapshots/logs/sync-metrics_python311.log | 12 +++---- .../snapshots/logs/sync-metrics_python312.log | 12 +++---- .../snapshots/logs/sync-metrics_python313.log | 12 +++---- .../snapshots/logs/sync-metrics_python38.log | 12 +++---- .../snapshots/logs/sync-metrics_python39.log | 14 ++++---- tests/test_tracing.py | 36 +++++++++---------- tests/test_trigger.py | 12 +++---- tests/test_wrapper.py | 2 +- 17 files changed, 108 insertions(+), 104 deletions(-) diff --git a/datadog_lambda/tracing.py b/datadog_lambda/tracing.py index 9a27673c..4b6f300a 100644 --- a/datadog_lambda/tracing.py +++ b/datadog_lambda/tracing.py @@ -850,13 +850,14 @@ def create_inferred_span_from_lambda_function_url_event(event, context): http = request_context.get("http") method = http.get("method") if http else None path = http.get("path") if http else None + http_url = f"https://{domain}{path}" resource = f"{method} {path}" tags = { "operation_name": "aws.lambda.url", - "http.url": domain + path, + "http.url": http_url, "endpoint": path, "http.method": method, - "resource_names": domain + path, + "resource_names": resource, "request_id": context.aws_request_id, } request_time_epoch = request_context.get("timeEpoch") @@ -948,6 +949,7 @@ def create_inferred_span_from_api_gateway_websocket_event( request_context = event.get("requestContext") domain = request_context.get("domainName") endpoint = request_context.get("routeKey") + http_url = f"https://{domain}{endpoint}" api_id = request_context.get("apiId") service_name = determine_service_name( @@ -955,7 +957,7 @@ def create_inferred_span_from_api_gateway_websocket_event( ) tags = { "operation_name": "aws.apigateway.websocket", - "http.url": domain + endpoint, + "http.url": http_url, "endpoint": endpoint, "resource_names": endpoint, "apiid": api_id, @@ -1007,11 +1009,12 @@ def create_inferred_span_from_api_gateway_event( ) method = event.get("httpMethod") path = event.get("path") + http_url = f"https://{domain}{path}" resource_path = _get_resource_path(event, request_context) resource = f"{method} {resource_path}" tags = { "operation_name": "aws.apigateway.rest", - "http.url": domain + path, + "http.url": http_url, "endpoint": path, "http.method": method, "resource_names": resource, @@ -1073,12 +1076,13 @@ def create_inferred_span_from_http_api_event( http = request_context.get("http") or {} method = http.get("method") path = event.get("rawPath") + http_url = f"https://{domain}{path}" resource_path = _get_resource_path(event, request_context) resource = f"{method} {resource_path}" tags = { "operation_name": "aws.httpapi", "endpoint": path, - "http.url": domain + path, + "http.url": http_url, "http.method": http.get("method"), "http.protocol": http.get("protocol"), "http.source_ip": http.get("sourceIp"), diff --git a/datadog_lambda/trigger.py b/datadog_lambda/trigger.py index a2708a59..14cb06ac 100644 --- a/datadog_lambda/trigger.py +++ b/datadog_lambda/trigger.py @@ -301,7 +301,7 @@ def extract_http_tags(event): if request_context and request_context.get("stage"): domain_name = request_context.get("domainName") if domain_name: - http_tags["http.url"] = domain_name + http_tags["http.url"] = f"https://{domain_name}" path = request_context.get("path") method = request_context.get("httpMethod") diff --git a/tests/integration/snapshots/logs/async-metrics_python310.log b/tests/integration/snapshots/logs/async-metrics_python310.log index 24d3fb5b..0bd7237c 100644 --- a/tests/integration/snapshots/logs/async-metrics_python310.log +++ b/tests/integration/snapshots/logs/async-metrics_python310.log @@ -55,7 +55,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -103,7 +103,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -594,7 +594,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -644,7 +644,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1436,7 +1436,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1486,7 +1486,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/async-metrics_python311.log b/tests/integration/snapshots/logs/async-metrics_python311.log index e4fa66bc..8550a062 100644 --- a/tests/integration/snapshots/logs/async-metrics_python311.log +++ b/tests/integration/snapshots/logs/async-metrics_python311.log @@ -55,7 +55,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -103,7 +103,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -594,7 +594,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -644,7 +644,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1436,7 +1436,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1486,7 +1486,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/async-metrics_python312.log b/tests/integration/snapshots/logs/async-metrics_python312.log index 0d632c6c..57c318ab 100644 --- a/tests/integration/snapshots/logs/async-metrics_python312.log +++ b/tests/integration/snapshots/logs/async-metrics_python312.log @@ -55,7 +55,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -103,7 +103,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -594,7 +594,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -644,7 +644,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1436,7 +1436,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1486,7 +1486,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/async-metrics_python313.log b/tests/integration/snapshots/logs/async-metrics_python313.log index 09070709..9204499b 100644 --- a/tests/integration/snapshots/logs/async-metrics_python313.log +++ b/tests/integration/snapshots/logs/async-metrics_python313.log @@ -55,7 +55,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -103,7 +103,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -594,7 +594,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -644,7 +644,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1436,7 +1436,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1486,7 +1486,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/async-metrics_python38.log b/tests/integration/snapshots/logs/async-metrics_python38.log index 4a506930..e6df054c 100644 --- a/tests/integration/snapshots/logs/async-metrics_python38.log +++ b/tests/integration/snapshots/logs/async-metrics_python38.log @@ -55,7 +55,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -103,7 +103,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -594,7 +594,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -644,7 +644,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1436,7 +1436,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1486,7 +1486,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/async-metrics_python39.log b/tests/integration/snapshots/logs/async-metrics_python39.log index 54081402..9bcb7a85 100644 --- a/tests/integration/snapshots/logs/async-metrics_python39.log +++ b/tests/integration/snapshots/logs/async-metrics_python39.log @@ -55,7 +55,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -103,7 +103,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -594,7 +594,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -644,7 +644,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1436,7 +1436,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1486,7 +1486,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/sync-metrics_python310.log b/tests/integration/snapshots/logs/sync-metrics_python310.log index e2569775..40562a6d 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python310.log +++ b/tests/integration/snapshots/logs/sync-metrics_python310.log @@ -35,7 +35,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -83,7 +83,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -631,7 +631,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -681,7 +681,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1568,7 +1568,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1618,7 +1618,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/sync-metrics_python311.log b/tests/integration/snapshots/logs/sync-metrics_python311.log index 69d4a695..52ec4c85 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python311.log +++ b/tests/integration/snapshots/logs/sync-metrics_python311.log @@ -35,7 +35,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -83,7 +83,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -631,7 +631,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -681,7 +681,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1568,7 +1568,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1618,7 +1618,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/sync-metrics_python312.log b/tests/integration/snapshots/logs/sync-metrics_python312.log index 49bae0a2..3ec0f01f 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python312.log +++ b/tests/integration/snapshots/logs/sync-metrics_python312.log @@ -35,7 +35,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -83,7 +83,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -631,7 +631,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -681,7 +681,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1568,7 +1568,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1618,7 +1618,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/sync-metrics_python313.log b/tests/integration/snapshots/logs/sync-metrics_python313.log index 2f461f6f..d2c20dc0 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python313.log +++ b/tests/integration/snapshots/logs/sync-metrics_python313.log @@ -35,7 +35,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -83,7 +83,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -631,7 +631,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -681,7 +681,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1568,7 +1568,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1618,7 +1618,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/sync-metrics_python38.log b/tests/integration/snapshots/logs/sync-metrics_python38.log index 83e33d33..57a354a6 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python38.log +++ b/tests/integration/snapshots/logs/sync-metrics_python38.log @@ -35,7 +35,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -83,7 +83,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -631,7 +631,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -681,7 +681,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1568,7 +1568,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1618,7 +1618,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/sync-metrics_python39.log b/tests/integration/snapshots/logs/sync-metrics_python39.log index 0a433c34..8b7bb31b 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python39.log +++ b/tests/integration/snapshots/logs/sync-metrics_python39.log @@ -35,7 +35,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -83,7 +83,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -377,7 +377,6 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A ] } HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","Content-Encoding:deflate","Content-Length:XXXX","Content-Type:application/json","DD-API-KEY:XXXX","User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)","traceparent:XXX","tracestate:XXX -END Duration: XXXX ms Memory Used: XXXX MB { "traces": [ [ @@ -416,6 +415,7 @@ END Duration: XXXX ms Memory Used: XXXX MB ] ] } +END Duration: XXXX ms Memory Used: XXXX MB START { "m": "aws.lambda.enhanced.invocations", @@ -631,7 +631,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -681,7 +681,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1568,7 +1568,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1618,7 +1618,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/test_tracing.py b/tests/test_tracing.py index 0a961a62..e38e4ecd 100644 --- a/tests/test_tracing.py +++ b/tests/test_tracing.py @@ -1730,7 +1730,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "1234567890", "endpoint": "/path/to/resource", "http.method": "POST", - "http.url": "70ixmpl4fl.execute-api.us-east-2.amazonaws.com/path/to/resource", + "http.url": "https://70ixmpl4fl.execute-api.us-east-2.amazonaws.com/path/to/resource", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "POST /{proxy+}", @@ -1752,7 +1752,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "lgxbo6a518", "endpoint": "/http/get", "http.method": "GET", - "http.url": "lgxbo6a518.execute-api.eu-west-1.amazonaws.com/http/get", + "http.url": "https://lgxbo6a518.execute-api.eu-west-1.amazonaws.com/http/get", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "GET /http/get", @@ -1774,7 +1774,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "lgxbo6a518", "endpoint": "/http/get", "http.method": "GET", - "http.url": "lgxbo6a518.execute-api.eu-west-1.amazonaws.com/http/get", + "http.url": "https://lgxbo6a518.execute-api.eu-west-1.amazonaws.com/http/get", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "GET /http/get", @@ -1798,7 +1798,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "38.122.226.210", - "http.url": "x02yirxc7a.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://x02yirxc7a.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.user_agent": "curl/7.64.1", "operation_name": "aws.httpapi", "request_id": "123", @@ -1821,7 +1821,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "mcwkra0ya4", "endpoint": "/user/42", "http.method": "GET", - "http.url": "mcwkra0ya4.execute-api.sa-east-1.amazonaws.com/user/42", + "http.url": "https://mcwkra0ya4.execute-api.sa-east-1.amazonaws.com/user/42", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "GET /user/{id}", @@ -1843,7 +1843,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "9vj54we5ih", "endpoint": "/user/42", "http.method": "GET", - "http.url": "9vj54we5ih.execute-api.sa-east-1.amazonaws.com/user/42", + "http.url": "https://9vj54we5ih.execute-api.sa-east-1.amazonaws.com/user/42", "operation_name": "aws.httpapi", "request_id": "123", "resource_names": "GET /user/{id}", @@ -1866,7 +1866,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "connection_id": "Fc5SzcoYGjQCJlg=", "endpoint": "$default", "event_type": "MESSAGE", - "http.url": "p62c47itsb.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://p62c47itsb.execute-api.eu-west-1.amazonaws.com$default", "message_direction": "IN", "operation_name": "aws.apigateway.websocket", "request_id": "123", @@ -1890,7 +1890,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "connection_id": "Fc2tgfl3mjQCJfA=", "endpoint": "$connect", "event_type": "CONNECT", - "http.url": "p62c47itsb.execute-api.eu-west-1.amazonaws.com$connect", + "http.url": "https://p62c47itsb.execute-api.eu-west-1.amazonaws.com$connect", "message_direction": "IN", "operation_name": "aws.apigateway.websocket", "request_id": "123", @@ -1914,7 +1914,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "connection_id": "Fc2tgfl3mjQCJfA=", "endpoint": "$disconnect", "event_type": "DISCONNECT", - "http.url": "p62c47itsb.execute-api.eu-west-1.amazonaws.com$disconnect", + "http.url": "https://p62c47itsb.execute-api.eu-west-1.amazonaws.com$disconnect", "message_direction": "IN", "operation_name": "aws.apigateway.websocket", "request_id": "123", @@ -2112,7 +2112,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "None", "endpoint": "/path/to/resource", "http.method": "POST", - "http.url": "70ixmpl4fl.execute-api.us-east-2.amazonaws.com/path/to/resource", + "http.url": "https://70ixmpl4fl.execute-api.us-east-2.amazonaws.com/path/to/resource", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "POST /{proxy+}", @@ -2135,7 +2135,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "amddr1rix9", "endpoint": "/hello", "http.method": "GET", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "GET /hello", @@ -2157,7 +2157,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "amddr1rix9", "endpoint": "/hello", "http.method": "GET", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "GET /hello", @@ -2180,7 +2180,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "amddr1rix9", "endpoint": "/hello", "http.method": "GET", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "GET /hello", @@ -2202,7 +2202,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "amddr1rix9", "endpoint": "/hello", "http.method": "GET", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "GET /hello", @@ -2224,7 +2224,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "amddr1rix9", "endpoint": "/hello", "http.method": "GET", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", "operation_name": "aws.httpapi", "request_id": "123", "resource_names": "GET /hello", @@ -2246,7 +2246,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "amddr1rix9", "endpoint": "/hello", "http.method": "GET", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", "operation_name": "aws.httpapi", "request_id": "123", "resource_names": "GET /hello", @@ -2270,7 +2270,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "connection_id": "ZLr9QeNLmjQCIZA=", "endpoint": "$connect", "event_type": "CONNECT", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.com$connect", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.com$connect", "message_direction": "IN", "operation_name": "aws.apigateway.websocket", "request_id": "123", @@ -2294,7 +2294,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "connection_id": "ZLwtceO1mjQCI8Q=", "endpoint": "main", "event_type": "MESSAGE", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.commain", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.commain", "message_direction": "IN", "operation_name": "aws.apigateway.websocket", "request_id": "123", diff --git a/tests/test_trigger.py b/tests/test_trigger.py index b4da7ff0..c12e8f5c 100644 --- a/tests/test_trigger.py +++ b/tests/test_trigger.py @@ -294,7 +294,7 @@ def test_extract_trigger_tags_api_gateway(self): { "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "arn:aws:apigateway:us-west-1::/restapis/1234567890/stages/prod", - "http.url": "70ixmpl4fl.execute-api.us-east-2.amazonaws.com", + "http.url": "https://70ixmpl4fl.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/prod/path/to/resource", "http.method": "POST", "http.route": "/{proxy+}", @@ -313,7 +313,7 @@ def test_extract_trigger_tags_api_gateway_non_proxy(self): { "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "arn:aws:apigateway:us-west-1::/restapis/lgxbo6a518/stages/dev", - "http.url": "lgxbo6a518.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://lgxbo6a518.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/dev/http/get", "http.method": "GET", "http.route": "/http/get", @@ -332,7 +332,7 @@ def test_extract_trigger_tags_api_gateway_websocket_connect(self): { "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "arn:aws:apigateway:us-west-1::/restapis/p62c47itsb/stages/dev", - "http.url": "p62c47itsb.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://p62c47itsb.execute-api.eu-west-1.amazonaws.com", }, ) @@ -348,7 +348,7 @@ def test_extract_trigger_tags_api_gateway_websocket_default(self): { "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "arn:aws:apigateway:us-west-1::/restapis/p62c47itsb/stages/dev", - "http.url": "p62c47itsb.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://p62c47itsb.execute-api.eu-west-1.amazonaws.com", }, ) @@ -364,7 +364,7 @@ def test_extract_trigger_tags_api_gateway_websocket_disconnect(self): { "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "arn:aws:apigateway:us-west-1::/restapis/p62c47itsb/stages/dev", - "http.url": "p62c47itsb.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://p62c47itsb.execute-api.eu-west-1.amazonaws.com", }, ) @@ -380,7 +380,7 @@ def test_extract_trigger_tags_api_gateway_http_api(self): { "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "arn:aws:apigateway:us-west-1::/restapis/x02yirxc7a/stages/$default", - "http.url": "x02yirxc7a.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://x02yirxc7a.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", diff --git a/tests/test_wrapper.py b/tests/test_wrapper.py index 4b243036..f46b365e 100644 --- a/tests/test_wrapper.py +++ b/tests/test_wrapper.py @@ -283,7 +283,7 @@ def test_5xx_sends_errors_metric_and_set_tags(self, mock_extract_trigger_tags): mock_extract_trigger_tags.return_value = { "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "arn:aws:apigateway:us-west-1::/restapis/1234567890/stages/prod", - "http.url": "70ixmpl4fl.execute-api.us-east-2.amazonaws.com", + "http.url": "https://70ixmpl4fl.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/prod/path/to/resource", "http.method": "GET", } From 87f2314928fb839886aefe09cf6a1b7c7a0a7988 Mon Sep 17 00:00:00 2001 From: happynancee <120061598+happynancee@users.noreply.github.com> Date: Mon, 2 Jun 2025 07:50:36 -0700 Subject: [PATCH 32/44] update codeowners file and make apm serverless co-owners of repo (#596) --- .github/CODEOWNERS | 7 +------ CODEOWNERS | 1 - 2 files changed, 1 insertion(+), 7 deletions(-) delete mode 100644 CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 26b4b78e..a7f48dfe 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,6 +1 @@ -* @DataDog/serverless-aws -datadog_lambda/tracing.py @DataDog/apm-serverless -datadog_lambda/patch.py @DataDog/apm-serverless -datadog_lambda/span_points.py @DataDog/apm-serverless -datadog_lambda/cold_start.py @DataDog/apm-serverless -datadog_lambda/wrapper.py @DataDog/apm-serverless +* @DataDog/serverless-aws @DataDog/apm-serverless diff --git a/CODEOWNERS b/CODEOWNERS deleted file mode 100644 index e340f1ed..00000000 --- a/CODEOWNERS +++ /dev/null @@ -1 +0,0 @@ -* @DataDog/serverless \ No newline at end of file From 254466cb2c6d749211e74201572ce5a937ec82da Mon Sep 17 00:00:00 2001 From: Yiming Luo Date: Wed, 4 Jun 2025 12:34:10 -0400 Subject: [PATCH 33/44] chore: Use GitHub App for update-deps workflow (#605) --- .github/workflows/update_deps.yml | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/.github/workflows/update_deps.yml b/.github/workflows/update_deps.yml index 31025402..33a524b2 100644 --- a/.github/workflows/update_deps.yml +++ b/.github/workflows/update_deps.yml @@ -3,14 +3,24 @@ name: update-deps on: schedule: - cron: "0 10 * * *" # Run at 10 am every day + workflow_dispatch: jobs: check: runs-on: ubuntu-latest + environment: + name: protected-main-env steps: + - name: Generate token + id: generate_token + uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6 + with: + app-id: ${{ secrets.GH_APP_ID }} + private-key: ${{ secrets.GH_APP_PRIVATE_KEY }} + - uses: actions/checkout@v3 with: - ssh-key: ${{ secrets.SSH_PRIVATE_KEY }} + token: ${{ steps.generate_token.outputs.token }} - name: Set up Python uses: actions/setup-python@v4 From 6beb65d63c063ff6fc5e480e7bf076d29f2abf0c Mon Sep 17 00:00:00 2001 From: michael-zhao459 Date: Thu, 5 Jun 2025 15:24:42 -0400 Subject: [PATCH 34/44] feat: Enable sqs -> lambda support for DSM (#604) --------- Co-authored-by: Rey Abolofia --- datadog_lambda/dsm.py | 38 +++++++++++++ datadog_lambda/wrapper.py | 7 +++ tests/test_dsm.py | 112 ++++++++++++++++++++++++++++++++++++++ tests/test_wrapper.py | 60 ++++++++++++++++++++ 4 files changed, 217 insertions(+) create mode 100644 datadog_lambda/dsm.py create mode 100644 tests/test_dsm.py diff --git a/datadog_lambda/dsm.py b/datadog_lambda/dsm.py new file mode 100644 index 00000000..427f5e47 --- /dev/null +++ b/datadog_lambda/dsm.py @@ -0,0 +1,38 @@ +from datadog_lambda import logger +from datadog_lambda.trigger import EventTypes + + +def set_dsm_context(event, event_source): + + if event_source.equals(EventTypes.SQS): + _dsm_set_sqs_context(event) + + +def _dsm_set_sqs_context(event): + from datadog_lambda.wrapper import format_err_with_traceback + from ddtrace.internal.datastreams import data_streams_processor + from ddtrace.internal.datastreams.processor import DsmPathwayCodec + from ddtrace.internal.datastreams.botocore import ( + get_datastreams_context, + calculate_sqs_payload_size, + ) + + records = event.get("Records") + if records is None: + return + processor = data_streams_processor() + + for record in records: + try: + queue_arn = record.get("eventSourceARN", "") + + contextjson = get_datastreams_context(record) + payload_size = calculate_sqs_payload_size(record) + + ctx = DsmPathwayCodec.decode(contextjson, processor) + ctx.set_checkpoint( + ["direction:in", f"topic:{queue_arn}", "type:sqs"], + payload_size=payload_size, + ) + except Exception as e: + logger.error(format_err_with_traceback(e)) diff --git a/datadog_lambda/wrapper.py b/datadog_lambda/wrapper.py index 86bbf04d..0e23b721 100644 --- a/datadog_lambda/wrapper.py +++ b/datadog_lambda/wrapper.py @@ -9,6 +9,7 @@ from importlib import import_module from time import time_ns +from datadog_lambda.dsm import set_dsm_context from datadog_lambda.extension import should_use_extension, flush_extension from datadog_lambda.cold_start import ( set_cold_start, @@ -79,6 +80,7 @@ DD_REQUESTS_SERVICE_NAME = "DD_REQUESTS_SERVICE_NAME" DD_SERVICE = "DD_SERVICE" DD_ENV = "DD_ENV" +DD_DATA_STREAMS_ENABLED = "DD_DATA_STREAMS_ENABLED" def get_env_as_int(env_key, default_value: int) -> int: @@ -190,6 +192,9 @@ def __init__(self, func): self.min_cold_start_trace_duration = get_env_as_int( DD_MIN_COLD_START_DURATION, 3 ) + self.data_streams_enabled = ( + os.environ.get(DD_DATA_STREAMS_ENABLED, "false").lower() == "true" + ) self.local_testing_mode = os.environ.get( DD_LOCAL_TEST, "false" ).lower() in ("true", "1") @@ -322,6 +327,8 @@ def _before(self, event, context): self.inferred_span = create_inferred_span( event, context, event_source, self.decode_authorizer_context ) + if self.data_streams_enabled: + set_dsm_context(event, event_source) self.span = create_function_execution_span( context=context, function_name=self.function_name, diff --git a/tests/test_dsm.py b/tests/test_dsm.py new file mode 100644 index 00000000..544212d8 --- /dev/null +++ b/tests/test_dsm.py @@ -0,0 +1,112 @@ +import unittest +from unittest.mock import patch, MagicMock + +from datadog_lambda.dsm import set_dsm_context, _dsm_set_sqs_context +from datadog_lambda.trigger import EventTypes, _EventSource + + +class TestDsmSQSContext(unittest.TestCase): + def setUp(self): + patcher = patch("datadog_lambda.dsm._dsm_set_sqs_context") + self.mock_dsm_set_sqs_context = patcher.start() + self.addCleanup(patcher.stop) + + patcher = patch("ddtrace.internal.datastreams.data_streams_processor") + self.mock_data_streams_processor = patcher.start() + self.addCleanup(patcher.stop) + + patcher = patch("ddtrace.internal.datastreams.botocore.get_datastreams_context") + self.mock_get_datastreams_context = patcher.start() + self.mock_get_datastreams_context.return_value = {} + self.addCleanup(patcher.stop) + + patcher = patch( + "ddtrace.internal.datastreams.botocore.calculate_sqs_payload_size" + ) + self.mock_calculate_sqs_payload_size = patcher.start() + self.mock_calculate_sqs_payload_size.return_value = 100 + self.addCleanup(patcher.stop) + + patcher = patch("ddtrace.internal.datastreams.processor.DsmPathwayCodec.decode") + self.mock_dsm_pathway_codec_decode = patcher.start() + self.addCleanup(patcher.stop) + + def test_non_sqs_event_source_does_nothing(self): + """Test that non-SQS event sources don't trigger DSM context setting""" + event = {} + # Use Unknown Event Source + event_source = _EventSource(EventTypes.UNKNOWN) + set_dsm_context(event, event_source) + + # DSM context should not be set for non-SQS events + self.mock_dsm_set_sqs_context.assert_not_called() + + def test_sqs_event_with_no_records_does_nothing(self): + """Test that events where Records is None don't trigger DSM processing""" + events_with_no_records = [ + {}, + {"Records": None}, + {"someOtherField": "value"}, + ] + + for event in events_with_no_records: + _dsm_set_sqs_context(event) + self.mock_data_streams_processor.assert_not_called() + + def test_sqs_event_triggers_dsm_sqs_context(self): + """Test that SQS event sources trigger the SQS-specific DSM context function""" + sqs_event = { + "Records": [ + { + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-1:123456789012:my-queue", + "body": "Hello from SQS!", + } + ] + } + + event_source = _EventSource(EventTypes.SQS) + set_dsm_context(sqs_event, event_source) + + self.mock_dsm_set_sqs_context.assert_called_once_with(sqs_event) + + def test_sqs_multiple_records_process_each_record(self): + """Test that each record in an SQS event gets processed individually""" + multi_record_event = { + "Records": [ + { + "eventSourceARN": "arn:aws:sqs:us-east-1:123456789012:queue1", + "body": "Message 1", + }, + { + "eventSourceARN": "arn:aws:sqs:us-east-1:123456789012:queue2", + "body": "Message 2", + }, + { + "eventSourceARN": "arn:aws:sqs:us-east-1:123456789012:queue3", + "body": "Message 3", + }, + ] + } + + mock_context = MagicMock() + self.mock_dsm_pathway_codec_decode.return_value = mock_context + + _dsm_set_sqs_context(multi_record_event) + + self.assertEqual(mock_context.set_checkpoint.call_count, 3) + + calls = mock_context.set_checkpoint.call_args_list + expected_arns = [ + "arn:aws:sqs:us-east-1:123456789012:queue1", + "arn:aws:sqs:us-east-1:123456789012:queue2", + "arn:aws:sqs:us-east-1:123456789012:queue3", + ] + + for i, call in enumerate(calls): + args, kwargs = call + tags = args[0] + self.assertIn("direction:in", tags) + self.assertIn(f"topic:{expected_arns[i]}", tags) + self.assertIn("type:sqs", tags) + self.assertEqual(kwargs["payload_size"], 100) diff --git a/tests/test_wrapper.py b/tests/test_wrapper.py index f46b365e..f482fa3d 100644 --- a/tests/test_wrapper.py +++ b/tests/test_wrapper.py @@ -76,6 +76,10 @@ def setUp(self): self.mock_dd_lambda_layer_tag = patcher.start() self.addCleanup(patcher.stop) + patcher = patch("datadog_lambda.wrapper.set_dsm_context") + self.mock_set_dsm_context = patcher.start() + self.addCleanup(patcher.stop) + def test_datadog_lambda_wrapper(self): wrapper.dd_tracing_enabled = False @@ -563,6 +567,62 @@ def return_type_test(event, context): self.assertEqual(result, test_result) self.assertFalse(MockPrintExc.called) + def test_set_dsm_context_called_when_DSM_and_tracing_enabled(self): + os.environ["DD_DATA_STREAMS_ENABLED"] = "true" + wrapper.dd_tracing_enabled = True + + @wrapper.datadog_lambda_wrapper + def lambda_handler(event, context): + return "ok" + + result = lambda_handler({}, get_mock_context()) + self.assertEqual(result, "ok") + self.mock_set_dsm_context.assert_called_once() + + del os.environ["DD_DATA_STREAMS_ENABLED"] + + def test_set_dsm_context_not_called_when_only_DSM_enabled(self): + os.environ["DD_DATA_STREAMS_ENABLED"] = "true" + wrapper.dd_tracing_enabled = False + + @wrapper.datadog_lambda_wrapper + def lambda_handler(event, context): + return "ok" + + result = lambda_handler({}, get_mock_context()) + self.assertEqual(result, "ok") + self.mock_set_dsm_context.assert_not_called() + + del os.environ["DD_DATA_STREAMS_ENABLED"] + + def test_set_dsm_context_not_called_when_only_tracing_enabled(self): + os.environ["DD_DATA_STREAMS_ENABLED"] = "false" + wrapper.dd_tracing_enabled = True + + @wrapper.datadog_lambda_wrapper + def lambda_handler(event, context): + return "ok" + + result = lambda_handler({}, get_mock_context()) + self.assertEqual(result, "ok") + self.mock_set_dsm_context.assert_not_called() + + del os.environ["DD_DATA_STREAMS_ENABLED"] + + def test_set_dsm_context_not_called_when_tracing_and_DSM_disabled(self): + os.environ["DD_DATA_STREAMS_ENABLED"] = "false" + wrapper.dd_tracing_enabled = False + + @wrapper.datadog_lambda_wrapper + def lambda_handler(event, context): + return "ok" + + result = lambda_handler({}, get_mock_context()) + self.assertEqual(result, "ok") + self.mock_set_dsm_context.assert_not_called() + + del os.environ["DD_DATA_STREAMS_ENABLED"] + class TestLambdaDecoratorSettings(unittest.TestCase): def test_some_envs_should_depend_on_dd_tracing_enabled(self): From b3cf1c2ee54db0c01470e8cdd858db85a1183c24 Mon Sep 17 00:00:00 2001 From: Rey Abolofia Date: Wed, 11 Jun 2025 08:37:35 -0700 Subject: [PATCH 35/44] Consolidate env reading to single config object. (#600) --- datadog_lambda/api.py | 15 +- datadog_lambda/cold_start.py | 12 +- datadog_lambda/config.py | 145 +++++++++++++++++++ datadog_lambda/fips.py | 19 --- datadog_lambda/metric.py | 17 +-- datadog_lambda/patch.py | 12 +- datadog_lambda/span_pointers.py | 9 +- datadog_lambda/tag_object.py | 7 +- datadog_lambda/tracing.py | 32 ++--- datadog_lambda/wrapper.py | 163 ++++++---------------- tests/conftest.py | 8 ++ tests/test_api.py | 11 +- tests/test_cold_start.py | 9 +- tests/test_config.py | 240 ++++++++++++++++++++++++++++++++ tests/test_metric.py | 2 +- tests/test_patch.py | 8 ++ tests/test_tag_object.py | 8 +- tests/test_tracing.py | 22 ++- tests/test_wrapper.py | 86 +++++------- tests/utils.py | 1 + 20 files changed, 534 insertions(+), 292 deletions(-) create mode 100644 datadog_lambda/config.py delete mode 100644 datadog_lambda/fips.py create mode 100644 tests/conftest.py create mode 100644 tests/test_config.py diff --git a/datadog_lambda/api.py b/datadog_lambda/api.py index d1cee4e4..4921dae9 100644 --- a/datadog_lambda/api.py +++ b/datadog_lambda/api.py @@ -1,7 +1,7 @@ import logging import os -from datadog_lambda.fips import fips_mode_enabled +from datadog_lambda.config import config logger = logging.getLogger(__name__) KMS_ENCRYPTION_CONTEXT_KEY = "LambdaFunctionName" @@ -29,7 +29,6 @@ def decrypt_kms_api_key(kms_client, ciphertext): is added. We need to try decrypting the API key both with and without the encryption context. """ # Try without encryption context, in case API key was encrypted using the AWS CLI - function_name = os.environ.get("AWS_LAMBDA_FUNCTION_NAME") try: plaintext = kms_client.decrypt(CiphertextBlob=decoded_bytes)[ "Plaintext" @@ -43,7 +42,7 @@ def decrypt_kms_api_key(kms_client, ciphertext): plaintext = kms_client.decrypt( CiphertextBlob=decoded_bytes, EncryptionContext={ - KMS_ENCRYPTION_CONTEXT_KEY: function_name, + KMS_ENCRYPTION_CONTEXT_KEY: config.function_name, }, )["Plaintext"].decode("utf-8") @@ -66,7 +65,7 @@ def get_api_key() -> str: DD_API_KEY = os.environ.get("DD_API_KEY", os.environ.get("DATADOG_API_KEY", "")) LAMBDA_REGION = os.environ.get("AWS_REGION", "") - if fips_mode_enabled: + if config.fips_mode_enabled: logger.debug( "FIPS mode is enabled, using FIPS endpoints for secrets management." ) @@ -82,7 +81,7 @@ def get_api_key() -> str: return "" endpoint_url = ( f"https://secretsmanager-fips.{secrets_region}.amazonaws.com" - if fips_mode_enabled + if config.fips_mode_enabled else None ) secrets_manager_client = _boto3_client( @@ -95,7 +94,7 @@ def get_api_key() -> str: # SSM endpoints: https://docs.aws.amazon.com/general/latest/gr/ssm.html fips_endpoint = ( f"https://ssm-fips.{LAMBDA_REGION}.amazonaws.com" - if fips_mode_enabled + if config.fips_mode_enabled else None ) ssm_client = _boto3_client("ssm", endpoint_url=fips_endpoint) @@ -106,7 +105,7 @@ def get_api_key() -> str: # KMS endpoints: https://docs.aws.amazon.com/general/latest/gr/kms.html fips_endpoint = ( f"https://kms-fips.{LAMBDA_REGION}.amazonaws.com" - if fips_mode_enabled + if config.fips_mode_enabled else None ) kms_client = _boto3_client("kms", endpoint_url=fips_endpoint) @@ -118,7 +117,7 @@ def get_api_key() -> str: def init_api(): - if not os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true": + if not config.flush_to_log: # Make sure that this package would always be lazy-loaded/outside from the critical path # since underlying packages are quite heavy to load # and useless with the extension unless sending metrics with timestamps diff --git a/datadog_lambda/cold_start.py b/datadog_lambda/cold_start.py index ea10ea20..a40e2fcb 100644 --- a/datadog_lambda/cold_start.py +++ b/datadog_lambda/cold_start.py @@ -1,8 +1,9 @@ import time -import os from typing import List, Hashable import logging +from datadog_lambda.config import config + logger = logging.getLogger(__name__) _cold_start = True @@ -86,14 +87,12 @@ def reset_node_stacks(): def push_node(module_name, file_path): node = ImportNode(module_name, file_path, time.time_ns()) - global import_stack if import_stack: import_stack[-1].children.append(node) import_stack.append(node) def pop_node(module_name): - global import_stack if not import_stack: return node = import_stack.pop() @@ -102,7 +101,6 @@ def pop_node(module_name): end_time_ns = time.time_ns() node.end_time_ns = end_time_ns if not import_stack: # import_stack empty, a root node has been found - global root_nodes root_nodes.append(node) @@ -147,11 +145,7 @@ def wrapped_find_spec(*args, **kwargs): def initialize_cold_start_tracing(): - if ( - is_new_sandbox() - and os.environ.get("DD_TRACE_ENABLED", "true").lower() == "true" - and os.environ.get("DD_COLD_START_TRACING", "true").lower() == "true" - ): + if is_new_sandbox() and config.cold_start_tracing: from sys import meta_path for importer in meta_path: diff --git a/datadog_lambda/config.py b/datadog_lambda/config.py new file mode 100644 index 00000000..7a08d8a7 --- /dev/null +++ b/datadog_lambda/config.py @@ -0,0 +1,145 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +import logging +import os + +logger = logging.getLogger(__name__) + + +def _get_env(key, default=None, cast=None, depends_on_tracing=False): + @property + def _getter(self): + if not hasattr(self, prop_key): + val = self._resolve_env(key, default, cast, depends_on_tracing) + setattr(self, prop_key, val) + return getattr(self, prop_key) + + prop_key = f"_config_{key}" + return _getter + + +def as_bool(val): + return val.lower() == "true" or val == "1" + + +def as_list(val): + return [val.strip() for val in val.split(",") if val.strip()] + + +class Config: + def _resolve_env(self, key, default=None, cast=None, depends_on_tracing=False): + if depends_on_tracing and not self.trace_enabled: + return False + val = os.environ.get(key, default) + if cast is not None: + try: + val = cast(val) + except (ValueError, TypeError): + msg = ( + "Failed to cast environment variable '%s' with " + "value '%s' to type %s. Using default value '%s'." + ) + logger.warning(msg, key, val, cast.__name__, default) + val = default + return val + + service = _get_env("DD_SERVICE") + env = _get_env("DD_ENV") + + cold_start_tracing = _get_env( + "DD_COLD_START_TRACING", "true", as_bool, depends_on_tracing=True + ) + min_cold_start_trace_duration = _get_env("DD_MIN_COLD_START_DURATION", 3, int) + cold_start_trace_skip_lib = _get_env( + "DD_COLD_START_TRACE_SKIP_LIB", + "ddtrace.internal.compat,ddtrace.filters", + as_list, + ) + + capture_payload_max_depth = _get_env("DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH", 10, int) + capture_payload_enabled = _get_env("DD_CAPTURE_LAMBDA_PAYLOAD", "false", as_bool) + + trace_enabled = _get_env("DD_TRACE_ENABLED", "true", as_bool) + make_inferred_span = _get_env( + "DD_TRACE_MANAGED_SERVICES", "true", as_bool, depends_on_tracing=True + ) + encode_authorizer_context = _get_env( + "DD_ENCODE_AUTHORIZER_CONTEXT", "true", as_bool, depends_on_tracing=True + ) + decode_authorizer_context = _get_env( + "DD_DECODE_AUTHORIZER_CONTEXT", "true", as_bool, depends_on_tracing=True + ) + add_span_pointers = _get_env("DD_BOTOCORE_ADD_SPAN_POINTERS", "true", as_bool) + trace_extractor = _get_env("DD_TRACE_EXTRACTOR") + + enhanced_metrics_enabled = _get_env("DD_ENHANCED_METRICS", "true", as_bool) + + flush_in_thread = _get_env("DD_FLUSH_IN_THREAD", "false", as_bool) + flush_to_log = _get_env("DD_FLUSH_TO_LOG", "false", as_bool) + logs_injection = _get_env("DD_LOGS_INJECTION", "true", as_bool) + merge_xray_traces = _get_env("DD_MERGE_XRAY_TRACES", "false", as_bool) + + telemetry_enabled = _get_env( + "DD_INSTRUMENTATION_TELEMETRY_ENABLED", + "false", + as_bool, + depends_on_tracing=True, + ) + otel_enabled = _get_env("DD_TRACE_OTEL_ENABLED", "false", as_bool) + profiling_enabled = _get_env("DD_PROFILING_ENABLED", "false", as_bool) + llmobs_enabled = _get_env("DD_LLMOBS_ENABLED", "false", as_bool) + exception_replay_enabled = _get_env("DD_EXCEPTION_REPLAY_ENABLED", "false", as_bool) + data_streams_enabled = _get_env( + "DD_DATA_STREAMS_ENABLED", "false", as_bool, depends_on_tracing=True + ) + + is_gov_region = _get_env("AWS_REGION", "", lambda x: x.startswith("us-gov-")) + + local_test = _get_env("DD_LOCAL_TEST", "false", as_bool) + integration_test = _get_env("DD_INTEGRATION_TEST", "false", as_bool) + + aws_lambda_function_name = _get_env("AWS_LAMBDA_FUNCTION_NAME") + + @property + def function_name(self): + if not hasattr(self, "_config_function_name"): + if self.aws_lambda_function_name is None: + self._config_function_name = "function" + else: + self._config_function_name = self.aws_lambda_function_name + return self._config_function_name + + @property + def is_lambda_context(self): + if not hasattr(self, "_config_is_lambda_context"): + self._config_is_lambda_context = bool(self.aws_lambda_function_name) + return self._config_is_lambda_context + + @property + def fips_mode_enabled(self): + if not hasattr(self, "_config_fips_mode_enabled"): + self._config_fips_mode_enabled = ( + os.environ.get( + "DD_LAMBDA_FIPS_MODE", + "true" if self.is_gov_region else "false", + ).lower() + == "true" + ) + return self._config_fips_mode_enabled + + def _reset(self): + for attr in dir(self): + if attr.startswith("_config_"): + delattr(self, attr) + + +config = Config() + +if config.is_gov_region or config.fips_mode_enabled: + logger.debug( + "Python Lambda Layer FIPS mode is %s.", + "enabled" if config.fips_mode_enabled else "not enabled", + ) diff --git a/datadog_lambda/fips.py b/datadog_lambda/fips.py deleted file mode 100644 index 8442ddd9..00000000 --- a/datadog_lambda/fips.py +++ /dev/null @@ -1,19 +0,0 @@ -import logging -import os - -is_gov_region = os.environ.get("AWS_REGION", "").startswith("us-gov-") - -fips_mode_enabled = ( - os.environ.get( - "DD_LAMBDA_FIPS_MODE", - "true" if is_gov_region else "false", - ).lower() - == "true" -) - -if is_gov_region or fips_mode_enabled: - logger = logging.getLogger(__name__) - logger.debug( - "Python Lambda Layer FIPS mode is %s.", - "enabled" if fips_mode_enabled else "not enabled", - ) diff --git a/datadog_lambda/metric.py b/datadog_lambda/metric.py index c9b978d6..73bbeca3 100644 --- a/datadog_lambda/metric.py +++ b/datadog_lambda/metric.py @@ -5,14 +5,13 @@ import enum import logging -import os import time from datetime import datetime, timedelta import ujson as json +from datadog_lambda.config import config from datadog_lambda.extension import should_use_extension -from datadog_lambda.fips import fips_mode_enabled from datadog_lambda.tags import dd_lambda_layer_tag, get_enhanced_metrics_tags logger = logging.getLogger(__name__) @@ -28,10 +27,10 @@ class MetricsHandler(enum.Enum): def _select_metrics_handler(): if should_use_extension: return MetricsHandler.EXTENSION - if os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true": + if config.flush_to_log: return MetricsHandler.FORWARDER - if fips_mode_enabled: + if config.fips_mode_enabled: logger.debug( "With FIPS mode enabled, the Datadog API metrics handler is unavailable." ) @@ -58,14 +57,8 @@ def _select_metrics_handler(): from datadog_lambda.api import init_api from datadog_lambda.thread_stats_writer import ThreadStatsWriter - flush_in_thread = os.environ.get("DD_FLUSH_IN_THREAD", "").lower() == "true" init_api() - lambda_stats = ThreadStatsWriter(flush_in_thread) - - -enhanced_metrics_enabled = ( - os.environ.get("DD_ENHANCED_METRICS", "true").lower() == "true" -) + lambda_stats = ThreadStatsWriter(config.flush_in_thread) def lambda_metric(metric_name, value, timestamp=None, tags=None, force_async=False): @@ -191,7 +184,7 @@ def submit_enhanced_metric(metric_name, lambda_context): metric_name (str): metric name w/o enhanced prefix i.e. "invocations" or "errors" lambda_context (object): Lambda context dict passed to the function by AWS """ - if not enhanced_metrics_enabled: + if not config.enhanced_metrics_enabled: logger.debug( "Not submitting enhanced metric %s because enhanced metrics are disabled", metric_name, diff --git a/datadog_lambda/patch.py b/datadog_lambda/patch.py index 5b8a92c5..6d2af0dc 100644 --- a/datadog_lambda/patch.py +++ b/datadog_lambda/patch.py @@ -3,7 +3,6 @@ # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2019 Datadog, Inc. -import os import sys import logging import zlib @@ -13,10 +12,8 @@ from wrapt.importer import when_imported from ddtrace import patch_all as patch_all_dd -from datadog_lambda.tracing import ( - get_dd_trace_context, - dd_tracing_enabled, -) +from datadog_lambda.config import config +from datadog_lambda.tracing import get_dd_trace_context from collections.abc import MutableMapping logger = logging.getLogger(__name__) @@ -32,7 +29,7 @@ def patch_all(): """ _patch_for_integration_tests() - if dd_tracing_enabled: + if config.trace_enabled: patch_all_dd() else: _patch_http() @@ -44,8 +41,7 @@ def _patch_for_integration_tests(): Patch `requests` to log the outgoing requests for integration tests. """ global _integration_tests_patched - is_in_tests = os.environ.get("DD_INTEGRATION_TEST", "false").lower() == "true" - if not _integration_tests_patched and is_in_tests: + if not _integration_tests_patched and config.integration_test: wrap("requests", "Session.send", _log_request) _integration_tests_patched = True diff --git a/datadog_lambda/span_pointers.py b/datadog_lambda/span_pointers.py index 40d959e6..45925d92 100644 --- a/datadog_lambda/span_pointers.py +++ b/datadog_lambda/span_pointers.py @@ -1,12 +1,12 @@ from itertools import chain import logging -import os from typing import List from typing import Optional from ddtrace._trace._span_pointer import _SpanPointerDirection from ddtrace._trace._span_pointer import _SpanPointerDescription +from datadog_lambda.config import config from datadog_lambda.metric import submit_dynamodb_stream_type_metric from datadog_lambda.trigger import EventTypes @@ -14,15 +14,10 @@ logger = logging.getLogger(__name__) -dd_botocore_add_span_pointers = os.environ.get( - "DD_BOTOCORE_ADD_SPAN_POINTERS", "true" -).lower() in ("true", "1") - - def calculate_span_pointers( event_source, event, - botocore_add_span_pointers=dd_botocore_add_span_pointers, + botocore_add_span_pointers=config.add_span_pointers, ) -> List[_SpanPointerDescription]: try: if botocore_add_span_pointers: diff --git a/datadog_lambda/tag_object.py b/datadog_lambda/tag_object.py index 6d82f83b..744e4893 100644 --- a/datadog_lambda/tag_object.py +++ b/datadog_lambda/tag_object.py @@ -4,18 +4,17 @@ # Copyright 2021 Datadog, Inc. from decimal import Decimal -import logging import ujson as json +from datadog_lambda.config import config + redactable_keys = ["authorization", "x-authorization", "password", "token"] -max_depth = 10 -logger = logging.getLogger(__name__) def tag_object(span, key, obj, depth=0): if obj is None: return span.set_tag(key, obj) - if depth >= max_depth: + if depth >= config.capture_payload_max_depth: return span.set_tag(key, _redact_val(key, str(obj)[0:5000])) depth += 1 if _should_try_string(obj): diff --git a/datadog_lambda/tracing.py b/datadog_lambda/tracing.py index 4b6f300a..3d5f671e 100644 --- a/datadog_lambda/tracing.py +++ b/datadog_lambda/tracing.py @@ -32,6 +32,8 @@ from ddtrace import __version__ as ddtrace_version from ddtrace.propagation.http import HTTPPropagator from ddtrace.trace import Context, Span, tracer + +from datadog_lambda.config import config from datadog_lambda import __version__ as datadog_lambda_version from datadog_lambda.trigger import ( _EventSource, @@ -42,10 +44,7 @@ EventSubtypes, ) -dd_trace_otel_enabled = ( - os.environ.get("DD_TRACE_OTEL_ENABLED", "false").lower() == "true" -) -if dd_trace_otel_enabled: +if config.otel_enabled: from opentelemetry.trace import set_tracer_provider from ddtrace.opentelemetry import TracerProvider @@ -55,18 +54,11 @@ logger = logging.getLogger(__name__) dd_trace_context = None -dd_tracing_enabled = os.environ.get("DD_TRACE_ENABLED", "false").lower() == "true" -if dd_tracing_enabled: +if config.telemetry_enabled: # Enable the telemetry client if the user has opted in - if ( - os.environ.get("DD_INSTRUMENTATION_TELEMETRY_ENABLED", "false").lower() - == "true" - ): - from ddtrace.internal.telemetry import telemetry_writer + from ddtrace.internal.telemetry import telemetry_writer - telemetry_writer.enable() - -is_lambda_context = os.environ.get(XrayDaemon.FUNCTION_NAME_HEADER_NAME) != "" + telemetry_writer.enable() propagator = HTTPPropagator() @@ -97,7 +89,7 @@ def _convert_xray_sampling(xray_sampled): def _get_xray_trace_context(): - if not is_lambda_context: + if not config.is_lambda_context: return None xray_trace_entity = parse_xray_header( @@ -639,13 +631,11 @@ def get_dd_trace_context_obj(): automatically, but this function can be used to manually inject the trace context to an outgoing request. """ - if dd_tracing_enabled: + if config.trace_enabled: dd_trace_py_context = _get_dd_trace_py_context() if _is_context_complete(dd_trace_py_context): return dd_trace_py_context - global dd_trace_context - try: xray_context = _get_xray_trace_context() # xray (sub)segment except Exception as e: @@ -690,10 +680,10 @@ def set_correlation_ids(): TODO: Remove me when Datadog tracer is natively supported in Lambda. """ - if not is_lambda_context: + if not config.is_lambda_context: logger.debug("set_correlation_ids is only supported in LambdaContext") return - if dd_tracing_enabled: + if config.trace_enabled: logger.debug("using ddtrace implementation for spans") return @@ -1480,7 +1470,7 @@ def emit_telemetry_on_exception_outside_of_handler( Emit an enhanced error metric and create a span for exceptions occurring outside the handler """ submit_errors_metric(None) - if dd_tracing_enabled: + if config.trace_enabled: span = tracer.trace( "aws.lambda", service="aws.lambda", diff --git a/datadog_lambda/wrapper.py b/datadog_lambda/wrapper.py index 0e23b721..87063411 100644 --- a/datadog_lambda/wrapper.py +++ b/datadog_lambda/wrapper.py @@ -18,6 +18,7 @@ is_new_sandbox, ColdStartTracer, ) +from datadog_lambda.config import config from datadog_lambda.constants import ( TraceContextSource, XraySubsegment, @@ -26,11 +27,11 @@ from datadog_lambda.module_name import modify_module_name from datadog_lambda.patch import patch_all from datadog_lambda.span_pointers import calculate_span_pointers +from datadog_lambda.tag_object import tag_object from datadog_lambda.tracing import ( extract_dd_trace_context, create_dd_dummy_metadata_subsegment, inject_correlation_ids, - dd_tracing_enabled, mark_trace_as_error_for_5xx_responses, set_correlation_ids, set_dd_trace_py_root, @@ -46,65 +47,20 @@ extract_http_status_code_tag, ) -profiling_env_var = os.environ.get("DD_PROFILING_ENABLED", "false").lower() == "true" -if profiling_env_var: +if config.profiling_enabled: from ddtrace.profiling import profiler -llmobs_env_var = os.environ.get("DD_LLMOBS_ENABLED", "false").lower() in ("true", "1") -if llmobs_env_var: +if config.llmobs_enabled: from ddtrace.llmobs import LLMObs -exception_replay_env_var = os.environ.get( - "DD_EXCEPTION_REPLAY_ENABLED", "false" -).lower() in ("true", "1") -if exception_replay_env_var: +if config.exception_replay_enabled: from ddtrace.debugging._exception.replay import SpanExceptionHandler from ddtrace.debugging._uploader import LogsIntakeUploaderV1 logger = logging.getLogger(__name__) -DD_FLUSH_TO_LOG = "DD_FLUSH_TO_LOG" -DD_LOGS_INJECTION = "DD_LOGS_INJECTION" -DD_MERGE_XRAY_TRACES = "DD_MERGE_XRAY_TRACES" -AWS_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME" -DD_LOCAL_TEST = "DD_LOCAL_TEST" -DD_TRACE_EXTRACTOR = "DD_TRACE_EXTRACTOR" -DD_TRACE_MANAGED_SERVICES = "DD_TRACE_MANAGED_SERVICES" -DD_ENCODE_AUTHORIZER_CONTEXT = "DD_ENCODE_AUTHORIZER_CONTEXT" -DD_DECODE_AUTHORIZER_CONTEXT = "DD_DECODE_AUTHORIZER_CONTEXT" -DD_COLD_START_TRACING = "DD_COLD_START_TRACING" -DD_MIN_COLD_START_DURATION = "DD_MIN_COLD_START_DURATION" -DD_COLD_START_TRACE_SKIP_LIB = "DD_COLD_START_TRACE_SKIP_LIB" -DD_CAPTURE_LAMBDA_PAYLOAD = "DD_CAPTURE_LAMBDA_PAYLOAD" -DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH = "DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH" DD_REQUESTS_SERVICE_NAME = "DD_REQUESTS_SERVICE_NAME" DD_SERVICE = "DD_SERVICE" -DD_ENV = "DD_ENV" -DD_DATA_STREAMS_ENABLED = "DD_DATA_STREAMS_ENABLED" - - -def get_env_as_int(env_key, default_value: int) -> int: - try: - return int(os.environ.get(env_key, default_value)) - except Exception as e: - logger.warn( - f"Failed to parse {env_key} as int. Using default value: {default_value}. Error: {e}" - ) - return default_value - - -dd_capture_lambda_payload_enabled = ( - os.environ.get(DD_CAPTURE_LAMBDA_PAYLOAD, "false").lower() == "true" -) - -if dd_capture_lambda_payload_enabled: - import datadog_lambda.tag_object as tag_object - - tag_object.max_depth = get_env_as_int( - DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH, tag_object.max_depth - ) - -env_env_var = os.environ.get(DD_ENV, None) init_timestamp_ns = time_ns() @@ -161,59 +117,16 @@ def __init__(self, func): """Executes when the wrapped function gets wrapped""" try: self.func = func - self.flush_to_log = os.environ.get(DD_FLUSH_TO_LOG, "").lower() == "true" - self.logs_injection = ( - os.environ.get(DD_LOGS_INJECTION, "true").lower() == "true" - ) - self.merge_xray_traces = ( - os.environ.get(DD_MERGE_XRAY_TRACES, "false").lower() == "true" - ) - self.function_name = os.environ.get(AWS_LAMBDA_FUNCTION_NAME, "function") - self.service = os.environ.get(DD_SERVICE, None) - self.extractor_env = os.environ.get(DD_TRACE_EXTRACTOR, None) self.trace_extractor = None self.span = None self.inferred_span = None - depends_on_dd_tracing_enabled = ( - lambda original_boolean: dd_tracing_enabled and original_boolean - ) - self.make_inferred_span = depends_on_dd_tracing_enabled( - os.environ.get(DD_TRACE_MANAGED_SERVICES, "true").lower() == "true" - ) - self.encode_authorizer_context = depends_on_dd_tracing_enabled( - os.environ.get(DD_ENCODE_AUTHORIZER_CONTEXT, "true").lower() == "true" - ) - self.decode_authorizer_context = depends_on_dd_tracing_enabled( - os.environ.get(DD_DECODE_AUTHORIZER_CONTEXT, "true").lower() == "true" - ) - self.cold_start_tracing = depends_on_dd_tracing_enabled( - os.environ.get(DD_COLD_START_TRACING, "true").lower() == "true" - ) - self.min_cold_start_trace_duration = get_env_as_int( - DD_MIN_COLD_START_DURATION, 3 - ) - self.data_streams_enabled = ( - os.environ.get(DD_DATA_STREAMS_ENABLED, "false").lower() == "true" - ) - self.local_testing_mode = os.environ.get( - DD_LOCAL_TEST, "false" - ).lower() in ("true", "1") - self.cold_start_trace_skip_lib = [ - "ddtrace.internal.compat", - "ddtrace.filters", - ] - if DD_COLD_START_TRACE_SKIP_LIB in os.environ: - try: - self.cold_start_trace_skip_lib = os.environ[ - DD_COLD_START_TRACE_SKIP_LIB - ].split(",") - except Exception: - logger.debug(f"Malformatted for env {DD_COLD_START_TRACE_SKIP_LIB}") self.response = None - if profiling_env_var: - self.prof = profiler.Profiler(env=env_env_var, service=self.service) - if self.extractor_env: - extractor_parts = self.extractor_env.rsplit(".", 1) + + if config.profiling_enabled: + self.prof = profiler.Profiler(env=config.env, service=config.service) + + if config.trace_extractor: + extractor_parts = config.trace_extractor.rsplit(".", 1) if len(extractor_parts) == 2: (mod_name, extractor_name) = extractor_parts modified_extractor_name = modify_module_name(mod_name) @@ -221,7 +134,7 @@ def __init__(self, func): self.trace_extractor = getattr(extractor_module, extractor_name) # Inject trace correlation ids to logs - if self.logs_injection: + if config.logs_injection: inject_correlation_ids() # This prevents a breaking change in ddtrace v0.49 regarding the service name @@ -233,11 +146,11 @@ def __init__(self, func): patch_all() # Enable LLM Observability - if llmobs_env_var: + if config.llmobs_enabled: LLMObs.enable() # Enable Exception Replay - if exception_replay_env_var: + if config.exception_replay_enabled: logger.debug("Enabling exception replay") SpanExceptionHandler.enable() @@ -307,7 +220,7 @@ def _before(self, event, context): event, context, extractor=self.trace_extractor, - decode_authorizer_context=self.decode_authorizer_context, + decode_authorizer_context=config.decode_authorizer_context, ) self.event_source = event_source # Create a Datadog X-Ray subsegment with the trace context @@ -321,28 +234,28 @@ def _before(self, event, context): XraySubsegment.TRACE_KEY, ) - if dd_tracing_enabled: - set_dd_trace_py_root(trace_context_source, self.merge_xray_traces) - if self.make_inferred_span: + if config.trace_enabled: + set_dd_trace_py_root(trace_context_source, config.merge_xray_traces) + if config.make_inferred_span: self.inferred_span = create_inferred_span( - event, context, event_source, self.decode_authorizer_context + event, context, event_source, config.decode_authorizer_context ) - if self.data_streams_enabled: + if config.data_streams_enabled: set_dsm_context(event, event_source) self.span = create_function_execution_span( context=context, - function_name=self.function_name, + function_name=config.function_name, is_cold_start=is_cold_start(), is_proactive_init=is_proactive_init(), trace_context_source=trace_context_source, - merge_xray_traces=self.merge_xray_traces, + merge_xray_traces=config.merge_xray_traces, trigger_tags=self.trigger_tags, parent_span=self.inferred_span, span_pointers=calculate_span_pointers(event_source, event), ) else: set_correlation_ids() - if profiling_env_var and is_new_sandbox(): + if config.profiling_enabled and is_new_sandbox(): self.prof.start(stop_on_exit=False, profile_children=True) logger.debug("datadog_lambda_wrapper _before() done") except Exception as e: @@ -361,14 +274,14 @@ def _after(self, event, context): create_dd_dummy_metadata_subsegment( self.trigger_tags, XraySubsegment.LAMBDA_FUNCTION_TAGS_KEY ) - should_trace_cold_start = self.cold_start_tracing and is_new_sandbox() + should_trace_cold_start = config.cold_start_tracing and is_new_sandbox() if should_trace_cold_start: trace_ctx = tracer.current_trace_context() if self.span: - if dd_capture_lambda_payload_enabled: - tag_object.tag_object(self.span, "function.request", event) - tag_object.tag_object(self.span, "function.response", self.response) + if config.capture_payload_enabled: + tag_object(self.span, "function.request", event) + tag_object(self.span, "function.response", self.response) if status_code: self.span.set_tag("http.status_code", status_code) @@ -378,8 +291,8 @@ def _after(self, event, context): if status_code: self.inferred_span.set_tag("http.status_code", status_code) - if self.service: - self.inferred_span.set_tag("peer.service", self.service) + if config.service: + self.inferred_span.set_tag("peer.service", config.service) if InferredSpanInfo.is_async(self.inferred_span) and self.span: self.inferred_span.finish(finish_time=self.span.start) @@ -391,33 +304,35 @@ def _after(self, event, context): following_span = self.span or self.inferred_span ColdStartTracer( tracer, - self.function_name, + config.function_name, following_span.start_ns, trace_ctx, - self.min_cold_start_trace_duration, - self.cold_start_trace_skip_lib, + config.min_cold_start_trace_duration, + config.cold_start_trace_skip_lib, ).trace() except Exception as e: logger.debug("Failed to create cold start spans. %s", e) - if not self.flush_to_log or should_use_extension: + if not config.flush_to_log or should_use_extension: from datadog_lambda.metric import flush_stats flush_stats(context) - if should_use_extension and self.local_testing_mode: + if should_use_extension and config.local_test: # when testing locally, the extension does not know when an # invocation completes because it does not have access to the # logs api flush_extension() - if llmobs_env_var: + if config.llmobs_enabled: LLMObs.flush() # Flush exception replay - if exception_replay_env_var: + if config.exception_replay_enabled: LogsIntakeUploaderV1._instance.periodic() - if self.encode_authorizer_context and is_authorizer_response(self.response): + if config.encode_authorizer_context and is_authorizer_response( + self.response + ): self._inject_authorizer_span_headers( event.get("requestContext", {}).get("requestId") ) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..33869802 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,8 @@ +import pytest + +from datadog_lambda.config import config + + +@pytest.fixture(autouse=True) +def reset_config(): + config._reset() diff --git a/tests/test_api.py b/tests/test_api.py index 59ee4ee8..7fcc3c22 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -22,7 +22,10 @@ def setUp(self): ) self.env_patcher.start() - @patch("datadog_lambda.api.fips_mode_enabled", True) + def tearDown(self): + del os.environ["AWS_REGION"] + + @patch("datadog_lambda.config.Config.fips_mode_enabled", True) @patch("botocore.session.Session.create_client") def test_secrets_manager_fips_endpoint(self, mock_boto3_client): mock_client = MagicMock() @@ -63,7 +66,7 @@ def test_secrets_manager_different_region(self, mock_boto3_client): ) self.assertEqual(api_key, "test-api-key") - @patch("datadog_lambda.api.fips_mode_enabled", True) + @patch("datadog_lambda.config.Config.fips_mode_enabled", True) @patch("botocore.session.Session.create_client") def test_secrets_manager_different_region_but_still_fips(self, mock_boto3_client): mock_client = MagicMock() @@ -84,7 +87,7 @@ def test_secrets_manager_different_region_but_still_fips(self, mock_boto3_client ) self.assertEqual(api_key, "test-api-key") - @patch("datadog_lambda.api.fips_mode_enabled", True) + @patch("datadog_lambda.config.Config.fips_mode_enabled", True) @patch("botocore.session.Session.create_client") def test_ssm_fips_endpoint(self, mock_boto3_client): mock_client = MagicMock() @@ -103,7 +106,7 @@ def test_ssm_fips_endpoint(self, mock_boto3_client): ) self.assertEqual(api_key, "test-api-key") - @patch("datadog_lambda.api.fips_mode_enabled", True) + @patch("datadog_lambda.config.Config.fips_mode_enabled", True) @patch("botocore.session.Session.create_client") @patch("datadog_lambda.api.decrypt_kms_api_key") def test_kms_fips_endpoint(self, mock_decrypt_kms, mock_boto3_client): diff --git a/tests/test_cold_start.py b/tests/test_cold_start.py index c7444c49..d75b5f43 100644 --- a/tests/test_cold_start.py +++ b/tests/test_cold_start.py @@ -8,6 +8,8 @@ import datadog_lambda.cold_start as cold_start import datadog_lambda.wrapper as wrapper +from tests.utils import get_mock_context + class TestColdStartTracingSetup(unittest.TestCase): def test_proactive_init(self): @@ -247,7 +249,7 @@ def finish(span): monkeypatch.setattr(wrapper.tracer, "_on_span_finish", finish) monkeypatch.setattr(wrapper, "is_new_sandbox", lambda: True) - monkeypatch.setattr("datadog_lambda.wrapper.dd_tracing_enabled", True) + monkeypatch.setattr("datadog_lambda.config.Config.trace_enabled", True) monkeypatch.setenv( "DD_COLD_START_TRACE_SKIP_LIB", "ddtrace.contrib.logging,datadog_lambda.wrapper" ) @@ -257,10 +259,7 @@ def finish(span): def handler(event, context): import tabnanny - lambda_context = MagicMock() - lambda_context.invoked_function_arn = ( - "arn:aws:lambda:us-west-1:123457598159:function:python-layer-test:1" - ) + lambda_context = get_mock_context() handler.cold_start_tracing = True handler({}, lambda_context) diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 00000000..92002439 --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,240 @@ +import pytest + +from datadog_lambda.config import config, _get_env, Config + + +@pytest.fixture +def setenv(monkeypatch): + def set_env(key, value): + if value is None: + monkeypatch.delenv(key, raising=False) + else: + monkeypatch.setenv(key, value) + + return set_env + + +def _test_as_bool(env_key, conf_key, default): + return ( + (env_key, conf_key, None, default), + (env_key, conf_key, "", False), + (env_key, conf_key, "true", True), + (env_key, conf_key, "TRUE", True), + (env_key, conf_key, "false", False), + (env_key, conf_key, "FALSE", False), + (env_key, conf_key, "1", True), + (env_key, conf_key, "0", False), + (env_key, conf_key, "purple", False), + ) + + +def _test_int(env_key, conf_key, default): + return ( + (env_key, conf_key, None, default), + (env_key, conf_key, "", default), + (env_key, conf_key, "5", 5), + (env_key, conf_key, "0", 0), + (env_key, conf_key, "2.5", default), + (env_key, conf_key, "-1", -1), + (env_key, conf_key, "purple", default), + ) + + +def _test_as_list(env_key, conf_key, default): + return ( + (env_key, conf_key, None, default.split(",")), + (env_key, conf_key, "", []), + (env_key, conf_key, " ", []), + (env_key, conf_key, ",", []), + (env_key, conf_key, " , ", []), + (env_key, conf_key, "a", ["a"]), + (env_key, conf_key, "a,", ["a"]), + (env_key, conf_key, "a, ", ["a"]), + (env_key, conf_key, "a,b", ["a", "b"]), + (env_key, conf_key, "a, b", ["a", "b"]), + ) + + +_test_config_from_environ = ( + *_test_as_bool("DD_FLUSH_TO_LOG", "flush_to_log", default=False), + *_test_as_bool("DD_LOGS_INJECTION", "logs_injection", default=True), + *_test_as_bool("DD_TRACE_ENABLED", "trace_enabled", default=True), + *_test_as_bool("DD_COLD_START_TRACING", "cold_start_tracing", default=True), + *_test_as_bool("DD_TRACE_MANAGED_SERVICES", "make_inferred_span", default=True), + *_test_as_bool( + "DD_ENCODE_AUTHORIZER_CONTEXT", "encode_authorizer_context", default=True + ), + *_test_as_bool( + "DD_DECODE_AUTHORIZER_CONTEXT", "decode_authorizer_context", default=True + ), + *_test_as_bool("DD_FLUSH_IN_THREAD", "flush_in_thread", default=False), + *_test_as_bool("DD_ENHANCED_METRICS", "enhanced_metrics_enabled", default=True), + *_test_as_bool("DD_INTEGRATION_TEST", "integration_test", default=False), + *_test_as_bool("DD_BOTOCORE_ADD_SPAN_POINTERS", "add_span_pointers", default=True), + *_test_as_bool("DD_TRACE_OTEL_ENABLED", "otel_enabled", default=False), + *_test_as_bool( + "DD_INSTRUMENTATION_TELEMETRY_ENABLED", "telemetry_enabled", default=False + ), + *_test_as_bool("DD_MERGE_XRAY_TRACES", "merge_xray_traces", default=False), + *_test_as_bool("DD_PROFILING_ENABLED", "profiling_enabled", default=False), + *_test_as_bool("DD_LLMOBS_ENABLED", "llmobs_enabled", default=False), + *_test_as_bool( + "DD_EXCEPTION_REPLAY_ENABLED", "exception_replay_enabled", default=False + ), + *_test_as_bool( + "DD_CAPTURE_LAMBDA_PAYLOAD", "capture_payload_enabled", default=False + ), + *_test_as_bool("DD_LOCAL_TEST", "local_test", default=False), + *_test_as_bool("DD_DATA_STREAMS_ENABLED", "data_streams_enabled", default=False), + *_test_int( + "DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH", "capture_payload_max_depth", default=10 + ), + *_test_int( + "DD_MIN_COLD_START_DURATION", "min_cold_start_trace_duration", default=3 + ), + *_test_as_list( + "DD_COLD_START_TRACE_SKIP_LIB", + "cold_start_trace_skip_lib", + default="ddtrace.internal.compat,ddtrace.filters", + ), + ("DD_SERVICE", "service", None, None), + ("DD_SERVICE", "service", "", ""), + ("DD_SERVICE", "service", "my_service", "my_service"), + ("AWS_LAMBDA_FUNCTION_NAME", "aws_lambda_function_name", None, None), + ("AWS_LAMBDA_FUNCTION_NAME", "aws_lambda_function_name", "", ""), + ( + "AWS_LAMBDA_FUNCTION_NAME", + "aws_lambda_function_name", + "my_function", + "my_function", + ), + ("AWS_LAMBDA_FUNCTION_NAME", "function_name", None, "function"), + ("AWS_LAMBDA_FUNCTION_NAME", "function_name", "", ""), + ("AWS_LAMBDA_FUNCTION_NAME", "function_name", "my_function", "my_function"), + ("AWS_LAMBDA_FUNCTION_NAME", "is_lambda_context", None, False), + ("AWS_LAMBDA_FUNCTION_NAME", "is_lambda_context", "", False), + ("AWS_LAMBDA_FUNCTION_NAME", "is_lambda_context", "my_function", True), + ("AWS_REGION", "is_gov_region", None, False), + ("AWS_REGION", "is_gov_region", "", False), + ("AWS_REGION", "is_gov_region", "us-gov-1", True), + ("AWS_REGION", "is_gov_region", "us-est-1", False), + ("DD_TRACE_EXTRACTOR", "trace_extractor", None, None), + ("DD_TRACE_EXTRACTOR", "trace_extractor", "", ""), + ("DD_TRACE_EXTRACTOR", "trace_extractor", "my_extractor", "my_extractor"), + ("DD_ENV", "env", None, None), + ("DD_ENV", "env", "", ""), + ("DD_ENV", "env", "my_env", "my_env"), +) + + +@pytest.mark.parametrize("env_key,conf_key,env_val,conf_val", _test_config_from_environ) +def test_config_from_environ(env_key, conf_key, env_val, conf_val, setenv): + setenv(env_key, env_val) + assert getattr(config, conf_key) == conf_val + + +_test_config_from_environ_depends_on_tracing = ( + *_test_as_bool("DD_COLD_START_TRACING", "cold_start_tracing", default=True), + *_test_as_bool("DD_TRACE_MANAGED_SERVICES", "make_inferred_span", default=True), + *_test_as_bool( + "DD_ENCODE_AUTHORIZER_CONTEXT", "encode_authorizer_context", default=True + ), + *_test_as_bool( + "DD_DECODE_AUTHORIZER_CONTEXT", "decode_authorizer_context", default=True + ), + *_test_as_bool("DD_DATA_STREAMS_ENABLED", "data_streams_enabled", default=False), + *_test_as_bool( + "DD_INSTRUMENTATION_TELEMETRY_ENABLED", "telemetry_enabled", default=False + ), +) + + +@pytest.mark.parametrize( + "env_key,conf_key,env_val,conf_val", _test_config_from_environ_depends_on_tracing +) +def test_config_from_environ_depends_on_tracing( + env_key, conf_key, env_val, conf_val, setenv +): + setenv(env_key, env_val) + setenv("DD_TRACE_ENABLED", "false") + assert getattr(config, conf_key) is False + + +def test_config_aws_lambda_function_name(setenv): + # these config values all access the same environment variable, test to + # ensure the wrong value is not cached + setenv("AWS_LAMBDA_FUNCTION_NAME", "my_function") + assert config.aws_lambda_function_name == "my_function" + assert config.function_name == "my_function" + assert config.is_lambda_context is True + + +_test_fips_mode_from_environ = ( + (None, None, False), + (None, "", False), + (None, "us-gov-1", True), + (None, "us-east-1", False), + ("", None, False), + ("", "", False), + ("", "us-gov-1", False), + ("", "us-east-1", False), + ("true", None, True), + ("true", "", True), + ("true", "us-gov-1", True), + ("true", "us-east-1", True), + ("TRUE", None, True), + ("TRUE", "", True), + ("TRUE", "us-gov-1", True), + ("TRUE", "us-east-1", True), + ("false", None, False), + ("false", "", False), + ("false", "us-gov-1", False), + ("false", "us-east-1", False), + ("FALSE", None, False), + ("FALSE", "", False), + ("FALSE", "us-gov-1", False), + ("FALSE", "us-east-1", False), + ("1", None, False), + ("1", "", False), + ("1", "us-gov-1", False), + ("1", "us-east-1", False), + ("0", None, False), + ("0", "", False), + ("0", "us-gov-1", False), + ("0", "us-east-1", False), +) + + +@pytest.mark.parametrize("fips_mode,region,conf_val", _test_fips_mode_from_environ) +def test_fips_mode_from_environ(fips_mode, region, conf_val, setenv): + setenv("DD_LAMBDA_FIPS_MODE", fips_mode) + setenv("AWS_REGION", region) + assert config.fips_mode_enabled == conf_val + + +def test__get_env_does_not_log_when_env_not_set(setenv, monkeypatch): + setenv("TEST_1", None) + setenv("TEST_2", None) + setenv("TEST_3", None) + setenv("TEST_4", None) + + class Testing(Config): + test_1 = _get_env("TEST_1") + test_2 = _get_env("TEST_2", "purple") + test_3 = _get_env("TEST_3", "true", bool) + test_4 = _get_env("TEST_4", "true", bool, depends_on_tracing=True) + + logs = [] + + def cap_warn(*args, **kwargs): + logs.append(args) + + monkeypatch.setattr("datadog_lambda.config.logger.warning", cap_warn) + + testing = Testing() + testing.test_1 + testing.test_2 + testing.test_3 + testing.test_4 + + assert not logs diff --git a/tests/test_metric.py b/tests/test_metric.py index e7dab2c3..aa537d34 100644 --- a/tests/test_metric.py +++ b/tests/test_metric.py @@ -62,7 +62,7 @@ def test_select_metrics_handler_dd_api_fallback(self): self.assertEqual(MetricsHandler.DATADOG_API, _select_metrics_handler()) del os.environ["DD_FLUSH_TO_LOG"] - @patch("datadog_lambda.metric.fips_mode_enabled", True) + @patch("datadog_lambda.config.Config.fips_mode_enabled", True) @patch("datadog_lambda.metric.should_use_extension", False) def test_select_metrics_handler_has_no_fallback_in_fips_mode(self): os.environ["DD_FLUSH_TO_LOG"] = "False" diff --git a/tests/test_patch.py b/tests/test_patch.py index bf924875..b03d2e23 100644 --- a/tests/test_patch.py +++ b/tests/test_patch.py @@ -1,3 +1,4 @@ +import pytest import unittest from unittest.mock import patch, MagicMock @@ -5,6 +6,13 @@ from datadog_lambda.patch import _patch_http, _ensure_patch_requests from datadog_lambda.constants import TraceHeader +from ddtrace.contrib.internal.requests.patch import unpatch as unpatch_requests + + +@pytest.fixture(scope="module", autouse=True) +def reset_patches(): + unpatch_requests() + class TestPatchHTTPClients(unittest.TestCase): def setUp(self): diff --git a/tests/test_tag_object.py b/tests/test_tag_object.py index 77512164..574bb331 100644 --- a/tests/test_tag_object.py +++ b/tests/test_tag_object.py @@ -29,6 +29,7 @@ def test_tag_object(self): True, ) + @patch("datadog_lambda.config.Config.capture_payload_max_depth", 2) def test_tag_object_max_depth(self): payload = { "hello": "world", @@ -41,11 +42,8 @@ def test_tag_object_max_depth(self): "vals": [{"thingOne": 1}, {"thingTwo": 2}], } spanMock = MagicMock() - import datadog_lambda.tag_object as lib_ref - lib_ref.max_depth = 2 # setting up the test tag_object(spanMock, "function.request", payload) - lib_ref.max_depth = 10 # revert the setup spanMock.set_tag.assert_has_calls( [ call("function.request.vals.0", "{'thingOne': 1}"), @@ -62,6 +60,7 @@ def test_tag_object_max_depth(self): True, ) + @patch("datadog_lambda.config.Config.capture_payload_max_depth", 0) def test_tag_object_max_depth_0(self): payload = { "hello": "world", @@ -74,11 +73,8 @@ def test_tag_object_max_depth_0(self): "vals": [{"thingOne": 1}, {"thingTwo": 2}], } spanMock = MagicMock() - import datadog_lambda.tag_object as lib_ref - lib_ref.max_depth = 0 # setting up the test tag_object(spanMock, "function.request", payload) - lib_ref.max_depth = 10 # revert the setup spanMock.set_tag.assert_has_calls( [ call( diff --git a/tests/test_tracing.py b/tests/test_tracing.py index e38e4ecd..a629343e 100644 --- a/tests/test_tracing.py +++ b/tests/test_tracing.py @@ -251,20 +251,16 @@ def test_extract_dd_trace_context(event, expect): class TestExtractAndGetDDTraceContext(unittest.TestCase): def setUp(self): - global dd_tracing_enabled - dd_tracing_enabled = False os.environ["_X_AMZN_TRACE_ID"] = fake_xray_header_value patcher = patch("datadog_lambda.tracing.send_segment") self.mock_send_segment = patcher.start() self.addCleanup(patcher.stop) - patcher = patch("datadog_lambda.tracing.is_lambda_context") + patcher = patch("datadog_lambda.config.Config.is_lambda_context") self.mock_is_lambda_context = patcher.start() self.mock_is_lambda_context.return_value = True self.addCleanup(patcher.stop) def tearDown(self): - global dd_tracing_enabled - dd_tracing_enabled = False del os.environ["_X_AMZN_TRACE_ID"] @with_trace_propagation_style("datadog") @@ -984,11 +980,12 @@ def setUp(self): ) self.addCleanup(patcher.stop) - patcher = patch("datadog_lambda.tracing.is_lambda_context") + patcher = patch("datadog_lambda.config.Config.is_lambda_context") self.mock_is_lambda_context = patcher.start() self.mock_is_lambda_context.return_value = True self.addCleanup(patcher.stop) + @patch("datadog_lambda.config.Config.trace_enabled", False) def test_set_correlation_ids(self): set_correlation_ids() span = tracer.current_span() @@ -1124,13 +1121,11 @@ def test_function_with_span_pointers(self): class TestSetTraceRootSpan(unittest.TestCase): def setUp(self): - global dd_tracing_enabled - dd_tracing_enabled = False os.environ["_X_AMZN_TRACE_ID"] = fake_xray_header_value patcher = patch("datadog_lambda.tracing.send_segment") self.mock_send_segment = patcher.start() self.addCleanup(patcher.stop) - patcher = patch("datadog_lambda.tracing.is_lambda_context") + patcher = patch("datadog_lambda.config.Config.is_lambda_context") self.mock_is_lambda_context = patcher.start() self.mock_is_lambda_context.return_value = True self.addCleanup(patcher.stop) @@ -1143,8 +1138,6 @@ def setUp(self): self.addCleanup(patcher.stop) def tearDown(self): - global dd_tracing_enabled - dd_tracing_enabled = False del os.environ["_X_AMZN_TRACE_ID"] def test_mixed_parent_context_when_merging(self): @@ -1245,6 +1238,7 @@ def test_get_service_mapping(self): create_service_mapping(os.environ["DD_SERVICE_MAPPING"]) ) self.assertEqual(self.get_service_mapping(), expected_output) + del os.environ["DD_SERVICE_MAPPING"] def test_set_service_mapping(self): new_service_mapping = {"api3": "service3", "api4": "service4"} @@ -1285,6 +1279,8 @@ def test_determine_service_name(self): "default", ) + del os.environ["DD_SERVICE_MAPPING"] + def test_remaps_all_inferred_span_service_names_from_api_gateway_event(self): new_service_mapping = {"lambda_api_gateway": "new-name"} self.set_service_mapping(new_service_mapping) @@ -2386,7 +2382,7 @@ def test_deterministic_m5_hash__always_leading_with_zero(self): class TestExceptionOutsideHandler(unittest.TestCase): - @patch("datadog_lambda.tracing.dd_tracing_enabled", True) + @patch("datadog_lambda.config.Config.trace_enabled", True) @patch("datadog_lambda.tracing.submit_errors_metric") @patch("time.time_ns", return_value=42) def test_exception_outside_handler_tracing_enabled( @@ -2427,7 +2423,7 @@ def test_exception_outside_handler_tracing_enabled( assert mock_span.error == 1 assert mock_span.start_ns == 42 - @patch("datadog_lambda.tracing.dd_tracing_enabled", False) + @patch("datadog_lambda.config.Config.trace_enabled", False) @patch("datadog_lambda.tracing.submit_errors_metric") @patch("time.time_ns", return_value=42) def test_exception_outside_handler_tracing_disabled( diff --git a/tests/test_wrapper.py b/tests/test_wrapper.py index f482fa3d..f0240905 100644 --- a/tests/test_wrapper.py +++ b/tests/test_wrapper.py @@ -8,6 +8,8 @@ import datadog_lambda.wrapper as wrapper import datadog_lambda.xray as xray + +from datadog_lambda.config import config from datadog_lambda.metric import lambda_metric from datadog_lambda.thread_stats_writer import ThreadStatsWriter from ddtrace.trace import Span, tracer @@ -24,7 +26,6 @@ def setUp(self): patch("ddtrace.internal.writer.AgentWriter.flush_queue").start() wrapper.datadog_lambda_wrapper._force_wrap = True - wrapper.dd_tracing_enabled = True patcher = patch( "datadog.threadstats.reporters.HttpReporter.flush_distributions" ) @@ -80,9 +81,8 @@ def setUp(self): self.mock_set_dsm_context = patcher.start() self.addCleanup(patcher.stop) + @patch("datadog_lambda.config.Config.trace_enabled", False) def test_datadog_lambda_wrapper(self): - wrapper.dd_tracing_enabled = False - @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): lambda_metric("test.metric", 100) @@ -92,7 +92,6 @@ def lambda_handler(event, context): lambda_context = get_mock_context() lambda_handler(lambda_event, lambda_context) - wrapper.dd_tracing_enabled = True self.mock_threadstats_flush_distributions.assert_has_calls( [ call( @@ -189,9 +188,9 @@ def lambda_handler(event, context): metric_module.lambda_stats.stop() metric_module.lambda_stats = ThreadStatsWriter(False) + @patch("datadog_lambda.config.Config.trace_enabled", False) def test_datadog_lambda_wrapper_inject_correlation_ids(self): os.environ["DD_LOGS_INJECTION"] = "True" - wrapper.dd_tracing_enabled = False @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): @@ -199,7 +198,6 @@ def lambda_handler(event, context): lambda_event = {} lambda_handler(lambda_event, get_mock_context()) - wrapper.dd_tracing_enabled = True self.mock_set_correlation_ids.assert_called() self.mock_inject_correlation_ids.assert_called() @@ -457,11 +455,8 @@ def lambda_handler(event, context): ] ) + @patch("datadog_lambda.config.Config.enhanced_metrics_enabled", False) def test_no_enhanced_metrics_without_env_var(self): - patcher = patch("datadog_lambda.metric.enhanced_metrics_enabled", False) - patcher.start() - self.addCleanup(patcher.stop) - @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): raise RuntimeError() @@ -515,6 +510,7 @@ def lambda_handler(event, context): self.assertEqual(os.environ.get("DD_REQUESTS_SERVICE_NAME"), "myAwesomeService") del os.environ["DD_SERVICE"] + @patch("datadog_lambda.config.Config.make_inferred_span", False) def test_encode_authorizer_span(self): @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): @@ -541,7 +537,6 @@ def lambda_handler(event, context): trace_ctx.sampling_priority = 1 test_span.finish() lambda_handler.inferred_span = test_span - lambda_handler.make_inferred_span = False result = lambda_handler(lambda_event, lambda_context) raw_inject_data = result["context"]["_datadog"] self.assertIsInstance(raw_inject_data, str) @@ -569,7 +564,7 @@ def return_type_test(event, context): def test_set_dsm_context_called_when_DSM_and_tracing_enabled(self): os.environ["DD_DATA_STREAMS_ENABLED"] = "true" - wrapper.dd_tracing_enabled = True + os.environ["DD_TRACE_ENABLED"] = "true" @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): @@ -583,7 +578,7 @@ def lambda_handler(event, context): def test_set_dsm_context_not_called_when_only_DSM_enabled(self): os.environ["DD_DATA_STREAMS_ENABLED"] = "true" - wrapper.dd_tracing_enabled = False + os.environ["DD_TRACE_ENABLED"] = "false" @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): @@ -597,7 +592,7 @@ def lambda_handler(event, context): def test_set_dsm_context_not_called_when_only_tracing_enabled(self): os.environ["DD_DATA_STREAMS_ENABLED"] = "false" - wrapper.dd_tracing_enabled = True + os.environ["DD_TRACE_ENABLED"] = "true" @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): @@ -611,7 +606,7 @@ def lambda_handler(event, context): def test_set_dsm_context_not_called_when_tracing_and_DSM_disabled(self): os.environ["DD_DATA_STREAMS_ENABLED"] = "false" - wrapper.dd_tracing_enabled = False + os.environ["DD_TRACE_ENABLED"] = "false" @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): @@ -624,18 +619,6 @@ def lambda_handler(event, context): del os.environ["DD_DATA_STREAMS_ENABLED"] -class TestLambdaDecoratorSettings(unittest.TestCase): - def test_some_envs_should_depend_on_dd_tracing_enabled(self): - wrapper.dd_tracing_enabled = False - os.environ[wrapper.DD_TRACE_MANAGED_SERVICES] = "true" - os.environ[wrapper.DD_ENCODE_AUTHORIZER_CONTEXT] = "true" - os.environ[wrapper.DD_DECODE_AUTHORIZER_CONTEXT] = "true" - decorator = wrapper._LambdaDecorator(func=None) - self.assertFalse(decorator.make_inferred_span) - self.assertFalse(decorator.encode_authorizer_context) - self.assertFalse(decorator.decode_authorizer_context) - - class TestLambdaWrapperWithTraceContext(unittest.TestCase): xray_root = "1-5e272390-8c398be037738dc042009320" xray_parent = "94ae789b969f1cc5" @@ -706,14 +689,28 @@ def handler(event, context): class TestLambdaWrapperFlushExtension(unittest.TestCase): - def setUp(self): - self.orig_environ = os.environ + @patch("datadog_lambda.config.Config.local_test", True) + @patch("datadog_lambda.wrapper.should_use_extension", True) + def test_local_test_true_flushing(self): + flushes = [] + lambda_event = {} + lambda_context = get_mock_context() + + def flush(): + flushes.append(1) - def tearDown(self): - os.environ = self.orig_environ + @patch("datadog_lambda.wrapper.flush_extension", flush) + @wrapper.datadog_lambda_wrapper + def lambda_handler(event, context): + pass + lambda_handler(lambda_event, lambda_context) + + self.assertEqual(len(flushes), 1) + + @patch("datadog_lambda.config.Config.local_test", False) @patch("datadog_lambda.wrapper.should_use_extension", True) - def test_local_test_envvar_flushing(self): + def test_local_test_false_flushing(self): flushes = [] lambda_event = {} lambda_context = get_mock_context() @@ -721,24 +718,11 @@ def test_local_test_envvar_flushing(self): def flush(): flushes.append(1) - for environ, flush_called in ( - ({"DD_LOCAL_TEST": "True"}, True), - ({"DD_LOCAL_TEST": "true"}, True), - ({"DD_LOCAL_TEST": "1"}, True), - ({"DD_LOCAL_TEST": "False"}, False), - ({"DD_LOCAL_TEST": "false"}, False), - ({"DD_LOCAL_TEST": "0"}, False), - ({"DD_LOCAL_TEST": ""}, False), - ({}, False), - ): - os.environ = environ - flushes.clear() - - @patch("datadog_lambda.wrapper.flush_extension", flush) - @wrapper.datadog_lambda_wrapper - def lambda_handler(event, context): - pass + @patch("datadog_lambda.wrapper.flush_extension", flush) + @wrapper.datadog_lambda_wrapper + def lambda_handler(event, context): + pass - lambda_handler(lambda_event, lambda_context) + lambda_handler(lambda_event, lambda_context) - self.assertEqual(flush_called, len(flushes) == 1) + self.assertEqual(len(flushes), 0) diff --git a/tests/utils.py b/tests/utils.py index 0f246e68..2d56ca0c 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -22,6 +22,7 @@ def get_mock_context( lambda_context.invoked_function_arn = invoked_function_arn lambda_context.function_version = function_version lambda_context.function_name = function_name + lambda_context.get_remaining_time_in_millis = lambda: 100 lambda_context.client_context = ClientContext(custom) return lambda_context From e230d94554991136bc185bef2c46261f50073abc Mon Sep 17 00:00:00 2001 From: Joey Zhao <5253430+joeyzhao2018@users.noreply.github.com> Date: Wed, 11 Jun 2025 14:54:46 -0400 Subject: [PATCH 36/44] handle a case where the record is some customized item (#616) --- datadog_lambda/trigger.py | 2 +- tests/test_trigger.py | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datadog_lambda/trigger.py b/datadog_lambda/trigger.py index 14cb06ac..bbd0d027 100644 --- a/datadog_lambda/trigger.py +++ b/datadog_lambda/trigger.py @@ -153,7 +153,7 @@ def parse_event_source(event: dict) -> _EventSource: event_source = _EventSource(EventTypes.STEPFUNCTIONS) event_record = get_first_record(event) - if event_record: + if event_record and isinstance(event_record, dict): aws_event_source = event_record.get("eventSource") or event_record.get( "EventSource" ) diff --git a/tests/test_trigger.py b/tests/test_trigger.py index c12e8f5c..15103937 100644 --- a/tests/test_trigger.py +++ b/tests/test_trigger.py @@ -280,6 +280,13 @@ def test_detect_lambda_function_url_domain_with_invalid_input(self): # Test with string that would normally cause an exception when split self.assertFalse(detect_lambda_function_url_domain("")) + def test_event_source_with_non_dict_event_record(self): + # Test with event_record that's not a dictionary + event = {"Records": "not_a_dict"} + event_source = parse_event_source(event) + # Should handle the first non-dict record gracefully and return unknown + self.assertEqual(event_source.to_string(), "unknown") + class GetTriggerTags(unittest.TestCase): def test_extract_trigger_tags_api_gateway(self): From 8f2a45af6d90845efaf69246349f572e0de0b445 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Jun 2025 16:01:59 -0400 Subject: [PATCH 37/44] chore(deps): bump protobuf from 5.29.3 to 5.29.5 (#619) Bumps [protobuf](https://github.com/protocolbuffers/protobuf) from 5.29.3 to 5.29.5. - [Release notes](https://github.com/protocolbuffers/protobuf/releases) - [Changelog](https://github.com/protocolbuffers/protobuf/blob/main/protobuf_release.bzl) - [Commits](https://github.com/protocolbuffers/protobuf/compare/v5.29.3...v5.29.5) --- updated-dependencies: - dependency-name: protobuf dependency-version: 5.29.5 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 145 ++++++++++++++++++++++++++++++---------------------- 1 file changed, 83 insertions(+), 62 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3953f953..434f887c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,23 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. - -[[package]] -name = "boto3" -version = "1.36.8" -description = "The AWS SDK for Python" -optional = true -python-versions = ">=3.8" -files = [ - {file = "boto3-1.36.8-py3-none-any.whl", hash = "sha256:7f61c9d0ea64f484a17c1e3115fdf90fd7b17ab6771e07cb4549f42b9fd28fb9"}, - {file = "boto3-1.36.8.tar.gz", hash = "sha256:ac47215d320b0c2534340db58d6d5284cb1860b7bff172b4dd6eee2dee1d5779"}, -] - -[package.dependencies] -botocore = ">=1.36.8,<1.37.0" -jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.11.0,<0.12.0" - -[package.extras] -crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "botocore" @@ -25,6 +6,8 @@ version = "1.36.8" description = "Low-level, data-driven core of boto 3." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "botocore-1.36.8-py3-none-any.whl", hash = "sha256:59d3fdfbae6d916b046e973bebcbeb70a102f9e570ca86d5ba512f1854b78fc2"}, {file = "botocore-1.36.8.tar.gz", hash = "sha256:81c88e5566cf018e1411a68304dc1fb9e4156ca2b50a3a0f0befc274299e67fa"}, @@ -34,8 +17,8 @@ files = [ jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" urllib3 = [ - {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, ] [package.extras] @@ -47,6 +30,7 @@ version = "0.16.1" description = "Python module to generate and modify bytecode" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "bytecode-0.16.1-py3-none-any.whl", hash = "sha256:1d4b61ed6bade4bff44127c8283bef8131a664ce4dbe09d64a88caf329939f35"}, {file = "bytecode-0.16.1.tar.gz", hash = "sha256:8fbbb637c880f339e564858bc6c7984ede67ae97bc71343379a535a9a4baf398"}, @@ -61,6 +45,7 @@ version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, @@ -72,6 +57,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -173,6 +159,8 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +markers = "extra == \"dev\" and sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -184,6 +172,7 @@ version = "0.51.0" description = "The Datadog Python library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] files = [ {file = "datadog-0.51.0-py2.py3-none-any.whl", hash = "sha256:a9764f091c96af4e0996d4400b168fc5fba380f911d6d672c9dcd4773e29ea3f"}, {file = "datadog-0.51.0.tar.gz", hash = "sha256:3279534f831ae0b4ae2d8ce42ef038b4ab38e667d7ed6ff7437982d7a0cf5250"}, @@ -198,6 +187,7 @@ version = "2.20.0" description = "Datadog APM client library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "ddtrace-2.20.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:e1dee099099b95acf7d0e552179925cfec58a52315cc914d153506367b195bc4"}, {file = "ddtrace-2.20.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:9d209bef14caafcd53be8c14e04741d86c08f76496c1bf755e2eaa38605ce3e0"}, @@ -271,10 +261,10 @@ files = [ [package.dependencies] bytecode = [ + {version = ">=0.13.0", markers = "python_version < \"3.11\""}, {version = ">=0.16.0", markers = "python_version >= \"3.13.0\""}, {version = ">=0.15.0", markers = "python_version ~= \"3.12.0\""}, {version = ">=0.14.0", markers = "python_version ~= \"3.11.0\""}, - {version = ">=0.13.0", markers = "python_version < \"3.11.0\""}, ] envier = ">=0.5,<1.0" legacy-cgi = {version = ">=2.0.0", markers = "python_version >= \"3.13.0\""} @@ -294,6 +284,7 @@ version = "1.2.18" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +groups = ["main"] files = [ {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"}, {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"}, @@ -303,7 +294,7 @@ files = [ wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools ; python_version >= \"3.12\"", "tox"] [[package]] name = "envier" @@ -311,6 +302,7 @@ version = "0.6.1" description = "Python application configuration via the environment" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "envier-0.6.1-py3-none-any.whl", hash = "sha256:73609040a76be48bbcb97074d9969666484aa0de706183a6e9ef773156a8a6a9"}, {file = "envier-0.6.1.tar.gz", hash = "sha256:3309a01bb3d8850c9e7a31a5166d5a836846db2faecb79b9cb32654dd50ca9f9"}, @@ -325,6 +317,8 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"dev\" and python_version <= \"3.10\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -339,6 +333,8 @@ version = "5.0.4" description = "the modular source code checker: pep8 pyflakes and co" optional = true python-versions = ">=3.6.1" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, @@ -355,6 +351,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -369,6 +366,7 @@ version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, @@ -378,12 +376,12 @@ files = [ zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] @@ -392,6 +390,8 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -403,6 +403,8 @@ version = "1.0.1" description = "JSON Matching Expressions" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, @@ -414,6 +416,8 @@ version = "2.6.2" description = "Fork of the standard library cgi and cgitb modules, being deprecated in PEP-594" optional = false python-versions = ">=3.10" +groups = ["main"] +markers = "python_version >= \"3.13.0\"" files = [ {file = "legacy_cgi-2.6.2-py3-none-any.whl", hash = "sha256:a7b83afb1baf6ebeb56522537c5943ef9813cf933f6715e88a803f7edbce0bff"}, {file = "legacy_cgi-2.6.2.tar.gz", hash = "sha256:9952471ceb304043b104c22d00b4f333cac27a6abe446d8a528fc437cf13c85f"}, @@ -425,6 +429,8 @@ version = "0.7.0" description = "McCabe checker, plugin for flake8" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -436,6 +442,7 @@ version = "1.29.0" description = "OpenTelemetry Python API" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "opentelemetry_api-1.29.0-py3-none-any.whl", hash = "sha256:5fcd94c4141cc49c736271f3e1efb777bebe9cc535759c54c936cca4f1b312b8"}, {file = "opentelemetry_api-1.29.0.tar.gz", hash = "sha256:d04a6cf78aad09614f52964ecb38021e248f5714dc32c2e0d8fd99517b4d69cf"}, @@ -451,6 +458,8 @@ version = "24.2" description = "Core utilities for Python packages" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -462,6 +471,8 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -473,22 +484,23 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "protobuf" -version = "5.29.3" +version = "5.29.5" description = "" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888"}, - {file = "protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a"}, - {file = "protobuf-5.29.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e"}, - {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84"}, - {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f"}, - {file = "protobuf-5.29.3-cp38-cp38-win32.whl", hash = "sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252"}, - {file = "protobuf-5.29.3-cp38-cp38-win_amd64.whl", hash = "sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107"}, - {file = "protobuf-5.29.3-cp39-cp39-win32.whl", hash = "sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7"}, - {file = "protobuf-5.29.3-cp39-cp39-win_amd64.whl", hash = "sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da"}, - {file = "protobuf-5.29.3-py3-none-any.whl", hash = "sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f"}, - {file = "protobuf-5.29.3.tar.gz", hash = "sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620"}, + {file = "protobuf-5.29.5-cp310-abi3-win32.whl", hash = "sha256:3f1c6468a2cfd102ff4703976138844f78ebd1fb45f49011afc5139e9e283079"}, + {file = "protobuf-5.29.5-cp310-abi3-win_amd64.whl", hash = "sha256:3f76e3a3675b4a4d867b52e4a5f5b78a2ef9565549d4037e06cf7b0942b1d3fc"}, + {file = "protobuf-5.29.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e38c5add5a311f2a6eb0340716ef9b039c1dfa428b28f25a7838ac329204a671"}, + {file = "protobuf-5.29.5-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:fa18533a299d7ab6c55a238bf8629311439995f2e7eca5caaff08663606e9015"}, + {file = "protobuf-5.29.5-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:63848923da3325e1bf7e9003d680ce6e14b07e55d0473253a690c3a8b8fd6e61"}, + {file = "protobuf-5.29.5-cp38-cp38-win32.whl", hash = "sha256:ef91363ad4faba7b25d844ef1ada59ff1604184c0bcd8b39b8a6bef15e1af238"}, + {file = "protobuf-5.29.5-cp38-cp38-win_amd64.whl", hash = "sha256:7318608d56b6402d2ea7704ff1e1e4597bee46d760e7e4dd42a3d45e24b87f2e"}, + {file = "protobuf-5.29.5-cp39-cp39-win32.whl", hash = "sha256:6f642dc9a61782fa72b90878af134c5afe1917c89a568cd3476d758d3c3a0736"}, + {file = "protobuf-5.29.5-cp39-cp39-win_amd64.whl", hash = "sha256:470f3af547ef17847a28e1f47200a1cbf0ba3ff57b7de50d22776607cd2ea353"}, + {file = "protobuf-5.29.5-py3-none-any.whl", hash = "sha256:6cf42630262c59b2d8de33954443d94b746c952b01434fc58a417fdbd2e84bd5"}, + {file = "protobuf-5.29.5.tar.gz", hash = "sha256:bc1463bafd4b0929216c35f437a8e28731a2b7fe3d98bb77a600efced5a15c84"}, ] [[package]] @@ -497,6 +509,8 @@ version = "9.0.0" description = "Get CPU info with pure Python" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, @@ -508,6 +522,8 @@ version = "2.9.1" description = "Python style guide checker" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, @@ -519,6 +535,8 @@ version = "2.5.0" description = "passive checker of Python programs" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, @@ -530,6 +548,8 @@ version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -552,6 +572,8 @@ version = "4.0.0" description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1"}, {file = "pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6"}, @@ -572,6 +594,8 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -586,6 +610,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -601,29 +626,14 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] -[[package]] -name = "s3transfer" -version = "0.11.2" -description = "An Amazon S3 Transfer Manager" -optional = true -python-versions = ">=3.8" -files = [ - {file = "s3transfer-0.11.2-py3-none-any.whl", hash = "sha256:be6ecb39fadd986ef1701097771f87e4d2f821f27f6071c872143884d2950fbc"}, - {file = "s3transfer-0.11.2.tar.gz", hash = "sha256:3b39185cb72f5acc77db1a58b6e25b977f28d20496b6e58d6813d75f464d632f"}, -] - -[package.dependencies] -botocore = ">=1.36.0,<2.0a.0" - -[package.extras] -crt = ["botocore[crt] (>=1.36.0,<2.0a.0)"] - [[package]] name = "six" version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -635,6 +645,8 @@ version = "2.2.1" description = "A lil' TOML parser" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\" and python_version <= \"3.10\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -676,6 +688,7 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -687,6 +700,7 @@ version = "5.10.0" description = "Ultra fast JSON encoder and decoder for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, @@ -774,14 +788,16 @@ version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] +markers = "python_version < \"3.10\"" files = [ {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] [package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -790,13 +806,15 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -807,6 +825,7 @@ version = "1.17.2" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, @@ -895,6 +914,7 @@ version = "0.14.2" description = "Makes working with XML feel like you are working with JSON" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, @@ -906,23 +926,24 @@ version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [extras] -dev = ["boto3", "flake8", "pytest", "pytest-benchmark", "requests"] +dev = ["botocore", "flake8", "pytest", "pytest-benchmark", "requests"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = ">=3.8.0,<4" -content-hash = "9128af5437fd535ec458c64280d8390574c632e704cace5ea783de3c5d453c8c" +content-hash = "f6a2f7355200da107aa5b027d6fe4fb6bdb5a898ce8298a56e6ac39fe8d8e34d" From f93bc0a88dff1cddb184938efddbb5289add21cc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Jun 2025 16:02:16 -0400 Subject: [PATCH 38/44] chore(deps): bump brace-expansion in /tests/integration (#617) Bumps [brace-expansion](https://github.com/juliangruber/brace-expansion) from 1.1.11 to 1.1.12. - [Release notes](https://github.com/juliangruber/brace-expansion/releases) - [Commits](https://github.com/juliangruber/brace-expansion/compare/1.1.11...v1.1.12) --- updated-dependencies: - dependency-name: brace-expansion dependency-version: 1.1.12 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- tests/integration/yarn.lock | 339 ++---------------------------------- 1 file changed, 16 insertions(+), 323 deletions(-) diff --git a/tests/integration/yarn.lock b/tests/integration/yarn.lock index f96feb52..37cb357d 100644 --- a/tests/integration/yarn.lock +++ b/tests/integration/yarn.lock @@ -2,28 +2,6 @@ # yarn lockfile v1 -"@iarna/toml@^2.2.5": - version "2.2.5" - resolved "https://registry.npmjs.org/@iarna/toml/-/toml-2.2.5.tgz" - integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== - -"@isaacs/cliui@^8.0.2": - version "8.0.2" - resolved "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz" - integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== - dependencies: - string-width "^5.1.2" - string-width-cjs "npm:string-width@^4.2.0" - strip-ansi "^7.0.1" - strip-ansi-cjs "npm:strip-ansi@^6.0.1" - wrap-ansi "^8.1.0" - wrap-ansi-cjs "npm:wrap-ansi@^7.0.0" - -"@pkgjs/parseargs@^0.11.0": - version "0.11.0" - resolved "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz" - integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== - "2-thenable@^1.0.0": version "1.0.0" resolved "https://registry.npmjs.org/2-thenable/-/2-thenable-1.0.0.tgz" @@ -32,16 +10,16 @@ d "1" es5-ext "^0.10.47" +"@iarna/toml@^2.2.5": + version "2.2.5" + resolved "https://registry.npmjs.org/@iarna/toml/-/toml-2.2.5.tgz" + integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== + ansi-regex@^5.0.1: version "5.0.1" resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== -ansi-regex@^6.0.1: - version "6.1.0" - resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz" - integrity sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA== - ansi-styles@^4.0.0: version "4.3.0" resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz" @@ -49,37 +27,11 @@ ansi-styles@^4.0.0: dependencies: color-convert "^2.0.1" -ansi-styles@^6.1.0: - version "6.2.1" - resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz" - integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== - appdirectory@^0.1.0: version "0.1.0" resolved "https://registry.npmjs.org/appdirectory/-/appdirectory-0.1.0.tgz" integrity sha512-DJ5DV8vZXBbusyiyPlH28xppwS8eAMRuuyMo88xeEcf4bV64lbLtbxRxqixZuJBXsZzLtXFmA13GwVjJc7vdQw== -asynckit@^0.4.0: - version "0.4.0" - resolved "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" - integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== - -axios-proxy-builder@^0.1.2: - version "0.1.2" - resolved "https://registry.npmjs.org/axios-proxy-builder/-/axios-proxy-builder-0.1.2.tgz" - integrity sha512-6uBVsBZzkB3tCC8iyx59mCjQckhB8+GQrI9Cop8eC7ybIsvs/KtnNgEBfRMSEa7GqK2VBGUzgjNYMdPIfotyPA== - dependencies: - tunnel "^0.0.6" - -axios@^1.7.4: - version "1.7.9" - resolved "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz" - integrity sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw== - dependencies: - follow-redirects "^1.15.6" - form-data "^4.0.0" - proxy-from-env "^1.1.0" - balanced-match@^1.0.0: version "1.0.2" resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz" @@ -91,20 +43,13 @@ bluebird@^3.7.2: integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + version "1.1.12" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.12.tgz#ab9b454466e5a8cc3a187beaad580412a9c5b843" + integrity sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" -brace-expansion@^2.0.1: - version "2.0.1" - resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz" - integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== - dependencies: - balanced-match "^1.0.0" - camelcase@^5.0.0: version "5.3.1" resolved "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz" @@ -142,13 +87,6 @@ color-name@~1.1.4: resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== -combined-stream@^1.0.8: - version "1.0.8" - resolved "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" - integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== - dependencies: - delayed-stream "~1.0.0" - concat-map@0.0.1: version "0.0.1" resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" @@ -170,16 +108,7 @@ cross-spawn@^6.0.5: shebang-command "^1.2.0" which "^1.2.9" -cross-spawn@^7.0.0: - version "7.0.6" - resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz" - integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - -d@^1.0.1, d@^1.0.2, d@1: +d@1, d@^1.0.1, d@^1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/d/-/d-1.0.2.tgz" integrity sha512-MOqHvMWF9/9MX6nza0KgvFH4HpMU0EF5uUDXqX/BtxtU8NfB0QzRtJ8Oe/6SuS4kbhyzVJwjd97EA4PKrzJ8bw== @@ -192,11 +121,6 @@ decamelize@^1.2.0: resolved "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz" integrity sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA== -delayed-stream@~1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" - integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== - duration@^0.2.2: version "0.2.2" resolved "https://registry.npmjs.org/duration/-/duration-0.2.2.tgz" @@ -205,21 +129,11 @@ duration@^0.2.2: d "1" es5-ext "~0.10.46" -eastasianwidth@^0.2.0: - version "0.2.0" - resolved "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz" - integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== - emoji-regex@^8.0.0: version "8.0.0" resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== -emoji-regex@^9.2.2: - version "9.2.2" - resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz" - integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== - es5-ext@^0.10.35, es5-ext@^0.10.47, es5-ext@^0.10.49, es5-ext@^0.10.53, es5-ext@^0.10.62, es5-ext@^0.10.64, es5-ext@~0.10.14, es5-ext@~0.10.46: version "0.10.64" resolved "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.64.tgz" @@ -280,28 +194,6 @@ find-up@^4.1.0: locate-path "^5.0.0" path-exists "^4.0.0" -follow-redirects@^1.15.6: - version "1.15.9" - resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz" - integrity sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ== - -foreground-child@^3.1.0: - version "3.3.0" - resolved "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz" - integrity sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg== - dependencies: - cross-spawn "^7.0.0" - signal-exit "^4.0.1" - -form-data@^4.0.0: - version "4.0.1" - resolved "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz" - integrity sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.8" - mime-types "^2.1.12" - fs-extra@^10.1.0: version "10.1.0" resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz" @@ -329,31 +221,7 @@ glob-all@^3.3.1: glob "^7.2.3" yargs "^15.3.1" -glob@^10.3.7: - version "10.4.5" - resolved "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz" - integrity sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg== - dependencies: - foreground-child "^3.1.0" - jackspeak "^3.1.2" - minimatch "^9.0.4" - minipass "^7.1.2" - package-json-from-dist "^1.0.0" - path-scurry "^1.11.1" - -glob@^7.1.3: - version "7.2.3" - resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz" - integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.1.1" - once "^1.3.0" - path-is-absolute "^1.0.0" - -glob@^7.2.3: +glob@^7.1.3, glob@^7.2.3: version "7.2.3" resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz" integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== @@ -383,7 +251,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@^2.0.3, inherits@~2.0.3, inherits@2: +inherits@2, inherits@^2.0.3, inherits@~2.0.3: version "2.0.4" resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -437,15 +305,6 @@ isobject@^3.0.1: resolved "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz" integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== -jackspeak@^3.1.2: - version "3.4.3" - resolved "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz" - integrity sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw== - dependencies: - "@isaacs/cliui" "^8.0.2" - optionalDependencies: - "@pkgjs/parseargs" "^0.11.0" - jsonfile@^6.0.1: version "6.1.0" resolved "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz" @@ -507,23 +366,6 @@ log@^6.0.0: type "^2.7.3" uni-global "^1.0.0" -lru-cache@^10.2.0: - version "10.4.3" - resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz" - integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== - -mime-db@1.52.0: - version "1.52.0" - resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" - integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== - -mime-types@^2.1.12: - version "2.1.35" - resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" - integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== - dependencies: - mime-db "1.52.0" - minimatch@^3.1.1: version "3.1.2" resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" @@ -531,18 +373,6 @@ minimatch@^3.1.1: dependencies: brace-expansion "^1.1.7" -minimatch@^9.0.4: - version "9.0.5" - resolved "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz" - integrity sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow== - dependencies: - brace-expansion "^2.0.1" - -"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.1.2: - version "7.1.2" - resolved "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz" - integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== - next-tick@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/next-tick/-/next-tick-1.1.0.tgz" @@ -579,11 +409,6 @@ p-try@^2.0.0: resolved "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== -package-json-from-dist@^1.0.0: - version "1.0.1" - resolved "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz" - integrity sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw== - pako@~1.0.2: version "1.0.11" resolved "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz" @@ -604,29 +429,11 @@ path-key@^2.0.1: resolved "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz" integrity sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw== -path-key@^3.1.0: - version "3.1.1" - resolved "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz" - integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== - -path-scurry@^1.11.1: - version "1.11.1" - resolved "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz" - integrity sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA== - dependencies: - lru-cache "^10.2.0" - minipass "^5.0.0 || ^6.0.2 || ^7.0.0" - process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== -proxy-from-env@^1.1.0: - version "1.1.0" - resolved "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz" - integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== - readable-stream@^3.0.0: version "3.6.2" resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz" @@ -666,23 +473,11 @@ rimraf@^3.0.2: dependencies: glob "^7.1.3" -rimraf@^5.0.5: - version "5.0.10" - resolved "https://registry.npmjs.org/rimraf/-/rimraf-5.0.10.tgz" - integrity sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ== - dependencies: - glob "^10.3.7" - safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -sax@>=0.6.0: - version "1.4.1" - resolved "https://registry.npmjs.org/sax/-/sax-1.4.1.tgz" - integrity sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg== - semver@^5.5.0: version "5.7.2" resolved "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz" @@ -720,16 +515,6 @@ serverless-python-requirements@^6.1.1: sha256-file "1.0.0" shell-quote "^1.8.1" -serverless@>=2.32: - version "4.5.0" - resolved "https://registry.npmjs.org/serverless/-/serverless-4.5.0.tgz" - integrity sha512-msbs5I/fuPiW0ZfBuFA7lpKazCTFtF0AhOCA0HsrJBGVaOrNbwzlC/krZKXn1YgDR2+cw/izKRNupZJXtpyxJQ== - dependencies: - axios "^1.7.4" - axios-proxy-builder "^0.1.2" - rimraf "^5.0.5" - xml2js "0.6.2" - set-blocking@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz" @@ -760,33 +545,16 @@ shebang-command@^1.2.0: dependencies: shebang-regex "^1.0.0" -shebang-command@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz" - integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== - dependencies: - shebang-regex "^3.0.0" - shebang-regex@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz" integrity sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ== -shebang-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz" - integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== - shell-quote@^1.8.1: version "1.8.2" resolved "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.2.tgz" integrity sha512-AzqKpGKjrj7EM6rKVQEPpB288oCfnrEIuyoT9cyF4nmGa7V8Zk6f7RRqYisX8X9m+Q7bd632aZW4ky7EhbQztA== -signal-exit@^4.0.1: - version "4.1.0" - resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz" - integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== - split2@^3.1.1: version "3.2.2" resolved "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz" @@ -810,22 +578,6 @@ stream-promise@^3.2.0: es5-ext "^0.10.49" is-stream "^1.1.0" -string_decoder@^1.1.1, string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - -"string-width-cjs@npm:string-width@^4.2.0": - version "4.2.3" - resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - string-width@^4.1.0, string-width@^4.2.0: version "4.2.3" resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" @@ -835,21 +587,12 @@ string-width@^4.1.0, string-width@^4.2.0: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" -string-width@^5.0.1, string-width@^5.1.2: - version "5.1.2" - resolved "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz" - integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== - dependencies: - eastasianwidth "^0.2.0" - emoji-regex "^9.2.2" - strip-ansi "^7.0.1" - -"strip-ansi-cjs@npm:strip-ansi@^6.0.1": - version "6.0.1" - resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== +string_decoder@^1.1.1, string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: - ansi-regex "^5.0.1" + safe-buffer "~5.1.0" strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" @@ -858,18 +601,6 @@ strip-ansi@^6.0.0, strip-ansi@^6.0.1: dependencies: ansi-regex "^5.0.1" -strip-ansi@^7.0.1: - version "7.1.0" - resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz" - integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== - dependencies: - ansi-regex "^6.0.1" - -tunnel@^0.0.6: - version "0.0.6" - resolved "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz" - integrity sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg== - type@^2.5.0, type@^2.7.2, type@^2.7.3: version "2.7.3" resolved "https://registry.npmjs.org/type/-/type-2.7.3.tgz" @@ -904,22 +635,6 @@ which@^1.2.9: dependencies: isexe "^2.0.0" -which@^2.0.1: - version "2.0.2" - resolved "https://registry.npmjs.org/which/-/which-2.0.2.tgz" - integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== - dependencies: - isexe "^2.0.0" - -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": - version "7.0.0" - resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - wrap-ansi@^6.2.0: version "6.2.0" resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz" @@ -929,33 +644,11 @@ wrap-ansi@^6.2.0: string-width "^4.1.0" strip-ansi "^6.0.0" -wrap-ansi@^8.1.0: - version "8.1.0" - resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz" - integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== - dependencies: - ansi-styles "^6.1.0" - string-width "^5.0.1" - strip-ansi "^7.0.1" - wrappy@1: version "1.0.2" resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== -xml2js@0.6.2: - version "0.6.2" - resolved "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz" - integrity sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA== - dependencies: - sax ">=0.6.0" - xmlbuilder "~11.0.0" - -xmlbuilder@~11.0.0: - version "11.0.1" - resolved "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz" - integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA== - y18n@^4.0.0: version "4.0.3" resolved "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz" From 267a5429a99e85746537a1585608497a9d355fef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 19 Jun 2025 23:51:26 -0400 Subject: [PATCH 39/44] chore(deps): bump requests from 2.32.3 to 2.32.4 (#615) --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 434f887c..4c37bf87 100644 --- a/poetry.lock +++ b/poetry.lock @@ -606,19 +606,19 @@ six = ">=1.5" [[package]] name = "requests" -version = "2.32.3" +version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, + {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, + {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" +charset_normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" From b08b7635675826278170dbb570e2d9307ae02cf9 Mon Sep 17 00:00:00 2001 From: Joey Zhao <5253430+joeyzhao2018@users.noreply.github.com> Date: Mon, 23 Jun 2025 12:46:01 -0400 Subject: [PATCH 40/44] release v6.111.0 (#625) --- datadog_lambda/version.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/datadog_lambda/version.py b/datadog_lambda/version.py index 9534f0c7..2bb1df7f 100644 --- a/datadog_lambda/version.py +++ b/datadog_lambda/version.py @@ -1 +1 @@ -__version__ = "6.110.0" +__version__ = "6.111.0" diff --git a/pyproject.toml b/pyproject.toml index ba5bcb17..1d5feb7f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "datadog_lambda" -version = "6.110.0" +version = "6.111.0" description = "The Datadog AWS Lambda Library" authors = ["Datadog, Inc. "] license = "Apache-2.0" From 555ed5bd0635afe57c8bdfe4c09767effe24f855 Mon Sep 17 00:00:00 2001 From: Florentin Labelle Date: Thu, 26 Jun 2025 09:54:10 +0200 Subject: [PATCH 41/44] feat: initial support for ASM inside the tracer (#621) * build: add back libddwaf in the layer * fix: ensure the start_ns of a function url inferred span is an int * feat(asm): enable Threat Detection inside AWS Lambda for HTTP events * test(asm): test parsing events for lambda * build: bump layer size check * fix(asm): work with non dictionary responses * fix(asm): add extra check + comment on listeners --- Dockerfile | 1 - datadog_lambda/asm.py | 174 +++++++++ datadog_lambda/config.py | 1 + datadog_lambda/tracing.py | 2 +- datadog_lambda/wrapper.py | 12 + scripts/check_layer_size.sh | 4 +- ...ation-load-balancer-mutivalue-headers.json | 31 ++ tests/test_asm.py | 329 ++++++++++++++++++ 8 files changed, 550 insertions(+), 4 deletions(-) create mode 100644 datadog_lambda/asm.py create mode 100644 tests/event_samples/application-load-balancer-mutivalue-headers.json create mode 100644 tests/test_asm.py diff --git a/Dockerfile b/Dockerfile index 0e79d884..c5824528 100644 --- a/Dockerfile +++ b/Dockerfile @@ -21,7 +21,6 @@ RUN pip install --no-cache-dir . -t ./python/lib/$runtime/site-packages RUN rm -rf ./python/lib/$runtime/site-packages/botocore* RUN rm -rf ./python/lib/$runtime/site-packages/setuptools RUN rm -rf ./python/lib/$runtime/site-packages/jsonschema/tests -RUN find . -name 'libddwaf.so' -delete RUN rm -f ./python/lib/$runtime/site-packages/ddtrace/appsec/_iast/_taint_tracking/*.so RUN rm -f ./python/lib/$runtime/site-packages/ddtrace/appsec/_iast/_stacktrace*.so RUN rm -f ./python/lib/$runtime/site-packages/ddtrace/internal/datadog/profiling/libdd_wrapper*.so diff --git a/datadog_lambda/asm.py b/datadog_lambda/asm.py new file mode 100644 index 00000000..aab0f1e9 --- /dev/null +++ b/datadog_lambda/asm.py @@ -0,0 +1,174 @@ +from copy import deepcopy +import logging +from typing import Any, Dict, List, Optional, Union + +from ddtrace.contrib.internal.trace_utils import _get_request_header_client_ip +from ddtrace.internal import core +from ddtrace.trace import Span + +from datadog_lambda.trigger import ( + EventSubtypes, + EventTypes, + _EventSource, + _http_event_types, +) + +logger = logging.getLogger(__name__) + + +def _to_single_value_headers(headers: Dict[str, List[str]]) -> Dict[str, str]: + """ + Convert multi-value headers to single-value headers. + If a header has multiple values, join them with commas. + """ + single_value_headers = {} + for key, values in headers.items(): + single_value_headers[key] = ", ".join(values) + return single_value_headers + + +def _merge_single_and_multi_value_headers( + single_value_headers: Dict[str, str], + multi_value_headers: Dict[str, List[str]], +): + """ + Merge single-value headers with multi-value headers. + If a header exists in both, we merge them removing duplicates + """ + merged_headers = deepcopy(multi_value_headers) + for key, value in single_value_headers.items(): + if key not in merged_headers: + merged_headers[key] = [value] + elif value not in merged_headers[key]: + merged_headers[key].append(value) + return _to_single_value_headers(merged_headers) + + +def asm_start_request( + span: Span, + event: Dict[str, Any], + event_source: _EventSource, + trigger_tags: Dict[str, str], +): + if event_source.event_type not in _http_event_types: + return + + request_headers: Dict[str, str] = {} + peer_ip: Optional[str] = None + request_path_parameters: Optional[Dict[str, Any]] = None + route: Optional[str] = None + + if event_source.event_type == EventTypes.ALB: + headers = event.get("headers") + multi_value_request_headers = event.get("multiValueHeaders") + if multi_value_request_headers: + request_headers = _to_single_value_headers(multi_value_request_headers) + else: + request_headers = headers or {} + + raw_uri = event.get("path") + parsed_query = event.get("multiValueQueryStringParameters") or event.get( + "queryStringParameters" + ) + + elif event_source.event_type == EventTypes.LAMBDA_FUNCTION_URL: + request_headers = event.get("headers", {}) + peer_ip = event.get("requestContext", {}).get("http", {}).get("sourceIp") + raw_uri = event.get("rawPath") + parsed_query = event.get("queryStringParameters") + + elif event_source.event_type == EventTypes.API_GATEWAY: + request_context = event.get("requestContext", {}) + request_path_parameters = event.get("pathParameters") + route = trigger_tags.get("http.route") + + if event_source.subtype == EventSubtypes.API_GATEWAY: + request_headers = event.get("headers", {}) + peer_ip = request_context.get("identity", {}).get("sourceIp") + raw_uri = event.get("path") + parsed_query = event.get("multiValueQueryStringParameters") + + elif event_source.subtype == EventSubtypes.HTTP_API: + request_headers = event.get("headers", {}) + peer_ip = request_context.get("http", {}).get("sourceIp") + raw_uri = event.get("rawPath") + parsed_query = event.get("queryStringParameters") + + elif event_source.subtype == EventSubtypes.WEBSOCKET: + request_headers = _to_single_value_headers( + event.get("multiValueHeaders", {}) + ) + peer_ip = request_context.get("identity", {}).get("sourceIp") + raw_uri = event.get("path") + parsed_query = event.get("multiValueQueryStringParameters") + + else: + return + + else: + return + + body = event.get("body") + is_base64_encoded = event.get("isBase64Encoded", False) + + request_ip = _get_request_header_client_ip(request_headers, peer_ip, True) + if request_ip is not None: + span.set_tag_str("http.client_ip", request_ip) + span.set_tag_str("network.client.ip", request_ip) + + core.dispatch( + # The matching listener is registered in ddtrace.appsec._handlers + "aws_lambda.start_request", + ( + span, + request_headers, + request_ip, + body, + is_base64_encoded, + raw_uri, + route, + trigger_tags.get("http.method"), + parsed_query, + request_path_parameters, + ), + ) + + +def asm_start_response( + span: Span, + status_code: str, + event_source: _EventSource, + response: Union[Dict[str, Any], str, None], +): + if event_source.event_type not in _http_event_types: + return + + if isinstance(response, dict) and ( + "headers" in response or "multiValueHeaders" in response + ): + headers = response.get("headers", {}) + multi_value_request_headers = response.get("multiValueHeaders") + if isinstance(multi_value_request_headers, dict) and isinstance(headers, dict): + response_headers = _merge_single_and_multi_value_headers( + headers, multi_value_request_headers + ) + elif isinstance(headers, dict): + response_headers = headers + else: + response_headers = { + "content-type": "application/json", + } + else: + response_headers = { + "content-type": "application/json", + } + + core.dispatch( + # The matching listener is registered in ddtrace.appsec._handlers + "aws_lambda.start_response", + ( + span, + status_code, + response_headers, + ), + ) diff --git a/datadog_lambda/config.py b/datadog_lambda/config.py index 7a08d8a7..aaa1af5e 100644 --- a/datadog_lambda/config.py +++ b/datadog_lambda/config.py @@ -95,6 +95,7 @@ def _resolve_env(self, key, default=None, cast=None, depends_on_tracing=False): data_streams_enabled = _get_env( "DD_DATA_STREAMS_ENABLED", "false", as_bool, depends_on_tracing=True ) + appsec_enabled = _get_env("DD_APPSEC_ENABLED", "false", as_bool) is_gov_region = _get_env("AWS_REGION", "", lambda x: x.startswith("us-gov-")) diff --git a/datadog_lambda/tracing.py b/datadog_lambda/tracing.py index 3d5f671e..89a4126b 100644 --- a/datadog_lambda/tracing.py +++ b/datadog_lambda/tracing.py @@ -859,7 +859,7 @@ def create_inferred_span_from_lambda_function_url_event(event, context): InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync") if span: span.set_tags(tags) - span.start_ns = int(request_time_epoch) * 1e6 + span.start_ns = int(request_time_epoch * 1e6) return span diff --git a/datadog_lambda/wrapper.py b/datadog_lambda/wrapper.py index 87063411..29972bf4 100644 --- a/datadog_lambda/wrapper.py +++ b/datadog_lambda/wrapper.py @@ -9,6 +9,7 @@ from importlib import import_module from time import time_ns +from datadog_lambda.asm import asm_start_response, asm_start_request from datadog_lambda.dsm import set_dsm_context from datadog_lambda.extension import should_use_extension, flush_extension from datadog_lambda.cold_start import ( @@ -253,6 +254,8 @@ def _before(self, event, context): parent_span=self.inferred_span, span_pointers=calculate_span_pointers(event_source, event), ) + if config.appsec_enabled: + asm_start_request(self.span, event, event_source, self.trigger_tags) else: set_correlation_ids() if config.profiling_enabled and is_new_sandbox(): @@ -285,6 +288,15 @@ def _after(self, event, context): if status_code: self.span.set_tag("http.status_code", status_code) + + if config.appsec_enabled: + asm_start_response( + self.span, + status_code, + self.event_source, + response=self.response, + ) + self.span.finish() if self.inferred_span: diff --git a/scripts/check_layer_size.sh b/scripts/check_layer_size.sh index 90d5861b..626f9d31 100755 --- a/scripts/check_layer_size.sh +++ b/scripts/check_layer_size.sh @@ -8,8 +8,8 @@ # Compares layer size to threshold, and fails if below that threshold set -e -MAX_LAYER_COMPRESSED_SIZE_KB=$(expr 5 \* 1024) -MAX_LAYER_UNCOMPRESSED_SIZE_KB=$(expr 13 \* 1024) +MAX_LAYER_COMPRESSED_SIZE_KB=$(expr 6 \* 1024) +MAX_LAYER_UNCOMPRESSED_SIZE_KB=$(expr 15 \* 1024) LAYER_FILES_PREFIX="datadog_lambda_py" diff --git a/tests/event_samples/application-load-balancer-mutivalue-headers.json b/tests/event_samples/application-load-balancer-mutivalue-headers.json new file mode 100644 index 00000000..6d446d15 --- /dev/null +++ b/tests/event_samples/application-load-balancer-mutivalue-headers.json @@ -0,0 +1,31 @@ +{ + "requestContext": { + "elb": { + "targetGroupArn": "arn:aws:elasticloadbalancing:us-east-2:123456789012:targetgroup/lambda-xyz/123abc" + } + }, + "httpMethod": "GET", + "path": "/lambda", + "queryStringParameters": { + "query": "1234ABCD" + }, + "multiValueHeaders": { + "accept": ["text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8"], + "accept-encoding": ["gzip"], + "accept-language": ["en-US,en;q=0.9"], + "connection": ["keep-alive"], + "host": ["lambda-alb-123578498.us-east-2.elb.amazonaws.com"], + "upgrade-insecure-requests": ["1"], + "user-agent": ["Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"], + "x-amzn-trace-id": ["Root=1-5c536348-3d683b8b04734faae651f476"], + "x-forwarded-for": ["72.12.164.125"], + "x-forwarded-port": ["80"], + "x-forwarded-proto": ["http"], + "x-imforwards": ["20"], + "x-datadog-trace-id": ["12345"], + "x-datadog-parent-id": ["67890"], + "x-datadog-sampling-priority": ["2"] + }, + "body": "", + "isBase64Encoded": false +} diff --git a/tests/test_asm.py b/tests/test_asm.py new file mode 100644 index 00000000..e57c289f --- /dev/null +++ b/tests/test_asm.py @@ -0,0 +1,329 @@ +import json +import pytest +from unittest.mock import MagicMock, patch + +from datadog_lambda.asm import asm_start_request, asm_start_response +from datadog_lambda.trigger import parse_event_source, extract_trigger_tags +from tests.utils import get_mock_context + +event_samples = "tests/event_samples/" + + +# Test cases for ASM start request +ASM_START_REQUEST_TEST_CASES = [ + ( + "application_load_balancer", + "application-load-balancer.json", + "72.12.164.125", + "/lambda", + "GET", + "", + False, + {"query": "1234ABCD"}, + None, + None, + ), + ( + "application_load_balancer_multivalue_headers", + "application-load-balancer-mutivalue-headers.json", + "72.12.164.125", + "/lambda", + "GET", + "", + False, + {"query": "1234ABCD"}, + None, + None, + ), + ( + "lambda_function_url", + "lambda-url.json", + "71.195.30.42", + "/", + "GET", + None, + False, + None, + None, + None, + ), + ( + "api_gateway", + "api-gateway.json", + "127.0.0.1", + "/path/to/resource", + "POST", + "eyJ0ZXN0IjoiYm9keSJ9", + True, + {"foo": ["bar"]}, + {"proxy": "/path/to/resource"}, + "/{proxy+}", + ), + ( + "api_gateway_v2_parametrized", + "api-gateway-v2-parametrized.json", + "76.115.124.192", + "/user/42", + "GET", + None, + False, + None, + {"id": "42"}, + "/user/{id}", + ), + ( + "api_gateway_websocket", + "api-gateway-websocket-default.json", + "38.122.226.210", + None, + None, + '"What\'s good in the hood?"', + False, + None, + None, + None, + ), +] + + +# Test cases for ASM start response +ASM_START_RESPONSE_TEST_CASES = [ + ( + "application_load_balancer", + "application-load-balancer.json", + { + "statusCode": 200, + "headers": {"Content-Type": "text/html"}, + }, + "200", + {"Content-Type": "text/html"}, + True, + ), + ( + "application_load_balancer_multivalue_headers", + "application-load-balancer-mutivalue-headers.json", + { + "statusCode": 404, + "multiValueHeaders": { + "Content-Type": ["text/plain"], + "X-Error": ["Not Found"], + }, + }, + "404", + { + "Content-Type": "text/plain", + "X-Error": "Not Found", + }, + True, + ), + ( + "lambda_function_url", + "lambda-url.json", + { + "statusCode": 201, + "headers": { + "Location": "/user/123", + "Content-Type": "application/json", + }, + }, + "201", + { + "Location": "/user/123", + "Content-Type": "application/json", + }, + True, + ), + ( + "api_gateway", + "api-gateway.json", + { + "statusCode": 200, + "headers": { + "Content-Type": "application/json", + "X-Custom-Header": "test-value", + }, + "body": '{"message": "success"}', + }, + "200", + { + "Content-Type": "application/json", + "X-Custom-Header": "test-value", + }, + True, + ), + ( + "api_gateway_v2_parametrized", + "api-gateway-v2-parametrized.json", + { + "statusCode": 200, + "headers": {"Content-Type": "application/json"}, + }, + "200", + {"Content-Type": "application/json"}, + True, + ), + ( + "api_gateway_websocket", + "api-gateway-websocket-default.json", + { + "statusCode": 200, + "headers": {"Content-Type": "text/plain"}, + }, + "200", + {"Content-Type": "text/plain"}, + True, + ), + ( + "non_http_event_s3", + "s3.json", + {"statusCode": 200}, + "200", + {}, + False, # Should not dispatch for non-HTTP events + ), + ( + "api_gateway_v2_string_response", + "api-gateway-v2-parametrized.json", + "Hello, World!", + "200", + {"content-type": "application/json"}, + True, + ), + ( + "api_gateway_v2_dict_response", + "api-gateway-v2-parametrized.json", + {"message": "Hello, World!"}, + "200", + {"content-type": "application/json"}, + True, + ), +] + + +@pytest.mark.parametrize( + "name,file,expected_ip,expected_uri,expected_method,expected_body,expected_base64,expected_query,expected_path_params,expected_route", + ASM_START_REQUEST_TEST_CASES, +) +@patch("datadog_lambda.asm.core") +def test_asm_start_request_parametrized( + mock_core, + name, + file, + expected_ip, + expected_uri, + expected_method, + expected_body, + expected_base64, + expected_query, + expected_path_params, + expected_route, +): + """Test ASM start request for various HTTP event types using parametrization""" + mock_span = MagicMock() + ctx = get_mock_context() + + # Reset mock for each test + mock_core.reset_mock() + mock_span.reset_mock() + + test_file = event_samples + file + with open(test_file, "r") as f: + event = json.load(f) + + event_source = parse_event_source(event) + trigger_tags = extract_trigger_tags(event, ctx) + + asm_start_request(mock_span, event, event_source, trigger_tags) + + # Verify core.dispatch was called + mock_core.dispatch.assert_called_once() + call_args = mock_core.dispatch.call_args + dispatch_args = call_args[0][1] + ( + span, + request_headers, + request_ip, + body, + is_base64_encoded, + raw_uri, + http_route, + http_method, + parsed_query, + request_path_parameters, + ) = dispatch_args + + # Common assertions + assert span == mock_span + assert isinstance(request_headers, dict) + + # Specific assertions based on test case + assert request_ip == expected_ip + assert raw_uri == expected_uri + assert http_method == expected_method + assert body == expected_body + assert is_base64_encoded == expected_base64 + + if expected_query is not None: + assert parsed_query == expected_query + else: + assert parsed_query is None + + if expected_path_params is not None: + assert request_path_parameters == expected_path_params + else: + assert request_path_parameters is None + + # Check route is correctly extracted and passed + assert http_route == expected_route + + # Check IP tags were set if IP is present + if expected_ip: + mock_span.set_tag_str.assert_any_call("http.client_ip", expected_ip) + mock_span.set_tag_str.assert_any_call("network.client.ip", expected_ip) + + +@pytest.mark.parametrize( + "name,event_file,response,status_code,expected_headers,should_dispatch", + ASM_START_RESPONSE_TEST_CASES, +) +@patch("datadog_lambda.asm.core") +def test_asm_start_response_parametrized( + mock_core, + name, + event_file, + response, + status_code, + expected_headers, + should_dispatch, +): + """Test ASM start response for various HTTP event types using parametrization""" + mock_span = MagicMock() + + # Reset mock for each test + mock_core.reset_mock() + mock_span.reset_mock() + + test_file = event_samples + event_file + with open(test_file, "r") as f: + event = json.load(f) + + event_source = parse_event_source(event) + + asm_start_response(mock_span, status_code, event_source, response) + + if should_dispatch: + # Verify core.dispatch was called + mock_core.dispatch.assert_called_once() + call_args = mock_core.dispatch.call_args + assert call_args[0][0] == "aws_lambda.start_response" + + # Extract the dispatched arguments + dispatch_args = call_args[0][1] + span, response_status_code, response_headers = dispatch_args + + assert span == mock_span + assert response_status_code == status_code + assert response_headers == expected_headers + else: + # Verify core.dispatch was not called for non-HTTP events + mock_core.dispatch.assert_not_called() From 495c7704cbace47bc7b6ad87756f9c2857f1dc7c Mon Sep 17 00:00:00 2001 From: Joey Zhao <5253430+joeyzhao2018@users.noreply.github.com> Date: Thu, 26 Jun 2025 12:16:07 -0400 Subject: [PATCH 42/44] bugfix: error metrics should be sent during exception handling (#626) Signed-off-by: Joey Zhao <5253430+joeyzhao2018@users.noreply.github.com> --- datadog_lambda/wrapper.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/datadog_lambda/wrapper.py b/datadog_lambda/wrapper.py index 29972bf4..98a1e87e 100644 --- a/datadog_lambda/wrapper.py +++ b/datadog_lambda/wrapper.py @@ -166,10 +166,9 @@ def __call__(self, event, context, **kwargs): self.response = self.func(event, context, **kwargs) return self.response except Exception: - if not should_use_extension: - from datadog_lambda.metric import submit_errors_metric + from datadog_lambda.metric import submit_errors_metric - submit_errors_metric(context) + submit_errors_metric(context) if self.span: self.span.set_traceback() From d72ebaaa960f48d06ba199dd69bef954bf535f92 Mon Sep 17 00:00:00 2001 From: Rey Abolofia Date: Mon, 30 Jun 2025 14:33:24 -0700 Subject: [PATCH 43/44] fix: call `patch_all` before importing handler code. (#598) * Call `patch_all` before importing handler code. * Remove tests for patch_all. * Move patch_all to init. * Update integration tests. --- datadog_lambda/__init__.py | 7 ++ datadog_lambda/wrapper.py | 3 - .../logs/async-metrics_python310.log | 90 ++++++++------ .../logs/async-metrics_python311.log | 90 ++++++++------ .../logs/async-metrics_python312.log | 90 ++++++++------ .../logs/async-metrics_python313.log | 90 ++++++++------ .../snapshots/logs/async-metrics_python38.log | 90 ++++++++------ .../snapshots/logs/async-metrics_python39.log | 90 ++++++++------ .../snapshots/logs/sync-metrics_python310.log | 117 +++++++++++------- .../snapshots/logs/sync-metrics_python311.log | 117 +++++++++++------- .../snapshots/logs/sync-metrics_python312.log | 117 +++++++++++------- .../snapshots/logs/sync-metrics_python313.log | 117 +++++++++++------- .../snapshots/logs/sync-metrics_python38.log | 117 +++++++++++------- .../snapshots/logs/sync-metrics_python39.log | 117 +++++++++++------- tests/test_wrapper.py | 6 - 15 files changed, 763 insertions(+), 495 deletions(-) diff --git a/datadog_lambda/__init__.py b/datadog_lambda/__init__.py index 378fd15c..f319d2ed 100644 --- a/datadog_lambda/__init__.py +++ b/datadog_lambda/__init__.py @@ -17,3 +17,10 @@ initialize_logging(__name__) + + +from datadog_lambda.patch import patch_all # noqa: E402 + +# Patch third-party libraries for tracing, must be done before importing any +# handler code. +patch_all() diff --git a/datadog_lambda/wrapper.py b/datadog_lambda/wrapper.py index 98a1e87e..c7474f65 100644 --- a/datadog_lambda/wrapper.py +++ b/datadog_lambda/wrapper.py @@ -26,7 +26,6 @@ Headers, ) from datadog_lambda.module_name import modify_module_name -from datadog_lambda.patch import patch_all from datadog_lambda.span_pointers import calculate_span_pointers from datadog_lambda.tag_object import tag_object from datadog_lambda.tracing import ( @@ -143,8 +142,6 @@ def __init__(self, func): os.environ[DD_REQUESTS_SERVICE_NAME] = os.environ.get( DD_SERVICE, "aws.lambda" ) - # Patch third-party libraries for tracing - patch_all() # Enable LLM Observability if config.llmobs_enabled: diff --git a/tests/integration/snapshots/logs/async-metrics_python310.log b/tests/integration/snapshots/logs/async-metrics_python310.log index 0bd7237c..bda234df 100644 --- a/tests/integration/snapshots/logs/async-metrics_python310.log +++ b/tests/integration/snapshots/logs/async-metrics_python310.log @@ -119,7 +119,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -133,7 +133,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -145,7 +146,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -160,7 +161,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -326,7 +328,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -340,7 +342,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -352,7 +355,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -367,7 +370,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -485,7 +489,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -499,7 +503,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -511,7 +516,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -526,7 +531,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -660,7 +666,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -674,7 +680,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -686,7 +693,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -701,7 +708,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -826,7 +834,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -840,7 +848,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -852,7 +861,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -867,7 +876,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1001,7 +1011,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1015,7 +1025,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1027,7 +1038,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1042,7 +1053,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1165,7 +1177,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1179,7 +1191,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1191,7 +1204,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1206,7 +1219,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1328,7 +1342,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1342,7 +1356,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1354,7 +1369,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1369,7 +1384,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1499,7 +1515,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1513,7 +1529,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1525,7 +1542,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1540,7 +1557,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 diff --git a/tests/integration/snapshots/logs/async-metrics_python311.log b/tests/integration/snapshots/logs/async-metrics_python311.log index 8550a062..fd318de3 100644 --- a/tests/integration/snapshots/logs/async-metrics_python311.log +++ b/tests/integration/snapshots/logs/async-metrics_python311.log @@ -119,7 +119,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -133,7 +133,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -145,7 +146,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -160,7 +161,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -326,7 +328,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -340,7 +342,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -352,7 +355,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -367,7 +370,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -485,7 +489,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -499,7 +503,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -511,7 +516,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -526,7 +531,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -660,7 +666,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -674,7 +680,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -686,7 +693,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -701,7 +708,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -826,7 +834,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -840,7 +848,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -852,7 +861,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -867,7 +876,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1001,7 +1011,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1015,7 +1025,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1027,7 +1038,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1042,7 +1053,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1165,7 +1177,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1179,7 +1191,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1191,7 +1204,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1206,7 +1219,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1328,7 +1342,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1342,7 +1356,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1354,7 +1369,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1369,7 +1384,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1499,7 +1515,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1513,7 +1529,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1525,7 +1542,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1540,7 +1557,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 diff --git a/tests/integration/snapshots/logs/async-metrics_python312.log b/tests/integration/snapshots/logs/async-metrics_python312.log index 57c318ab..b51b6a2d 100644 --- a/tests/integration/snapshots/logs/async-metrics_python312.log +++ b/tests/integration/snapshots/logs/async-metrics_python312.log @@ -119,7 +119,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -133,7 +133,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -145,7 +146,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -160,7 +161,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -326,7 +328,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -340,7 +342,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -352,7 +355,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -367,7 +370,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -485,7 +489,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -499,7 +503,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -511,7 +516,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -526,7 +531,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -660,7 +666,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -674,7 +680,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -686,7 +693,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -701,7 +708,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -826,7 +834,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -840,7 +848,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -852,7 +861,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -867,7 +876,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1001,7 +1011,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1015,7 +1025,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1027,7 +1038,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1042,7 +1053,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1165,7 +1177,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1179,7 +1191,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1191,7 +1204,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1206,7 +1219,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1328,7 +1342,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1342,7 +1356,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1354,7 +1369,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1369,7 +1384,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1499,7 +1515,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1513,7 +1529,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1525,7 +1542,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1540,7 +1557,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 diff --git a/tests/integration/snapshots/logs/async-metrics_python313.log b/tests/integration/snapshots/logs/async-metrics_python313.log index 9204499b..89b73e92 100644 --- a/tests/integration/snapshots/logs/async-metrics_python313.log +++ b/tests/integration/snapshots/logs/async-metrics_python313.log @@ -119,7 +119,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -133,7 +133,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -145,7 +146,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -160,7 +161,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -326,7 +328,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -340,7 +342,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -352,7 +355,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -367,7 +370,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -485,7 +489,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -499,7 +503,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -511,7 +516,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -526,7 +531,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -660,7 +666,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -674,7 +680,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -686,7 +693,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -701,7 +708,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -826,7 +834,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -840,7 +848,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -852,7 +861,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -867,7 +876,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1001,7 +1011,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1015,7 +1025,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1027,7 +1038,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1042,7 +1053,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1165,7 +1177,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1179,7 +1191,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1191,7 +1204,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1206,7 +1219,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1328,7 +1342,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1342,7 +1356,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1354,7 +1369,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1369,7 +1384,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1499,7 +1515,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1513,7 +1529,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1525,7 +1542,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1540,7 +1557,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 diff --git a/tests/integration/snapshots/logs/async-metrics_python38.log b/tests/integration/snapshots/logs/async-metrics_python38.log index e6df054c..ff5e5a60 100644 --- a/tests/integration/snapshots/logs/async-metrics_python38.log +++ b/tests/integration/snapshots/logs/async-metrics_python38.log @@ -119,7 +119,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -133,7 +133,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -145,7 +146,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -160,7 +161,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -326,7 +328,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -340,7 +342,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -352,7 +355,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -367,7 +370,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -485,7 +489,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -499,7 +503,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -511,7 +516,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -526,7 +531,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -660,7 +666,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -674,7 +680,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -686,7 +693,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -701,7 +708,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -826,7 +834,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -840,7 +848,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -852,7 +861,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -867,7 +876,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1001,7 +1011,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1015,7 +1025,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1027,7 +1038,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1042,7 +1053,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1165,7 +1177,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1179,7 +1191,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1191,7 +1204,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1206,7 +1219,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1328,7 +1342,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1342,7 +1356,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1354,7 +1369,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1369,7 +1384,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1499,7 +1515,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1513,7 +1529,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1525,7 +1542,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1540,7 +1557,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 diff --git a/tests/integration/snapshots/logs/async-metrics_python39.log b/tests/integration/snapshots/logs/async-metrics_python39.log index 9bcb7a85..5e3d46b6 100644 --- a/tests/integration/snapshots/logs/async-metrics_python39.log +++ b/tests/integration/snapshots/logs/async-metrics_python39.log @@ -119,7 +119,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -133,7 +133,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -145,7 +146,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -160,7 +161,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -326,7 +328,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -340,7 +342,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -352,7 +355,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -367,7 +370,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -485,7 +489,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -499,7 +503,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -511,7 +516,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -526,7 +531,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -660,7 +666,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -674,7 +680,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -686,7 +693,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -701,7 +708,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -826,7 +834,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -840,7 +848,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -852,7 +861,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -867,7 +876,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1001,7 +1011,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1015,7 +1025,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1027,7 +1038,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1042,7 +1053,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1165,7 +1177,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1179,7 +1191,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1191,7 +1204,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1206,7 +1219,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1328,7 +1342,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1342,7 +1356,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1354,7 +1369,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1369,7 +1384,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1499,7 +1515,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1513,7 +1529,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1525,7 +1542,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1540,7 +1557,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 diff --git a/tests/integration/snapshots/logs/sync-metrics_python310.log b/tests/integration/snapshots/logs/sync-metrics_python310.log index 40562a6d..21569831 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python310.log +++ b/tests/integration/snapshots/logs/sync-metrics_python310.log @@ -99,7 +99,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -113,7 +113,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -125,7 +126,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -140,7 +141,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -158,7 +160,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -174,6 +176,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -325,7 +328,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -339,7 +342,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -351,7 +355,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -366,7 +370,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -384,7 +389,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -400,6 +405,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -503,7 +509,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -517,7 +523,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -529,7 +536,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -544,7 +551,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -562,7 +570,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -578,6 +586,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -697,7 +706,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -711,7 +720,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -723,7 +733,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -738,7 +748,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -756,7 +767,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -772,6 +783,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -882,7 +894,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -896,7 +908,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -908,7 +921,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -923,7 +936,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -941,7 +955,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -957,6 +971,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1076,7 +1091,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1090,7 +1105,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1102,7 +1118,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1117,7 +1133,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1135,7 +1152,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1151,6 +1168,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1259,7 +1277,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1273,7 +1291,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1285,7 +1304,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1300,7 +1319,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1318,7 +1338,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1334,6 +1354,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1441,7 +1462,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1455,7 +1476,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1467,7 +1489,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1482,7 +1504,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1500,7 +1523,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1516,6 +1539,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1631,7 +1655,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1645,7 +1669,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1657,7 +1682,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1672,7 +1697,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1690,7 +1716,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1706,6 +1732,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" diff --git a/tests/integration/snapshots/logs/sync-metrics_python311.log b/tests/integration/snapshots/logs/sync-metrics_python311.log index 52ec4c85..5fcd504d 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python311.log +++ b/tests/integration/snapshots/logs/sync-metrics_python311.log @@ -99,7 +99,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -113,7 +113,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -125,7 +126,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -140,7 +141,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -158,7 +160,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -174,6 +176,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -325,7 +328,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -339,7 +342,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -351,7 +355,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -366,7 +370,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -384,7 +389,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -400,6 +405,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -503,7 +509,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -517,7 +523,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -529,7 +536,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -544,7 +551,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -562,7 +570,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -578,6 +586,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -697,7 +706,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -711,7 +720,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -723,7 +733,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -738,7 +748,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -756,7 +767,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -772,6 +783,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -882,7 +894,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -896,7 +908,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -908,7 +921,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -923,7 +936,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -941,7 +955,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -957,6 +971,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1076,7 +1091,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1090,7 +1105,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1102,7 +1118,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1117,7 +1133,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1135,7 +1152,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1151,6 +1168,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1259,7 +1277,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1273,7 +1291,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1285,7 +1304,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1300,7 +1319,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1318,7 +1338,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1334,6 +1354,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1441,7 +1462,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1455,7 +1476,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1467,7 +1489,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1482,7 +1504,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1500,7 +1523,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1516,6 +1539,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1631,7 +1655,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1645,7 +1669,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1657,7 +1682,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1672,7 +1697,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1690,7 +1716,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1706,6 +1732,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" diff --git a/tests/integration/snapshots/logs/sync-metrics_python312.log b/tests/integration/snapshots/logs/sync-metrics_python312.log index 3ec0f01f..9a05404c 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python312.log +++ b/tests/integration/snapshots/logs/sync-metrics_python312.log @@ -99,7 +99,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -113,7 +113,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -125,7 +126,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -140,7 +141,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -158,7 +160,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -174,6 +176,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -325,7 +328,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -339,7 +342,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -351,7 +355,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -366,7 +370,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -384,7 +389,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -400,6 +405,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -503,7 +509,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -517,7 +523,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -529,7 +536,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -544,7 +551,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -562,7 +570,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -578,6 +586,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -697,7 +706,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -711,7 +720,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -723,7 +733,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -738,7 +748,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -756,7 +767,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -772,6 +783,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -882,7 +894,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -896,7 +908,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -908,7 +921,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -923,7 +936,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -941,7 +955,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -957,6 +971,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1076,7 +1091,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1090,7 +1105,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1102,7 +1118,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1117,7 +1133,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1135,7 +1152,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1151,6 +1168,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1259,7 +1277,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1273,7 +1291,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1285,7 +1304,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1300,7 +1319,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1318,7 +1338,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1334,6 +1354,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1441,7 +1462,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1455,7 +1476,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1467,7 +1489,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1482,7 +1504,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1500,7 +1523,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1516,6 +1539,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1631,7 +1655,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1645,7 +1669,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1657,7 +1682,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1672,7 +1697,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1690,7 +1716,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1706,6 +1732,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" diff --git a/tests/integration/snapshots/logs/sync-metrics_python313.log b/tests/integration/snapshots/logs/sync-metrics_python313.log index d2c20dc0..5d17bed5 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python313.log +++ b/tests/integration/snapshots/logs/sync-metrics_python313.log @@ -99,7 +99,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -113,7 +113,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -125,7 +126,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -140,7 +141,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -158,7 +160,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -174,6 +176,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -325,7 +328,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -339,7 +342,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -351,7 +355,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -366,7 +370,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -384,7 +389,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -400,6 +405,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -503,7 +509,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -517,7 +523,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -529,7 +536,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -544,7 +551,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -562,7 +570,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -578,6 +586,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -697,7 +706,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -711,7 +720,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -723,7 +733,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -738,7 +748,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -756,7 +767,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -772,6 +783,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -882,7 +894,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -896,7 +908,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -908,7 +921,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -923,7 +936,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -941,7 +955,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -957,6 +971,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1076,7 +1091,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1090,7 +1105,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1102,7 +1118,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1117,7 +1133,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1135,7 +1152,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1151,6 +1168,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1259,7 +1277,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1273,7 +1291,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1285,7 +1304,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1300,7 +1319,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1318,7 +1338,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1334,6 +1354,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1441,7 +1462,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1455,7 +1476,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1467,7 +1489,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1482,7 +1504,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1500,7 +1523,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1516,6 +1539,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1631,7 +1655,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1645,7 +1669,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1657,7 +1682,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1672,7 +1697,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1690,7 +1716,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1706,6 +1732,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" diff --git a/tests/integration/snapshots/logs/sync-metrics_python38.log b/tests/integration/snapshots/logs/sync-metrics_python38.log index 57a354a6..37ed391e 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python38.log +++ b/tests/integration/snapshots/logs/sync-metrics_python38.log @@ -99,7 +99,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -113,7 +113,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -125,7 +126,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -140,7 +141,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -158,7 +160,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -174,6 +176,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -325,7 +328,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -339,7 +342,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -351,7 +355,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -366,7 +370,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -384,7 +389,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -400,6 +405,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -503,7 +509,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -517,7 +523,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -529,7 +536,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -544,7 +551,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -562,7 +570,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -578,6 +586,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -697,7 +706,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -711,7 +720,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -723,7 +733,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -738,7 +748,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -756,7 +767,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -772,6 +783,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -882,7 +894,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -896,7 +908,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -908,7 +921,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -923,7 +936,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -941,7 +955,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -957,6 +971,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1076,7 +1091,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1090,7 +1105,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1102,7 +1118,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1117,7 +1133,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1135,7 +1152,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1151,6 +1168,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1259,7 +1277,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1273,7 +1291,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1285,7 +1304,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1300,7 +1319,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1318,7 +1338,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1334,6 +1354,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1441,7 +1462,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1455,7 +1476,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1467,7 +1489,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1482,7 +1504,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1500,7 +1523,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1516,6 +1539,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1631,7 +1655,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1645,7 +1669,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1657,7 +1682,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1672,7 +1697,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1690,7 +1716,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1706,6 +1732,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" diff --git a/tests/integration/snapshots/logs/sync-metrics_python39.log b/tests/integration/snapshots/logs/sync-metrics_python39.log index 8b7bb31b..f147744b 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python39.log +++ b/tests/integration/snapshots/logs/sync-metrics_python39.log @@ -99,7 +99,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -113,7 +113,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -125,7 +126,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -140,7 +141,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -158,7 +160,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -174,6 +176,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -325,7 +328,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -339,7 +342,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -351,7 +355,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -366,7 +370,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -384,7 +389,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -400,6 +405,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -503,7 +509,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -517,7 +523,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -529,7 +536,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -544,7 +551,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -562,7 +570,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -578,6 +586,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -697,7 +706,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -711,7 +720,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -723,7 +733,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -738,7 +748,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -756,7 +767,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -772,6 +783,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -882,7 +894,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -896,7 +908,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -908,7 +921,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -923,7 +936,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -941,7 +955,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -957,6 +971,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1076,7 +1091,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1090,7 +1105,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1102,7 +1118,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1117,7 +1133,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1135,7 +1152,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1151,6 +1168,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1259,7 +1277,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1273,7 +1291,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1285,7 +1304,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1300,7 +1319,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1318,7 +1338,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1334,6 +1354,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1441,7 +1462,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1455,7 +1476,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1467,7 +1489,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1482,7 +1504,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1500,7 +1523,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1516,6 +1539,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1631,7 +1655,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1645,7 +1669,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://datadoghq.com/", "out.host": "datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1, @@ -1657,7 +1682,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "GET /", "name": "requests.request", "error": 0, @@ -1672,7 +1697,8 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "http.url": "https://www.datadoghq.com/", "out.host": "www.datadoghq.com", "http.status_code": "200", - "http.useragent": "python-requests/X.X.X" + "http.useragent": "python-requests/X.X.X", + "_dd.base_service": "integration-tests-python" }, "metrics": { "_dd.measured": 1 @@ -1690,7 +1716,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "trace_id": "XXXX", "parent_id": "XXXX", "span_id": "XXXX", - "service": "integration-tests-python", + "service": "requests", "resource": "POST /api/v1/distribution_points", "name": "requests.request", "error": 0, @@ -1706,6 +1732,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "out.host": "api.datadoghq.com", "http.status_code": "202", "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", + "_dd.base_service": "integration-tests-python", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" diff --git a/tests/test_wrapper.py b/tests/test_wrapper.py index f0240905..09f48c8a 100644 --- a/tests/test_wrapper.py +++ b/tests/test_wrapper.py @@ -45,10 +45,6 @@ def setUp(self): self.mock_inject_correlation_ids = patcher.start() self.addCleanup(patcher.stop) - patcher = patch("datadog_lambda.wrapper.patch_all") - self.mock_patch_all = patcher.start() - self.addCleanup(patcher.stop) - patcher = patch("datadog_lambda.tags.get_cold_start_tag") self.mock_get_cold_start_tag = patcher.start() self.mock_get_cold_start_tag.return_value = "cold_start:true" @@ -117,7 +113,6 @@ def lambda_handler(event, context): ) self.mock_set_correlation_ids.assert_called() self.mock_inject_correlation_ids.assert_called() - self.mock_patch_all.assert_called() def test_datadog_lambda_wrapper_flush_to_log(self): os.environ["DD_FLUSH_TO_LOG"] = "True" @@ -487,7 +482,6 @@ def lambda_handler(event, context): lambda_handler_double_wrapped(lambda_event, get_mock_context()) - self.mock_patch_all.assert_called_once() self.mock_submit_invocations_metric.assert_called_once() def test_dd_requests_service_name_default(self): From ae7df53a7c92a6e64c982f5bbe30ed890dc3d1ff Mon Sep 17 00:00:00 2001 From: Florentin Labelle Date: Fri, 4 Jul 2025 09:10:24 +0200 Subject: [PATCH 44/44] feat(appsec): skip processing spans for events that are not http requests (#627) --- datadog_lambda/asm.py | 10 ++++++++++ datadog_lambda/wrapper.py | 6 +++++- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/datadog_lambda/asm.py b/datadog_lambda/asm.py index aab0f1e9..9636760c 100644 --- a/datadog_lambda/asm.py +++ b/datadog_lambda/asm.py @@ -44,6 +44,16 @@ def _merge_single_and_multi_value_headers( return _to_single_value_headers(merged_headers) +def asm_set_context(event_source: _EventSource): + """Add asm specific items to the ExecutionContext. + + This allows the AppSecSpanProcessor to know information about the event + at the moment the span is created and skip it when not relevant. + """ + if event_source.event_type not in _http_event_types: + core.set_item("appsec_skip_next_lambda_event", True) + + def asm_start_request( span: Span, event: Dict[str, Any], diff --git a/datadog_lambda/wrapper.py b/datadog_lambda/wrapper.py index c7474f65..7abe0fc1 100644 --- a/datadog_lambda/wrapper.py +++ b/datadog_lambda/wrapper.py @@ -9,7 +9,7 @@ from importlib import import_module from time import time_ns -from datadog_lambda.asm import asm_start_response, asm_start_request +from datadog_lambda.asm import asm_set_context, asm_start_response, asm_start_request from datadog_lambda.dsm import set_dsm_context from datadog_lambda.extension import should_use_extension, flush_extension from datadog_lambda.cold_start import ( @@ -239,6 +239,10 @@ def _before(self, event, context): ) if config.data_streams_enabled: set_dsm_context(event, event_source) + + if config.appsec_enabled: + asm_set_context(event_source) + self.span = create_function_execution_span( context=context, function_name=config.function_name,