From 64f81daa97d589f1047d00000c31d85e24e4314d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?jordan=20gonz=C3=A1lez?= <30836115+duncanista@users.noreply.github.com> Date: Fri, 23 May 2025 16:15:23 +0200 Subject: [PATCH 01/10] fix: `http.url` tag to include protocol (#603) * update `http.url` in inferred spans * update `http.url` in trigger tags * fix `resource_names` update * update integration test --- datadog_lambda/tracing.py | 14 +++++--- datadog_lambda/trigger.py | 2 +- .../logs/async-metrics_python310.log | 12 +++---- .../logs/async-metrics_python311.log | 12 +++---- .../logs/async-metrics_python312.log | 12 +++---- .../logs/async-metrics_python313.log | 12 +++---- .../snapshots/logs/async-metrics_python38.log | 12 +++---- .../snapshots/logs/async-metrics_python39.log | 12 +++---- .../snapshots/logs/sync-metrics_python310.log | 12 +++---- .../snapshots/logs/sync-metrics_python311.log | 12 +++---- .../snapshots/logs/sync-metrics_python312.log | 12 +++---- .../snapshots/logs/sync-metrics_python313.log | 12 +++---- .../snapshots/logs/sync-metrics_python38.log | 12 +++---- .../snapshots/logs/sync-metrics_python39.log | 14 ++++---- tests/test_tracing.py | 36 +++++++++---------- tests/test_trigger.py | 12 +++---- tests/test_wrapper.py | 2 +- 17 files changed, 108 insertions(+), 104 deletions(-) diff --git a/datadog_lambda/tracing.py b/datadog_lambda/tracing.py index 9a27673c..4b6f300a 100644 --- a/datadog_lambda/tracing.py +++ b/datadog_lambda/tracing.py @@ -850,13 +850,14 @@ def create_inferred_span_from_lambda_function_url_event(event, context): http = request_context.get("http") method = http.get("method") if http else None path = http.get("path") if http else None + http_url = f"https://{domain}{path}" resource = f"{method} {path}" tags = { "operation_name": "aws.lambda.url", - "http.url": domain + path, + "http.url": http_url, "endpoint": path, "http.method": method, - "resource_names": domain + path, + "resource_names": resource, "request_id": context.aws_request_id, } request_time_epoch = request_context.get("timeEpoch") @@ -948,6 +949,7 @@ def create_inferred_span_from_api_gateway_websocket_event( request_context = event.get("requestContext") domain = request_context.get("domainName") endpoint = request_context.get("routeKey") + http_url = f"https://{domain}{endpoint}" api_id = request_context.get("apiId") service_name = determine_service_name( @@ -955,7 +957,7 @@ def create_inferred_span_from_api_gateway_websocket_event( ) tags = { "operation_name": "aws.apigateway.websocket", - "http.url": domain + endpoint, + "http.url": http_url, "endpoint": endpoint, "resource_names": endpoint, "apiid": api_id, @@ -1007,11 +1009,12 @@ def create_inferred_span_from_api_gateway_event( ) method = event.get("httpMethod") path = event.get("path") + http_url = f"https://{domain}{path}" resource_path = _get_resource_path(event, request_context) resource = f"{method} {resource_path}" tags = { "operation_name": "aws.apigateway.rest", - "http.url": domain + path, + "http.url": http_url, "endpoint": path, "http.method": method, "resource_names": resource, @@ -1073,12 +1076,13 @@ def create_inferred_span_from_http_api_event( http = request_context.get("http") or {} method = http.get("method") path = event.get("rawPath") + http_url = f"https://{domain}{path}" resource_path = _get_resource_path(event, request_context) resource = f"{method} {resource_path}" tags = { "operation_name": "aws.httpapi", "endpoint": path, - "http.url": domain + path, + "http.url": http_url, "http.method": http.get("method"), "http.protocol": http.get("protocol"), "http.source_ip": http.get("sourceIp"), diff --git a/datadog_lambda/trigger.py b/datadog_lambda/trigger.py index a2708a59..14cb06ac 100644 --- a/datadog_lambda/trigger.py +++ b/datadog_lambda/trigger.py @@ -301,7 +301,7 @@ def extract_http_tags(event): if request_context and request_context.get("stage"): domain_name = request_context.get("domainName") if domain_name: - http_tags["http.url"] = domain_name + http_tags["http.url"] = f"https://{domain_name}" path = request_context.get("path") method = request_context.get("httpMethod") diff --git a/tests/integration/snapshots/logs/async-metrics_python310.log b/tests/integration/snapshots/logs/async-metrics_python310.log index 24d3fb5b..0bd7237c 100644 --- a/tests/integration/snapshots/logs/async-metrics_python310.log +++ b/tests/integration/snapshots/logs/async-metrics_python310.log @@ -55,7 +55,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -103,7 +103,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -594,7 +594,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -644,7 +644,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1436,7 +1436,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1486,7 +1486,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/async-metrics_python311.log b/tests/integration/snapshots/logs/async-metrics_python311.log index e4fa66bc..8550a062 100644 --- a/tests/integration/snapshots/logs/async-metrics_python311.log +++ b/tests/integration/snapshots/logs/async-metrics_python311.log @@ -55,7 +55,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -103,7 +103,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -594,7 +594,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -644,7 +644,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1436,7 +1436,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1486,7 +1486,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/async-metrics_python312.log b/tests/integration/snapshots/logs/async-metrics_python312.log index 0d632c6c..57c318ab 100644 --- a/tests/integration/snapshots/logs/async-metrics_python312.log +++ b/tests/integration/snapshots/logs/async-metrics_python312.log @@ -55,7 +55,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -103,7 +103,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -594,7 +594,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -644,7 +644,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1436,7 +1436,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1486,7 +1486,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/async-metrics_python313.log b/tests/integration/snapshots/logs/async-metrics_python313.log index 09070709..9204499b 100644 --- a/tests/integration/snapshots/logs/async-metrics_python313.log +++ b/tests/integration/snapshots/logs/async-metrics_python313.log @@ -55,7 +55,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -103,7 +103,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -594,7 +594,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -644,7 +644,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1436,7 +1436,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1486,7 +1486,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/async-metrics_python38.log b/tests/integration/snapshots/logs/async-metrics_python38.log index 4a506930..e6df054c 100644 --- a/tests/integration/snapshots/logs/async-metrics_python38.log +++ b/tests/integration/snapshots/logs/async-metrics_python38.log @@ -55,7 +55,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -103,7 +103,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -594,7 +594,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -644,7 +644,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1436,7 +1436,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1486,7 +1486,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/async-metrics_python39.log b/tests/integration/snapshots/logs/async-metrics_python39.log index 54081402..9bcb7a85 100644 --- a/tests/integration/snapshots/logs/async-metrics_python39.log +++ b/tests/integration/snapshots/logs/async-metrics_python39.log @@ -55,7 +55,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -103,7 +103,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -594,7 +594,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -644,7 +644,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1436,7 +1436,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1486,7 +1486,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/sync-metrics_python310.log b/tests/integration/snapshots/logs/sync-metrics_python310.log index e2569775..40562a6d 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python310.log +++ b/tests/integration/snapshots/logs/sync-metrics_python310.log @@ -35,7 +35,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -83,7 +83,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -631,7 +631,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -681,7 +681,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1568,7 +1568,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1618,7 +1618,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/sync-metrics_python311.log b/tests/integration/snapshots/logs/sync-metrics_python311.log index 69d4a695..52ec4c85 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python311.log +++ b/tests/integration/snapshots/logs/sync-metrics_python311.log @@ -35,7 +35,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -83,7 +83,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -631,7 +631,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -681,7 +681,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1568,7 +1568,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1618,7 +1618,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/sync-metrics_python312.log b/tests/integration/snapshots/logs/sync-metrics_python312.log index 49bae0a2..3ec0f01f 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python312.log +++ b/tests/integration/snapshots/logs/sync-metrics_python312.log @@ -35,7 +35,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -83,7 +83,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -631,7 +631,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -681,7 +681,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1568,7 +1568,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1618,7 +1618,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/sync-metrics_python313.log b/tests/integration/snapshots/logs/sync-metrics_python313.log index 2f461f6f..d2c20dc0 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python313.log +++ b/tests/integration/snapshots/logs/sync-metrics_python313.log @@ -35,7 +35,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -83,7 +83,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -631,7 +631,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -681,7 +681,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1568,7 +1568,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1618,7 +1618,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/sync-metrics_python38.log b/tests/integration/snapshots/logs/sync-metrics_python38.log index 83e33d33..57a354a6 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python38.log +++ b/tests/integration/snapshots/logs/sync-metrics_python38.log @@ -35,7 +35,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -83,7 +83,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -631,7 +631,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -681,7 +681,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1568,7 +1568,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1618,7 +1618,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/integration/snapshots/logs/sync-metrics_python39.log b/tests/integration/snapshots/logs/sync-metrics_python39.log index 0a433c34..8b7bb31b 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python39.log +++ b/tests/integration/snapshots/logs/sync-metrics_python39.log @@ -35,7 +35,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.rest", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com/", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com/", "endpoint": "/", "http.method": "GET", "resource_names": "GET /", @@ -83,7 +83,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.us-east-2.amazonaws.com", + "http.url": "https://XXXX.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/Prod/", "http.method": "GET", "http.route": "/", @@ -377,7 +377,6 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A ] } HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate","Accept:*/*","Connection:keep-alive","Content-Encoding:deflate","Content-Length:XXXX","Content-Type:application/json","DD-API-KEY:XXXX","User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)","traceparent:XXX","tracestate:XXX -END Duration: XXXX ms Memory Used: XXXX MB { "traces": [ [ @@ -416,6 +415,7 @@ END Duration: XXXX ms Memory Used: XXXX MB ] ] } +END Duration: XXXX ms Memory Used: XXXX MB START { "m": "aws.lambda.enhanced.invocations", @@ -631,7 +631,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "_dd.origin": "lambda", "operation_name": "aws.httpapi", "endpoint": "/httpapi/get", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "XXXX", @@ -681,7 +681,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX$default", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", @@ -1568,7 +1568,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "runtime-id": "XXXX", "_dd.origin": "lambda", "operation_name": "aws.apigateway.websocket", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com$default", "endpoint": "$default", "resource_names": "$default", "apiid": "XXXX", @@ -1618,7 +1618,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate","A "span.name": "aws.lambda", "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "XXXX", - "http.url": "XXXX.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://XXXX.execute-api.eu-west-1.amazonaws.com", "http.status_code": "200", "_dd.base_service": "integration-tests-python" }, diff --git a/tests/test_tracing.py b/tests/test_tracing.py index 0a961a62..e38e4ecd 100644 --- a/tests/test_tracing.py +++ b/tests/test_tracing.py @@ -1730,7 +1730,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "1234567890", "endpoint": "/path/to/resource", "http.method": "POST", - "http.url": "70ixmpl4fl.execute-api.us-east-2.amazonaws.com/path/to/resource", + "http.url": "https://70ixmpl4fl.execute-api.us-east-2.amazonaws.com/path/to/resource", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "POST /{proxy+}", @@ -1752,7 +1752,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "lgxbo6a518", "endpoint": "/http/get", "http.method": "GET", - "http.url": "lgxbo6a518.execute-api.eu-west-1.amazonaws.com/http/get", + "http.url": "https://lgxbo6a518.execute-api.eu-west-1.amazonaws.com/http/get", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "GET /http/get", @@ -1774,7 +1774,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "lgxbo6a518", "endpoint": "/http/get", "http.method": "GET", - "http.url": "lgxbo6a518.execute-api.eu-west-1.amazonaws.com/http/get", + "http.url": "https://lgxbo6a518.execute-api.eu-west-1.amazonaws.com/http/get", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "GET /http/get", @@ -1798,7 +1798,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "http.method": "GET", "http.protocol": "HTTP/1.1", "http.source_ip": "38.122.226.210", - "http.url": "x02yirxc7a.execute-api.eu-west-1.amazonaws.com/httpapi/get", + "http.url": "https://x02yirxc7a.execute-api.eu-west-1.amazonaws.com/httpapi/get", "http.user_agent": "curl/7.64.1", "operation_name": "aws.httpapi", "request_id": "123", @@ -1821,7 +1821,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "mcwkra0ya4", "endpoint": "/user/42", "http.method": "GET", - "http.url": "mcwkra0ya4.execute-api.sa-east-1.amazonaws.com/user/42", + "http.url": "https://mcwkra0ya4.execute-api.sa-east-1.amazonaws.com/user/42", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "GET /user/{id}", @@ -1843,7 +1843,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "9vj54we5ih", "endpoint": "/user/42", "http.method": "GET", - "http.url": "9vj54we5ih.execute-api.sa-east-1.amazonaws.com/user/42", + "http.url": "https://9vj54we5ih.execute-api.sa-east-1.amazonaws.com/user/42", "operation_name": "aws.httpapi", "request_id": "123", "resource_names": "GET /user/{id}", @@ -1866,7 +1866,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "connection_id": "Fc5SzcoYGjQCJlg=", "endpoint": "$default", "event_type": "MESSAGE", - "http.url": "p62c47itsb.execute-api.eu-west-1.amazonaws.com$default", + "http.url": "https://p62c47itsb.execute-api.eu-west-1.amazonaws.com$default", "message_direction": "IN", "operation_name": "aws.apigateway.websocket", "request_id": "123", @@ -1890,7 +1890,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "connection_id": "Fc2tgfl3mjQCJfA=", "endpoint": "$connect", "event_type": "CONNECT", - "http.url": "p62c47itsb.execute-api.eu-west-1.amazonaws.com$connect", + "http.url": "https://p62c47itsb.execute-api.eu-west-1.amazonaws.com$connect", "message_direction": "IN", "operation_name": "aws.apigateway.websocket", "request_id": "123", @@ -1914,7 +1914,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "connection_id": "Fc2tgfl3mjQCJfA=", "endpoint": "$disconnect", "event_type": "DISCONNECT", - "http.url": "p62c47itsb.execute-api.eu-west-1.amazonaws.com$disconnect", + "http.url": "https://p62c47itsb.execute-api.eu-west-1.amazonaws.com$disconnect", "message_direction": "IN", "operation_name": "aws.apigateway.websocket", "request_id": "123", @@ -2112,7 +2112,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "None", "endpoint": "/path/to/resource", "http.method": "POST", - "http.url": "70ixmpl4fl.execute-api.us-east-2.amazonaws.com/path/to/resource", + "http.url": "https://70ixmpl4fl.execute-api.us-east-2.amazonaws.com/path/to/resource", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "POST /{proxy+}", @@ -2135,7 +2135,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "amddr1rix9", "endpoint": "/hello", "http.method": "GET", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "GET /hello", @@ -2157,7 +2157,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "amddr1rix9", "endpoint": "/hello", "http.method": "GET", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "GET /hello", @@ -2180,7 +2180,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "amddr1rix9", "endpoint": "/hello", "http.method": "GET", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "GET /hello", @@ -2202,7 +2202,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "amddr1rix9", "endpoint": "/hello", "http.method": "GET", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", "operation_name": "aws.apigateway.rest", "request_id": "123", "resource_names": "GET /hello", @@ -2224,7 +2224,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "amddr1rix9", "endpoint": "/hello", "http.method": "GET", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", "operation_name": "aws.httpapi", "request_id": "123", "resource_names": "GET /hello", @@ -2246,7 +2246,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "apiname": "amddr1rix9", "endpoint": "/hello", "http.method": "GET", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.com/hello", "operation_name": "aws.httpapi", "request_id": "123", "resource_names": "GET /hello", @@ -2270,7 +2270,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "connection_id": "ZLr9QeNLmjQCIZA=", "endpoint": "$connect", "event_type": "CONNECT", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.com$connect", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.com$connect", "message_direction": "IN", "operation_name": "aws.apigateway.websocket", "request_id": "123", @@ -2294,7 +2294,7 @@ def __init__(self, service, start, span_type, parent_name=None, tags=None): "connection_id": "ZLwtceO1mjQCI8Q=", "endpoint": "main", "event_type": "MESSAGE", - "http.url": "amddr1rix9.execute-api.eu-west-1.amazonaws.commain", + "http.url": "https://amddr1rix9.execute-api.eu-west-1.amazonaws.commain", "message_direction": "IN", "operation_name": "aws.apigateway.websocket", "request_id": "123", diff --git a/tests/test_trigger.py b/tests/test_trigger.py index b4da7ff0..c12e8f5c 100644 --- a/tests/test_trigger.py +++ b/tests/test_trigger.py @@ -294,7 +294,7 @@ def test_extract_trigger_tags_api_gateway(self): { "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "arn:aws:apigateway:us-west-1::/restapis/1234567890/stages/prod", - "http.url": "70ixmpl4fl.execute-api.us-east-2.amazonaws.com", + "http.url": "https://70ixmpl4fl.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/prod/path/to/resource", "http.method": "POST", "http.route": "/{proxy+}", @@ -313,7 +313,7 @@ def test_extract_trigger_tags_api_gateway_non_proxy(self): { "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "arn:aws:apigateway:us-west-1::/restapis/lgxbo6a518/stages/dev", - "http.url": "lgxbo6a518.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://lgxbo6a518.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/dev/http/get", "http.method": "GET", "http.route": "/http/get", @@ -332,7 +332,7 @@ def test_extract_trigger_tags_api_gateway_websocket_connect(self): { "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "arn:aws:apigateway:us-west-1::/restapis/p62c47itsb/stages/dev", - "http.url": "p62c47itsb.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://p62c47itsb.execute-api.eu-west-1.amazonaws.com", }, ) @@ -348,7 +348,7 @@ def test_extract_trigger_tags_api_gateway_websocket_default(self): { "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "arn:aws:apigateway:us-west-1::/restapis/p62c47itsb/stages/dev", - "http.url": "p62c47itsb.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://p62c47itsb.execute-api.eu-west-1.amazonaws.com", }, ) @@ -364,7 +364,7 @@ def test_extract_trigger_tags_api_gateway_websocket_disconnect(self): { "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "arn:aws:apigateway:us-west-1::/restapis/p62c47itsb/stages/dev", - "http.url": "p62c47itsb.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://p62c47itsb.execute-api.eu-west-1.amazonaws.com", }, ) @@ -380,7 +380,7 @@ def test_extract_trigger_tags_api_gateway_http_api(self): { "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "arn:aws:apigateway:us-west-1::/restapis/x02yirxc7a/stages/$default", - "http.url": "x02yirxc7a.execute-api.eu-west-1.amazonaws.com", + "http.url": "https://x02yirxc7a.execute-api.eu-west-1.amazonaws.com", "http.url_details.path": "/httpapi/get", "http.method": "GET", "http.route": "/httpapi/get", diff --git a/tests/test_wrapper.py b/tests/test_wrapper.py index 4b243036..f46b365e 100644 --- a/tests/test_wrapper.py +++ b/tests/test_wrapper.py @@ -283,7 +283,7 @@ def test_5xx_sends_errors_metric_and_set_tags(self, mock_extract_trigger_tags): mock_extract_trigger_tags.return_value = { "function_trigger.event_source": "api-gateway", "function_trigger.event_source_arn": "arn:aws:apigateway:us-west-1::/restapis/1234567890/stages/prod", - "http.url": "70ixmpl4fl.execute-api.us-east-2.amazonaws.com", + "http.url": "https://70ixmpl4fl.execute-api.us-east-2.amazonaws.com", "http.url_details.path": "/prod/path/to/resource", "http.method": "GET", } From 87f2314928fb839886aefe09cf6a1b7c7a0a7988 Mon Sep 17 00:00:00 2001 From: happynancee <120061598+happynancee@users.noreply.github.com> Date: Mon, 2 Jun 2025 07:50:36 -0700 Subject: [PATCH 02/10] update codeowners file and make apm serverless co-owners of repo (#596) --- .github/CODEOWNERS | 7 +------ CODEOWNERS | 1 - 2 files changed, 1 insertion(+), 7 deletions(-) delete mode 100644 CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 26b4b78e..a7f48dfe 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,6 +1 @@ -* @DataDog/serverless-aws -datadog_lambda/tracing.py @DataDog/apm-serverless -datadog_lambda/patch.py @DataDog/apm-serverless -datadog_lambda/span_points.py @DataDog/apm-serverless -datadog_lambda/cold_start.py @DataDog/apm-serverless -datadog_lambda/wrapper.py @DataDog/apm-serverless +* @DataDog/serverless-aws @DataDog/apm-serverless diff --git a/CODEOWNERS b/CODEOWNERS deleted file mode 100644 index e340f1ed..00000000 --- a/CODEOWNERS +++ /dev/null @@ -1 +0,0 @@ -* @DataDog/serverless \ No newline at end of file From 254466cb2c6d749211e74201572ce5a937ec82da Mon Sep 17 00:00:00 2001 From: Yiming Luo Date: Wed, 4 Jun 2025 12:34:10 -0400 Subject: [PATCH 03/10] chore: Use GitHub App for update-deps workflow (#605) --- .github/workflows/update_deps.yml | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/.github/workflows/update_deps.yml b/.github/workflows/update_deps.yml index 31025402..33a524b2 100644 --- a/.github/workflows/update_deps.yml +++ b/.github/workflows/update_deps.yml @@ -3,14 +3,24 @@ name: update-deps on: schedule: - cron: "0 10 * * *" # Run at 10 am every day + workflow_dispatch: jobs: check: runs-on: ubuntu-latest + environment: + name: protected-main-env steps: + - name: Generate token + id: generate_token + uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6 + with: + app-id: ${{ secrets.GH_APP_ID }} + private-key: ${{ secrets.GH_APP_PRIVATE_KEY }} + - uses: actions/checkout@v3 with: - ssh-key: ${{ secrets.SSH_PRIVATE_KEY }} + token: ${{ steps.generate_token.outputs.token }} - name: Set up Python uses: actions/setup-python@v4 From 6beb65d63c063ff6fc5e480e7bf076d29f2abf0c Mon Sep 17 00:00:00 2001 From: michael-zhao459 Date: Thu, 5 Jun 2025 15:24:42 -0400 Subject: [PATCH 04/10] feat: Enable sqs -> lambda support for DSM (#604) --------- Co-authored-by: Rey Abolofia --- datadog_lambda/dsm.py | 38 +++++++++++++ datadog_lambda/wrapper.py | 7 +++ tests/test_dsm.py | 112 ++++++++++++++++++++++++++++++++++++++ tests/test_wrapper.py | 60 ++++++++++++++++++++ 4 files changed, 217 insertions(+) create mode 100644 datadog_lambda/dsm.py create mode 100644 tests/test_dsm.py diff --git a/datadog_lambda/dsm.py b/datadog_lambda/dsm.py new file mode 100644 index 00000000..427f5e47 --- /dev/null +++ b/datadog_lambda/dsm.py @@ -0,0 +1,38 @@ +from datadog_lambda import logger +from datadog_lambda.trigger import EventTypes + + +def set_dsm_context(event, event_source): + + if event_source.equals(EventTypes.SQS): + _dsm_set_sqs_context(event) + + +def _dsm_set_sqs_context(event): + from datadog_lambda.wrapper import format_err_with_traceback + from ddtrace.internal.datastreams import data_streams_processor + from ddtrace.internal.datastreams.processor import DsmPathwayCodec + from ddtrace.internal.datastreams.botocore import ( + get_datastreams_context, + calculate_sqs_payload_size, + ) + + records = event.get("Records") + if records is None: + return + processor = data_streams_processor() + + for record in records: + try: + queue_arn = record.get("eventSourceARN", "") + + contextjson = get_datastreams_context(record) + payload_size = calculate_sqs_payload_size(record) + + ctx = DsmPathwayCodec.decode(contextjson, processor) + ctx.set_checkpoint( + ["direction:in", f"topic:{queue_arn}", "type:sqs"], + payload_size=payload_size, + ) + except Exception as e: + logger.error(format_err_with_traceback(e)) diff --git a/datadog_lambda/wrapper.py b/datadog_lambda/wrapper.py index 86bbf04d..0e23b721 100644 --- a/datadog_lambda/wrapper.py +++ b/datadog_lambda/wrapper.py @@ -9,6 +9,7 @@ from importlib import import_module from time import time_ns +from datadog_lambda.dsm import set_dsm_context from datadog_lambda.extension import should_use_extension, flush_extension from datadog_lambda.cold_start import ( set_cold_start, @@ -79,6 +80,7 @@ DD_REQUESTS_SERVICE_NAME = "DD_REQUESTS_SERVICE_NAME" DD_SERVICE = "DD_SERVICE" DD_ENV = "DD_ENV" +DD_DATA_STREAMS_ENABLED = "DD_DATA_STREAMS_ENABLED" def get_env_as_int(env_key, default_value: int) -> int: @@ -190,6 +192,9 @@ def __init__(self, func): self.min_cold_start_trace_duration = get_env_as_int( DD_MIN_COLD_START_DURATION, 3 ) + self.data_streams_enabled = ( + os.environ.get(DD_DATA_STREAMS_ENABLED, "false").lower() == "true" + ) self.local_testing_mode = os.environ.get( DD_LOCAL_TEST, "false" ).lower() in ("true", "1") @@ -322,6 +327,8 @@ def _before(self, event, context): self.inferred_span = create_inferred_span( event, context, event_source, self.decode_authorizer_context ) + if self.data_streams_enabled: + set_dsm_context(event, event_source) self.span = create_function_execution_span( context=context, function_name=self.function_name, diff --git a/tests/test_dsm.py b/tests/test_dsm.py new file mode 100644 index 00000000..544212d8 --- /dev/null +++ b/tests/test_dsm.py @@ -0,0 +1,112 @@ +import unittest +from unittest.mock import patch, MagicMock + +from datadog_lambda.dsm import set_dsm_context, _dsm_set_sqs_context +from datadog_lambda.trigger import EventTypes, _EventSource + + +class TestDsmSQSContext(unittest.TestCase): + def setUp(self): + patcher = patch("datadog_lambda.dsm._dsm_set_sqs_context") + self.mock_dsm_set_sqs_context = patcher.start() + self.addCleanup(patcher.stop) + + patcher = patch("ddtrace.internal.datastreams.data_streams_processor") + self.mock_data_streams_processor = patcher.start() + self.addCleanup(patcher.stop) + + patcher = patch("ddtrace.internal.datastreams.botocore.get_datastreams_context") + self.mock_get_datastreams_context = patcher.start() + self.mock_get_datastreams_context.return_value = {} + self.addCleanup(patcher.stop) + + patcher = patch( + "ddtrace.internal.datastreams.botocore.calculate_sqs_payload_size" + ) + self.mock_calculate_sqs_payload_size = patcher.start() + self.mock_calculate_sqs_payload_size.return_value = 100 + self.addCleanup(patcher.stop) + + patcher = patch("ddtrace.internal.datastreams.processor.DsmPathwayCodec.decode") + self.mock_dsm_pathway_codec_decode = patcher.start() + self.addCleanup(patcher.stop) + + def test_non_sqs_event_source_does_nothing(self): + """Test that non-SQS event sources don't trigger DSM context setting""" + event = {} + # Use Unknown Event Source + event_source = _EventSource(EventTypes.UNKNOWN) + set_dsm_context(event, event_source) + + # DSM context should not be set for non-SQS events + self.mock_dsm_set_sqs_context.assert_not_called() + + def test_sqs_event_with_no_records_does_nothing(self): + """Test that events where Records is None don't trigger DSM processing""" + events_with_no_records = [ + {}, + {"Records": None}, + {"someOtherField": "value"}, + ] + + for event in events_with_no_records: + _dsm_set_sqs_context(event) + self.mock_data_streams_processor.assert_not_called() + + def test_sqs_event_triggers_dsm_sqs_context(self): + """Test that SQS event sources trigger the SQS-specific DSM context function""" + sqs_event = { + "Records": [ + { + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-1:123456789012:my-queue", + "body": "Hello from SQS!", + } + ] + } + + event_source = _EventSource(EventTypes.SQS) + set_dsm_context(sqs_event, event_source) + + self.mock_dsm_set_sqs_context.assert_called_once_with(sqs_event) + + def test_sqs_multiple_records_process_each_record(self): + """Test that each record in an SQS event gets processed individually""" + multi_record_event = { + "Records": [ + { + "eventSourceARN": "arn:aws:sqs:us-east-1:123456789012:queue1", + "body": "Message 1", + }, + { + "eventSourceARN": "arn:aws:sqs:us-east-1:123456789012:queue2", + "body": "Message 2", + }, + { + "eventSourceARN": "arn:aws:sqs:us-east-1:123456789012:queue3", + "body": "Message 3", + }, + ] + } + + mock_context = MagicMock() + self.mock_dsm_pathway_codec_decode.return_value = mock_context + + _dsm_set_sqs_context(multi_record_event) + + self.assertEqual(mock_context.set_checkpoint.call_count, 3) + + calls = mock_context.set_checkpoint.call_args_list + expected_arns = [ + "arn:aws:sqs:us-east-1:123456789012:queue1", + "arn:aws:sqs:us-east-1:123456789012:queue2", + "arn:aws:sqs:us-east-1:123456789012:queue3", + ] + + for i, call in enumerate(calls): + args, kwargs = call + tags = args[0] + self.assertIn("direction:in", tags) + self.assertIn(f"topic:{expected_arns[i]}", tags) + self.assertIn("type:sqs", tags) + self.assertEqual(kwargs["payload_size"], 100) diff --git a/tests/test_wrapper.py b/tests/test_wrapper.py index f46b365e..f482fa3d 100644 --- a/tests/test_wrapper.py +++ b/tests/test_wrapper.py @@ -76,6 +76,10 @@ def setUp(self): self.mock_dd_lambda_layer_tag = patcher.start() self.addCleanup(patcher.stop) + patcher = patch("datadog_lambda.wrapper.set_dsm_context") + self.mock_set_dsm_context = patcher.start() + self.addCleanup(patcher.stop) + def test_datadog_lambda_wrapper(self): wrapper.dd_tracing_enabled = False @@ -563,6 +567,62 @@ def return_type_test(event, context): self.assertEqual(result, test_result) self.assertFalse(MockPrintExc.called) + def test_set_dsm_context_called_when_DSM_and_tracing_enabled(self): + os.environ["DD_DATA_STREAMS_ENABLED"] = "true" + wrapper.dd_tracing_enabled = True + + @wrapper.datadog_lambda_wrapper + def lambda_handler(event, context): + return "ok" + + result = lambda_handler({}, get_mock_context()) + self.assertEqual(result, "ok") + self.mock_set_dsm_context.assert_called_once() + + del os.environ["DD_DATA_STREAMS_ENABLED"] + + def test_set_dsm_context_not_called_when_only_DSM_enabled(self): + os.environ["DD_DATA_STREAMS_ENABLED"] = "true" + wrapper.dd_tracing_enabled = False + + @wrapper.datadog_lambda_wrapper + def lambda_handler(event, context): + return "ok" + + result = lambda_handler({}, get_mock_context()) + self.assertEqual(result, "ok") + self.mock_set_dsm_context.assert_not_called() + + del os.environ["DD_DATA_STREAMS_ENABLED"] + + def test_set_dsm_context_not_called_when_only_tracing_enabled(self): + os.environ["DD_DATA_STREAMS_ENABLED"] = "false" + wrapper.dd_tracing_enabled = True + + @wrapper.datadog_lambda_wrapper + def lambda_handler(event, context): + return "ok" + + result = lambda_handler({}, get_mock_context()) + self.assertEqual(result, "ok") + self.mock_set_dsm_context.assert_not_called() + + del os.environ["DD_DATA_STREAMS_ENABLED"] + + def test_set_dsm_context_not_called_when_tracing_and_DSM_disabled(self): + os.environ["DD_DATA_STREAMS_ENABLED"] = "false" + wrapper.dd_tracing_enabled = False + + @wrapper.datadog_lambda_wrapper + def lambda_handler(event, context): + return "ok" + + result = lambda_handler({}, get_mock_context()) + self.assertEqual(result, "ok") + self.mock_set_dsm_context.assert_not_called() + + del os.environ["DD_DATA_STREAMS_ENABLED"] + class TestLambdaDecoratorSettings(unittest.TestCase): def test_some_envs_should_depend_on_dd_tracing_enabled(self): From b3cf1c2ee54db0c01470e8cdd858db85a1183c24 Mon Sep 17 00:00:00 2001 From: Rey Abolofia Date: Wed, 11 Jun 2025 08:37:35 -0700 Subject: [PATCH 05/10] Consolidate env reading to single config object. (#600) --- datadog_lambda/api.py | 15 +- datadog_lambda/cold_start.py | 12 +- datadog_lambda/config.py | 145 +++++++++++++++++++ datadog_lambda/fips.py | 19 --- datadog_lambda/metric.py | 17 +-- datadog_lambda/patch.py | 12 +- datadog_lambda/span_pointers.py | 9 +- datadog_lambda/tag_object.py | 7 +- datadog_lambda/tracing.py | 32 ++--- datadog_lambda/wrapper.py | 163 ++++++---------------- tests/conftest.py | 8 ++ tests/test_api.py | 11 +- tests/test_cold_start.py | 9 +- tests/test_config.py | 240 ++++++++++++++++++++++++++++++++ tests/test_metric.py | 2 +- tests/test_patch.py | 8 ++ tests/test_tag_object.py | 8 +- tests/test_tracing.py | 22 ++- tests/test_wrapper.py | 86 +++++------- tests/utils.py | 1 + 20 files changed, 534 insertions(+), 292 deletions(-) create mode 100644 datadog_lambda/config.py delete mode 100644 datadog_lambda/fips.py create mode 100644 tests/conftest.py create mode 100644 tests/test_config.py diff --git a/datadog_lambda/api.py b/datadog_lambda/api.py index d1cee4e4..4921dae9 100644 --- a/datadog_lambda/api.py +++ b/datadog_lambda/api.py @@ -1,7 +1,7 @@ import logging import os -from datadog_lambda.fips import fips_mode_enabled +from datadog_lambda.config import config logger = logging.getLogger(__name__) KMS_ENCRYPTION_CONTEXT_KEY = "LambdaFunctionName" @@ -29,7 +29,6 @@ def decrypt_kms_api_key(kms_client, ciphertext): is added. We need to try decrypting the API key both with and without the encryption context. """ # Try without encryption context, in case API key was encrypted using the AWS CLI - function_name = os.environ.get("AWS_LAMBDA_FUNCTION_NAME") try: plaintext = kms_client.decrypt(CiphertextBlob=decoded_bytes)[ "Plaintext" @@ -43,7 +42,7 @@ def decrypt_kms_api_key(kms_client, ciphertext): plaintext = kms_client.decrypt( CiphertextBlob=decoded_bytes, EncryptionContext={ - KMS_ENCRYPTION_CONTEXT_KEY: function_name, + KMS_ENCRYPTION_CONTEXT_KEY: config.function_name, }, )["Plaintext"].decode("utf-8") @@ -66,7 +65,7 @@ def get_api_key() -> str: DD_API_KEY = os.environ.get("DD_API_KEY", os.environ.get("DATADOG_API_KEY", "")) LAMBDA_REGION = os.environ.get("AWS_REGION", "") - if fips_mode_enabled: + if config.fips_mode_enabled: logger.debug( "FIPS mode is enabled, using FIPS endpoints for secrets management." ) @@ -82,7 +81,7 @@ def get_api_key() -> str: return "" endpoint_url = ( f"https://secretsmanager-fips.{secrets_region}.amazonaws.com" - if fips_mode_enabled + if config.fips_mode_enabled else None ) secrets_manager_client = _boto3_client( @@ -95,7 +94,7 @@ def get_api_key() -> str: # SSM endpoints: https://docs.aws.amazon.com/general/latest/gr/ssm.html fips_endpoint = ( f"https://ssm-fips.{LAMBDA_REGION}.amazonaws.com" - if fips_mode_enabled + if config.fips_mode_enabled else None ) ssm_client = _boto3_client("ssm", endpoint_url=fips_endpoint) @@ -106,7 +105,7 @@ def get_api_key() -> str: # KMS endpoints: https://docs.aws.amazon.com/general/latest/gr/kms.html fips_endpoint = ( f"https://kms-fips.{LAMBDA_REGION}.amazonaws.com" - if fips_mode_enabled + if config.fips_mode_enabled else None ) kms_client = _boto3_client("kms", endpoint_url=fips_endpoint) @@ -118,7 +117,7 @@ def get_api_key() -> str: def init_api(): - if not os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true": + if not config.flush_to_log: # Make sure that this package would always be lazy-loaded/outside from the critical path # since underlying packages are quite heavy to load # and useless with the extension unless sending metrics with timestamps diff --git a/datadog_lambda/cold_start.py b/datadog_lambda/cold_start.py index ea10ea20..a40e2fcb 100644 --- a/datadog_lambda/cold_start.py +++ b/datadog_lambda/cold_start.py @@ -1,8 +1,9 @@ import time -import os from typing import List, Hashable import logging +from datadog_lambda.config import config + logger = logging.getLogger(__name__) _cold_start = True @@ -86,14 +87,12 @@ def reset_node_stacks(): def push_node(module_name, file_path): node = ImportNode(module_name, file_path, time.time_ns()) - global import_stack if import_stack: import_stack[-1].children.append(node) import_stack.append(node) def pop_node(module_name): - global import_stack if not import_stack: return node = import_stack.pop() @@ -102,7 +101,6 @@ def pop_node(module_name): end_time_ns = time.time_ns() node.end_time_ns = end_time_ns if not import_stack: # import_stack empty, a root node has been found - global root_nodes root_nodes.append(node) @@ -147,11 +145,7 @@ def wrapped_find_spec(*args, **kwargs): def initialize_cold_start_tracing(): - if ( - is_new_sandbox() - and os.environ.get("DD_TRACE_ENABLED", "true").lower() == "true" - and os.environ.get("DD_COLD_START_TRACING", "true").lower() == "true" - ): + if is_new_sandbox() and config.cold_start_tracing: from sys import meta_path for importer in meta_path: diff --git a/datadog_lambda/config.py b/datadog_lambda/config.py new file mode 100644 index 00000000..7a08d8a7 --- /dev/null +++ b/datadog_lambda/config.py @@ -0,0 +1,145 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +import logging +import os + +logger = logging.getLogger(__name__) + + +def _get_env(key, default=None, cast=None, depends_on_tracing=False): + @property + def _getter(self): + if not hasattr(self, prop_key): + val = self._resolve_env(key, default, cast, depends_on_tracing) + setattr(self, prop_key, val) + return getattr(self, prop_key) + + prop_key = f"_config_{key}" + return _getter + + +def as_bool(val): + return val.lower() == "true" or val == "1" + + +def as_list(val): + return [val.strip() for val in val.split(",") if val.strip()] + + +class Config: + def _resolve_env(self, key, default=None, cast=None, depends_on_tracing=False): + if depends_on_tracing and not self.trace_enabled: + return False + val = os.environ.get(key, default) + if cast is not None: + try: + val = cast(val) + except (ValueError, TypeError): + msg = ( + "Failed to cast environment variable '%s' with " + "value '%s' to type %s. Using default value '%s'." + ) + logger.warning(msg, key, val, cast.__name__, default) + val = default + return val + + service = _get_env("DD_SERVICE") + env = _get_env("DD_ENV") + + cold_start_tracing = _get_env( + "DD_COLD_START_TRACING", "true", as_bool, depends_on_tracing=True + ) + min_cold_start_trace_duration = _get_env("DD_MIN_COLD_START_DURATION", 3, int) + cold_start_trace_skip_lib = _get_env( + "DD_COLD_START_TRACE_SKIP_LIB", + "ddtrace.internal.compat,ddtrace.filters", + as_list, + ) + + capture_payload_max_depth = _get_env("DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH", 10, int) + capture_payload_enabled = _get_env("DD_CAPTURE_LAMBDA_PAYLOAD", "false", as_bool) + + trace_enabled = _get_env("DD_TRACE_ENABLED", "true", as_bool) + make_inferred_span = _get_env( + "DD_TRACE_MANAGED_SERVICES", "true", as_bool, depends_on_tracing=True + ) + encode_authorizer_context = _get_env( + "DD_ENCODE_AUTHORIZER_CONTEXT", "true", as_bool, depends_on_tracing=True + ) + decode_authorizer_context = _get_env( + "DD_DECODE_AUTHORIZER_CONTEXT", "true", as_bool, depends_on_tracing=True + ) + add_span_pointers = _get_env("DD_BOTOCORE_ADD_SPAN_POINTERS", "true", as_bool) + trace_extractor = _get_env("DD_TRACE_EXTRACTOR") + + enhanced_metrics_enabled = _get_env("DD_ENHANCED_METRICS", "true", as_bool) + + flush_in_thread = _get_env("DD_FLUSH_IN_THREAD", "false", as_bool) + flush_to_log = _get_env("DD_FLUSH_TO_LOG", "false", as_bool) + logs_injection = _get_env("DD_LOGS_INJECTION", "true", as_bool) + merge_xray_traces = _get_env("DD_MERGE_XRAY_TRACES", "false", as_bool) + + telemetry_enabled = _get_env( + "DD_INSTRUMENTATION_TELEMETRY_ENABLED", + "false", + as_bool, + depends_on_tracing=True, + ) + otel_enabled = _get_env("DD_TRACE_OTEL_ENABLED", "false", as_bool) + profiling_enabled = _get_env("DD_PROFILING_ENABLED", "false", as_bool) + llmobs_enabled = _get_env("DD_LLMOBS_ENABLED", "false", as_bool) + exception_replay_enabled = _get_env("DD_EXCEPTION_REPLAY_ENABLED", "false", as_bool) + data_streams_enabled = _get_env( + "DD_DATA_STREAMS_ENABLED", "false", as_bool, depends_on_tracing=True + ) + + is_gov_region = _get_env("AWS_REGION", "", lambda x: x.startswith("us-gov-")) + + local_test = _get_env("DD_LOCAL_TEST", "false", as_bool) + integration_test = _get_env("DD_INTEGRATION_TEST", "false", as_bool) + + aws_lambda_function_name = _get_env("AWS_LAMBDA_FUNCTION_NAME") + + @property + def function_name(self): + if not hasattr(self, "_config_function_name"): + if self.aws_lambda_function_name is None: + self._config_function_name = "function" + else: + self._config_function_name = self.aws_lambda_function_name + return self._config_function_name + + @property + def is_lambda_context(self): + if not hasattr(self, "_config_is_lambda_context"): + self._config_is_lambda_context = bool(self.aws_lambda_function_name) + return self._config_is_lambda_context + + @property + def fips_mode_enabled(self): + if not hasattr(self, "_config_fips_mode_enabled"): + self._config_fips_mode_enabled = ( + os.environ.get( + "DD_LAMBDA_FIPS_MODE", + "true" if self.is_gov_region else "false", + ).lower() + == "true" + ) + return self._config_fips_mode_enabled + + def _reset(self): + for attr in dir(self): + if attr.startswith("_config_"): + delattr(self, attr) + + +config = Config() + +if config.is_gov_region or config.fips_mode_enabled: + logger.debug( + "Python Lambda Layer FIPS mode is %s.", + "enabled" if config.fips_mode_enabled else "not enabled", + ) diff --git a/datadog_lambda/fips.py b/datadog_lambda/fips.py deleted file mode 100644 index 8442ddd9..00000000 --- a/datadog_lambda/fips.py +++ /dev/null @@ -1,19 +0,0 @@ -import logging -import os - -is_gov_region = os.environ.get("AWS_REGION", "").startswith("us-gov-") - -fips_mode_enabled = ( - os.environ.get( - "DD_LAMBDA_FIPS_MODE", - "true" if is_gov_region else "false", - ).lower() - == "true" -) - -if is_gov_region or fips_mode_enabled: - logger = logging.getLogger(__name__) - logger.debug( - "Python Lambda Layer FIPS mode is %s.", - "enabled" if fips_mode_enabled else "not enabled", - ) diff --git a/datadog_lambda/metric.py b/datadog_lambda/metric.py index c9b978d6..73bbeca3 100644 --- a/datadog_lambda/metric.py +++ b/datadog_lambda/metric.py @@ -5,14 +5,13 @@ import enum import logging -import os import time from datetime import datetime, timedelta import ujson as json +from datadog_lambda.config import config from datadog_lambda.extension import should_use_extension -from datadog_lambda.fips import fips_mode_enabled from datadog_lambda.tags import dd_lambda_layer_tag, get_enhanced_metrics_tags logger = logging.getLogger(__name__) @@ -28,10 +27,10 @@ class MetricsHandler(enum.Enum): def _select_metrics_handler(): if should_use_extension: return MetricsHandler.EXTENSION - if os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true": + if config.flush_to_log: return MetricsHandler.FORWARDER - if fips_mode_enabled: + if config.fips_mode_enabled: logger.debug( "With FIPS mode enabled, the Datadog API metrics handler is unavailable." ) @@ -58,14 +57,8 @@ def _select_metrics_handler(): from datadog_lambda.api import init_api from datadog_lambda.thread_stats_writer import ThreadStatsWriter - flush_in_thread = os.environ.get("DD_FLUSH_IN_THREAD", "").lower() == "true" init_api() - lambda_stats = ThreadStatsWriter(flush_in_thread) - - -enhanced_metrics_enabled = ( - os.environ.get("DD_ENHANCED_METRICS", "true").lower() == "true" -) + lambda_stats = ThreadStatsWriter(config.flush_in_thread) def lambda_metric(metric_name, value, timestamp=None, tags=None, force_async=False): @@ -191,7 +184,7 @@ def submit_enhanced_metric(metric_name, lambda_context): metric_name (str): metric name w/o enhanced prefix i.e. "invocations" or "errors" lambda_context (object): Lambda context dict passed to the function by AWS """ - if not enhanced_metrics_enabled: + if not config.enhanced_metrics_enabled: logger.debug( "Not submitting enhanced metric %s because enhanced metrics are disabled", metric_name, diff --git a/datadog_lambda/patch.py b/datadog_lambda/patch.py index 5b8a92c5..6d2af0dc 100644 --- a/datadog_lambda/patch.py +++ b/datadog_lambda/patch.py @@ -3,7 +3,6 @@ # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2019 Datadog, Inc. -import os import sys import logging import zlib @@ -13,10 +12,8 @@ from wrapt.importer import when_imported from ddtrace import patch_all as patch_all_dd -from datadog_lambda.tracing import ( - get_dd_trace_context, - dd_tracing_enabled, -) +from datadog_lambda.config import config +from datadog_lambda.tracing import get_dd_trace_context from collections.abc import MutableMapping logger = logging.getLogger(__name__) @@ -32,7 +29,7 @@ def patch_all(): """ _patch_for_integration_tests() - if dd_tracing_enabled: + if config.trace_enabled: patch_all_dd() else: _patch_http() @@ -44,8 +41,7 @@ def _patch_for_integration_tests(): Patch `requests` to log the outgoing requests for integration tests. """ global _integration_tests_patched - is_in_tests = os.environ.get("DD_INTEGRATION_TEST", "false").lower() == "true" - if not _integration_tests_patched and is_in_tests: + if not _integration_tests_patched and config.integration_test: wrap("requests", "Session.send", _log_request) _integration_tests_patched = True diff --git a/datadog_lambda/span_pointers.py b/datadog_lambda/span_pointers.py index 40d959e6..45925d92 100644 --- a/datadog_lambda/span_pointers.py +++ b/datadog_lambda/span_pointers.py @@ -1,12 +1,12 @@ from itertools import chain import logging -import os from typing import List from typing import Optional from ddtrace._trace._span_pointer import _SpanPointerDirection from ddtrace._trace._span_pointer import _SpanPointerDescription +from datadog_lambda.config import config from datadog_lambda.metric import submit_dynamodb_stream_type_metric from datadog_lambda.trigger import EventTypes @@ -14,15 +14,10 @@ logger = logging.getLogger(__name__) -dd_botocore_add_span_pointers = os.environ.get( - "DD_BOTOCORE_ADD_SPAN_POINTERS", "true" -).lower() in ("true", "1") - - def calculate_span_pointers( event_source, event, - botocore_add_span_pointers=dd_botocore_add_span_pointers, + botocore_add_span_pointers=config.add_span_pointers, ) -> List[_SpanPointerDescription]: try: if botocore_add_span_pointers: diff --git a/datadog_lambda/tag_object.py b/datadog_lambda/tag_object.py index 6d82f83b..744e4893 100644 --- a/datadog_lambda/tag_object.py +++ b/datadog_lambda/tag_object.py @@ -4,18 +4,17 @@ # Copyright 2021 Datadog, Inc. from decimal import Decimal -import logging import ujson as json +from datadog_lambda.config import config + redactable_keys = ["authorization", "x-authorization", "password", "token"] -max_depth = 10 -logger = logging.getLogger(__name__) def tag_object(span, key, obj, depth=0): if obj is None: return span.set_tag(key, obj) - if depth >= max_depth: + if depth >= config.capture_payload_max_depth: return span.set_tag(key, _redact_val(key, str(obj)[0:5000])) depth += 1 if _should_try_string(obj): diff --git a/datadog_lambda/tracing.py b/datadog_lambda/tracing.py index 4b6f300a..3d5f671e 100644 --- a/datadog_lambda/tracing.py +++ b/datadog_lambda/tracing.py @@ -32,6 +32,8 @@ from ddtrace import __version__ as ddtrace_version from ddtrace.propagation.http import HTTPPropagator from ddtrace.trace import Context, Span, tracer + +from datadog_lambda.config import config from datadog_lambda import __version__ as datadog_lambda_version from datadog_lambda.trigger import ( _EventSource, @@ -42,10 +44,7 @@ EventSubtypes, ) -dd_trace_otel_enabled = ( - os.environ.get("DD_TRACE_OTEL_ENABLED", "false").lower() == "true" -) -if dd_trace_otel_enabled: +if config.otel_enabled: from opentelemetry.trace import set_tracer_provider from ddtrace.opentelemetry import TracerProvider @@ -55,18 +54,11 @@ logger = logging.getLogger(__name__) dd_trace_context = None -dd_tracing_enabled = os.environ.get("DD_TRACE_ENABLED", "false").lower() == "true" -if dd_tracing_enabled: +if config.telemetry_enabled: # Enable the telemetry client if the user has opted in - if ( - os.environ.get("DD_INSTRUMENTATION_TELEMETRY_ENABLED", "false").lower() - == "true" - ): - from ddtrace.internal.telemetry import telemetry_writer + from ddtrace.internal.telemetry import telemetry_writer - telemetry_writer.enable() - -is_lambda_context = os.environ.get(XrayDaemon.FUNCTION_NAME_HEADER_NAME) != "" + telemetry_writer.enable() propagator = HTTPPropagator() @@ -97,7 +89,7 @@ def _convert_xray_sampling(xray_sampled): def _get_xray_trace_context(): - if not is_lambda_context: + if not config.is_lambda_context: return None xray_trace_entity = parse_xray_header( @@ -639,13 +631,11 @@ def get_dd_trace_context_obj(): automatically, but this function can be used to manually inject the trace context to an outgoing request. """ - if dd_tracing_enabled: + if config.trace_enabled: dd_trace_py_context = _get_dd_trace_py_context() if _is_context_complete(dd_trace_py_context): return dd_trace_py_context - global dd_trace_context - try: xray_context = _get_xray_trace_context() # xray (sub)segment except Exception as e: @@ -690,10 +680,10 @@ def set_correlation_ids(): TODO: Remove me when Datadog tracer is natively supported in Lambda. """ - if not is_lambda_context: + if not config.is_lambda_context: logger.debug("set_correlation_ids is only supported in LambdaContext") return - if dd_tracing_enabled: + if config.trace_enabled: logger.debug("using ddtrace implementation for spans") return @@ -1480,7 +1470,7 @@ def emit_telemetry_on_exception_outside_of_handler( Emit an enhanced error metric and create a span for exceptions occurring outside the handler """ submit_errors_metric(None) - if dd_tracing_enabled: + if config.trace_enabled: span = tracer.trace( "aws.lambda", service="aws.lambda", diff --git a/datadog_lambda/wrapper.py b/datadog_lambda/wrapper.py index 0e23b721..87063411 100644 --- a/datadog_lambda/wrapper.py +++ b/datadog_lambda/wrapper.py @@ -18,6 +18,7 @@ is_new_sandbox, ColdStartTracer, ) +from datadog_lambda.config import config from datadog_lambda.constants import ( TraceContextSource, XraySubsegment, @@ -26,11 +27,11 @@ from datadog_lambda.module_name import modify_module_name from datadog_lambda.patch import patch_all from datadog_lambda.span_pointers import calculate_span_pointers +from datadog_lambda.tag_object import tag_object from datadog_lambda.tracing import ( extract_dd_trace_context, create_dd_dummy_metadata_subsegment, inject_correlation_ids, - dd_tracing_enabled, mark_trace_as_error_for_5xx_responses, set_correlation_ids, set_dd_trace_py_root, @@ -46,65 +47,20 @@ extract_http_status_code_tag, ) -profiling_env_var = os.environ.get("DD_PROFILING_ENABLED", "false").lower() == "true" -if profiling_env_var: +if config.profiling_enabled: from ddtrace.profiling import profiler -llmobs_env_var = os.environ.get("DD_LLMOBS_ENABLED", "false").lower() in ("true", "1") -if llmobs_env_var: +if config.llmobs_enabled: from ddtrace.llmobs import LLMObs -exception_replay_env_var = os.environ.get( - "DD_EXCEPTION_REPLAY_ENABLED", "false" -).lower() in ("true", "1") -if exception_replay_env_var: +if config.exception_replay_enabled: from ddtrace.debugging._exception.replay import SpanExceptionHandler from ddtrace.debugging._uploader import LogsIntakeUploaderV1 logger = logging.getLogger(__name__) -DD_FLUSH_TO_LOG = "DD_FLUSH_TO_LOG" -DD_LOGS_INJECTION = "DD_LOGS_INJECTION" -DD_MERGE_XRAY_TRACES = "DD_MERGE_XRAY_TRACES" -AWS_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME" -DD_LOCAL_TEST = "DD_LOCAL_TEST" -DD_TRACE_EXTRACTOR = "DD_TRACE_EXTRACTOR" -DD_TRACE_MANAGED_SERVICES = "DD_TRACE_MANAGED_SERVICES" -DD_ENCODE_AUTHORIZER_CONTEXT = "DD_ENCODE_AUTHORIZER_CONTEXT" -DD_DECODE_AUTHORIZER_CONTEXT = "DD_DECODE_AUTHORIZER_CONTEXT" -DD_COLD_START_TRACING = "DD_COLD_START_TRACING" -DD_MIN_COLD_START_DURATION = "DD_MIN_COLD_START_DURATION" -DD_COLD_START_TRACE_SKIP_LIB = "DD_COLD_START_TRACE_SKIP_LIB" -DD_CAPTURE_LAMBDA_PAYLOAD = "DD_CAPTURE_LAMBDA_PAYLOAD" -DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH = "DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH" DD_REQUESTS_SERVICE_NAME = "DD_REQUESTS_SERVICE_NAME" DD_SERVICE = "DD_SERVICE" -DD_ENV = "DD_ENV" -DD_DATA_STREAMS_ENABLED = "DD_DATA_STREAMS_ENABLED" - - -def get_env_as_int(env_key, default_value: int) -> int: - try: - return int(os.environ.get(env_key, default_value)) - except Exception as e: - logger.warn( - f"Failed to parse {env_key} as int. Using default value: {default_value}. Error: {e}" - ) - return default_value - - -dd_capture_lambda_payload_enabled = ( - os.environ.get(DD_CAPTURE_LAMBDA_PAYLOAD, "false").lower() == "true" -) - -if dd_capture_lambda_payload_enabled: - import datadog_lambda.tag_object as tag_object - - tag_object.max_depth = get_env_as_int( - DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH, tag_object.max_depth - ) - -env_env_var = os.environ.get(DD_ENV, None) init_timestamp_ns = time_ns() @@ -161,59 +117,16 @@ def __init__(self, func): """Executes when the wrapped function gets wrapped""" try: self.func = func - self.flush_to_log = os.environ.get(DD_FLUSH_TO_LOG, "").lower() == "true" - self.logs_injection = ( - os.environ.get(DD_LOGS_INJECTION, "true").lower() == "true" - ) - self.merge_xray_traces = ( - os.environ.get(DD_MERGE_XRAY_TRACES, "false").lower() == "true" - ) - self.function_name = os.environ.get(AWS_LAMBDA_FUNCTION_NAME, "function") - self.service = os.environ.get(DD_SERVICE, None) - self.extractor_env = os.environ.get(DD_TRACE_EXTRACTOR, None) self.trace_extractor = None self.span = None self.inferred_span = None - depends_on_dd_tracing_enabled = ( - lambda original_boolean: dd_tracing_enabled and original_boolean - ) - self.make_inferred_span = depends_on_dd_tracing_enabled( - os.environ.get(DD_TRACE_MANAGED_SERVICES, "true").lower() == "true" - ) - self.encode_authorizer_context = depends_on_dd_tracing_enabled( - os.environ.get(DD_ENCODE_AUTHORIZER_CONTEXT, "true").lower() == "true" - ) - self.decode_authorizer_context = depends_on_dd_tracing_enabled( - os.environ.get(DD_DECODE_AUTHORIZER_CONTEXT, "true").lower() == "true" - ) - self.cold_start_tracing = depends_on_dd_tracing_enabled( - os.environ.get(DD_COLD_START_TRACING, "true").lower() == "true" - ) - self.min_cold_start_trace_duration = get_env_as_int( - DD_MIN_COLD_START_DURATION, 3 - ) - self.data_streams_enabled = ( - os.environ.get(DD_DATA_STREAMS_ENABLED, "false").lower() == "true" - ) - self.local_testing_mode = os.environ.get( - DD_LOCAL_TEST, "false" - ).lower() in ("true", "1") - self.cold_start_trace_skip_lib = [ - "ddtrace.internal.compat", - "ddtrace.filters", - ] - if DD_COLD_START_TRACE_SKIP_LIB in os.environ: - try: - self.cold_start_trace_skip_lib = os.environ[ - DD_COLD_START_TRACE_SKIP_LIB - ].split(",") - except Exception: - logger.debug(f"Malformatted for env {DD_COLD_START_TRACE_SKIP_LIB}") self.response = None - if profiling_env_var: - self.prof = profiler.Profiler(env=env_env_var, service=self.service) - if self.extractor_env: - extractor_parts = self.extractor_env.rsplit(".", 1) + + if config.profiling_enabled: + self.prof = profiler.Profiler(env=config.env, service=config.service) + + if config.trace_extractor: + extractor_parts = config.trace_extractor.rsplit(".", 1) if len(extractor_parts) == 2: (mod_name, extractor_name) = extractor_parts modified_extractor_name = modify_module_name(mod_name) @@ -221,7 +134,7 @@ def __init__(self, func): self.trace_extractor = getattr(extractor_module, extractor_name) # Inject trace correlation ids to logs - if self.logs_injection: + if config.logs_injection: inject_correlation_ids() # This prevents a breaking change in ddtrace v0.49 regarding the service name @@ -233,11 +146,11 @@ def __init__(self, func): patch_all() # Enable LLM Observability - if llmobs_env_var: + if config.llmobs_enabled: LLMObs.enable() # Enable Exception Replay - if exception_replay_env_var: + if config.exception_replay_enabled: logger.debug("Enabling exception replay") SpanExceptionHandler.enable() @@ -307,7 +220,7 @@ def _before(self, event, context): event, context, extractor=self.trace_extractor, - decode_authorizer_context=self.decode_authorizer_context, + decode_authorizer_context=config.decode_authorizer_context, ) self.event_source = event_source # Create a Datadog X-Ray subsegment with the trace context @@ -321,28 +234,28 @@ def _before(self, event, context): XraySubsegment.TRACE_KEY, ) - if dd_tracing_enabled: - set_dd_trace_py_root(trace_context_source, self.merge_xray_traces) - if self.make_inferred_span: + if config.trace_enabled: + set_dd_trace_py_root(trace_context_source, config.merge_xray_traces) + if config.make_inferred_span: self.inferred_span = create_inferred_span( - event, context, event_source, self.decode_authorizer_context + event, context, event_source, config.decode_authorizer_context ) - if self.data_streams_enabled: + if config.data_streams_enabled: set_dsm_context(event, event_source) self.span = create_function_execution_span( context=context, - function_name=self.function_name, + function_name=config.function_name, is_cold_start=is_cold_start(), is_proactive_init=is_proactive_init(), trace_context_source=trace_context_source, - merge_xray_traces=self.merge_xray_traces, + merge_xray_traces=config.merge_xray_traces, trigger_tags=self.trigger_tags, parent_span=self.inferred_span, span_pointers=calculate_span_pointers(event_source, event), ) else: set_correlation_ids() - if profiling_env_var and is_new_sandbox(): + if config.profiling_enabled and is_new_sandbox(): self.prof.start(stop_on_exit=False, profile_children=True) logger.debug("datadog_lambda_wrapper _before() done") except Exception as e: @@ -361,14 +274,14 @@ def _after(self, event, context): create_dd_dummy_metadata_subsegment( self.trigger_tags, XraySubsegment.LAMBDA_FUNCTION_TAGS_KEY ) - should_trace_cold_start = self.cold_start_tracing and is_new_sandbox() + should_trace_cold_start = config.cold_start_tracing and is_new_sandbox() if should_trace_cold_start: trace_ctx = tracer.current_trace_context() if self.span: - if dd_capture_lambda_payload_enabled: - tag_object.tag_object(self.span, "function.request", event) - tag_object.tag_object(self.span, "function.response", self.response) + if config.capture_payload_enabled: + tag_object(self.span, "function.request", event) + tag_object(self.span, "function.response", self.response) if status_code: self.span.set_tag("http.status_code", status_code) @@ -378,8 +291,8 @@ def _after(self, event, context): if status_code: self.inferred_span.set_tag("http.status_code", status_code) - if self.service: - self.inferred_span.set_tag("peer.service", self.service) + if config.service: + self.inferred_span.set_tag("peer.service", config.service) if InferredSpanInfo.is_async(self.inferred_span) and self.span: self.inferred_span.finish(finish_time=self.span.start) @@ -391,33 +304,35 @@ def _after(self, event, context): following_span = self.span or self.inferred_span ColdStartTracer( tracer, - self.function_name, + config.function_name, following_span.start_ns, trace_ctx, - self.min_cold_start_trace_duration, - self.cold_start_trace_skip_lib, + config.min_cold_start_trace_duration, + config.cold_start_trace_skip_lib, ).trace() except Exception as e: logger.debug("Failed to create cold start spans. %s", e) - if not self.flush_to_log or should_use_extension: + if not config.flush_to_log or should_use_extension: from datadog_lambda.metric import flush_stats flush_stats(context) - if should_use_extension and self.local_testing_mode: + if should_use_extension and config.local_test: # when testing locally, the extension does not know when an # invocation completes because it does not have access to the # logs api flush_extension() - if llmobs_env_var: + if config.llmobs_enabled: LLMObs.flush() # Flush exception replay - if exception_replay_env_var: + if config.exception_replay_enabled: LogsIntakeUploaderV1._instance.periodic() - if self.encode_authorizer_context and is_authorizer_response(self.response): + if config.encode_authorizer_context and is_authorizer_response( + self.response + ): self._inject_authorizer_span_headers( event.get("requestContext", {}).get("requestId") ) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..33869802 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,8 @@ +import pytest + +from datadog_lambda.config import config + + +@pytest.fixture(autouse=True) +def reset_config(): + config._reset() diff --git a/tests/test_api.py b/tests/test_api.py index 59ee4ee8..7fcc3c22 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -22,7 +22,10 @@ def setUp(self): ) self.env_patcher.start() - @patch("datadog_lambda.api.fips_mode_enabled", True) + def tearDown(self): + del os.environ["AWS_REGION"] + + @patch("datadog_lambda.config.Config.fips_mode_enabled", True) @patch("botocore.session.Session.create_client") def test_secrets_manager_fips_endpoint(self, mock_boto3_client): mock_client = MagicMock() @@ -63,7 +66,7 @@ def test_secrets_manager_different_region(self, mock_boto3_client): ) self.assertEqual(api_key, "test-api-key") - @patch("datadog_lambda.api.fips_mode_enabled", True) + @patch("datadog_lambda.config.Config.fips_mode_enabled", True) @patch("botocore.session.Session.create_client") def test_secrets_manager_different_region_but_still_fips(self, mock_boto3_client): mock_client = MagicMock() @@ -84,7 +87,7 @@ def test_secrets_manager_different_region_but_still_fips(self, mock_boto3_client ) self.assertEqual(api_key, "test-api-key") - @patch("datadog_lambda.api.fips_mode_enabled", True) + @patch("datadog_lambda.config.Config.fips_mode_enabled", True) @patch("botocore.session.Session.create_client") def test_ssm_fips_endpoint(self, mock_boto3_client): mock_client = MagicMock() @@ -103,7 +106,7 @@ def test_ssm_fips_endpoint(self, mock_boto3_client): ) self.assertEqual(api_key, "test-api-key") - @patch("datadog_lambda.api.fips_mode_enabled", True) + @patch("datadog_lambda.config.Config.fips_mode_enabled", True) @patch("botocore.session.Session.create_client") @patch("datadog_lambda.api.decrypt_kms_api_key") def test_kms_fips_endpoint(self, mock_decrypt_kms, mock_boto3_client): diff --git a/tests/test_cold_start.py b/tests/test_cold_start.py index c7444c49..d75b5f43 100644 --- a/tests/test_cold_start.py +++ b/tests/test_cold_start.py @@ -8,6 +8,8 @@ import datadog_lambda.cold_start as cold_start import datadog_lambda.wrapper as wrapper +from tests.utils import get_mock_context + class TestColdStartTracingSetup(unittest.TestCase): def test_proactive_init(self): @@ -247,7 +249,7 @@ def finish(span): monkeypatch.setattr(wrapper.tracer, "_on_span_finish", finish) monkeypatch.setattr(wrapper, "is_new_sandbox", lambda: True) - monkeypatch.setattr("datadog_lambda.wrapper.dd_tracing_enabled", True) + monkeypatch.setattr("datadog_lambda.config.Config.trace_enabled", True) monkeypatch.setenv( "DD_COLD_START_TRACE_SKIP_LIB", "ddtrace.contrib.logging,datadog_lambda.wrapper" ) @@ -257,10 +259,7 @@ def finish(span): def handler(event, context): import tabnanny - lambda_context = MagicMock() - lambda_context.invoked_function_arn = ( - "arn:aws:lambda:us-west-1:123457598159:function:python-layer-test:1" - ) + lambda_context = get_mock_context() handler.cold_start_tracing = True handler({}, lambda_context) diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 00000000..92002439 --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,240 @@ +import pytest + +from datadog_lambda.config import config, _get_env, Config + + +@pytest.fixture +def setenv(monkeypatch): + def set_env(key, value): + if value is None: + monkeypatch.delenv(key, raising=False) + else: + monkeypatch.setenv(key, value) + + return set_env + + +def _test_as_bool(env_key, conf_key, default): + return ( + (env_key, conf_key, None, default), + (env_key, conf_key, "", False), + (env_key, conf_key, "true", True), + (env_key, conf_key, "TRUE", True), + (env_key, conf_key, "false", False), + (env_key, conf_key, "FALSE", False), + (env_key, conf_key, "1", True), + (env_key, conf_key, "0", False), + (env_key, conf_key, "purple", False), + ) + + +def _test_int(env_key, conf_key, default): + return ( + (env_key, conf_key, None, default), + (env_key, conf_key, "", default), + (env_key, conf_key, "5", 5), + (env_key, conf_key, "0", 0), + (env_key, conf_key, "2.5", default), + (env_key, conf_key, "-1", -1), + (env_key, conf_key, "purple", default), + ) + + +def _test_as_list(env_key, conf_key, default): + return ( + (env_key, conf_key, None, default.split(",")), + (env_key, conf_key, "", []), + (env_key, conf_key, " ", []), + (env_key, conf_key, ",", []), + (env_key, conf_key, " , ", []), + (env_key, conf_key, "a", ["a"]), + (env_key, conf_key, "a,", ["a"]), + (env_key, conf_key, "a, ", ["a"]), + (env_key, conf_key, "a,b", ["a", "b"]), + (env_key, conf_key, "a, b", ["a", "b"]), + ) + + +_test_config_from_environ = ( + *_test_as_bool("DD_FLUSH_TO_LOG", "flush_to_log", default=False), + *_test_as_bool("DD_LOGS_INJECTION", "logs_injection", default=True), + *_test_as_bool("DD_TRACE_ENABLED", "trace_enabled", default=True), + *_test_as_bool("DD_COLD_START_TRACING", "cold_start_tracing", default=True), + *_test_as_bool("DD_TRACE_MANAGED_SERVICES", "make_inferred_span", default=True), + *_test_as_bool( + "DD_ENCODE_AUTHORIZER_CONTEXT", "encode_authorizer_context", default=True + ), + *_test_as_bool( + "DD_DECODE_AUTHORIZER_CONTEXT", "decode_authorizer_context", default=True + ), + *_test_as_bool("DD_FLUSH_IN_THREAD", "flush_in_thread", default=False), + *_test_as_bool("DD_ENHANCED_METRICS", "enhanced_metrics_enabled", default=True), + *_test_as_bool("DD_INTEGRATION_TEST", "integration_test", default=False), + *_test_as_bool("DD_BOTOCORE_ADD_SPAN_POINTERS", "add_span_pointers", default=True), + *_test_as_bool("DD_TRACE_OTEL_ENABLED", "otel_enabled", default=False), + *_test_as_bool( + "DD_INSTRUMENTATION_TELEMETRY_ENABLED", "telemetry_enabled", default=False + ), + *_test_as_bool("DD_MERGE_XRAY_TRACES", "merge_xray_traces", default=False), + *_test_as_bool("DD_PROFILING_ENABLED", "profiling_enabled", default=False), + *_test_as_bool("DD_LLMOBS_ENABLED", "llmobs_enabled", default=False), + *_test_as_bool( + "DD_EXCEPTION_REPLAY_ENABLED", "exception_replay_enabled", default=False + ), + *_test_as_bool( + "DD_CAPTURE_LAMBDA_PAYLOAD", "capture_payload_enabled", default=False + ), + *_test_as_bool("DD_LOCAL_TEST", "local_test", default=False), + *_test_as_bool("DD_DATA_STREAMS_ENABLED", "data_streams_enabled", default=False), + *_test_int( + "DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH", "capture_payload_max_depth", default=10 + ), + *_test_int( + "DD_MIN_COLD_START_DURATION", "min_cold_start_trace_duration", default=3 + ), + *_test_as_list( + "DD_COLD_START_TRACE_SKIP_LIB", + "cold_start_trace_skip_lib", + default="ddtrace.internal.compat,ddtrace.filters", + ), + ("DD_SERVICE", "service", None, None), + ("DD_SERVICE", "service", "", ""), + ("DD_SERVICE", "service", "my_service", "my_service"), + ("AWS_LAMBDA_FUNCTION_NAME", "aws_lambda_function_name", None, None), + ("AWS_LAMBDA_FUNCTION_NAME", "aws_lambda_function_name", "", ""), + ( + "AWS_LAMBDA_FUNCTION_NAME", + "aws_lambda_function_name", + "my_function", + "my_function", + ), + ("AWS_LAMBDA_FUNCTION_NAME", "function_name", None, "function"), + ("AWS_LAMBDA_FUNCTION_NAME", "function_name", "", ""), + ("AWS_LAMBDA_FUNCTION_NAME", "function_name", "my_function", "my_function"), + ("AWS_LAMBDA_FUNCTION_NAME", "is_lambda_context", None, False), + ("AWS_LAMBDA_FUNCTION_NAME", "is_lambda_context", "", False), + ("AWS_LAMBDA_FUNCTION_NAME", "is_lambda_context", "my_function", True), + ("AWS_REGION", "is_gov_region", None, False), + ("AWS_REGION", "is_gov_region", "", False), + ("AWS_REGION", "is_gov_region", "us-gov-1", True), + ("AWS_REGION", "is_gov_region", "us-est-1", False), + ("DD_TRACE_EXTRACTOR", "trace_extractor", None, None), + ("DD_TRACE_EXTRACTOR", "trace_extractor", "", ""), + ("DD_TRACE_EXTRACTOR", "trace_extractor", "my_extractor", "my_extractor"), + ("DD_ENV", "env", None, None), + ("DD_ENV", "env", "", ""), + ("DD_ENV", "env", "my_env", "my_env"), +) + + +@pytest.mark.parametrize("env_key,conf_key,env_val,conf_val", _test_config_from_environ) +def test_config_from_environ(env_key, conf_key, env_val, conf_val, setenv): + setenv(env_key, env_val) + assert getattr(config, conf_key) == conf_val + + +_test_config_from_environ_depends_on_tracing = ( + *_test_as_bool("DD_COLD_START_TRACING", "cold_start_tracing", default=True), + *_test_as_bool("DD_TRACE_MANAGED_SERVICES", "make_inferred_span", default=True), + *_test_as_bool( + "DD_ENCODE_AUTHORIZER_CONTEXT", "encode_authorizer_context", default=True + ), + *_test_as_bool( + "DD_DECODE_AUTHORIZER_CONTEXT", "decode_authorizer_context", default=True + ), + *_test_as_bool("DD_DATA_STREAMS_ENABLED", "data_streams_enabled", default=False), + *_test_as_bool( + "DD_INSTRUMENTATION_TELEMETRY_ENABLED", "telemetry_enabled", default=False + ), +) + + +@pytest.mark.parametrize( + "env_key,conf_key,env_val,conf_val", _test_config_from_environ_depends_on_tracing +) +def test_config_from_environ_depends_on_tracing( + env_key, conf_key, env_val, conf_val, setenv +): + setenv(env_key, env_val) + setenv("DD_TRACE_ENABLED", "false") + assert getattr(config, conf_key) is False + + +def test_config_aws_lambda_function_name(setenv): + # these config values all access the same environment variable, test to + # ensure the wrong value is not cached + setenv("AWS_LAMBDA_FUNCTION_NAME", "my_function") + assert config.aws_lambda_function_name == "my_function" + assert config.function_name == "my_function" + assert config.is_lambda_context is True + + +_test_fips_mode_from_environ = ( + (None, None, False), + (None, "", False), + (None, "us-gov-1", True), + (None, "us-east-1", False), + ("", None, False), + ("", "", False), + ("", "us-gov-1", False), + ("", "us-east-1", False), + ("true", None, True), + ("true", "", True), + ("true", "us-gov-1", True), + ("true", "us-east-1", True), + ("TRUE", None, True), + ("TRUE", "", True), + ("TRUE", "us-gov-1", True), + ("TRUE", "us-east-1", True), + ("false", None, False), + ("false", "", False), + ("false", "us-gov-1", False), + ("false", "us-east-1", False), + ("FALSE", None, False), + ("FALSE", "", False), + ("FALSE", "us-gov-1", False), + ("FALSE", "us-east-1", False), + ("1", None, False), + ("1", "", False), + ("1", "us-gov-1", False), + ("1", "us-east-1", False), + ("0", None, False), + ("0", "", False), + ("0", "us-gov-1", False), + ("0", "us-east-1", False), +) + + +@pytest.mark.parametrize("fips_mode,region,conf_val", _test_fips_mode_from_environ) +def test_fips_mode_from_environ(fips_mode, region, conf_val, setenv): + setenv("DD_LAMBDA_FIPS_MODE", fips_mode) + setenv("AWS_REGION", region) + assert config.fips_mode_enabled == conf_val + + +def test__get_env_does_not_log_when_env_not_set(setenv, monkeypatch): + setenv("TEST_1", None) + setenv("TEST_2", None) + setenv("TEST_3", None) + setenv("TEST_4", None) + + class Testing(Config): + test_1 = _get_env("TEST_1") + test_2 = _get_env("TEST_2", "purple") + test_3 = _get_env("TEST_3", "true", bool) + test_4 = _get_env("TEST_4", "true", bool, depends_on_tracing=True) + + logs = [] + + def cap_warn(*args, **kwargs): + logs.append(args) + + monkeypatch.setattr("datadog_lambda.config.logger.warning", cap_warn) + + testing = Testing() + testing.test_1 + testing.test_2 + testing.test_3 + testing.test_4 + + assert not logs diff --git a/tests/test_metric.py b/tests/test_metric.py index e7dab2c3..aa537d34 100644 --- a/tests/test_metric.py +++ b/tests/test_metric.py @@ -62,7 +62,7 @@ def test_select_metrics_handler_dd_api_fallback(self): self.assertEqual(MetricsHandler.DATADOG_API, _select_metrics_handler()) del os.environ["DD_FLUSH_TO_LOG"] - @patch("datadog_lambda.metric.fips_mode_enabled", True) + @patch("datadog_lambda.config.Config.fips_mode_enabled", True) @patch("datadog_lambda.metric.should_use_extension", False) def test_select_metrics_handler_has_no_fallback_in_fips_mode(self): os.environ["DD_FLUSH_TO_LOG"] = "False" diff --git a/tests/test_patch.py b/tests/test_patch.py index bf924875..b03d2e23 100644 --- a/tests/test_patch.py +++ b/tests/test_patch.py @@ -1,3 +1,4 @@ +import pytest import unittest from unittest.mock import patch, MagicMock @@ -5,6 +6,13 @@ from datadog_lambda.patch import _patch_http, _ensure_patch_requests from datadog_lambda.constants import TraceHeader +from ddtrace.contrib.internal.requests.patch import unpatch as unpatch_requests + + +@pytest.fixture(scope="module", autouse=True) +def reset_patches(): + unpatch_requests() + class TestPatchHTTPClients(unittest.TestCase): def setUp(self): diff --git a/tests/test_tag_object.py b/tests/test_tag_object.py index 77512164..574bb331 100644 --- a/tests/test_tag_object.py +++ b/tests/test_tag_object.py @@ -29,6 +29,7 @@ def test_tag_object(self): True, ) + @patch("datadog_lambda.config.Config.capture_payload_max_depth", 2) def test_tag_object_max_depth(self): payload = { "hello": "world", @@ -41,11 +42,8 @@ def test_tag_object_max_depth(self): "vals": [{"thingOne": 1}, {"thingTwo": 2}], } spanMock = MagicMock() - import datadog_lambda.tag_object as lib_ref - lib_ref.max_depth = 2 # setting up the test tag_object(spanMock, "function.request", payload) - lib_ref.max_depth = 10 # revert the setup spanMock.set_tag.assert_has_calls( [ call("function.request.vals.0", "{'thingOne': 1}"), @@ -62,6 +60,7 @@ def test_tag_object_max_depth(self): True, ) + @patch("datadog_lambda.config.Config.capture_payload_max_depth", 0) def test_tag_object_max_depth_0(self): payload = { "hello": "world", @@ -74,11 +73,8 @@ def test_tag_object_max_depth_0(self): "vals": [{"thingOne": 1}, {"thingTwo": 2}], } spanMock = MagicMock() - import datadog_lambda.tag_object as lib_ref - lib_ref.max_depth = 0 # setting up the test tag_object(spanMock, "function.request", payload) - lib_ref.max_depth = 10 # revert the setup spanMock.set_tag.assert_has_calls( [ call( diff --git a/tests/test_tracing.py b/tests/test_tracing.py index e38e4ecd..a629343e 100644 --- a/tests/test_tracing.py +++ b/tests/test_tracing.py @@ -251,20 +251,16 @@ def test_extract_dd_trace_context(event, expect): class TestExtractAndGetDDTraceContext(unittest.TestCase): def setUp(self): - global dd_tracing_enabled - dd_tracing_enabled = False os.environ["_X_AMZN_TRACE_ID"] = fake_xray_header_value patcher = patch("datadog_lambda.tracing.send_segment") self.mock_send_segment = patcher.start() self.addCleanup(patcher.stop) - patcher = patch("datadog_lambda.tracing.is_lambda_context") + patcher = patch("datadog_lambda.config.Config.is_lambda_context") self.mock_is_lambda_context = patcher.start() self.mock_is_lambda_context.return_value = True self.addCleanup(patcher.stop) def tearDown(self): - global dd_tracing_enabled - dd_tracing_enabled = False del os.environ["_X_AMZN_TRACE_ID"] @with_trace_propagation_style("datadog") @@ -984,11 +980,12 @@ def setUp(self): ) self.addCleanup(patcher.stop) - patcher = patch("datadog_lambda.tracing.is_lambda_context") + patcher = patch("datadog_lambda.config.Config.is_lambda_context") self.mock_is_lambda_context = patcher.start() self.mock_is_lambda_context.return_value = True self.addCleanup(patcher.stop) + @patch("datadog_lambda.config.Config.trace_enabled", False) def test_set_correlation_ids(self): set_correlation_ids() span = tracer.current_span() @@ -1124,13 +1121,11 @@ def test_function_with_span_pointers(self): class TestSetTraceRootSpan(unittest.TestCase): def setUp(self): - global dd_tracing_enabled - dd_tracing_enabled = False os.environ["_X_AMZN_TRACE_ID"] = fake_xray_header_value patcher = patch("datadog_lambda.tracing.send_segment") self.mock_send_segment = patcher.start() self.addCleanup(patcher.stop) - patcher = patch("datadog_lambda.tracing.is_lambda_context") + patcher = patch("datadog_lambda.config.Config.is_lambda_context") self.mock_is_lambda_context = patcher.start() self.mock_is_lambda_context.return_value = True self.addCleanup(patcher.stop) @@ -1143,8 +1138,6 @@ def setUp(self): self.addCleanup(patcher.stop) def tearDown(self): - global dd_tracing_enabled - dd_tracing_enabled = False del os.environ["_X_AMZN_TRACE_ID"] def test_mixed_parent_context_when_merging(self): @@ -1245,6 +1238,7 @@ def test_get_service_mapping(self): create_service_mapping(os.environ["DD_SERVICE_MAPPING"]) ) self.assertEqual(self.get_service_mapping(), expected_output) + del os.environ["DD_SERVICE_MAPPING"] def test_set_service_mapping(self): new_service_mapping = {"api3": "service3", "api4": "service4"} @@ -1285,6 +1279,8 @@ def test_determine_service_name(self): "default", ) + del os.environ["DD_SERVICE_MAPPING"] + def test_remaps_all_inferred_span_service_names_from_api_gateway_event(self): new_service_mapping = {"lambda_api_gateway": "new-name"} self.set_service_mapping(new_service_mapping) @@ -2386,7 +2382,7 @@ def test_deterministic_m5_hash__always_leading_with_zero(self): class TestExceptionOutsideHandler(unittest.TestCase): - @patch("datadog_lambda.tracing.dd_tracing_enabled", True) + @patch("datadog_lambda.config.Config.trace_enabled", True) @patch("datadog_lambda.tracing.submit_errors_metric") @patch("time.time_ns", return_value=42) def test_exception_outside_handler_tracing_enabled( @@ -2427,7 +2423,7 @@ def test_exception_outside_handler_tracing_enabled( assert mock_span.error == 1 assert mock_span.start_ns == 42 - @patch("datadog_lambda.tracing.dd_tracing_enabled", False) + @patch("datadog_lambda.config.Config.trace_enabled", False) @patch("datadog_lambda.tracing.submit_errors_metric") @patch("time.time_ns", return_value=42) def test_exception_outside_handler_tracing_disabled( diff --git a/tests/test_wrapper.py b/tests/test_wrapper.py index f482fa3d..f0240905 100644 --- a/tests/test_wrapper.py +++ b/tests/test_wrapper.py @@ -8,6 +8,8 @@ import datadog_lambda.wrapper as wrapper import datadog_lambda.xray as xray + +from datadog_lambda.config import config from datadog_lambda.metric import lambda_metric from datadog_lambda.thread_stats_writer import ThreadStatsWriter from ddtrace.trace import Span, tracer @@ -24,7 +26,6 @@ def setUp(self): patch("ddtrace.internal.writer.AgentWriter.flush_queue").start() wrapper.datadog_lambda_wrapper._force_wrap = True - wrapper.dd_tracing_enabled = True patcher = patch( "datadog.threadstats.reporters.HttpReporter.flush_distributions" ) @@ -80,9 +81,8 @@ def setUp(self): self.mock_set_dsm_context = patcher.start() self.addCleanup(patcher.stop) + @patch("datadog_lambda.config.Config.trace_enabled", False) def test_datadog_lambda_wrapper(self): - wrapper.dd_tracing_enabled = False - @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): lambda_metric("test.metric", 100) @@ -92,7 +92,6 @@ def lambda_handler(event, context): lambda_context = get_mock_context() lambda_handler(lambda_event, lambda_context) - wrapper.dd_tracing_enabled = True self.mock_threadstats_flush_distributions.assert_has_calls( [ call( @@ -189,9 +188,9 @@ def lambda_handler(event, context): metric_module.lambda_stats.stop() metric_module.lambda_stats = ThreadStatsWriter(False) + @patch("datadog_lambda.config.Config.trace_enabled", False) def test_datadog_lambda_wrapper_inject_correlation_ids(self): os.environ["DD_LOGS_INJECTION"] = "True" - wrapper.dd_tracing_enabled = False @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): @@ -199,7 +198,6 @@ def lambda_handler(event, context): lambda_event = {} lambda_handler(lambda_event, get_mock_context()) - wrapper.dd_tracing_enabled = True self.mock_set_correlation_ids.assert_called() self.mock_inject_correlation_ids.assert_called() @@ -457,11 +455,8 @@ def lambda_handler(event, context): ] ) + @patch("datadog_lambda.config.Config.enhanced_metrics_enabled", False) def test_no_enhanced_metrics_without_env_var(self): - patcher = patch("datadog_lambda.metric.enhanced_metrics_enabled", False) - patcher.start() - self.addCleanup(patcher.stop) - @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): raise RuntimeError() @@ -515,6 +510,7 @@ def lambda_handler(event, context): self.assertEqual(os.environ.get("DD_REQUESTS_SERVICE_NAME"), "myAwesomeService") del os.environ["DD_SERVICE"] + @patch("datadog_lambda.config.Config.make_inferred_span", False) def test_encode_authorizer_span(self): @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): @@ -541,7 +537,6 @@ def lambda_handler(event, context): trace_ctx.sampling_priority = 1 test_span.finish() lambda_handler.inferred_span = test_span - lambda_handler.make_inferred_span = False result = lambda_handler(lambda_event, lambda_context) raw_inject_data = result["context"]["_datadog"] self.assertIsInstance(raw_inject_data, str) @@ -569,7 +564,7 @@ def return_type_test(event, context): def test_set_dsm_context_called_when_DSM_and_tracing_enabled(self): os.environ["DD_DATA_STREAMS_ENABLED"] = "true" - wrapper.dd_tracing_enabled = True + os.environ["DD_TRACE_ENABLED"] = "true" @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): @@ -583,7 +578,7 @@ def lambda_handler(event, context): def test_set_dsm_context_not_called_when_only_DSM_enabled(self): os.environ["DD_DATA_STREAMS_ENABLED"] = "true" - wrapper.dd_tracing_enabled = False + os.environ["DD_TRACE_ENABLED"] = "false" @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): @@ -597,7 +592,7 @@ def lambda_handler(event, context): def test_set_dsm_context_not_called_when_only_tracing_enabled(self): os.environ["DD_DATA_STREAMS_ENABLED"] = "false" - wrapper.dd_tracing_enabled = True + os.environ["DD_TRACE_ENABLED"] = "true" @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): @@ -611,7 +606,7 @@ def lambda_handler(event, context): def test_set_dsm_context_not_called_when_tracing_and_DSM_disabled(self): os.environ["DD_DATA_STREAMS_ENABLED"] = "false" - wrapper.dd_tracing_enabled = False + os.environ["DD_TRACE_ENABLED"] = "false" @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): @@ -624,18 +619,6 @@ def lambda_handler(event, context): del os.environ["DD_DATA_STREAMS_ENABLED"] -class TestLambdaDecoratorSettings(unittest.TestCase): - def test_some_envs_should_depend_on_dd_tracing_enabled(self): - wrapper.dd_tracing_enabled = False - os.environ[wrapper.DD_TRACE_MANAGED_SERVICES] = "true" - os.environ[wrapper.DD_ENCODE_AUTHORIZER_CONTEXT] = "true" - os.environ[wrapper.DD_DECODE_AUTHORIZER_CONTEXT] = "true" - decorator = wrapper._LambdaDecorator(func=None) - self.assertFalse(decorator.make_inferred_span) - self.assertFalse(decorator.encode_authorizer_context) - self.assertFalse(decorator.decode_authorizer_context) - - class TestLambdaWrapperWithTraceContext(unittest.TestCase): xray_root = "1-5e272390-8c398be037738dc042009320" xray_parent = "94ae789b969f1cc5" @@ -706,14 +689,28 @@ def handler(event, context): class TestLambdaWrapperFlushExtension(unittest.TestCase): - def setUp(self): - self.orig_environ = os.environ + @patch("datadog_lambda.config.Config.local_test", True) + @patch("datadog_lambda.wrapper.should_use_extension", True) + def test_local_test_true_flushing(self): + flushes = [] + lambda_event = {} + lambda_context = get_mock_context() + + def flush(): + flushes.append(1) - def tearDown(self): - os.environ = self.orig_environ + @patch("datadog_lambda.wrapper.flush_extension", flush) + @wrapper.datadog_lambda_wrapper + def lambda_handler(event, context): + pass + lambda_handler(lambda_event, lambda_context) + + self.assertEqual(len(flushes), 1) + + @patch("datadog_lambda.config.Config.local_test", False) @patch("datadog_lambda.wrapper.should_use_extension", True) - def test_local_test_envvar_flushing(self): + def test_local_test_false_flushing(self): flushes = [] lambda_event = {} lambda_context = get_mock_context() @@ -721,24 +718,11 @@ def test_local_test_envvar_flushing(self): def flush(): flushes.append(1) - for environ, flush_called in ( - ({"DD_LOCAL_TEST": "True"}, True), - ({"DD_LOCAL_TEST": "true"}, True), - ({"DD_LOCAL_TEST": "1"}, True), - ({"DD_LOCAL_TEST": "False"}, False), - ({"DD_LOCAL_TEST": "false"}, False), - ({"DD_LOCAL_TEST": "0"}, False), - ({"DD_LOCAL_TEST": ""}, False), - ({}, False), - ): - os.environ = environ - flushes.clear() - - @patch("datadog_lambda.wrapper.flush_extension", flush) - @wrapper.datadog_lambda_wrapper - def lambda_handler(event, context): - pass + @patch("datadog_lambda.wrapper.flush_extension", flush) + @wrapper.datadog_lambda_wrapper + def lambda_handler(event, context): + pass - lambda_handler(lambda_event, lambda_context) + lambda_handler(lambda_event, lambda_context) - self.assertEqual(flush_called, len(flushes) == 1) + self.assertEqual(len(flushes), 0) diff --git a/tests/utils.py b/tests/utils.py index 0f246e68..2d56ca0c 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -22,6 +22,7 @@ def get_mock_context( lambda_context.invoked_function_arn = invoked_function_arn lambda_context.function_version = function_version lambda_context.function_name = function_name + lambda_context.get_remaining_time_in_millis = lambda: 100 lambda_context.client_context = ClientContext(custom) return lambda_context From e230d94554991136bc185bef2c46261f50073abc Mon Sep 17 00:00:00 2001 From: Joey Zhao <5253430+joeyzhao2018@users.noreply.github.com> Date: Wed, 11 Jun 2025 14:54:46 -0400 Subject: [PATCH 06/10] handle a case where the record is some customized item (#616) --- datadog_lambda/trigger.py | 2 +- tests/test_trigger.py | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datadog_lambda/trigger.py b/datadog_lambda/trigger.py index 14cb06ac..bbd0d027 100644 --- a/datadog_lambda/trigger.py +++ b/datadog_lambda/trigger.py @@ -153,7 +153,7 @@ def parse_event_source(event: dict) -> _EventSource: event_source = _EventSource(EventTypes.STEPFUNCTIONS) event_record = get_first_record(event) - if event_record: + if event_record and isinstance(event_record, dict): aws_event_source = event_record.get("eventSource") or event_record.get( "EventSource" ) diff --git a/tests/test_trigger.py b/tests/test_trigger.py index c12e8f5c..15103937 100644 --- a/tests/test_trigger.py +++ b/tests/test_trigger.py @@ -280,6 +280,13 @@ def test_detect_lambda_function_url_domain_with_invalid_input(self): # Test with string that would normally cause an exception when split self.assertFalse(detect_lambda_function_url_domain("")) + def test_event_source_with_non_dict_event_record(self): + # Test with event_record that's not a dictionary + event = {"Records": "not_a_dict"} + event_source = parse_event_source(event) + # Should handle the first non-dict record gracefully and return unknown + self.assertEqual(event_source.to_string(), "unknown") + class GetTriggerTags(unittest.TestCase): def test_extract_trigger_tags_api_gateway(self): From 8f2a45af6d90845efaf69246349f572e0de0b445 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Jun 2025 16:01:59 -0400 Subject: [PATCH 07/10] chore(deps): bump protobuf from 5.29.3 to 5.29.5 (#619) Bumps [protobuf](https://github.com/protocolbuffers/protobuf) from 5.29.3 to 5.29.5. - [Release notes](https://github.com/protocolbuffers/protobuf/releases) - [Changelog](https://github.com/protocolbuffers/protobuf/blob/main/protobuf_release.bzl) - [Commits](https://github.com/protocolbuffers/protobuf/compare/v5.29.3...v5.29.5) --- updated-dependencies: - dependency-name: protobuf dependency-version: 5.29.5 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 145 ++++++++++++++++++++++++++++++---------------------- 1 file changed, 83 insertions(+), 62 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3953f953..434f887c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,23 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. - -[[package]] -name = "boto3" -version = "1.36.8" -description = "The AWS SDK for Python" -optional = true -python-versions = ">=3.8" -files = [ - {file = "boto3-1.36.8-py3-none-any.whl", hash = "sha256:7f61c9d0ea64f484a17c1e3115fdf90fd7b17ab6771e07cb4549f42b9fd28fb9"}, - {file = "boto3-1.36.8.tar.gz", hash = "sha256:ac47215d320b0c2534340db58d6d5284cb1860b7bff172b4dd6eee2dee1d5779"}, -] - -[package.dependencies] -botocore = ">=1.36.8,<1.37.0" -jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.11.0,<0.12.0" - -[package.extras] -crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "botocore" @@ -25,6 +6,8 @@ version = "1.36.8" description = "Low-level, data-driven core of boto 3." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "botocore-1.36.8-py3-none-any.whl", hash = "sha256:59d3fdfbae6d916b046e973bebcbeb70a102f9e570ca86d5ba512f1854b78fc2"}, {file = "botocore-1.36.8.tar.gz", hash = "sha256:81c88e5566cf018e1411a68304dc1fb9e4156ca2b50a3a0f0befc274299e67fa"}, @@ -34,8 +17,8 @@ files = [ jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" urllib3 = [ - {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, ] [package.extras] @@ -47,6 +30,7 @@ version = "0.16.1" description = "Python module to generate and modify bytecode" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "bytecode-0.16.1-py3-none-any.whl", hash = "sha256:1d4b61ed6bade4bff44127c8283bef8131a664ce4dbe09d64a88caf329939f35"}, {file = "bytecode-0.16.1.tar.gz", hash = "sha256:8fbbb637c880f339e564858bc6c7984ede67ae97bc71343379a535a9a4baf398"}, @@ -61,6 +45,7 @@ version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, @@ -72,6 +57,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -173,6 +159,8 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +markers = "extra == \"dev\" and sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -184,6 +172,7 @@ version = "0.51.0" description = "The Datadog Python library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] files = [ {file = "datadog-0.51.0-py2.py3-none-any.whl", hash = "sha256:a9764f091c96af4e0996d4400b168fc5fba380f911d6d672c9dcd4773e29ea3f"}, {file = "datadog-0.51.0.tar.gz", hash = "sha256:3279534f831ae0b4ae2d8ce42ef038b4ab38e667d7ed6ff7437982d7a0cf5250"}, @@ -198,6 +187,7 @@ version = "2.20.0" description = "Datadog APM client library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "ddtrace-2.20.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:e1dee099099b95acf7d0e552179925cfec58a52315cc914d153506367b195bc4"}, {file = "ddtrace-2.20.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:9d209bef14caafcd53be8c14e04741d86c08f76496c1bf755e2eaa38605ce3e0"}, @@ -271,10 +261,10 @@ files = [ [package.dependencies] bytecode = [ + {version = ">=0.13.0", markers = "python_version < \"3.11\""}, {version = ">=0.16.0", markers = "python_version >= \"3.13.0\""}, {version = ">=0.15.0", markers = "python_version ~= \"3.12.0\""}, {version = ">=0.14.0", markers = "python_version ~= \"3.11.0\""}, - {version = ">=0.13.0", markers = "python_version < \"3.11.0\""}, ] envier = ">=0.5,<1.0" legacy-cgi = {version = ">=2.0.0", markers = "python_version >= \"3.13.0\""} @@ -294,6 +284,7 @@ version = "1.2.18" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +groups = ["main"] files = [ {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"}, {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"}, @@ -303,7 +294,7 @@ files = [ wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools ; python_version >= \"3.12\"", "tox"] [[package]] name = "envier" @@ -311,6 +302,7 @@ version = "0.6.1" description = "Python application configuration via the environment" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "envier-0.6.1-py3-none-any.whl", hash = "sha256:73609040a76be48bbcb97074d9969666484aa0de706183a6e9ef773156a8a6a9"}, {file = "envier-0.6.1.tar.gz", hash = "sha256:3309a01bb3d8850c9e7a31a5166d5a836846db2faecb79b9cb32654dd50ca9f9"}, @@ -325,6 +317,8 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"dev\" and python_version <= \"3.10\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -339,6 +333,8 @@ version = "5.0.4" description = "the modular source code checker: pep8 pyflakes and co" optional = true python-versions = ">=3.6.1" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, @@ -355,6 +351,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -369,6 +366,7 @@ version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, @@ -378,12 +376,12 @@ files = [ zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] @@ -392,6 +390,8 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -403,6 +403,8 @@ version = "1.0.1" description = "JSON Matching Expressions" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, @@ -414,6 +416,8 @@ version = "2.6.2" description = "Fork of the standard library cgi and cgitb modules, being deprecated in PEP-594" optional = false python-versions = ">=3.10" +groups = ["main"] +markers = "python_version >= \"3.13.0\"" files = [ {file = "legacy_cgi-2.6.2-py3-none-any.whl", hash = "sha256:a7b83afb1baf6ebeb56522537c5943ef9813cf933f6715e88a803f7edbce0bff"}, {file = "legacy_cgi-2.6.2.tar.gz", hash = "sha256:9952471ceb304043b104c22d00b4f333cac27a6abe446d8a528fc437cf13c85f"}, @@ -425,6 +429,8 @@ version = "0.7.0" description = "McCabe checker, plugin for flake8" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -436,6 +442,7 @@ version = "1.29.0" description = "OpenTelemetry Python API" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "opentelemetry_api-1.29.0-py3-none-any.whl", hash = "sha256:5fcd94c4141cc49c736271f3e1efb777bebe9cc535759c54c936cca4f1b312b8"}, {file = "opentelemetry_api-1.29.0.tar.gz", hash = "sha256:d04a6cf78aad09614f52964ecb38021e248f5714dc32c2e0d8fd99517b4d69cf"}, @@ -451,6 +458,8 @@ version = "24.2" description = "Core utilities for Python packages" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -462,6 +471,8 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -473,22 +484,23 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "protobuf" -version = "5.29.3" +version = "5.29.5" description = "" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888"}, - {file = "protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a"}, - {file = "protobuf-5.29.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e"}, - {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84"}, - {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f"}, - {file = "protobuf-5.29.3-cp38-cp38-win32.whl", hash = "sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252"}, - {file = "protobuf-5.29.3-cp38-cp38-win_amd64.whl", hash = "sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107"}, - {file = "protobuf-5.29.3-cp39-cp39-win32.whl", hash = "sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7"}, - {file = "protobuf-5.29.3-cp39-cp39-win_amd64.whl", hash = "sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da"}, - {file = "protobuf-5.29.3-py3-none-any.whl", hash = "sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f"}, - {file = "protobuf-5.29.3.tar.gz", hash = "sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620"}, + {file = "protobuf-5.29.5-cp310-abi3-win32.whl", hash = "sha256:3f1c6468a2cfd102ff4703976138844f78ebd1fb45f49011afc5139e9e283079"}, + {file = "protobuf-5.29.5-cp310-abi3-win_amd64.whl", hash = "sha256:3f76e3a3675b4a4d867b52e4a5f5b78a2ef9565549d4037e06cf7b0942b1d3fc"}, + {file = "protobuf-5.29.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e38c5add5a311f2a6eb0340716ef9b039c1dfa428b28f25a7838ac329204a671"}, + {file = "protobuf-5.29.5-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:fa18533a299d7ab6c55a238bf8629311439995f2e7eca5caaff08663606e9015"}, + {file = "protobuf-5.29.5-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:63848923da3325e1bf7e9003d680ce6e14b07e55d0473253a690c3a8b8fd6e61"}, + {file = "protobuf-5.29.5-cp38-cp38-win32.whl", hash = "sha256:ef91363ad4faba7b25d844ef1ada59ff1604184c0bcd8b39b8a6bef15e1af238"}, + {file = "protobuf-5.29.5-cp38-cp38-win_amd64.whl", hash = "sha256:7318608d56b6402d2ea7704ff1e1e4597bee46d760e7e4dd42a3d45e24b87f2e"}, + {file = "protobuf-5.29.5-cp39-cp39-win32.whl", hash = "sha256:6f642dc9a61782fa72b90878af134c5afe1917c89a568cd3476d758d3c3a0736"}, + {file = "protobuf-5.29.5-cp39-cp39-win_amd64.whl", hash = "sha256:470f3af547ef17847a28e1f47200a1cbf0ba3ff57b7de50d22776607cd2ea353"}, + {file = "protobuf-5.29.5-py3-none-any.whl", hash = "sha256:6cf42630262c59b2d8de33954443d94b746c952b01434fc58a417fdbd2e84bd5"}, + {file = "protobuf-5.29.5.tar.gz", hash = "sha256:bc1463bafd4b0929216c35f437a8e28731a2b7fe3d98bb77a600efced5a15c84"}, ] [[package]] @@ -497,6 +509,8 @@ version = "9.0.0" description = "Get CPU info with pure Python" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, @@ -508,6 +522,8 @@ version = "2.9.1" description = "Python style guide checker" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, @@ -519,6 +535,8 @@ version = "2.5.0" description = "passive checker of Python programs" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, @@ -530,6 +548,8 @@ version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -552,6 +572,8 @@ version = "4.0.0" description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1"}, {file = "pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6"}, @@ -572,6 +594,8 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -586,6 +610,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -601,29 +626,14 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] -[[package]] -name = "s3transfer" -version = "0.11.2" -description = "An Amazon S3 Transfer Manager" -optional = true -python-versions = ">=3.8" -files = [ - {file = "s3transfer-0.11.2-py3-none-any.whl", hash = "sha256:be6ecb39fadd986ef1701097771f87e4d2f821f27f6071c872143884d2950fbc"}, - {file = "s3transfer-0.11.2.tar.gz", hash = "sha256:3b39185cb72f5acc77db1a58b6e25b977f28d20496b6e58d6813d75f464d632f"}, -] - -[package.dependencies] -botocore = ">=1.36.0,<2.0a.0" - -[package.extras] -crt = ["botocore[crt] (>=1.36.0,<2.0a.0)"] - [[package]] name = "six" version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +markers = "extra == \"dev\"" files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -635,6 +645,8 @@ version = "2.2.1" description = "A lil' TOML parser" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\" and python_version <= \"3.10\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -676,6 +688,7 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -687,6 +700,7 @@ version = "5.10.0" description = "Ultra fast JSON encoder and decoder for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, @@ -774,14 +788,16 @@ version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] +markers = "python_version < \"3.10\"" files = [ {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] [package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -790,13 +806,15 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -807,6 +825,7 @@ version = "1.17.2" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, @@ -895,6 +914,7 @@ version = "0.14.2" description = "Makes working with XML feel like you are working with JSON" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, @@ -906,23 +926,24 @@ version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [extras] -dev = ["boto3", "flake8", "pytest", "pytest-benchmark", "requests"] +dev = ["botocore", "flake8", "pytest", "pytest-benchmark", "requests"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = ">=3.8.0,<4" -content-hash = "9128af5437fd535ec458c64280d8390574c632e704cace5ea783de3c5d453c8c" +content-hash = "f6a2f7355200da107aa5b027d6fe4fb6bdb5a898ce8298a56e6ac39fe8d8e34d" From f93bc0a88dff1cddb184938efddbb5289add21cc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Jun 2025 16:02:16 -0400 Subject: [PATCH 08/10] chore(deps): bump brace-expansion in /tests/integration (#617) Bumps [brace-expansion](https://github.com/juliangruber/brace-expansion) from 1.1.11 to 1.1.12. - [Release notes](https://github.com/juliangruber/brace-expansion/releases) - [Commits](https://github.com/juliangruber/brace-expansion/compare/1.1.11...v1.1.12) --- updated-dependencies: - dependency-name: brace-expansion dependency-version: 1.1.12 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- tests/integration/yarn.lock | 339 ++---------------------------------- 1 file changed, 16 insertions(+), 323 deletions(-) diff --git a/tests/integration/yarn.lock b/tests/integration/yarn.lock index f96feb52..37cb357d 100644 --- a/tests/integration/yarn.lock +++ b/tests/integration/yarn.lock @@ -2,28 +2,6 @@ # yarn lockfile v1 -"@iarna/toml@^2.2.5": - version "2.2.5" - resolved "https://registry.npmjs.org/@iarna/toml/-/toml-2.2.5.tgz" - integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== - -"@isaacs/cliui@^8.0.2": - version "8.0.2" - resolved "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz" - integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== - dependencies: - string-width "^5.1.2" - string-width-cjs "npm:string-width@^4.2.0" - strip-ansi "^7.0.1" - strip-ansi-cjs "npm:strip-ansi@^6.0.1" - wrap-ansi "^8.1.0" - wrap-ansi-cjs "npm:wrap-ansi@^7.0.0" - -"@pkgjs/parseargs@^0.11.0": - version "0.11.0" - resolved "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz" - integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== - "2-thenable@^1.0.0": version "1.0.0" resolved "https://registry.npmjs.org/2-thenable/-/2-thenable-1.0.0.tgz" @@ -32,16 +10,16 @@ d "1" es5-ext "^0.10.47" +"@iarna/toml@^2.2.5": + version "2.2.5" + resolved "https://registry.npmjs.org/@iarna/toml/-/toml-2.2.5.tgz" + integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== + ansi-regex@^5.0.1: version "5.0.1" resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== -ansi-regex@^6.0.1: - version "6.1.0" - resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz" - integrity sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA== - ansi-styles@^4.0.0: version "4.3.0" resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz" @@ -49,37 +27,11 @@ ansi-styles@^4.0.0: dependencies: color-convert "^2.0.1" -ansi-styles@^6.1.0: - version "6.2.1" - resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz" - integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== - appdirectory@^0.1.0: version "0.1.0" resolved "https://registry.npmjs.org/appdirectory/-/appdirectory-0.1.0.tgz" integrity sha512-DJ5DV8vZXBbusyiyPlH28xppwS8eAMRuuyMo88xeEcf4bV64lbLtbxRxqixZuJBXsZzLtXFmA13GwVjJc7vdQw== -asynckit@^0.4.0: - version "0.4.0" - resolved "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" - integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== - -axios-proxy-builder@^0.1.2: - version "0.1.2" - resolved "https://registry.npmjs.org/axios-proxy-builder/-/axios-proxy-builder-0.1.2.tgz" - integrity sha512-6uBVsBZzkB3tCC8iyx59mCjQckhB8+GQrI9Cop8eC7ybIsvs/KtnNgEBfRMSEa7GqK2VBGUzgjNYMdPIfotyPA== - dependencies: - tunnel "^0.0.6" - -axios@^1.7.4: - version "1.7.9" - resolved "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz" - integrity sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw== - dependencies: - follow-redirects "^1.15.6" - form-data "^4.0.0" - proxy-from-env "^1.1.0" - balanced-match@^1.0.0: version "1.0.2" resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz" @@ -91,20 +43,13 @@ bluebird@^3.7.2: integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + version "1.1.12" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.12.tgz#ab9b454466e5a8cc3a187beaad580412a9c5b843" + integrity sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" -brace-expansion@^2.0.1: - version "2.0.1" - resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz" - integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== - dependencies: - balanced-match "^1.0.0" - camelcase@^5.0.0: version "5.3.1" resolved "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz" @@ -142,13 +87,6 @@ color-name@~1.1.4: resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== -combined-stream@^1.0.8: - version "1.0.8" - resolved "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" - integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== - dependencies: - delayed-stream "~1.0.0" - concat-map@0.0.1: version "0.0.1" resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" @@ -170,16 +108,7 @@ cross-spawn@^6.0.5: shebang-command "^1.2.0" which "^1.2.9" -cross-spawn@^7.0.0: - version "7.0.6" - resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz" - integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - -d@^1.0.1, d@^1.0.2, d@1: +d@1, d@^1.0.1, d@^1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/d/-/d-1.0.2.tgz" integrity sha512-MOqHvMWF9/9MX6nza0KgvFH4HpMU0EF5uUDXqX/BtxtU8NfB0QzRtJ8Oe/6SuS4kbhyzVJwjd97EA4PKrzJ8bw== @@ -192,11 +121,6 @@ decamelize@^1.2.0: resolved "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz" integrity sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA== -delayed-stream@~1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" - integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== - duration@^0.2.2: version "0.2.2" resolved "https://registry.npmjs.org/duration/-/duration-0.2.2.tgz" @@ -205,21 +129,11 @@ duration@^0.2.2: d "1" es5-ext "~0.10.46" -eastasianwidth@^0.2.0: - version "0.2.0" - resolved "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz" - integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== - emoji-regex@^8.0.0: version "8.0.0" resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== -emoji-regex@^9.2.2: - version "9.2.2" - resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz" - integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== - es5-ext@^0.10.35, es5-ext@^0.10.47, es5-ext@^0.10.49, es5-ext@^0.10.53, es5-ext@^0.10.62, es5-ext@^0.10.64, es5-ext@~0.10.14, es5-ext@~0.10.46: version "0.10.64" resolved "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.64.tgz" @@ -280,28 +194,6 @@ find-up@^4.1.0: locate-path "^5.0.0" path-exists "^4.0.0" -follow-redirects@^1.15.6: - version "1.15.9" - resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz" - integrity sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ== - -foreground-child@^3.1.0: - version "3.3.0" - resolved "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz" - integrity sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg== - dependencies: - cross-spawn "^7.0.0" - signal-exit "^4.0.1" - -form-data@^4.0.0: - version "4.0.1" - resolved "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz" - integrity sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.8" - mime-types "^2.1.12" - fs-extra@^10.1.0: version "10.1.0" resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz" @@ -329,31 +221,7 @@ glob-all@^3.3.1: glob "^7.2.3" yargs "^15.3.1" -glob@^10.3.7: - version "10.4.5" - resolved "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz" - integrity sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg== - dependencies: - foreground-child "^3.1.0" - jackspeak "^3.1.2" - minimatch "^9.0.4" - minipass "^7.1.2" - package-json-from-dist "^1.0.0" - path-scurry "^1.11.1" - -glob@^7.1.3: - version "7.2.3" - resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz" - integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.1.1" - once "^1.3.0" - path-is-absolute "^1.0.0" - -glob@^7.2.3: +glob@^7.1.3, glob@^7.2.3: version "7.2.3" resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz" integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== @@ -383,7 +251,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@^2.0.3, inherits@~2.0.3, inherits@2: +inherits@2, inherits@^2.0.3, inherits@~2.0.3: version "2.0.4" resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -437,15 +305,6 @@ isobject@^3.0.1: resolved "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz" integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== -jackspeak@^3.1.2: - version "3.4.3" - resolved "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz" - integrity sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw== - dependencies: - "@isaacs/cliui" "^8.0.2" - optionalDependencies: - "@pkgjs/parseargs" "^0.11.0" - jsonfile@^6.0.1: version "6.1.0" resolved "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz" @@ -507,23 +366,6 @@ log@^6.0.0: type "^2.7.3" uni-global "^1.0.0" -lru-cache@^10.2.0: - version "10.4.3" - resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz" - integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== - -mime-db@1.52.0: - version "1.52.0" - resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" - integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== - -mime-types@^2.1.12: - version "2.1.35" - resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" - integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== - dependencies: - mime-db "1.52.0" - minimatch@^3.1.1: version "3.1.2" resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" @@ -531,18 +373,6 @@ minimatch@^3.1.1: dependencies: brace-expansion "^1.1.7" -minimatch@^9.0.4: - version "9.0.5" - resolved "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz" - integrity sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow== - dependencies: - brace-expansion "^2.0.1" - -"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.1.2: - version "7.1.2" - resolved "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz" - integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== - next-tick@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/next-tick/-/next-tick-1.1.0.tgz" @@ -579,11 +409,6 @@ p-try@^2.0.0: resolved "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== -package-json-from-dist@^1.0.0: - version "1.0.1" - resolved "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz" - integrity sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw== - pako@~1.0.2: version "1.0.11" resolved "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz" @@ -604,29 +429,11 @@ path-key@^2.0.1: resolved "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz" integrity sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw== -path-key@^3.1.0: - version "3.1.1" - resolved "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz" - integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== - -path-scurry@^1.11.1: - version "1.11.1" - resolved "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz" - integrity sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA== - dependencies: - lru-cache "^10.2.0" - minipass "^5.0.0 || ^6.0.2 || ^7.0.0" - process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== -proxy-from-env@^1.1.0: - version "1.1.0" - resolved "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz" - integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== - readable-stream@^3.0.0: version "3.6.2" resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz" @@ -666,23 +473,11 @@ rimraf@^3.0.2: dependencies: glob "^7.1.3" -rimraf@^5.0.5: - version "5.0.10" - resolved "https://registry.npmjs.org/rimraf/-/rimraf-5.0.10.tgz" - integrity sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ== - dependencies: - glob "^10.3.7" - safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -sax@>=0.6.0: - version "1.4.1" - resolved "https://registry.npmjs.org/sax/-/sax-1.4.1.tgz" - integrity sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg== - semver@^5.5.0: version "5.7.2" resolved "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz" @@ -720,16 +515,6 @@ serverless-python-requirements@^6.1.1: sha256-file "1.0.0" shell-quote "^1.8.1" -serverless@>=2.32: - version "4.5.0" - resolved "https://registry.npmjs.org/serverless/-/serverless-4.5.0.tgz" - integrity sha512-msbs5I/fuPiW0ZfBuFA7lpKazCTFtF0AhOCA0HsrJBGVaOrNbwzlC/krZKXn1YgDR2+cw/izKRNupZJXtpyxJQ== - dependencies: - axios "^1.7.4" - axios-proxy-builder "^0.1.2" - rimraf "^5.0.5" - xml2js "0.6.2" - set-blocking@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz" @@ -760,33 +545,16 @@ shebang-command@^1.2.0: dependencies: shebang-regex "^1.0.0" -shebang-command@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz" - integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== - dependencies: - shebang-regex "^3.0.0" - shebang-regex@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz" integrity sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ== -shebang-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz" - integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== - shell-quote@^1.8.1: version "1.8.2" resolved "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.2.tgz" integrity sha512-AzqKpGKjrj7EM6rKVQEPpB288oCfnrEIuyoT9cyF4nmGa7V8Zk6f7RRqYisX8X9m+Q7bd632aZW4ky7EhbQztA== -signal-exit@^4.0.1: - version "4.1.0" - resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz" - integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== - split2@^3.1.1: version "3.2.2" resolved "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz" @@ -810,22 +578,6 @@ stream-promise@^3.2.0: es5-ext "^0.10.49" is-stream "^1.1.0" -string_decoder@^1.1.1, string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - -"string-width-cjs@npm:string-width@^4.2.0": - version "4.2.3" - resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - string-width@^4.1.0, string-width@^4.2.0: version "4.2.3" resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" @@ -835,21 +587,12 @@ string-width@^4.1.0, string-width@^4.2.0: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" -string-width@^5.0.1, string-width@^5.1.2: - version "5.1.2" - resolved "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz" - integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== - dependencies: - eastasianwidth "^0.2.0" - emoji-regex "^9.2.2" - strip-ansi "^7.0.1" - -"strip-ansi-cjs@npm:strip-ansi@^6.0.1": - version "6.0.1" - resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== +string_decoder@^1.1.1, string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: - ansi-regex "^5.0.1" + safe-buffer "~5.1.0" strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" @@ -858,18 +601,6 @@ strip-ansi@^6.0.0, strip-ansi@^6.0.1: dependencies: ansi-regex "^5.0.1" -strip-ansi@^7.0.1: - version "7.1.0" - resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz" - integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== - dependencies: - ansi-regex "^6.0.1" - -tunnel@^0.0.6: - version "0.0.6" - resolved "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz" - integrity sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg== - type@^2.5.0, type@^2.7.2, type@^2.7.3: version "2.7.3" resolved "https://registry.npmjs.org/type/-/type-2.7.3.tgz" @@ -904,22 +635,6 @@ which@^1.2.9: dependencies: isexe "^2.0.0" -which@^2.0.1: - version "2.0.2" - resolved "https://registry.npmjs.org/which/-/which-2.0.2.tgz" - integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== - dependencies: - isexe "^2.0.0" - -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": - version "7.0.0" - resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - wrap-ansi@^6.2.0: version "6.2.0" resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz" @@ -929,33 +644,11 @@ wrap-ansi@^6.2.0: string-width "^4.1.0" strip-ansi "^6.0.0" -wrap-ansi@^8.1.0: - version "8.1.0" - resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz" - integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== - dependencies: - ansi-styles "^6.1.0" - string-width "^5.0.1" - strip-ansi "^7.0.1" - wrappy@1: version "1.0.2" resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== -xml2js@0.6.2: - version "0.6.2" - resolved "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz" - integrity sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA== - dependencies: - sax ">=0.6.0" - xmlbuilder "~11.0.0" - -xmlbuilder@~11.0.0: - version "11.0.1" - resolved "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz" - integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA== - y18n@^4.0.0: version "4.0.3" resolved "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz" From 267a5429a99e85746537a1585608497a9d355fef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 19 Jun 2025 23:51:26 -0400 Subject: [PATCH 09/10] chore(deps): bump requests from 2.32.3 to 2.32.4 (#615) --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 434f887c..4c37bf87 100644 --- a/poetry.lock +++ b/poetry.lock @@ -606,19 +606,19 @@ six = ">=1.5" [[package]] name = "requests" -version = "2.32.3" +version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, + {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, + {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" +charset_normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" From b08b7635675826278170dbb570e2d9307ae02cf9 Mon Sep 17 00:00:00 2001 From: Joey Zhao <5253430+joeyzhao2018@users.noreply.github.com> Date: Mon, 23 Jun 2025 12:46:01 -0400 Subject: [PATCH 10/10] release v6.111.0 (#625) --- datadog_lambda/version.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/datadog_lambda/version.py b/datadog_lambda/version.py index 9534f0c7..2bb1df7f 100644 --- a/datadog_lambda/version.py +++ b/datadog_lambda/version.py @@ -1 +1 @@ -__version__ = "6.110.0" +__version__ = "6.111.0" diff --git a/pyproject.toml b/pyproject.toml index ba5bcb17..1d5feb7f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "datadog_lambda" -version = "6.110.0" +version = "6.111.0" description = "The Datadog AWS Lambda Library" authors = ["Datadog, Inc. "] license = "Apache-2.0"