diff --git a/CHANGELOG.md b/CHANGELOG.md index 5947f4b8847..d617264b275 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## Unreleased +- Update OTLP gRPC/HTTP exporters: the export timeout is now inclusive of all retries and backoffs. + A +/-20% jitter was added to all backoffs. A pointless 32 second sleep that occurred after all retries + had completed/failed was removed. + ([#4564](https://github.com/open-telemetry/opentelemetry-python/pull/4564)). - Update ConsoleLogExporter.export to handle LogRecord's containing bytes type in the body ([#4614](https://github.com/open-telemetry/opentelemetry-python/pull/4614/)). - opentelemetry-sdk: Fix invalid `type: ignore` that causes mypy to ignore the whole file @@ -18,9 +22,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - typecheck: add sdk/resources and drop mypy ([#4578](https://github.com/open-telemetry/opentelemetry-python/pull/4578)) -- Refactor `BatchLogRecordProcessor` to simplify code and make the control flow more - clear ([#4562](https://github.com/open-telemetry/opentelemetry-python/pull/4562/) - and [#4535](https://github.com/open-telemetry/opentelemetry-python/pull/4535)). - Use PEP702 for marking deprecations ([#4522](https://github.com/open-telemetry/opentelemetry-python/pull/4522)) - Refactor `BatchLogRecordProcessor` and `BatchSpanProcessor` to simplify code diff --git a/exporter/opentelemetry-exporter-otlp-proto-common/src/opentelemetry/exporter/otlp/proto/common/_internal/__init__.py b/exporter/opentelemetry-exporter-otlp-proto-common/src/opentelemetry/exporter/otlp/proto/common/_internal/__init__.py index 2f49502cf1d..200644368df 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-common/src/opentelemetry/exporter/otlp/proto/common/_internal/__init__.py +++ b/exporter/opentelemetry-exporter-otlp-proto-common/src/opentelemetry/exporter/otlp/proto/common/_internal/__init__.py @@ -17,12 +17,10 @@ import logging from collections.abc import Sequence -from itertools import count from typing import ( Any, Callable, Dict, - Iterator, List, Mapping, Optional, @@ -177,38 +175,3 @@ def _get_resource_data( ) ) return resource_data - - -def _create_exp_backoff_generator(max_value: int = 0) -> Iterator[int]: - """ - Generates an infinite sequence of exponential backoff values. The sequence starts - from 1 (2^0) and doubles each time (2^1, 2^2, 2^3, ...). If a max_value is specified - and non-zero, the generated values will not exceed this maximum, capping at max_value - instead of growing indefinitely. - - Parameters: - - max_value (int, optional): The maximum value to yield. If 0 or not provided, the - sequence grows without bound. - - Returns: - Iterator[int]: An iterator that yields the exponential backoff values, either uncapped or - capped at max_value. - - Example: - ``` - gen = _create_exp_backoff_generator(max_value=10) - for _ in range(5): - print(next(gen)) - ``` - This will print: - 1 - 2 - 4 - 8 - 10 - - Note: this functionality used to be handled by the 'backoff' package. - """ - for i in count(0): - out = 2**i - yield min(out, max_value) if max_value else out diff --git a/exporter/opentelemetry-exporter-otlp-proto-common/tests/test_backoff.py b/exporter/opentelemetry-exporter-otlp-proto-common/tests/test_backoff.py deleted file mode 100644 index 789a184ad04..00000000000 --- a/exporter/opentelemetry-exporter-otlp-proto-common/tests/test_backoff.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright The OpenTelemetry Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from unittest import TestCase - -from opentelemetry.exporter.otlp.proto.common._internal import ( - _create_exp_backoff_generator, -) - - -class TestBackoffGenerator(TestCase): - def test_exp_backoff_generator(self): - generator = _create_exp_backoff_generator() - self.assertEqual(next(generator), 1) - self.assertEqual(next(generator), 2) - self.assertEqual(next(generator), 4) - self.assertEqual(next(generator), 8) - self.assertEqual(next(generator), 16) - - def test_exp_backoff_generator_with_max(self): - generator = _create_exp_backoff_generator(max_value=4) - self.assertEqual(next(generator), 1) - self.assertEqual(next(generator), 2) - self.assertEqual(next(generator), 4) - self.assertEqual(next(generator), 4) - self.assertEqual(next(generator), 4) - - def test_exp_backoff_generator_with_odd_max(self): - # use a max_value that's not in the set - generator = _create_exp_backoff_generator(max_value=11) - self.assertEqual(next(generator), 1) - self.assertEqual(next(generator), 2) - self.assertEqual(next(generator), 4) - self.assertEqual(next(generator), 8) - self.assertEqual(next(generator), 11) diff --git a/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/_log_exporter/__init__.py b/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/_log_exporter/__init__.py index 8f629899d77..e66f9dbcca7 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/_log_exporter/__init__.py +++ b/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/_log_exporter/__init__.py @@ -58,7 +58,7 @@ def __init__( headers: Optional[ Union[TypingSequence[Tuple[str, str]], Dict[str, str], str] ] = None, - timeout: Optional[int] = None, + timeout: Optional[float] = None, compression: Optional[Compression] = None, ): if insecure is None: @@ -79,7 +79,7 @@ def __init__( environ_timeout = environ.get(OTEL_EXPORTER_OTLP_LOGS_TIMEOUT) environ_timeout = ( - int(environ_timeout) if environ_timeout is not None else None + float(environ_timeout) if environ_timeout is not None else None ) compression = ( diff --git a/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/exporter.py b/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/exporter.py index 259f1ddb91b..cb4a234e7e7 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/exporter.py +++ b/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/exporter.py @@ -14,12 +14,13 @@ """OTLP Exporter""" +import random import threading from abc import ABC, abstractmethod from collections.abc import Sequence # noqa: F401 from logging import getLogger from os import environ -from time import sleep +from time import sleep, time from typing import ( # noqa: F401 Any, Callable, @@ -47,7 +48,6 @@ ssl_channel_credentials, ) from opentelemetry.exporter.otlp.proto.common._internal import ( - _create_exp_backoff_generator, _get_resource_data, ) from opentelemetry.exporter.otlp.proto.grpc import ( @@ -74,6 +74,18 @@ from opentelemetry.sdk.trace import ReadableSpan from opentelemetry.util.re import parse_env_headers +_RETRYABLE_ERROR_CODES = frozenset( + [ + StatusCode.CANCELLED, + StatusCode.DEADLINE_EXCEEDED, + StatusCode.RESOURCE_EXHAUSTED, + StatusCode.ABORTED, + StatusCode.OUT_OF_RANGE, + StatusCode.UNAVAILABLE, + StatusCode.DATA_LOSS, + ] +) +_MAX_RETRYS = 6 logger = getLogger(__name__) SDKDataT = TypeVar("SDKDataT") ResourceDataT = TypeVar("ResourceDataT") @@ -186,8 +198,6 @@ class OTLPExporterMixin( compression: gRPC compression method to use """ - _MAX_RETRY_TIMEOUT = 64 - def __init__( self, endpoint: Optional[str] = None, @@ -196,7 +206,7 @@ def __init__( headers: Optional[ Union[TypingSequence[Tuple[str, str]], Dict[str, str], str] ] = None, - timeout: Optional[int] = None, + timeout: Optional[float] = None, compression: Optional[Compression] = None, ): super().__init__() @@ -233,7 +243,7 @@ def __init__( else: self._headers = tuple(self._headers) + tuple(_OTLP_GRPC_HEADERS) - self._timeout = timeout or int( + self._timeout = timeout or float( environ.get(OTEL_EXPORTER_OTLP_TIMEOUT, 10) ) self._collector_kwargs = None @@ -246,7 +256,8 @@ def __init__( if insecure: self._channel = insecure_channel( - self._endpoint, compression=compression + self._endpoint, + compression=compression, ) else: credentials = _get_credentials( @@ -256,7 +267,9 @@ def __init__( OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, ) self._channel = secure_channel( - self._endpoint, credentials, compression=compression + self._endpoint, + credentials, + compression=compression, ) self._client = self._stub(self._channel) @@ -270,10 +283,9 @@ def _translate_data( pass def _export( - self, data: Union[TypingSequence[ReadableSpan], MetricsData] + self, + data: Union[TypingSequence[ReadableSpan], MetricsData], ) -> ExportResultT: - # After the call to shutdown, subsequent calls to Export are - # not allowed and should return a Failure result. if self._shutdown: logger.warning("Exporter already shutdown, ignoring batch") return self._result.FAILURE @@ -281,65 +293,34 @@ def _export( # FIXME remove this check if the export type for traces # gets updated to a class that represents the proto # TracesData and use the code below instead. - # logger.warning( - # "Transient error %s encountered while exporting %s, retrying in %ss.", - # error.code(), - # data.__class__.__name__, - # delay, - # ) - # expo returns a generator that yields delay values which grow - # exponentially. Once delay is greater than max_value, the yielded - # value will remain constant. - for delay in _create_exp_backoff_generator( - max_value=self._MAX_RETRY_TIMEOUT - ): - if delay == self._MAX_RETRY_TIMEOUT or self._shutdown: - return self._result.FAILURE - - with self._export_lock: + with self._export_lock: + deadline_sec = time() + self._timeout + for retry_num in range(_MAX_RETRYS): try: self._client.Export( request=self._translate_data(data), metadata=self._headers, - timeout=self._timeout, + timeout=deadline_sec - time(), ) - return self._result.SUCCESS - except RpcError as error: - if error.code() in [ - StatusCode.CANCELLED, - StatusCode.DEADLINE_EXCEEDED, - StatusCode.RESOURCE_EXHAUSTED, - StatusCode.ABORTED, - StatusCode.OUT_OF_RANGE, - StatusCode.UNAVAILABLE, - StatusCode.DATA_LOSS, - ]: - retry_info_bin = dict(error.trailing_metadata()).get( - "google.rpc.retryinfo-bin" - ) - if retry_info_bin is not None: - retry_info = RetryInfo() - retry_info.ParseFromString(retry_info_bin) - delay = ( - retry_info.retry_delay.seconds - + retry_info.retry_delay.nanos / 1.0e9 - ) - - logger.warning( - ( - "Transient error %s encountered while exporting " - "%s to %s, retrying in %ss." - ), - error.code(), - self._exporting, - self._endpoint, - delay, + retry_info_bin = dict(error.trailing_metadata()).get( + "google.rpc.retryinfo-bin" + ) + # multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff. + backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2) + if retry_info_bin is not None: + retry_info = RetryInfo() + retry_info.ParseFromString(retry_info_bin) + backoff_seconds = ( + retry_info.retry_delay.seconds + + retry_info.retry_delay.nanos / 1.0e9 ) - sleep(delay) - continue - else: + if ( + error.code() not in _RETRYABLE_ERROR_CODES + or retry_num + 1 == _MAX_RETRYS + or backoff_seconds > (deadline_sec - time()) + ): logger.error( "Failed to export %s to %s, error code: %s", self._exporting, @@ -347,12 +328,16 @@ def _export( error.code(), exc_info=error.code() == StatusCode.UNKNOWN, ) - - if error.code() == StatusCode.OK: - return self._result.SUCCESS - - return self._result.FAILURE - + return self._result.FAILURE + logger.warning( + "Transient error %s encountered while exporting %s to %s, retrying in %.2fs.", + error.code(), + self._exporting, + self._endpoint, + backoff_seconds, + ) + sleep(backoff_seconds) + # Not possible to reach here but the linter is complaining. return self._result.FAILURE def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None: diff --git a/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/metric_exporter/__init__.py b/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/metric_exporter/__init__.py index 8580dbb7386..dbb2a8e1dee 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/metric_exporter/__init__.py +++ b/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/metric_exporter/__init__.py @@ -99,7 +99,7 @@ def __init__( credentials: ChannelCredentials | None = None, headers: Union[TypingSequence[Tuple[str, str]], dict[str, str], str] | None = None, - timeout: int | None = None, + timeout: float | None = None, compression: Compression | None = None, preferred_temporality: dict[type, AggregationTemporality] | None = None, @@ -124,7 +124,7 @@ def __init__( environ_timeout = environ.get(OTEL_EXPORTER_OTLP_METRICS_TIMEOUT) environ_timeout = ( - int(environ_timeout) if environ_timeout is not None else None + float(environ_timeout) if environ_timeout is not None else None ) compression = ( @@ -172,7 +172,6 @@ def export( if split_export_result is MetricExportResult.FAILURE: export_result = MetricExportResult.FAILURE - return export_result def _split_metrics_data( diff --git a/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/trace_exporter/__init__.py b/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/trace_exporter/__init__.py index c78c1b81bb6..7aef65a2ca9 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/trace_exporter/__init__.py +++ b/exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/trace_exporter/__init__.py @@ -91,7 +91,7 @@ def __init__( headers: Optional[ Union[TypingSequence[Tuple[str, str]], Dict[str, str], str] ] = None, - timeout: Optional[int] = None, + timeout: Optional[float] = None, compression: Optional[Compression] = None, ): if insecure is None: @@ -112,7 +112,7 @@ def __init__( environ_timeout = environ.get(OTEL_EXPORTER_OTLP_TRACES_TIMEOUT) environ_timeout = ( - int(environ_timeout) if environ_timeout is not None else None + float(environ_timeout) if environ_timeout is not None else None ) compression = ( diff --git a/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements.txt b/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements.txt index e11dad64b40..5e6e2fcff12 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements.txt +++ b/exporter/opentelemetry-exporter-otlp-proto-grpc/test-requirements.txt @@ -1,5 +1,4 @@ asgiref==3.7.2 -googleapis-common-protos==1.63.2 grpcio==1.66.2 importlib-metadata==6.11.0 iniconfig==2.0.0 @@ -12,6 +11,7 @@ tomli==2.0.1 typing_extensions==4.10.0 wrapt==1.16.0 zipp==3.19.2 +googleapis-common-protos==1.63.2 -e opentelemetry-api -e tests/opentelemetry-test-utils -e exporter/opentelemetry-exporter-otlp-proto-common diff --git a/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_exporter_mixin.py b/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_exporter_mixin.py index 656d9a6cb79..ae4944456eb 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_exporter_mixin.py +++ b/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_exporter_mixin.py @@ -14,8 +14,10 @@ import threading import time +import unittest from concurrent.futures import ThreadPoolExecutor -from logging import WARNING +from logging import WARNING, getLogger +from platform import system from typing import Any, Optional, Sequence from unittest import TestCase from unittest.mock import Mock, patch @@ -55,6 +57,8 @@ SpanExportResult, ) +logger = getLogger(__name__) + # The below tests use this test SpanExporter and Spans, but are testing the # underlying behavior in the mixin. A MetricExporter or LogExporter could @@ -88,33 +92,27 @@ class TraceServiceServicerWithExportParams(TraceServiceServicer): def __init__( self, export_result: StatusCode, + optional_retry_nanos: Optional[int] = None, optional_export_sleep: Optional[float] = None, - optional_export_retry_millis: Optional[float] = None, ): self.export_result = export_result self.optional_export_sleep = optional_export_sleep - self.optional_export_retry_millis = optional_export_retry_millis + self.optional_retry_nanos = optional_retry_nanos + self.num_requests = 0 # pylint: disable=invalid-name,unused-argument def Export(self, request, context): + self.num_requests += 1 if self.optional_export_sleep: time.sleep(self.optional_export_sleep) - if self.optional_export_retry_millis: - context.send_initial_metadata( - ( - ( - "google.rpc.retryinfo-bin", - RetryInfo().SerializeToString(), - ), - ) - ) + if self.export_result != StatusCode.OK and self.optional_retry_nanos: context.set_trailing_metadata( ( ( "google.rpc.retryinfo-bin", RetryInfo( retry_delay=Duration( - nanos=int(self.optional_export_retry_millis) + nanos=self.optional_retry_nanos ) ).SerializeToString(), ), @@ -268,7 +266,8 @@ def test_otlp_exporter_otlp_compression_unspecified( """No env or kwarg should be NoCompression""" OTLPSpanExporterForTesting(insecure=True) mock_insecure_channel.assert_called_once_with( - "localhost:4317", compression=Compression.NoCompression + "localhost:4317", + compression=Compression.NoCompression, ) # pylint: disable=no-self-use, disable=unused-argument @@ -372,53 +371,86 @@ def test_export_over_closed_grpc_channel(self): str(err.exception), "Cannot invoke RPC on closed channel!" ) - @patch( - "opentelemetry.exporter.otlp.proto.grpc.exporter._create_exp_backoff_generator" + @unittest.skipIf( + system() == "Windows", + "For gRPC + windows there's some added delay in the RPCs which breaks the assertion over amount of time passed.", ) - @patch("opentelemetry.exporter.otlp.proto.grpc.exporter.sleep") - def test_unavailable(self, mock_sleep, mock_expo): - mock_expo.configure_mock(**{"return_value": [0.01]}) - - add_TraceServiceServicer_to_server( - TraceServiceServicerWithExportParams(StatusCode.UNAVAILABLE), - self.server, + def test_retry_info_is_respected(self): + mock_trace_service = TraceServiceServicerWithExportParams( + StatusCode.UNAVAILABLE, + optional_retry_nanos=200000000, # .2 seconds ) - result = self.exporter.export([self.span]) - self.assertEqual(result, SpanExportResult.FAILURE) - mock_sleep.assert_called_with(0.01) - - @patch("opentelemetry.exporter.otlp.proto.grpc.exporter.sleep") - def test_unavailable_delay(self, mock_sleep): add_TraceServiceServicer_to_server( - TraceServiceServicerWithExportParams( - StatusCode.UNAVAILABLE, - optional_export_sleep=None, - optional_export_retry_millis=1e7, - ), + mock_trace_service, self.server, ) - with self.assertLogs(level=WARNING) as warning: - self.assertEqual( - self.exporter.export([self.span]), SpanExportResult.FAILURE - ) - mock_sleep.assert_called_with(0.01) - - self.assertEqual( - warning.records[0].message, - ( - "Transient error StatusCode.UNAVAILABLE encountered " - "while exporting traces to localhost:4317, retrying in 0.01s." - ), - ) - - def test_success(self): + exporter = OTLPSpanExporterForTesting(insecure=True, timeout=10) + before = time.time() + self.assertEqual( + exporter.export([self.span]), + SpanExportResult.FAILURE, + ) + after = time.time() + self.assertEqual(mock_trace_service.num_requests, 6) + # 1 second plus wiggle room so the test passes consistently. + self.assertAlmostEqual(after - before, 1, 1) + + @unittest.skipIf( + system() == "Windows", + "For gRPC + windows there's some added delay in the RPCs which breaks the assertion over amount of time passed.", + ) + def test_retry_not_made_if_would_exceed_timeout(self): + mock_trace_service = TraceServiceServicerWithExportParams( + StatusCode.UNAVAILABLE + ) add_TraceServiceServicer_to_server( - TraceServiceServicerWithExportParams(StatusCode.OK), + mock_trace_service, self.server, ) + exporter = OTLPSpanExporterForTesting(insecure=True, timeout=4) + before = time.time() self.assertEqual( - self.exporter.export([self.span]), SpanExportResult.SUCCESS + exporter.export([self.span]), + SpanExportResult.FAILURE, ) + after = time.time() + # Our retry starts with a 1 second backoff then doubles. + # First call at time 0, second at time 1, third at time 3, fourth would exceed timeout. + self.assertEqual(mock_trace_service.num_requests, 3) + # There's a +/-20% jitter on each backoff. + self.assertTrue(2.35 < after - before < 3.65) + + @unittest.skipIf( + system() == "Windows", + "For gRPC + windows there's some added delay in the RPCs which breaks the assertion over amount of time passed.", + ) + def test_timeout_set_correctly(self): + mock_trace_service = TraceServiceServicerWithExportParams( + StatusCode.UNAVAILABLE, optional_export_sleep=0.25 + ) + add_TraceServiceServicer_to_server( + mock_trace_service, + self.server, + ) + exporter = OTLPSpanExporterForTesting(insecure=True, timeout=1.4) + # Should timeout after 1.4 seconds. First attempt takes .25 seconds + # Then a 1 second sleep, then deadline exceeded after .15 seconds, + # mid way through second call. + with self.assertLogs(level=WARNING) as warning: + before = time.time() + # Eliminate the jitter. + with patch("random.uniform", return_value=1): + self.assertEqual( + exporter.export([self.span]), + SpanExportResult.FAILURE, + ) + after = time.time() + self.assertEqual( + "Failed to export traces to localhost:4317, error code: StatusCode.DEADLINE_EXCEEDED", + warning.records[-1].message, + ) + self.assertEqual(mock_trace_service.num_requests, 2) + self.assertAlmostEqual(after - before, 1.4, 1) def test_otlp_headers_from_env(self): # pylint: disable=protected-access @@ -440,6 +472,6 @@ def test_permanent_failure(self): self.exporter.export([self.span]), SpanExportResult.FAILURE ) self.assertEqual( - warning.records[0].message, + warning.records[-1].message, "Failed to export traces to localhost:4317, error code: StatusCode.ALREADY_EXISTS", ) diff --git a/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_metrics_exporter.py b/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_metrics_exporter.py index 2ea12f660fb..c7dc83ef32b 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_metrics_exporter.py +++ b/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_metrics_exporter.py @@ -297,7 +297,8 @@ def test_otlp_exporter_otlp_compression_kwarg(self, mock_insecure_channel): insecure=True, compression=Compression.NoCompression ) mock_insecure_channel.assert_called_once_with( - "localhost:4317", compression=Compression.NoCompression + "localhost:4317", + compression=Compression.NoCompression, ) def test_split_metrics_data_many_data_points(self): diff --git a/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_trace_exporter.py b/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_trace_exporter.py index 73d8d6c7a20..7a609c34867 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_trace_exporter.py +++ b/exporter/opentelemetry-exporter-otlp-proto-grpc/tests/test_otlp_trace_exporter.py @@ -333,7 +333,8 @@ def test_otlp_exporter_otlp_compression_kwarg(self, mock_insecure_channel): """Specifying kwarg should take precedence over env""" OTLPSpanExporter(insecure=True, compression=Compression.NoCompression) mock_insecure_channel.assert_called_once_with( - "localhost:4317", compression=Compression.NoCompression + "localhost:4317", + compression=Compression.NoCompression, ) # pylint: disable=no-self-use @@ -350,7 +351,8 @@ def test_otlp_exporter_otlp_compression_precendence( """ OTLPSpanExporter(insecure=True) mock_insecure_channel.assert_called_once_with( - "localhost:4317", compression=Compression.Gzip + "localhost:4317", + compression=Compression.Gzip, ) def test_translate_spans(self): diff --git a/exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/_common/__init__.py b/exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/_common/__init__.py new file mode 100644 index 00000000000..b1ed46d28b7 --- /dev/null +++ b/exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/_common/__init__.py @@ -0,0 +1,23 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import requests + + +def _is_retryable(resp: requests.Response) -> bool: + if resp.status_code == 408: + return True + if resp.status_code >= 500 and resp.status_code <= 599: + return True + return False diff --git a/exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/_log_exporter/__init__.py b/exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/_log_exporter/__init__.py index f86f0113833..c64f269b9ed 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/_log_exporter/__init__.py +++ b/exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/_log_exporter/__init__.py @@ -14,23 +14,24 @@ import gzip import logging +import random import zlib from io import BytesIO from os import environ -from time import sleep +from time import sleep, time from typing import Dict, Optional, Sequence import requests from requests.exceptions import ConnectionError -from opentelemetry.exporter.otlp.proto.common._internal import ( - _create_exp_backoff_generator, -) from opentelemetry.exporter.otlp.proto.common._log_encoder import encode_logs from opentelemetry.exporter.otlp.proto.http import ( _OTLP_HTTP_HEADERS, Compression, ) +from opentelemetry.exporter.otlp.proto.http._common import ( + _is_retryable, +) from opentelemetry.sdk._logs import LogData from opentelemetry.sdk._logs.export import ( LogExporter, @@ -61,11 +62,10 @@ DEFAULT_ENDPOINT = "http://localhost:4318/" DEFAULT_LOGS_EXPORT_PATH = "v1/logs" DEFAULT_TIMEOUT = 10 # in seconds +_MAX_RETRYS = 6 class OTLPLogExporter(LogExporter): - _MAX_RETRY_TIMEOUT = 64 - def __init__( self, endpoint: Optional[str] = None, @@ -73,7 +73,7 @@ def __init__( client_key_file: Optional[str] = None, client_certificate_file: Optional[str] = None, headers: Optional[Dict[str, str]] = None, - timeout: Optional[int] = None, + timeout: Optional[float] = None, compression: Optional[Compression] = None, session: Optional[requests.Session] = None, ): @@ -108,7 +108,7 @@ def __init__( self._headers = headers or parse_env_headers( headers_string, liberal=True ) - self._timeout = timeout or int( + self._timeout = timeout or float( environ.get( OTEL_EXPORTER_OTLP_LOGS_TIMEOUT, environ.get(OTEL_EXPORTER_OTLP_TIMEOUT, DEFAULT_TIMEOUT), @@ -124,7 +124,7 @@ def __init__( ) self._shutdown = False - def _export(self, serialized_data: bytes): + def _export(self, serialized_data: bytes, timeout_sec: float): data = serialized_data if self._compression == Compression.Gzip: gzip_data = BytesIO() @@ -143,7 +143,7 @@ def _export(self, serialized_data: bytes): url=self._endpoint, data=data, verify=self._certificate_file, - timeout=self._timeout, + timeout=timeout_sec, cert=self._client_cert, ) except ConnectionError: @@ -151,53 +151,42 @@ def _export(self, serialized_data: bytes): url=self._endpoint, data=data, verify=self._certificate_file, - timeout=self._timeout, + timeout=timeout_sec, cert=self._client_cert, ) return resp - @staticmethod - def _retryable(resp: requests.Response) -> bool: - if resp.status_code == 408: - return True - if resp.status_code >= 500 and resp.status_code <= 599: - return True - return False - def export(self, batch: Sequence[LogData]) -> LogExportResult: - # After the call to Shutdown subsequent calls to Export are - # not allowed and should return a Failure result. if self._shutdown: _logger.warning("Exporter already shutdown, ignoring batch") return LogExportResult.FAILURE serialized_data = encode_logs(batch).SerializeToString() - - for delay in _create_exp_backoff_generator( - max_value=self._MAX_RETRY_TIMEOUT - ): - if delay == self._MAX_RETRY_TIMEOUT: - return LogExportResult.FAILURE - - resp = self._export(serialized_data) - # pylint: disable=no-else-return + deadline_sec = time() + self._timeout + for retry_num in range(_MAX_RETRYS): + resp = self._export(serialized_data, deadline_sec - time()) if resp.ok: return LogExportResult.SUCCESS - elif self._retryable(resp): - _logger.warning( - "Transient error %s encountered while exporting logs batch, retrying in %ss.", - resp.reason, - delay, - ) - sleep(delay) - continue - else: + # multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff. + backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2) + if ( + not _is_retryable(resp) + or retry_num + 1 == _MAX_RETRYS + or backoff_seconds > (deadline_sec - time()) + ): _logger.error( "Failed to export logs batch code: %s, reason: %s", resp.status_code, resp.text, ) return LogExportResult.FAILURE + _logger.warning( + "Transient error %s encountered while exporting logs batch, retrying in %.2fs.", + resp.reason, + backoff_seconds, + ) + sleep(backoff_seconds) + # Not possible to reach here but the linter is complaining. return LogExportResult.FAILURE def force_flush(self, timeout_millis: float = 10_000) -> bool: diff --git a/exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/metric_exporter/__init__.py b/exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/metric_exporter/__init__.py index 1bdcc13c16a..7ee0aa79132 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/metric_exporter/__init__.py +++ b/exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/metric_exporter/__init__.py @@ -14,16 +14,18 @@ import gzip import logging +import random import zlib from io import BytesIO from os import environ -from time import sleep +from time import sleep, time from typing import ( # noqa: F401 Any, Callable, Dict, List, Mapping, + Optional, Sequence, ) @@ -32,7 +34,6 @@ from typing_extensions import deprecated from opentelemetry.exporter.otlp.proto.common._internal import ( - _create_exp_backoff_generator, _get_resource_data, ) from opentelemetry.exporter.otlp.proto.common._internal.metrics_encoder import ( @@ -45,10 +46,13 @@ _OTLP_HTTP_HEADERS, Compression, ) +from opentelemetry.exporter.otlp.proto.http._common import ( + _is_retryable, +) from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2 import ( # noqa: F401 ExportMetricsServiceRequest, ) -from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401 # noqa: F401 +from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401 AnyValue, ArrayValue, InstrumentationScope, @@ -98,11 +102,10 @@ DEFAULT_ENDPOINT = "http://localhost:4318/" DEFAULT_METRICS_EXPORT_PATH = "v1/metrics" DEFAULT_TIMEOUT = 10 # in seconds +_MAX_RETRYS = 6 class OTLPMetricExporter(MetricExporter, OTLPMetricExporterMixin): - _MAX_RETRY_TIMEOUT = 64 - def __init__( self, endpoint: str | None = None, @@ -110,7 +113,7 @@ def __init__( client_key_file: str | None = None, client_certificate_file: str | None = None, headers: dict[str, str] | None = None, - timeout: int | None = None, + timeout: float | None = None, compression: Compression | None = None, session: requests.Session | None = None, preferred_temporality: dict[type, AggregationTemporality] @@ -147,7 +150,7 @@ def __init__( self._headers = headers or parse_env_headers( headers_string, liberal=True ) - self._timeout = timeout or int( + self._timeout = timeout or float( environ.get( OTEL_EXPORTER_OTLP_METRICS_TIMEOUT, environ.get(OTEL_EXPORTER_OTLP_TIMEOUT, DEFAULT_TIMEOUT), @@ -165,8 +168,9 @@ def __init__( self._common_configuration( preferred_temporality, preferred_aggregation ) + self._shutdown = False - def _export(self, serialized_data: bytes): + def _export(self, serialized_data: bytes, timeout_sec: float): data = serialized_data if self._compression == Compression.Gzip: gzip_data = BytesIO() @@ -185,7 +189,7 @@ def _export(self, serialized_data: bytes): url=self._endpoint, data=data, verify=self._certificate_file, - timeout=self._timeout, + timeout=timeout_sec, cert=self._client_cert, ) except ConnectionError: @@ -193,55 +197,54 @@ def _export(self, serialized_data: bytes): url=self._endpoint, data=data, verify=self._certificate_file, - timeout=self._timeout, + timeout=timeout_sec, cert=self._client_cert, ) return resp - @staticmethod - def _retryable(resp: requests.Response) -> bool: - if resp.status_code == 408: - return True - if resp.status_code >= 500 and resp.status_code <= 599: - return True - return False - def export( self, metrics_data: MetricsData, - timeout_millis: float = 10_000, + timeout_millis: Optional[float] = 10000, **kwargs, ) -> MetricExportResult: - serialized_data = encode_metrics(metrics_data) - for delay in _create_exp_backoff_generator( - max_value=self._MAX_RETRY_TIMEOUT - ): - if delay == self._MAX_RETRY_TIMEOUT: - return MetricExportResult.FAILURE - - resp = self._export(serialized_data.SerializeToString()) - # pylint: disable=no-else-return + if self._shutdown: + _logger.warning("Exporter already shutdown, ignoring batch") + return MetricExportResult.FAILURE + serialized_data = encode_metrics(metrics_data).SerializeToString() + deadline_sec = time() + self._timeout + for retry_num in range(_MAX_RETRYS): + resp = self._export(serialized_data, deadline_sec - time()) if resp.ok: return MetricExportResult.SUCCESS - elif self._retryable(resp): - _logger.warning( - "Transient error %s encountered while exporting metric batch, retrying in %ss.", - resp.reason, - delay, - ) - sleep(delay) - continue - else: + # multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff. + backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2) + if ( + not _is_retryable(resp) + or retry_num + 1 == _MAX_RETRYS + or backoff_seconds > (deadline_sec - time()) + ): _logger.error( - "Failed to export batch code: %s, reason: %s", + "Failed to export metrics batch code: %s, reason: %s", resp.status_code, resp.text, ) return MetricExportResult.FAILURE + _logger.warning( + "Transient error %s encountered while exporting metrics batch, retrying in %.2fs.", + resp.reason, + backoff_seconds, + ) + sleep(backoff_seconds) + # Not possible to reach here but the linter is complaining. return MetricExportResult.FAILURE def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None: - pass + if self._shutdown: + _logger.warning("Exporter already shutdown, ignoring call") + return + self._session.close() + self._shutdown = True @property def _exporting(self) -> str: diff --git a/exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/trace_exporter/__init__.py b/exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/trace_exporter/__init__.py index 1841e5210a4..9f9baf31150 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/trace_exporter/__init__.py +++ b/exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/trace_exporter/__init__.py @@ -14,18 +14,16 @@ import gzip import logging +import random import zlib from io import BytesIO from os import environ -from time import sleep -from typing import Dict, Optional +from time import sleep, time +from typing import Dict, Optional, Sequence import requests from requests.exceptions import ConnectionError -from opentelemetry.exporter.otlp.proto.common._internal import ( - _create_exp_backoff_generator, -) from opentelemetry.exporter.otlp.proto.common.trace_encoder import ( encode_spans, ) @@ -33,6 +31,9 @@ _OTLP_HTTP_HEADERS, Compression, ) +from opentelemetry.exporter.otlp.proto.http._common import ( + _is_retryable, +) from opentelemetry.sdk.environment_variables import ( OTEL_EXPORTER_OTLP_CERTIFICATE, OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, @@ -49,6 +50,7 @@ OTEL_EXPORTER_OTLP_TRACES_HEADERS, OTEL_EXPORTER_OTLP_TRACES_TIMEOUT, ) +from opentelemetry.sdk.trace import ReadableSpan from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult from opentelemetry.util.re import parse_env_headers @@ -59,11 +61,10 @@ DEFAULT_ENDPOINT = "http://localhost:4318/" DEFAULT_TRACES_EXPORT_PATH = "v1/traces" DEFAULT_TIMEOUT = 10 # in seconds +_MAX_RETRYS = 6 class OTLPSpanExporter(SpanExporter): - _MAX_RETRY_TIMEOUT = 64 - def __init__( self, endpoint: Optional[str] = None, @@ -71,7 +72,7 @@ def __init__( client_key_file: Optional[str] = None, client_certificate_file: Optional[str] = None, headers: Optional[Dict[str, str]] = None, - timeout: Optional[int] = None, + timeout: Optional[float] = None, compression: Optional[Compression] = None, session: Optional[requests.Session] = None, ): @@ -105,7 +106,7 @@ def __init__( self._headers = headers or parse_env_headers( headers_string, liberal=True ) - self._timeout = timeout or int( + self._timeout = timeout or float( environ.get( OTEL_EXPORTER_OTLP_TRACES_TIMEOUT, environ.get(OTEL_EXPORTER_OTLP_TIMEOUT, DEFAULT_TIMEOUT), @@ -121,7 +122,7 @@ def __init__( ) self._shutdown = False - def _export(self, serialized_data: bytes): + def _export(self, serialized_data: bytes, timeout_sec: float): data = serialized_data if self._compression == Compression.Gzip: gzip_data = BytesIO() @@ -140,7 +141,7 @@ def _export(self, serialized_data: bytes): url=self._endpoint, data=data, verify=self._certificate_file, - timeout=self._timeout, + timeout=timeout_sec, cert=self._client_cert, ) except ConnectionError: @@ -148,61 +149,44 @@ def _export(self, serialized_data: bytes): url=self._endpoint, data=data, verify=self._certificate_file, - timeout=self._timeout, + timeout=timeout_sec, cert=self._client_cert, ) return resp - @staticmethod - def _retryable(resp: requests.Response) -> bool: - if resp.status_code == 408: - return True - if resp.status_code >= 500 and resp.status_code <= 599: - return True - return False - - def _serialize_spans(self, spans): - return encode_spans(spans).SerializePartialToString() - - def _export_serialized_spans(self, serialized_data): - for delay in _create_exp_backoff_generator( - max_value=self._MAX_RETRY_TIMEOUT - ): - if delay == self._MAX_RETRY_TIMEOUT: - return SpanExportResult.FAILURE + def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult: + if self._shutdown: + _logger.warning("Exporter already shutdown, ignoring batch") + return SpanExportResult.FAILURE - resp = self._export(serialized_data) - # pylint: disable=no-else-return + serialized_data = encode_spans(spans).SerializePartialToString() + deadline_sec = time() + self._timeout + for retry_num in range(_MAX_RETRYS): + resp = self._export(serialized_data, deadline_sec - time()) if resp.ok: return SpanExportResult.SUCCESS - elif self._retryable(resp): - _logger.warning( - "Transient error %s encountered while exporting span batch, retrying in %ss.", - resp.reason, - delay, - ) - sleep(delay) - continue - else: + # multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff. + backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2) + if ( + not _is_retryable(resp) + or retry_num + 1 == _MAX_RETRYS + or backoff_seconds > (deadline_sec - time()) + ): _logger.error( - "Failed to export batch code: %s, reason: %s", + "Failed to export span batch code: %s, reason: %s", resp.status_code, resp.text, ) return SpanExportResult.FAILURE + _logger.warning( + "Transient error %s encountered while exporting span batch, retrying in %.2fs.", + resp.reason, + backoff_seconds, + ) + sleep(backoff_seconds) + # Not possible to reach here but the linter is complaining. return SpanExportResult.FAILURE - def export(self, spans) -> SpanExportResult: - # After the call to Shutdown subsequent calls to Export are - # not allowed and should return a Failure result. - if self._shutdown: - _logger.warning("Exporter already shutdown, ignoring batch") - return SpanExportResult.FAILURE - - serialized_data = self._serialize_spans(spans) - - return self._export_serialized_spans(serialized_data) - def shutdown(self): if self._shutdown: _logger.warning("Exporter already shutdown, ignoring call") diff --git a/exporter/opentelemetry-exporter-otlp-proto-http/tests/metrics/test_otlp_metrics_exporter.py b/exporter/opentelemetry-exporter-otlp-proto-http/tests/metrics/test_otlp_metrics_exporter.py index 16bb3e54286..1b5e9cc5f92 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-http/tests/metrics/test_otlp_metrics_exporter.py +++ b/exporter/opentelemetry-exporter-otlp-proto-http/tests/metrics/test_otlp_metrics_exporter.py @@ -12,14 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +import time from logging import WARNING from os import environ from unittest import TestCase -from unittest.mock import MagicMock, Mock, call, patch +from unittest.mock import ANY, MagicMock, Mock, patch from requests import Session from requests.models import Response -from responses import POST, activate, add from opentelemetry.exporter.otlp.proto.common.metrics_encoder import ( encode_metrics, @@ -327,31 +327,10 @@ def test_serialization(self, mock_post): url=exporter._endpoint, data=serialized_data.SerializeToString(), verify=exporter._certificate_file, - timeout=exporter._timeout, + timeout=ANY, # Timeout is a float based on real time, can't put an exact value here. cert=exporter._client_cert, ) - @activate - @patch("opentelemetry.exporter.otlp.proto.http.metric_exporter.sleep") - def test_exponential_backoff(self, mock_sleep): - # return a retryable error - add( - POST, - "http://metrics.example.com/export", - json={"error": "something exploded"}, - status=500, - ) - - exporter = OTLPMetricExporter( - endpoint="http://metrics.example.com/export" - ) - metrics_data = self.metrics["sum_int"] - - exporter.export(metrics_data) - mock_sleep.assert_has_calls( - [call(1), call(2), call(4), call(8), call(16), call(32)] - ) - def test_aggregation_temporality(self): otlp_metric_exporter = OTLPMetricExporter() @@ -523,3 +502,42 @@ def test_preferred_aggregation_override(self): self.assertEqual( exporter._preferred_aggregation[Histogram], histogram_aggregation ) + + @patch.object(Session, "post") + def test_retry_timeout(self, mock_post): + exporter = OTLPMetricExporter(timeout=1.5) + + resp = Response() + resp.status_code = 503 + resp.reason = "UNAVAILABLE" + mock_post.return_value = resp + with self.assertLogs(level=WARNING) as warning: + before = time.time() + self.assertEqual( + exporter.export(self.metrics["sum_int"]), + MetricExportResult.FAILURE, + ) + after = time.time() + + # First call at time 0, second at time 1, then an early return before the second backoff sleep b/c it would exceed timeout. + self.assertEqual(mock_post.call_count, 2) + # There's a +/-20% jitter on each backoff. + self.assertTrue(0.75 < after - before < 1.25) + self.assertIn( + "Transient error UNAVAILABLE encountered while exporting metrics batch, retrying in", + warning.records[0].message, + ) + + @patch.object(Session, "post") + def test_timeout_set_correctly(self, mock_post): + resp = Response() + resp.status_code = 200 + + def export_side_effect(*args, **kwargs): + # Timeout should be set to something slightly less than 400 milliseconds depending on how much time has passed. + self.assertAlmostEqual(0.4, kwargs["timeout"], 2) + return resp + + mock_post.side_effect = export_side_effect + exporter = OTLPMetricExporter(timeout=0.4) + exporter.export(self.metrics["sum_int"]) diff --git a/exporter/opentelemetry-exporter-otlp-proto-http/tests/test_proto_log_exporter.py b/exporter/opentelemetry-exporter-otlp-proto-http/tests/test_proto_log_exporter.py index 66b0f890d76..04c37743860 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-http/tests/test_proto_log_exporter.py +++ b/exporter/opentelemetry-exporter-otlp-proto-http/tests/test_proto_log_exporter.py @@ -14,13 +14,16 @@ # pylint: disable=protected-access +import time import unittest +from logging import WARNING from typing import List -from unittest.mock import MagicMock, Mock, call, patch +from unittest.mock import MagicMock, Mock, patch import requests -import responses from google.protobuf.json_format import MessageToDict +from requests import Session +from requests.models import Response from opentelemetry._logs import SeverityNumber from opentelemetry.exporter.otlp.proto.http import Compression @@ -267,25 +270,6 @@ def test_exported_log_without_span_id(self): else: self.fail("No log records found") - @responses.activate - @patch("opentelemetry.exporter.otlp.proto.http._log_exporter.sleep") - def test_exponential_backoff(self, mock_sleep): - # return a retryable error - responses.add( - responses.POST, - "http://logs.example.com/export", - json={"error": "something exploded"}, - status=500, - ) - - exporter = OTLPLogExporter(endpoint="http://logs.example.com/export") - logs = self._get_sdk_log_data() - - exporter.export(logs) - mock_sleep.assert_has_calls( - [call(1), call(2), call(4), call(8), call(16), call(32)] - ) - @staticmethod def _get_sdk_log_data() -> List[LogData]: log1 = LogData( @@ -365,3 +349,42 @@ def test_2xx_status_code(self, mock_otlp_metric_exporter): self.assertEqual( OTLPLogExporter().export(MagicMock()), LogExportResult.SUCCESS ) + + @patch.object(Session, "post") + def test_retry_timeout(self, mock_post): + exporter = OTLPLogExporter(timeout=1.5) + + resp = Response() + resp.status_code = 503 + resp.reason = "UNAVAILABLE" + mock_post.return_value = resp + with self.assertLogs(level=WARNING) as warning: + before = time.time() + # Set timeout to 1.5 seconds + self.assertEqual( + exporter.export(self._get_sdk_log_data()), + LogExportResult.FAILURE, + ) + after = time.time() + # First call at time 0, second at time 1, then an early return before the second backoff sleep b/c it would exceed timeout. + self.assertEqual(mock_post.call_count, 2) + # There's a +/-20% jitter on each backoff. + self.assertTrue(0.75 < after - before < 1.25) + self.assertIn( + "Transient error UNAVAILABLE encountered while exporting logs batch, retrying in", + warning.records[0].message, + ) + + @patch.object(Session, "post") + def test_timeout_set_correctly(self, mock_post): + resp = Response() + resp.status_code = 200 + + def export_side_effect(*args, **kwargs): + # Timeout should be set to something slightly less than 400 milliseconds depending on how much time has passed. + self.assertAlmostEqual(0.4, kwargs["timeout"], 2) + return resp + + mock_post.side_effect = export_side_effect + exporter = OTLPLogExporter(timeout=0.4) + exporter.export(self._get_sdk_log_data()) diff --git a/exporter/opentelemetry-exporter-otlp-proto-http/tests/test_proto_span_exporter.py b/exporter/opentelemetry-exporter-otlp-proto-http/tests/test_proto_span_exporter.py index 8d8ff6037aa..16d40e3f3fd 100644 --- a/exporter/opentelemetry-exporter-otlp-proto-http/tests/test_proto_span_exporter.py +++ b/exporter/opentelemetry-exporter-otlp-proto-http/tests/test_proto_span_exporter.py @@ -12,11 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +import time import unittest -from unittest.mock import MagicMock, Mock, call, patch +from logging import WARNING +from unittest.mock import MagicMock, Mock, patch import requests -import responses +from requests import Session +from requests.models import Response from opentelemetry.exporter.otlp.proto.http import Compression from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( @@ -52,6 +55,16 @@ OS_ENV_CLIENT_KEY = "os/env/client-key.pem" OS_ENV_HEADERS = "envHeader1=val1,envHeader2=val2" OS_ENV_TIMEOUT = "30" +BASIC_SPAN = _Span( + "abc", + context=Mock( + **{ + "trace_state": {"a": "b", "c": "d"}, + "span_id": 10217189687419569865, + "trace_id": 67545097771067222548457157018666467027, + } + ), +) # pylint: disable=protected-access @@ -227,37 +240,6 @@ def test_headers_parse_from_env(self): ), ) - # pylint: disable=no-self-use - @responses.activate - @patch("opentelemetry.exporter.otlp.proto.http.trace_exporter.sleep") - def test_exponential_backoff(self, mock_sleep): - # return a retryable error - responses.add( - responses.POST, - "http://traces.example.com/export", - json={"error": "something exploded"}, - status=500, - ) - - exporter = OTLPSpanExporter( - endpoint="http://traces.example.com/export" - ) - span = _Span( - "abc", - context=Mock( - **{ - "trace_state": {"a": "b", "c": "d"}, - "span_id": 10217189687419569865, - "trace_id": 67545097771067222548457157018666467027, - } - ), - ) - - exporter.export([span]) - mock_sleep.assert_has_calls( - [call(1), call(2), call(4), call(8), call(16), call(32)] - ) - @patch.object(OTLPSpanExporter, "_export", return_value=Mock(ok=True)) def test_2xx_status_code(self, mock_otlp_metric_exporter): """ @@ -267,3 +249,42 @@ def test_2xx_status_code(self, mock_otlp_metric_exporter): self.assertEqual( OTLPSpanExporter().export(MagicMock()), SpanExportResult.SUCCESS ) + + @patch.object(Session, "post") + def test_retry_timeout(self, mock_post): + exporter = OTLPSpanExporter(timeout=1.5) + + resp = Response() + resp.status_code = 503 + resp.reason = "UNAVAILABLE" + mock_post.return_value = resp + with self.assertLogs(level=WARNING) as warning: + before = time.time() + # Set timeout to 1.5 seconds + self.assertEqual( + exporter.export([BASIC_SPAN]), + SpanExportResult.FAILURE, + ) + after = time.time() + # First call at time 0, second at time 1, then an early return before the second backoff sleep b/c it would exceed timeout. + self.assertEqual(mock_post.call_count, 2) + # There's a +/-20% jitter on each backoff. + self.assertTrue(0.75 < after - before < 1.25) + self.assertIn( + "Transient error UNAVAILABLE encountered while exporting span batch, retrying in", + warning.records[0].message, + ) + + @patch.object(Session, "post") + def test_timeout_set_correctly(self, mock_post): + resp = Response() + resp.status_code = 200 + + def export_side_effect(*args, **kwargs): + # Timeout should be set to something slightly less than 400 milliseconds depending on how much time has passed. + self.assertAlmostEqual(0.4, kwargs["timeout"], 2) + return resp + + mock_post.side_effect = export_side_effect + exporter = OTLPSpanExporter(timeout=0.4) + exporter.export([BASIC_SPAN]) diff --git a/uv.lock b/uv.lock index 98c5dae6f6f..26c44026f7a 100644 --- a/uv.lock +++ b/uv.lock @@ -3,8 +3,8 @@ revision = 1 requires-python = ">=3.9" resolution-markers = [ "python_full_version >= '3.13'", - "python_full_version >= '3.9' and python_full_version < '3.13'", - "python_full_version < '3.9'", + "python_full_version < '3.13'", + "python_version < '0'", ] [manifest] @@ -104,19 +104,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 }, { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 }, { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 }, - { url = "https://files.pythonhosted.org/packages/10/bd/6517ea94f2672e801011d50b5d06be2a0deaf566aea27bcdcd47e5195357/charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c", size = 195653 }, - { url = "https://files.pythonhosted.org/packages/e5/0d/815a2ba3f283b4eeaa5ece57acade365c5b4135f65a807a083c818716582/charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9", size = 140701 }, - { url = "https://files.pythonhosted.org/packages/aa/17/c94be7ee0d142687e047fe1de72060f6d6837f40eedc26e87e6e124a3fc6/charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8", size = 150495 }, - { url = "https://files.pythonhosted.org/packages/f7/33/557ac796c47165fc141e4fb71d7b0310f67e05cb420756f3a82e0a0068e0/charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6", size = 142946 }, - { url = "https://files.pythonhosted.org/packages/1e/0d/38ef4ae41e9248d63fc4998d933cae22473b1b2ac4122cf908d0f5eb32aa/charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c", size = 144737 }, - { url = "https://files.pythonhosted.org/packages/43/01/754cdb29dd0560f58290aaaa284d43eea343ad0512e6ad3b8b5c11f08592/charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a", size = 147471 }, - { url = "https://files.pythonhosted.org/packages/ba/cd/861883ba5160c7a9bd242c30b2c71074cda2aefcc0addc91118e0d4e0765/charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd", size = 140801 }, - { url = "https://files.pythonhosted.org/packages/6f/7f/0c0dad447819e90b93f8ed238cc8f11b91353c23c19e70fa80483a155bed/charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd", size = 149312 }, - { url = "https://files.pythonhosted.org/packages/8e/09/9f8abcc6fff60fb727268b63c376c8c79cc37b833c2dfe1f535dfb59523b/charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824", size = 152347 }, - { url = "https://files.pythonhosted.org/packages/be/e5/3f363dad2e24378f88ccf63ecc39e817c29f32e308ef21a7a6d9c1201165/charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca", size = 149888 }, - { url = "https://files.pythonhosted.org/packages/e4/10/a78c0e91f487b4ad0ef7480ac765e15b774f83de2597f1b6ef0eaf7a2f99/charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b", size = 145169 }, - { url = "https://files.pythonhosted.org/packages/d3/81/396e7d7f5d7420da8273c91175d2e9a3f569288e3611d521685e4b9ac9cc/charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e", size = 95094 }, - { url = "https://files.pythonhosted.org/packages/40/bb/20affbbd9ea29c71ea123769dc568a6d42052ff5089c5fe23e21e21084a6/charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4", size = 102139 }, { url = "https://files.pythonhosted.org/packages/7f/c0/b913f8f02836ed9ab32ea643c6fe4d3325c3d8627cf6e78098671cafff86/charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41", size = 197867 }, { url = "https://files.pythonhosted.org/packages/0f/6c/2bee440303d705b6fb1e2ec789543edec83d32d258299b16eed28aad48e0/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f", size = 141385 }, { url = "https://files.pythonhosted.org/packages/3d/04/cb42585f07f6f9fd3219ffb6f37d5a39b4fd2db2355b23683060029c35f7/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2", size = 151367 }, @@ -187,15 +174,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/79/6a/5df64b6df405a1ed1482cb6c10044b06ec47fd28e87c2232dbcf435ecb33/grpcio-1.70.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:0a5c78d5198a1f0aa60006cd6eb1c912b4a1520b6a3968e677dbcba215fabb40", size = 6190982 }, { url = "https://files.pythonhosted.org/packages/42/aa/aeaac87737e6d25d1048c53b8ec408c056d3ed0c922e7c5efad65384250c/grpcio-1.70.0-cp313-cp313-win32.whl", hash = "sha256:fe9dbd916df3b60e865258a8c72ac98f3ac9e2a9542dcb72b7a34d236242a5ce", size = 3598359 }, { url = "https://files.pythonhosted.org/packages/1f/79/8edd2442d2de1431b4a3de84ef91c37002f12de0f9b577fb07b452989dbc/grpcio-1.70.0-cp313-cp313-win_amd64.whl", hash = "sha256:4119fed8abb7ff6c32e3d2255301e59c316c22d31ab812b3fbcbaf3d0d87cc68", size = 4293938 }, - { url = "https://files.pythonhosted.org/packages/38/5f/d7fe323c18a2ec98a2a9b38fb985f5e843f76990298d7c4ce095f44b46a7/grpcio-1.70.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:8058667a755f97407fca257c844018b80004ae8035565ebc2812cc550110718d", size = 5232027 }, - { url = "https://files.pythonhosted.org/packages/d4/4b/3d3b5548575b635f51883212a482cd237e8525535d4591b9dc7e5b2c2ddc/grpcio-1.70.0-cp38-cp38-macosx_10_14_universal2.whl", hash = "sha256:879a61bf52ff8ccacbedf534665bb5478ec8e86ad483e76fe4f729aaef867cab", size = 11448811 }, - { url = "https://files.pythonhosted.org/packages/8a/d7/9a0922fc12d339271c7e4e6691470172b7c13715fed7bd934274803f1527/grpcio-1.70.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:0ba0a173f4feacf90ee618fbc1a27956bfd21260cd31ced9bc707ef551ff7dc7", size = 5711890 }, - { url = "https://files.pythonhosted.org/packages/1e/ae/d4dbf8bff0f1d270f118d08558bc8dc0489e026d6620a4e3ee2d79d79041/grpcio-1.70.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:558c386ecb0148f4f99b1a65160f9d4b790ed3163e8610d11db47838d452512d", size = 6331933 }, - { url = "https://files.pythonhosted.org/packages/2c/64/66a74c02b00e00b919c245ca9da8e5c44e8692bf3fe7f27efbc97572566c/grpcio-1.70.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:412faabcc787bbc826f51be261ae5fa996b21263de5368a55dc2cf824dc5090e", size = 5950685 }, - { url = "https://files.pythonhosted.org/packages/b0/64/e992ac693118c37164e085676216d258804d7a5bbf3581d3f989c843a9a5/grpcio-1.70.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3b0f01f6ed9994d7a0b27eeddea43ceac1b7e6f3f9d86aeec0f0064b8cf50fdb", size = 6640974 }, - { url = "https://files.pythonhosted.org/packages/57/17/34d0a6af4477fd48b8b41d13782fb1e35b8841b17d6ac7a3eb24d2f3b17e/grpcio-1.70.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7385b1cb064734005204bc8994eed7dcb801ed6c2eda283f613ad8c6c75cf873", size = 6204792 }, - { url = "https://files.pythonhosted.org/packages/d3/e5/e45d8eb81929c0becd5bda413b60262f79d862e19cff632d496909aa3bd0/grpcio-1.70.0-cp38-cp38-win32.whl", hash = "sha256:07269ff4940f6fb6710951116a04cd70284da86d0a4368fd5a3b552744511f5a", size = 3620015 }, - { url = "https://files.pythonhosted.org/packages/87/7d/36009c38093e62969c708f20b86ab6761c2ba974b12ff10def6f397f24fa/grpcio-1.70.0-cp38-cp38-win_amd64.whl", hash = "sha256:aba19419aef9b254e15011b230a180e26e0f6864c90406fdbc255f01d83bc83c", size = 4307043 }, { url = "https://files.pythonhosted.org/packages/9d/0e/64061c9746a2dd6e07cb0a0f3829f0a431344add77ec36397cc452541ff6/grpcio-1.70.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:4f1937f47c77392ccd555728f564a49128b6a197a05a5cd527b796d36f3387d0", size = 5231123 }, { url = "https://files.pythonhosted.org/packages/72/9f/c93501d5f361aecee0146ab19300d5acb1c2747b00217c641f06fffbcd62/grpcio-1.70.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:0cd430b9215a15c10b0e7d78f51e8a39d6cf2ea819fd635a7214fae600b1da27", size = 11467217 }, { url = "https://files.pythonhosted.org/packages/0a/1a/980d115b701023450a304881bf3f6309f6fb15787f9b78d2728074f3bf86/grpcio-1.70.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:e27585831aa6b57b9250abaf147003e126cd3a6c6ca0c531a01996f31709bed1", size = 5710913 }, @@ -216,31 +194,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, ] -[[package]] -name = "importlib-metadata" -version = "8.5.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.9'", -] -dependencies = [ - { name = "zipp", version = "3.20.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/cd/12/33e59336dca5be0c398a7482335911a33aa0e20776128f038019f1a95f1b/importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7", size = 55304 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/d9/a1e041c5e7caa9a05c925f4bdbdfb7f006d1f74996af53467bc394c97be7/importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b", size = 26514 }, -] - [[package]] name = "importlib-metadata" version = "8.6.1" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13'", - "python_full_version >= '3.9' and python_full_version < '3.13'", -] dependencies = [ - { name = "zipp", version = "3.21.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "zipp" }, ] sdist = { url = "https://files.pythonhosted.org/packages/33/08/c1395a292bb23fd03bdf572a1357c5a733d3eecbab877641ceacab23db6e/importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580", size = 55767 } wheels = [ @@ -251,15 +210,14 @@ wheels = [ name = "opentelemetry-api" source = { editable = "opentelemetry-api" } dependencies = [ - { name = "importlib-metadata", version = "8.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, - { name = "importlib-metadata", version = "8.6.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "importlib-metadata" }, { name = "typing-extensions" }, ] [package.metadata] requires-dist = [ - { name = "typing-extensions", specifier = ">=4.5.0" }, { name = "importlib-metadata", specifier = ">=6.0,<8.8.0" }, + { name = "typing-extensions", specifier = ">=4.5.0" }, ] [[package]] @@ -502,8 +460,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dd/04/3eaedc2ba17a088961d0e3bd396eac764450f431621b58a04ce898acd126/protobuf-5.29.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e", size = 417825 }, { url = "https://files.pythonhosted.org/packages/4f/06/7c467744d23c3979ce250397e26d8ad8eeb2bea7b18ca12ad58313c1b8d5/protobuf-5.29.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84", size = 319573 }, { url = "https://files.pythonhosted.org/packages/a8/45/2ebbde52ad2be18d3675b6bee50e68cd73c9e0654de77d595540b5129df8/protobuf-5.29.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f", size = 319672 }, - { url = "https://files.pythonhosted.org/packages/ce/06/18efd22aaefbc444a96a68390fd66aacd40d6791637e86dd6fea3164975d/protobuf-5.29.3-cp38-cp38-win32.whl", hash = "sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252", size = 422593 }, - { url = "https://files.pythonhosted.org/packages/c6/36/37425a115a95e35a1d8dff686ac2488718a40f07d498edfd89eb40ee3c5d/protobuf-5.29.3-cp38-cp38-win_amd64.whl", hash = "sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107", size = 434517 }, { url = "https://files.pythonhosted.org/packages/85/a6/bf65a38f8be5ab8c3b575822acfd338702fdf7ac9abd8c81630cc7c9f4bd/protobuf-5.29.3-cp39-cp39-win32.whl", hash = "sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7", size = 422676 }, { url = "https://files.pythonhosted.org/packages/ac/e2/48d46adc86369ff092eaece3e537f76b3baaab45ca3dde257838cde831d2/protobuf-5.29.3-cp39-cp39-win_amd64.whl", hash = "sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da", size = 434593 }, { url = "https://files.pythonhosted.org/packages/fd/b2/ab07b09e0f6d143dfb839693aa05765257bceaa13d03bf1a696b78323e7a/protobuf-5.29.3-py3-none-any.whl", hash = "sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f", size = 172550 }, @@ -517,8 +473,7 @@ dependencies = [ { name = "certifi" }, { name = "charset-normalizer" }, { name = "idna" }, - { name = "urllib3", version = "2.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, - { name = "urllib3", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "urllib3" }, ] sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } wheels = [ @@ -534,51 +489,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, ] -[[package]] -name = "urllib3" -version = "2.2.3" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.9'", -] -sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 }, -] - [[package]] name = "urllib3" version = "2.3.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13'", - "python_full_version >= '3.9' and python_full_version < '3.13'", -] sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 } wheels = [ { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 }, ] -[[package]] -name = "zipp" -version = "3.20.2" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.9'", -] -sdist = { url = "https://files.pythonhosted.org/packages/54/bf/5c0000c44ebc80123ecbdddba1f5dcd94a5ada602a9c225d84b5aaa55e86/zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29", size = 24199 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/62/8b/5ba542fa83c90e09eac972fc9baca7a88e7e7ca4b221a89251954019308b/zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350", size = 9200 }, -] - [[package]] name = "zipp" version = "3.21.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13'", - "python_full_version >= '3.9' and python_full_version < '3.13'", -] sdist = { url = "https://files.pythonhosted.org/packages/3f/50/bad581df71744867e9468ebd0bcd6505de3b275e06f202c2cb016e3ff56f/zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4", size = 24545 } wheels = [ { url = "https://files.pythonhosted.org/packages/b7/1a/7e4798e9339adc931158c9d69ecc34f5e6791489d469f5e50ec15e35f458/zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931", size = 9630 },