diff --git a/README.md b/README.md index e9c47a6..6764736 100644 --- a/README.md +++ b/README.md @@ -47,11 +47,11 @@ with tracer.start_as_current_span('hello'): print('Hello World!') ``` -#### Integrations +#### Instrumentations -OpenTelemetry also supports several [integrations](https://github.com/open-telemetry/opentelemetry-python/tree/master/ext) which allows to integrate with third party libraries. +OpenTelemetry also supports several [instrumentations](https://github.com/open-telemetry/opentelemetry-python/tree/master/instrumentation) which allows to instrument with third party libraries. -This example shows how to integrate with the [requests](https://2.python-requests.org/en/master/)_ library. +This example shows how to instrument with the [requests](https://2.python-requests.org/en/master/)_ library. * Create an Azure Monitor resource and get the instrumentation key, more information can be found [here](https://docs.microsoft.com/azure/azure-monitor/app/create-new-resource). * Install the `requests` integration package using ``pip install opentelemetry-ext-http-requests``. diff --git a/azure_monitor/CHANGELOG.md b/azure_monitor/CHANGELOG.md index c81c6c8..8845db2 100644 --- a/azure_monitor/CHANGELOG.md +++ b/azure_monitor/CHANGELOG.md @@ -2,6 +2,9 @@ ## Unreleased +- Remove request metrics from auto-collection + ([#124](https://github.com/microsoft/opentelemetry-azure-monitor-python/pull/124)) + ## 0.5b.0 Released 2020-09-24 diff --git a/azure_monitor/examples/metrics/auto_collector.py b/azure_monitor/examples/metrics/auto_collector.py index 95c060c..74fcd3a 100644 --- a/azure_monitor/examples/metrics/auto_collector.py +++ b/azure_monitor/examples/metrics/auto_collector.py @@ -19,7 +19,7 @@ testing_label_set = {"environment": "testing"} -# Automatically collect standard metrics +# Automatically collect performance counters auto_collection = AutoCollection(meter=meter, labels=testing_label_set) metrics.get_meter_provider().start_pipeline(meter, exporter, 2) diff --git a/azure_monitor/src/azure_monitor/export/trace/__init__.py b/azure_monitor/src/azure_monitor/export/trace/__init__.py index dd433be..bc06989 100644 --- a/azure_monitor/src/azure_monitor/export/trace/__init__.py +++ b/azure_monitor/src/azure_monitor/export/trace/__init__.py @@ -27,6 +27,10 @@ class AzureMonitorSpanExporter(BaseExporter, SpanExporter): options: :doc:`export.options` to allow configuration for the exporter """ + def __init__(self, **options): + super().__init__(**options) + self.add_telemetry_processor(indicate_processed_by_metric_extractors) + def export(self, spans: Sequence[Span]) -> SpanExportResult: envelopes = list(map(self._span_to_envelope, spans)) envelopes = list( @@ -122,6 +126,7 @@ def convert_span_to_envelope(span: Span) -> protocol.Envelope: "component" in span.attributes and span.attributes["component"] == "http" ): + # TODO: check other component types (e.g. db) data.type = "HTTP" if "http.url" in span.attributes: url = span.attributes["http.url"] @@ -157,3 +162,12 @@ def convert_span_to_envelope(span: Span) -> protocol.Envelope: data.properties["_MS.links"] = json.dumps(links) # TODO: tracestate, tags return envelope + + +def indicate_processed_by_metric_extractors(envelope): + name = "Requests" + if envelope.data.base_type == "RemoteDependencyData": + name = "Dependencies" + envelope.data.base_data.properties["_MS.ProcessedByMetricExtractors"] = ( + "(Name:'" + name + "',Ver:'1.1')" + ) diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/__init__.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/__init__.py index 3753983..f6773ef 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/__init__.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/__init__.py @@ -24,8 +24,7 @@ class AutoCollection: - """Starts auto collection of standard metrics, including performance, - dependency and request metrics. + """Starts auto collection of performance counters Args: meter: OpenTelemetry Meter @@ -35,4 +34,3 @@ class AutoCollection: def __init__(self, meter: Meter, labels: Dict[str, str]): col_type = AutoCollectionType.PERF_COUNTER self._performance_metrics = PerformanceMetrics(meter, labels, col_type) - self._request_metrics = RequestMetrics(meter, labels, col_type) diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/dependency_metrics.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/dependency_metrics.py index 6229351..1afc829 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/dependency_metrics.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/dependency_metrics.py @@ -91,7 +91,7 @@ def __init__(self, meter: Meter, labels: Dict[str, str]): ) def _track_dependency_rate(self, observer: Observer) -> None: - """ Track Dependency rate + """Track Dependency rate Calculated by obtaining the number of outgoing requests made using the requests library within an elapsed time and dividing @@ -121,7 +121,7 @@ def _track_dependency_rate(self, observer: Observer) -> None: observer.observe(last_result, self._labels) def _track_dependency_duration(self, observer: Observer) -> None: - """ Track Dependency average duration + """Track Dependency average duration Calculated by getting the time it takes to make an outgoing request and dividing over the amount of outgoing requests over an elapsed time. @@ -149,7 +149,7 @@ def _track_dependency_duration(self, observer: Observer) -> None: observer.observe(last_average_duration, self._labels) def _track_failure_rate(self, observer: Observer) -> None: - """ Track Failed Dependency rate + """Track Failed Dependency rate Calculated by obtaining the number of failed outgoing requests made using the requests library within an elapsed time and dividing diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/__init__.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/__init__.py index 9e41b00..a4850bb 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/__init__.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/__init__.py @@ -42,7 +42,7 @@ def __init__( col_type = AutoCollectionType.LIVE_METRICS self._performance_metrics = PerformanceMetrics(meter, labels, col_type) self._dependency_metrics = DependencyMetrics(meter, labels) - self._request_metrics = RequestMetrics(meter, labels, col_type) + self._request_metrics = RequestMetrics(meter, labels) self._manager = LiveMetricsManager( meter, instrumentation_key, span_processor ) diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/performance_metrics.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/performance_metrics.py index 417e068..a1abcc2 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/performance_metrics.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/performance_metrics.py @@ -79,7 +79,7 @@ def __init__( ) def _track_cpu(self, observer: Observer) -> None: - """ Track CPU time + """Track CPU time Processor time is defined as a float representing the current system wide CPU utilization minus idle CPU time as a percentage. Idle CPU @@ -90,7 +90,7 @@ def _track_cpu(self, observer: Observer) -> None: observer.observe(100.0 - cpu_times_percent.idle, self._labels) def _track_memory(self, observer: Observer) -> None: - """ Track Memory + """Track Memory Available memory is defined as memory that can be given instantly to processes without the system going into swap. @@ -98,7 +98,7 @@ def _track_memory(self, observer: Observer) -> None: observer.observe(psutil.virtual_memory().available, self._labels) def _track_process_cpu(self, observer: Observer) -> None: - """ Track Process CPU time + """Track Process CPU time Returns a derived gauge for the CPU usage for the current process. Return values range from 0.0 to 100.0 inclusive. @@ -113,7 +113,7 @@ def _track_process_cpu(self, observer: Observer) -> None: logger.warning("Error handling get process cpu usage.") def _track_process_memory(self, observer: Observer) -> None: - """ Track Memory + """Track Memory Available memory is defined as memory that can be given instantly to processes without the system going into swap. @@ -124,7 +124,7 @@ def _track_process_memory(self, observer: Observer) -> None: logger.warning("Error handling get process private bytes.") def _track_commited_memory(self, observer: Observer) -> None: - """ Track Commited Memory + """Track Commited Memory Available commited memory is defined as total memory minus available memory. """ diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/request_metrics.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/request_metrics.py index 2cbfab4..71bca8f 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/request_metrics.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/request_metrics.py @@ -9,8 +9,6 @@ from opentelemetry.metrics import Meter, Observer from opentelemetry.sdk.metrics import UpDownSumObserver -from azure_monitor.sdk.auto_collection.utils import AutoCollectionType - _requests_lock = threading.Lock() logger = logging.getLogger(__name__) requests_map = dict() @@ -79,29 +77,24 @@ class RequestMetrics: Args: meter: OpenTelemetry Meter labels: Dictionary of labels - collection_type: Standard or Live Metrics """ def __init__( - self, - meter: Meter, - labels: Dict[str, str], - collection_type: AutoCollectionType, + self, meter: Meter, labels: Dict[str, str], ): self._meter = meter self._labels = labels # Patch the HTTPServer handler to track request information HTTPServer.__init__ = server_patch - if collection_type == AutoCollectionType.LIVE_METRICS: - meter.register_observer( - callback=self._track_request_failed_rate, - name="\\ApplicationInsights\\Requests Failed/Sec", - description="Incoming Requests Failed Rate", - unit="rps", - value_type=float, - observer_type=UpDownSumObserver, - ) + meter.register_observer( + callback=self._track_request_failed_rate, + name="\\ApplicationInsights\\Requests Failed/Sec", + description="Incoming Requests Failed Rate", + unit="rps", + value_type=float, + observer_type=UpDownSumObserver, + ) meter.register_observer( callback=self._track_request_duration, name="\\ApplicationInsights\\Request Duration", @@ -120,7 +113,7 @@ def __init__( ) def _track_request_duration(self, observer: Observer) -> None: - """ Track Request execution time + """Track Request execution time Calculated by getting the time it takes to make an incoming request and dividing over the amount of incoming requests over an elapsed time. @@ -144,7 +137,7 @@ def _track_request_duration(self, observer: Observer) -> None: observer.observe(last_average_duration, self._labels) def _track_request_rate(self, observer: Observer) -> None: - """ Track Request execution rate + """Track Request execution rate Calculated by obtaining by getting the number of incoming requests made to an HTTPServer within an elapsed time and dividing that value @@ -174,7 +167,7 @@ def _track_request_rate(self, observer: Observer) -> None: observer.observe(last_rate, self._labels) def _track_request_failed_rate(self, observer: Observer) -> None: - """ Track Request failed execution rate + """Track Request failed execution rate Calculated by obtaining by getting the number of failed incoming requests made to an HTTPServer within an elapsed time and dividing that value diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/utils.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/utils.py index 8883b11..6ca330c 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/utils.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/utils.py @@ -4,8 +4,7 @@ class AutoCollectionType(Enum): - """Automatic collection of metrics type - """ + """Automatic collection of metrics type""" PERF_COUNTER = 0 LIVE_METRICS = 1 diff --git a/azure_monitor/tests/auto_collection/test_auto_collection.py b/azure_monitor/tests/auto_collection/test_auto_collection.py index 6c85d12..c59a035 100644 --- a/azure_monitor/tests/auto_collection/test_auto_collection.py +++ b/azure_monitor/tests/auto_collection/test_auto_collection.py @@ -25,16 +25,10 @@ def tearDownClass(cls): @mock.patch( "azure_monitor.sdk.auto_collection.PerformanceMetrics", autospec=True ) - @mock.patch( - "azure_monitor.sdk.auto_collection.RequestMetrics", autospec=True - ) - def test_constructor(self, mock_requests, mock_performance): + def test_constructor(self, mock_performance): """Test the constructor.""" AutoCollection(meter=self._meter, labels=self._test_labels) self.assertEqual(mock_performance.called, True) - self.assertEqual(mock_requests.called, True) self.assertEqual(mock_performance.call_args[0][0], self._meter) self.assertEqual(mock_performance.call_args[0][1], self._test_labels) - self.assertEqual(mock_requests.call_args[0][0], self._meter) - self.assertEqual(mock_requests.call_args[0][1], self._test_labels) diff --git a/azure_monitor/tests/auto_collection/test_request_metrics.py b/azure_monitor/tests/auto_collection/test_request_metrics.py index 2ec6a85..62b1209 100644 --- a/azure_monitor/tests/auto_collection/test_request_metrics.py +++ b/azure_monitor/tests/auto_collection/test_request_metrics.py @@ -9,7 +9,6 @@ from opentelemetry.sdk.metrics import MeterProvider, Observer from azure_monitor.sdk.auto_collection import request_metrics -from azure_monitor.sdk.auto_collection.utils import AutoCollectionType ORIGINAL_CONS = HTTPServer.__init__ @@ -33,13 +32,11 @@ def setUp(self): def test_constructor(self): mock_meter = mock.Mock() request_metrics_collector = request_metrics.RequestMetrics( - meter=mock_meter, - labels=self._test_labels, - collection_type=AutoCollectionType.PERF_COUNTER, + meter=mock_meter, labels=self._test_labels, ) self.assertEqual(request_metrics_collector._meter, mock_meter) self.assertEqual(request_metrics_collector._labels, self._test_labels) - self.assertEqual(mock_meter.register_observer.call_count, 2) + self.assertEqual(mock_meter.register_observer.call_count, 3) create_metric_calls = mock_meter.register_observer.call_args_list create_metric_calls[0].assert_called_with( callback=request_metrics_collector._track_request_duration, @@ -57,11 +54,17 @@ def test_constructor(self): value_type=float, ) + create_metric_calls[2].assert_called_with( + callback=request_metrics_collector._track_request_failed_rate, + name="\\ApplicationInsights\\Requests Failed/Sec", + description="Incoming Requests Failed Rate", + unit="rps", + value_type=float, + ) + def test_track_request_duration(self): request_metrics_collector = request_metrics.RequestMetrics( - meter=self._meter, - labels=self._test_labels, - collection_type=AutoCollectionType.PERF_COUNTER, + meter=self._meter, labels=self._test_labels, ) request_metrics.requests_map["duration"] = 100 request_metrics.requests_map["count"] = 10 @@ -80,9 +83,7 @@ def test_track_request_duration(self): def test_track_request_duration_error(self): request_metrics_collector = request_metrics.RequestMetrics( - meter=self._meter, - labels=self._test_labels, - collection_type=AutoCollectionType.PERF_COUNTER, + meter=self._meter, labels=self._test_labels, ) request_metrics.requests_map["duration"] = 100 request_metrics.requests_map["count"] = 10 @@ -102,9 +103,7 @@ def test_track_request_duration_error(self): @mock.patch("azure_monitor.sdk.auto_collection.request_metrics.time") def test_track_request_rate(self, time_mock): request_metrics_collector = request_metrics.RequestMetrics( - meter=self._meter, - labels=self._test_labels, - collection_type=AutoCollectionType.PERF_COUNTER, + meter=self._meter, labels=self._test_labels, ) time_mock.time.return_value = 100 request_metrics.requests_map["last_time"] = 98 @@ -125,9 +124,7 @@ def test_track_request_rate(self, time_mock): def test_track_request_rate_time_none(self, time_mock): time_mock.time.return_value = 100 request_metrics_collector = request_metrics.RequestMetrics( - meter=self._meter, - labels=self._test_labels, - collection_type=AutoCollectionType.PERF_COUNTER, + meter=self._meter, labels=self._test_labels, ) request_metrics.requests_map["last_time"] = None obs = Observer( @@ -145,9 +142,7 @@ def test_track_request_rate_time_none(self, time_mock): @mock.patch("azure_monitor.sdk.auto_collection.request_metrics.time") def test_track_request_rate_error(self, time_mock): request_metrics_collector = request_metrics.RequestMetrics( - meter=self._meter, - labels=self._test_labels, - collection_type=AutoCollectionType.PERF_COUNTER, + meter=self._meter, labels=self._test_labels, ) time_mock.time.return_value = 100 request_metrics.requests_map["last_rate"] = 5.0 @@ -164,6 +159,47 @@ def test_track_request_rate_error(self, time_mock): obs.aggregators[tuple(self._test_labels.items())].current, 5.0 ) + @mock.patch("azure_monitor.sdk.auto_collection.request_metrics.time") + def test_track_request_rate_failed(self, time_mock): + request_metrics_collector = request_metrics.RequestMetrics( + meter=self._meter, labels=self._test_labels, + ) + time_mock.time.return_value = 100 + request_metrics.requests_map["last_failed_count"] = 5.0 + request_metrics.requests_map["failed_count"] = 25.0 + request_metrics.requests_map["last_time"] = 98 + obs = Observer( + callback=request_metrics_collector._track_request_failed_rate, + name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Requests/Sec", + description="Incoming Requests Average Execution Rate", + unit="rps", + value_type=float, + ) + request_metrics_collector._track_request_failed_rate(obs) + self.assertEqual( + obs.aggregators[tuple(self._test_labels.items())].current, 10.0 + ) + + @mock.patch("azure_monitor.sdk.auto_collection.request_metrics.time") + def test_track_request_rate_failed_error(self, time_mock): + request_metrics_collector = request_metrics.RequestMetrics( + meter=self._meter, labels=self._test_labels, + ) + time_mock.time.return_value = 100 + request_metrics.requests_map["last_rate"] = 5.0 + request_metrics.requests_map["last_time"] = 100 + obs = Observer( + callback=request_metrics_collector._track_request_failed_rate, + name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Requests/Sec", + description="Incoming Requests Average Execution Rate", + unit="rps", + value_type=float, + ) + request_metrics_collector._track_request_failed_rate(obs) + self.assertEqual( + obs.aggregators[tuple(self._test_labels.items())].current, 5.0 + ) + def test_request_patch(self): map = request_metrics.requests_map # pylint: disable=redefined-builtin func = mock.Mock() diff --git a/azure_monitor/tests/trace/test_trace.py b/azure_monitor/tests/trace/test_trace.py index e918b92..bea8033 100644 --- a/azure_monitor/tests/trace/test_trace.py +++ b/azure_monitor/tests/trace/test_trace.py @@ -15,7 +15,10 @@ from opentelemetry.trace.status import Status, StatusCanonicalCode from azure_monitor.export import ExportResult -from azure_monitor.export.trace import AzureMonitorSpanExporter +from azure_monitor.export.trace import ( + AzureMonitorSpanExporter, + indicate_processed_by_metric_extractors, +) from azure_monitor.options import ExporterOptions TEST_FOLDER = os.path.abspath(".test") @@ -67,6 +70,11 @@ def test_constructor(self): exporter = AzureMonitorSpanExporter( instrumentation_key="4321abcd-5678-4efa-8abc-1234567890ab", storage_path=os.path.join(TEST_FOLDER, self.id()), + storage_max_size=50, + storage_maintenance_period=100, + storage_retention_period=200, + proxies={"asd": "123"}, + timeout=5.0, ) self.assertIsInstance(exporter.options, ExporterOptions) self.assertEqual( @@ -74,7 +82,16 @@ def test_constructor(self): "4321abcd-5678-4efa-8abc-1234567890ab", ) self.assertEqual( - exporter.options.storage_path, os.path.join(TEST_FOLDER, self.id()) + exporter.storage.path, os.path.join(TEST_FOLDER, self.id()) + ) + self.assertEqual(exporter.storage.max_size, 50) + self.assertEqual(exporter.storage.maintenance_period, 100) + self.assertEqual(exporter.storage.retention_period, 200) + self.assertEqual(exporter.options.proxies, {"asd": "123"}) + self.assertEqual(exporter.options.timeout, 5.0) + self.assertEqual( + exporter._telemetry_processors[0], + indicate_processed_by_metric_extractors, ) def test_export_empty(self): @@ -121,6 +138,7 @@ def test_export_success(self): storage_mock = mock.Mock() exporter._transmit_from_storage = storage_mock exporter.export([test_span]) + self.assertEqual(len(exporter._telemetry_processors), 1) self.assertEqual(storage_mock.call_count, 1) self.assertEqual(len(os.listdir(exporter.storage.path)), 0) @@ -164,6 +182,26 @@ def test_export_not_retryable(self): result = exporter.export([test_span]) self.assertEqual(result, SpanExportResult.FAILURE) + def test_indicate_processed_by_metric_extractors(self): + envelope = mock.Mock() + envelope.data.base_type = "RemoteDependencyData" + envelope.data.base_data.properties = {} + indicate_processed_by_metric_extractors(envelope) + self.assertEqual( + envelope.data.base_data.properties[ + "_MS.ProcessedByMetricExtractors" + ], + "(Name:'Dependencies',Ver:'1.1')", + ) + envelope.data.base_type = "RequestData" + indicate_processed_by_metric_extractors(envelope) + self.assertEqual( + envelope.data.base_data.properties[ + "_MS.ProcessedByMetricExtractors" + ], + "(Name:'Requests',Ver:'1.1')", + ) + def test_span_to_envelope_none(self): exporter = self._exporter self.assertIsNone(exporter._span_to_envelope(None))