diff --git a/.travis.yml b/.travis.yml index 2cfc918..c48fd48 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,6 @@ dist: xenial language: python python: - - '3.4' - '3.5' - '3.6' - '3.7' diff --git a/azure_monitor/CHANGELOG.md b/azure_monitor/CHANGELOG.md index b8c03a5..e9a25f2 100644 --- a/azure_monitor/CHANGELOG.md +++ b/azure_monitor/CHANGELOG.md @@ -2,6 +2,14 @@ ## Unreleased +## 0.5b.0 +Released 2020-09-24 + +- Change epoch for live metrics + ([#115](https://github.com/microsoft/opentelemetry-azure-monitor-python/pull/115)) +- Dropping support for Python 3.4 + ([#117](https://github.com/microsoft/opentelemetry-azure-monitor-python/pull/117)) + ## 0.4b.0 Released 2020-06-29 diff --git a/azure_monitor/README.md b/azure_monitor/README.md index 98b692d..b417715 100644 --- a/azure_monitor/README.md +++ b/azure_monitor/README.md @@ -1,7 +1,9 @@ -# OpenTelemetry Azure Monitor SDKs and Exporters +# OpenTelemetry Azure Monitor SDKs and Exporters (Private preview) [![PyPI version](https://badge.fury.io/py/opentelemetry-azure-monitor.svg)](https://badge.fury.io/py/opentelemetry-azure-monitor) +The OpenTelemetry Azure Monitor SDK and exporter are in private preview. They are not recommended for a production environment. + ## Installation ```sh diff --git a/azure_monitor/examples/metrics/auto_collector.py b/azure_monitor/examples/metrics/auto_collector.py index fd7617c..95c060c 100644 --- a/azure_monitor/examples/metrics/auto_collector.py +++ b/azure_monitor/examples/metrics/auto_collector.py @@ -5,16 +5,11 @@ from opentelemetry.sdk.trace import TracerProvider from azure_monitor import AzureMonitorMetricsExporter -from azure_monitor.sdk.auto_collection import ( - AutoCollection, - AzureMetricsSpanProcessor, -) +from azure_monitor.sdk.auto_collection import AutoCollection # Add Span Processor to get metrics about traces trace.set_tracer_provider(TracerProvider()) tracer = trace.get_tracer_provider().get_tracer(__name__) -span_processor = AzureMetricsSpanProcessor() -trace.get_tracer_provider().add_span_processor(span_processor) metrics.set_meter_provider(MeterProvider()) meter = metrics.get_meter(__name__) @@ -25,9 +20,7 @@ testing_label_set = {"environment": "testing"} # Automatically collect standard metrics -auto_collection = AutoCollection( - meter=meter, labels=testing_label_set, span_processor=span_processor -) +auto_collection = AutoCollection(meter=meter, labels=testing_label_set) metrics.get_meter_provider().start_pipeline(meter, exporter, 2) diff --git a/azure_monitor/examples/metrics/observer.py b/azure_monitor/examples/metrics/observer.py index 52f21ac..66b1c64 100644 --- a/azure_monitor/examples/metrics/observer.py +++ b/azure_monitor/examples/metrics/observer.py @@ -48,4 +48,4 @@ def get_ram_usage_callback(observer): label_keys=(), ) -input("Metrics will be printed soon. Press a key to finish...\n") +input("Press any key to exit...") diff --git a/azure_monitor/examples/metrics/simple.py b/azure_monitor/examples/metrics/simple.py index 34c7fd1..363047c 100644 --- a/azure_monitor/examples/metrics/simple.py +++ b/azure_monitor/examples/metrics/simple.py @@ -18,7 +18,6 @@ unit="1", value_type=int, metric_type=Counter, - label_keys=("environment",), ) testing_labels = {"environment": "testing"} diff --git a/azure_monitor/examples/traces/client.py b/azure_monitor/examples/traces/client.py index 027f8dc..ad322a7 100644 --- a/azure_monitor/examples/traces/client.py +++ b/azure_monitor/examples/traces/client.py @@ -5,7 +5,7 @@ # pylint: disable=no-name-in-module import requests from opentelemetry import trace -from opentelemetry.ext.requests import RequestsInstrumentor +from opentelemetry.instrumentation.requests import RequestsInstrumentor from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchExportSpanProcessor @@ -21,6 +21,6 @@ ) trace.get_tracer_provider().add_span_processor(span_processor) -response = requests.get(url="http://127.0.0.1:8080/") +response = requests.get(url="http://example.com/") input("Press any key to exit...") diff --git a/azure_monitor/examples/traces/request.py b/azure_monitor/examples/traces/request.py deleted file mode 100644 index b9942a0..0000000 --- a/azure_monitor/examples/traces/request.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -# pylint: disable=import-error -# pylint: disable=no-member -# pylint: disable=no-name-in-module -import requests -from opentelemetry import trace -from opentelemetry.ext.requests import RequestsInstrumentor -from opentelemetry.sdk.trace import TracerProvider -from opentelemetry.sdk.trace.export import SimpleExportSpanProcessor - -from azure_monitor import AzureMonitorSpanExporter - -trace.set_tracer_provider(TracerProvider()) - -RequestsInstrumentor().instrument() -span_processor = SimpleExportSpanProcessor( - AzureMonitorSpanExporter( - connection_string="InstrumentationKey=" - ) -) -trace.get_tracer_provider().add_span_processor(span_processor) -tracer = trace.get_tracer(__name__) - -with tracer.start_as_current_span("parent"): - response = requests.get("https://azure.microsoft.com/", timeout=5) - -input("Press any key to exit...") diff --git a/azure_monitor/examples/traces/server.py b/azure_monitor/examples/traces/server.py index 51039c8..be08550 100644 --- a/azure_monitor/examples/traces/server.py +++ b/azure_monitor/examples/traces/server.py @@ -5,8 +5,8 @@ # pylint: disable=no-name-in-module import requests from opentelemetry import trace -from opentelemetry.ext.flask import FlaskInstrumentor -from opentelemetry.ext.requests import RequestsInstrumentor +from opentelemetry.instrumentation.flask import FlaskInstrumentor +from opentelemetry.instrumentation.requests import RequestsInstrumentor from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchExportSpanProcessor diff --git a/azure_monitor/setup.cfg b/azure_monitor/setup.cfg index b0f48bb..9e5d68c 100644 --- a/azure_monitor/setup.cfg +++ b/azure_monitor/setup.cfg @@ -16,20 +16,19 @@ classifiers = License :: OSI Approved :: MIT License Programming Language :: Python Programming Language :: Python :: 3 - Programming Language :: Python :: 3.4 Programming Language :: Python :: 3.5 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 [options] -python_requires = >=3.4 +python_requires = >=3.5 package_dir= =src packages=find_namespace: install_requires = - opentelemetry-api == 0.10b0 - opentelemetry-sdk == 0.10b0 + opentelemetry-api == 0.13b0 + opentelemetry-sdk == 0.13b0 psutil >= 5.6.3 requests ~= 2.0 diff --git a/azure_monitor/src/azure_monitor/options.py b/azure_monitor/src/azure_monitor/options.py index 36e2630..0764ef7 100644 --- a/azure_monitor/src/azure_monitor/options.py +++ b/azure_monitor/src/azure_monitor/options.py @@ -135,6 +135,7 @@ def parse_connection_string(connection_string) -> typing.Dict: # Convert keys to lower-case due to case type-insensitive checking result = {key.lower(): value for key, value in result.items()} except Exception: + # pylint: disable=raise-missing-from raise ValueError("Invalid connection string") # Validate authorization auth = result.get("authorization") diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/__init__.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/__init__.py index 59e0705..3753983 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/__init__.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/__init__.py @@ -5,9 +5,6 @@ from opentelemetry.metrics import Meter -from azure_monitor.sdk.auto_collection.dependency_metrics import ( - DependencyMetrics, -) from azure_monitor.sdk.auto_collection.metrics_span_processor import ( AzureMetricsSpanProcessor, ) @@ -21,7 +18,6 @@ "AutoCollection", "AutoCollectionType", "AzureMetricsSpanProcessor", - "DependencyMetrics", "RequestMetrics", "PerformanceMetrics", ] @@ -36,14 +32,7 @@ class AutoCollection: labels: Dictionary of labels """ - def __init__( - self, - meter: Meter, - labels: Dict[str, str], - span_processor: AzureMetricsSpanProcessor, - ): - col_type = AutoCollectionType.STANDARD_METRICS + def __init__(self, meter: Meter, labels: Dict[str, str]): + col_type = AutoCollectionType.PERF_COUNTER self._performance_metrics = PerformanceMetrics(meter, labels, col_type) - self._request_metrics = RequestMetrics( - meter, labels, span_processor, col_type - ) + self._request_metrics = RequestMetrics(meter, labels, col_type) diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/dependency_metrics.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/dependency_metrics.py index b6dea9e..6229351 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/dependency_metrics.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/dependency_metrics.py @@ -1,16 +1,53 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. +import logging +import threading import time from typing import Dict +import requests +from opentelemetry import context from opentelemetry.metrics import Meter, Observer from opentelemetry.sdk.metrics import UpDownSumObserver -from azure_monitor.sdk.auto_collection.metrics_span_processor import ( - AzureMetricsSpanProcessor, -) - +_dependency_lock = threading.Lock() +logger = logging.getLogger(__name__) dependency_map = dict() +ORIGINAL_REQUEST = requests.Session.request + + +def dependency_patch(*args, **kwargs) -> None: + start_time = time.time() + + try: + result = ORIGINAL_REQUEST(*args, **kwargs) + except Exception as exc: # pylint: disable=broad-except + exception = exc + result = getattr(exc, "response", None) + end_time = time.time() + + # Only collect request metric if sent from non-exporter thread + if context.get_value("suppress_instrumentation") is None: + # We don't want multiple threads updating this at once + with _dependency_lock: + try: + # Update duration + duration = dependency_map.get("duration", 0) + dependency_map["duration"] = duration + (end_time - start_time) + # Update count + count = dependency_map.get("count", 0) + dependency_map["count"] = count + 1 + # Update failed count + if ( + result is not None + and result.status_code < 200 + and result.status_code >= 300 + ) or exception is not None: + failed_count = dependency_map.get("failed_count", 0) + dependency_map["failed_count"] = failed_count + 1 + except Exception: # pylint: disable=broad-except + logger.warning("Error handling failed dependency metrics.") + return result class DependencyMetrics: @@ -20,26 +57,20 @@ class DependencyMetrics: Args: meter: OpenTelemetry Meter labels: Dictionary of labels - span_processor: Azure Metrics Span Processor - collection_type: Standard or Live Metrics """ - def __init__( - self, - meter: Meter, - labels: Dict[str, str], - span_processor: AzureMetricsSpanProcessor, - ): + def __init__(self, meter: Meter, labels: Dict[str, str]): self._meter = meter self._labels = labels - self._span_processor = span_processor + # Patch requests + requests.Session.request = dependency_patch meter.register_observer( callback=self._track_dependency_duration, name="\\ApplicationInsights\\Dependency Call Duration", description="Average Outgoing Requests duration", unit="milliseconds", - value_type=int, + value_type=float, observer_type=UpDownSumObserver, ) meter.register_observer( @@ -66,11 +97,11 @@ def _track_dependency_rate(self, observer: Observer) -> None: using the requests library within an elapsed time and dividing that value over the elapsed time. """ - current_count = self._span_processor.dependency_count + current_count = dependency_map.get("count", 0) current_time = time.time() last_count = dependency_map.get("last_count", 0) last_time = dependency_map.get("last_time") - last_result = dependency_map.get("last_result", 0) + last_result = dependency_map.get("last_result", 0.0) try: # last_time is None the very first time this function is called @@ -95,29 +126,27 @@ def _track_dependency_duration(self, observer: Observer) -> None: Calculated by getting the time it takes to make an outgoing request and dividing over the amount of outgoing requests over an elapsed time. """ - last_average_duration = dependency_map.get("last_average_duration", 0) - interval_duration = ( - self._span_processor.dependency_duration - - dependency_map.get("last_duration", 0) + last_average_duration = dependency_map.get( + "last_average_duration", 0.0 ) - interval_count = ( - self._span_processor.dependency_count - - dependency_map.get("last_count", 0) + interval_duration = dependency_map.get( + "duration", 0.0 + ) - dependency_map.get("last_duration", 0.0) + interval_count = dependency_map.get("count", 0) - dependency_map.get( + "last_count", 0 ) try: result = interval_duration / interval_count - dependency_map[ - "last_count" - ] = self._span_processor.dependency_count + dependency_map["last_count"] = dependency_map.get("count", 0) dependency_map["last_average_duration"] = result - dependency_map[ - "last_duration" - ] = self._span_processor.dependency_duration - observer.observe(int(result), self._labels) + dependency_map["last_duration"] = dependency_map.get( + "duration", 0.0 + ) + observer.observe(result, self._labels) except ZeroDivisionError: # If interval_count is 0, exporter call made too close to previous # Return the previous result if this is the case - observer.observe(int(last_average_duration), self._labels) + observer.observe(last_average_duration, self._labels) def _track_failure_rate(self, observer: Observer) -> None: """ Track Failed Dependency rate @@ -126,11 +155,11 @@ def _track_failure_rate(self, observer: Observer) -> None: using the requests library within an elapsed time and dividing that value over the elapsed time. """ - current_failed_count = self._span_processor.failed_dependency_count + current_failed_count = dependency_map.get("failed_count", 0) current_time = time.time() last_failed_count = dependency_map.get("last_failed_count", 0) last_time = dependency_map.get("last_time") - last_result = dependency_map.get("last_result", 0) + last_result = dependency_map.get("last_result", 0.0) try: # last_time is None the very first time this function is called diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/__init__.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/__init__.py index 0bd1428..9e41b00 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/__init__.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/__init__.py @@ -41,12 +41,8 @@ def __init__( ): col_type = AutoCollectionType.LIVE_METRICS self._performance_metrics = PerformanceMetrics(meter, labels, col_type) - self._dependency_metrics = DependencyMetrics( - meter, labels, span_processor - ) - self._request_metrics = RequestMetrics( - meter, labels, span_processor, col_type - ) + self._dependency_metrics = DependencyMetrics(meter, labels) + self._request_metrics = RequestMetrics(meter, labels, col_type) self._manager = LiveMetricsManager( meter, instrumentation_key, span_processor ) diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/exporter.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/exporter.py index 4c9e859..b200b05 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/exporter.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/exporter.py @@ -61,7 +61,7 @@ def export( return MetricsExportResult.SUCCESS except Exception: # pylint: disable=broad-except - logger.exception("Exception occurred while exporting the data.") + logger.warning("Exception occurred while exporting the data.") return MetricsExportResult.FAILURE diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/sender.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/sender.py index 27f39e3..9872efb 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/sender.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/sender.py @@ -3,7 +3,6 @@ # import json import logging -import time import requests @@ -42,7 +41,7 @@ def _send_request(self, data: str, request_type: str) -> requests.Response: "Expect": "100-continue", "Content-Type": "application/json; charset=utf-8", utils.LIVE_METRICS_TRANSMISSION_TIME_HEADER: str( - round(time.time()) * 1000 + utils.get_time_since_epoch() ), }, ) diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/utils.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/utils.py index cfba984..5893497 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/utils.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/live_metrics/utils.py @@ -1,6 +1,7 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. # +import datetime import time import uuid @@ -26,3 +27,13 @@ def create_metric_envelope(instrumentation_key: str): version=azure_monitor_context.get("ai.internal.sdkVersion"), ) return envelope + + +def get_time_since_epoch(): + now = datetime.datetime.now() + # epoch is defined as 12:00:00 midnight on January 1, 0001 for Microsoft + epoch = datetime.datetime(1, 1, 1) + delta = (now - epoch).total_seconds() + # return the number of 100-nanosecond intervals + delta = round(delta * 10000000) + return delta diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/metrics_span_processor.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/metrics_span_processor.py index 660809e..0ef5908 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/metrics_span_processor.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/metrics_span_processor.py @@ -4,7 +4,6 @@ import logging from opentelemetry.sdk.trace import Span, SpanProcessor -from opentelemetry.trace import SpanKind from azure_monitor.export.trace import convert_span_to_envelope @@ -13,52 +12,24 @@ class AzureMetricsSpanProcessor(SpanProcessor): """AzureMetricsSpanProcessor is an implementation of `SpanProcessor` used - to generate Azure specific metrics, including dependencies/requests rate, average duration - and failed dependencies/requests. + to generate documents for Live Metrics. """ def __init__(self): self.is_collecting_documents = False self.documents = collections.deque() - self.request_count = 0 - self.dependency_count = 0 - self.failed_request_count = 0 - self.failed_dependency_count = 0 - self.request_duration = 0 - self.dependency_duration = 0 def on_start(self, span: Span) -> None: pass def on_end(self, span: Span) -> None: try: - if span.kind == SpanKind.SERVER: - self.request_count = self.request_count + 1 - duration = ( - span.end_time - span.start_time - ) / 1000000 # Convert to milliseconds - self.request_duration = self.request_duration + duration + if self.is_collecting_documents: if not span.status.is_ok: - self.failed_request_count = self.failed_request_count + 1 - if self.is_collecting_documents: - self.documents.append(convert_span_to_envelope(span)) - - elif span.kind == SpanKind.CLIENT: - self.dependency_count = self.dependency_count + 1 - duration = ( - span.end_time - span.start_time - ) / 1000000 # Convert to milliseconds - self.dependency_duration = self.dependency_duration + duration - if not span.status.is_ok: - self.failed_dependency_count = ( - self.failed_dependency_count + 1 - ) - if self.is_collecting_documents: - self.documents.append(convert_span_to_envelope(span)) - + self.documents.append(convert_span_to_envelope(span)) # pylint: disable=broad-except except Exception: - logger.exception("Exception while processing Span.") + logger.warning("Exception while processing Span.") def shutdown(self) -> None: pass diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/performance_metrics.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/performance_metrics.py index 7c5f458..417e068 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/performance_metrics.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/performance_metrics.py @@ -43,7 +43,7 @@ def __init__( observer_type=UpDownSumObserver, ) - if collection_type == AutoCollectionType.STANDARD_METRICS: + if collection_type == AutoCollectionType.PERF_COUNTER: self._meter.register_observer( callback=self._track_memory, name="\\Memory\\Available Bytes", @@ -110,7 +110,7 @@ def _track_process_cpu(self, observer: Observer) -> None: cpu_count = psutil.cpu_count(logical=True) observer.observe(PROCESS.cpu_percent() / cpu_count, self._labels) except Exception: # pylint: disable=broad-except - logger.exception("Error handling get process cpu usage.") + logger.warning("Error handling get process cpu usage.") def _track_process_memory(self, observer: Observer) -> None: """ Track Memory @@ -121,7 +121,7 @@ def _track_process_memory(self, observer: Observer) -> None: try: observer.observe(PROCESS.memory_info().rss, self._labels) except Exception: # pylint: disable=broad-except - logger.exception("Error handling get process private bytes.") + logger.warning("Error handling get process private bytes.") def _track_commited_memory(self, observer: Observer) -> None: """ Track Commited Memory diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/request_metrics.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/request_metrics.py index c78294d..2cbfab4 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/request_metrics.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/request_metrics.py @@ -1,19 +1,74 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. import logging +import threading import time +from http.server import HTTPServer from typing import Dict from opentelemetry.metrics import Meter, Observer from opentelemetry.sdk.metrics import UpDownSumObserver -from azure_monitor.sdk.auto_collection.metrics_span_processor import ( - AzureMetricsSpanProcessor, -) from azure_monitor.sdk.auto_collection.utils import AutoCollectionType +_requests_lock = threading.Lock() logger = logging.getLogger(__name__) requests_map = dict() +ORIGINAL_CONSTRUCTOR = HTTPServer.__init__ + + +def request_patch(func): + def wrapper(self=None): + start_time = time.time() + func(self) + end_time = time.time() + + with _requests_lock: + # Update Count + count = requests_map.get("count", 0) + requests_map["count"] = count + 1 + # Update duration + duration = requests_map.get("duration", 0) + requests_map["duration"] = duration + (end_time - start_time) + + return wrapper + + +def send_response_patch(func): + def wrapper(self, code, message=None): + func(self, code, message) + if code < 200 >= 300: + with _requests_lock: + # Update Count + failed_count = requests_map.get("failed_count", 0) + requests_map["failed_count"] = failed_count + 1 + + return wrapper + + +def server_patch(*args, **kwargs): + if len(args) >= 3: + handler = args[2] + if handler: + # Patch the handler methods if they exist + if "do_DELETE" in dir(handler): + handler.do_DELETE = request_patch(handler.do_DELETE) + if "do_GET" in dir(handler): + handler.do_GET = request_patch(handler.do_GET) + if "do_HEAD" in dir(handler): + handler.do_HEAD = request_patch(handler.do_HEAD) + if "do_OPTIONS" in dir(handler): + handler.do_OPTIONS = request_patch(handler.do_OPTIONS) + if "do_POST" in dir(handler): + handler.do_POST = request_patch(handler.do_POST) + if "do_PUT" in dir(handler): + handler.do_PUT = request_patch(handler.do_PUT) + if "send_response" in dir(handler): + handler.send_response = send_response_patch( + handler.send_response + ) + result = ORIGINAL_CONSTRUCTOR(*args, **kwargs) + return result class RequestMetrics: @@ -24,7 +79,6 @@ class RequestMetrics: Args: meter: OpenTelemetry Meter labels: Dictionary of labels - span_processor: Azure Metrics Span Processor collection_type: Standard or Live Metrics """ @@ -32,12 +86,12 @@ def __init__( self, meter: Meter, labels: Dict[str, str], - span_processor: AzureMetricsSpanProcessor, collection_type: AutoCollectionType, ): self._meter = meter self._labels = labels - self._span_processor = span_processor + # Patch the HTTPServer handler to track request information + HTTPServer.__init__ = server_patch if collection_type == AutoCollectionType.LIVE_METRICS: meter.register_observer( @@ -50,15 +104,15 @@ def __init__( ) meter.register_observer( callback=self._track_request_duration, - name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Request Execution Time", + name="\\ApplicationInsights\\Request Duration", description="Incoming Requests Average Execution Time", unit="milliseconds", - value_type=int, + value_type=float, observer_type=UpDownSumObserver, ) meter.register_observer( callback=self._track_request_rate, - name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Requests/Sec", + name="\\ApplicationInsights\\Requests/Sec", description="Incoming Requests Rate", unit="rps", value_type=float, @@ -71,26 +125,23 @@ def _track_request_duration(self, observer: Observer) -> None: Calculated by getting the time it takes to make an incoming request and dividing over the amount of incoming requests over an elapsed time. """ - last_average_duration = requests_map.get("last_average_duration", 0) - interval_duration = ( - self._span_processor.request_duration - - requests_map.get("last_duration", 0) - ) - interval_count = self._span_processor.request_count - requests_map.get( + last_average_duration = requests_map.get("last_average_duration", 0.0) + interval_duration = requests_map.get( + "duration", 0.0 + ) - requests_map.get("last_duration", 0.0) + interval_count = requests_map.get("count", 0) - requests_map.get( "last_count", 0 ) try: result = interval_duration / interval_count - requests_map["last_count"] = self._span_processor.request_count + requests_map["last_count"] = requests_map.get("count", 0) requests_map["last_average_duration"] = result - requests_map[ - "last_duration" - ] = self._span_processor.request_duration - observer.observe(int(result), self._labels) + requests_map["last_duration"] = requests_map.get("duration", 0.0) + observer.observe(result, self._labels) except ZeroDivisionError: # If interval_count is 0, exporter call made too close to previous # Return the previous result if this is the case - observer.observe(int(last_average_duration), self._labels) + observer.observe(last_average_duration, self._labels) def _track_request_rate(self, observer: Observer) -> None: """ Track Request execution rate @@ -100,22 +151,21 @@ def _track_request_rate(self, observer: Observer) -> None: over the elapsed time. """ current_time = time.time() - last_rate = requests_map.get("last_rate", 0) + last_rate = requests_map.get("last_rate", 0.0) last_time = requests_map.get("last_time") try: # last_rate_time is None the first time this function is called if last_time is not None: interval_time = current_time - requests_map.get("last_time", 0) - interval_count = ( - self._span_processor.request_count - - requests_map.get("last_count", 0) - ) + interval_count = requests_map.get( + "count", 0 + ) - requests_map.get("last_count", 0) result = interval_count / interval_time else: result = 0.0 requests_map["last_time"] = current_time - requests_map["last_count"] = self._span_processor.request_count + requests_map["last_count"] = requests_map.get("count", 0) requests_map["last_rate"] = result observer.observe(result, self._labels) except ZeroDivisionError: @@ -131,24 +181,23 @@ def _track_request_failed_rate(self, observer: Observer) -> None: over the elapsed time. """ current_time = time.time() - last_rate = requests_map.get("last_rate", 0) + last_rate = requests_map.get("last_rate", 0.0) last_time = requests_map.get("last_time") try: # last_rate_time is None the first time this function is called if last_time is not None: interval_time = current_time - requests_map.get("last_time", 0) - interval_count = ( - self._span_processor.failed_request_count - - requests_map.get("last_failed_count", 0) - ) + interval_count = requests_map.get( + "failed_count", 0 + ) - requests_map.get("last_failed_count", 0) result = interval_count / interval_time else: result = 0.0 requests_map["last_time"] = current_time - requests_map[ - "last_failed_count" - ] = self._span_processor.failed_request_count + requests_map["last_failed_count"] = requests_map.get( + "failed_count", 0 + ) requests_map["last_rate"] = result observer.observe(result, self._labels) except ZeroDivisionError: diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/utils.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/utils.py index acbc811..8883b11 100644 --- a/azure_monitor/src/azure_monitor/sdk/auto_collection/utils.py +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/utils.py @@ -7,5 +7,5 @@ class AutoCollectionType(Enum): """Automatic collection of metrics type """ - STANDARD_METRICS = 0 + PERF_COUNTER = 0 LIVE_METRICS = 1 diff --git a/azure_monitor/src/azure_monitor/storage.py b/azure_monitor/src/azure_monitor/storage.py index 714aed7..4a22bf5 100644 --- a/azure_monitor/src/azure_monitor/storage.py +++ b/azure_monitor/src/azure_monitor/storage.py @@ -198,12 +198,14 @@ def _check_storage_size(self): ) continue if size >= self.max_size: + # pylint: disable=logging-format-interpolation logger.warning( "Persistent storage max capacity has been " - "reached. Currently at %fKB. Telemetry will be " + "reached. Currently at {}KB. Telemetry will be " "lost. Please consider increasing the value of " - "'storage_max_size' in exporter config.", - format(size / 1024), + "'storage_max_size' in exporter config.".format( + str(size / 1024) + ) ) return False return True diff --git a/azure_monitor/src/azure_monitor/utils.py b/azure_monitor/src/azure_monitor/utils.py index c2f4998..b8da222 100644 --- a/azure_monitor/src/azure_monitor/utils.py +++ b/azure_monitor/src/azure_monitor/utils.py @@ -58,7 +58,7 @@ class PeriodicTask(threading.Thread): """ def __init__(self, interval, function, args=None, kwargs=None): - super(PeriodicTask, self).__init__() + super().__init__() self.interval = interval self.function = function self.args = args or [] diff --git a/azure_monitor/src/azure_monitor/version.py b/azure_monitor/src/azure_monitor/version.py index 58e6682..d460fce 100644 --- a/azure_monitor/src/azure_monitor/version.py +++ b/azure_monitor/src/azure_monitor/version.py @@ -1,3 +1,3 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. -__version__ = "0.4b.0" +__version__ = "0.5b.0" diff --git a/azure_monitor/tests/auto_collection/live_metrics/test_exporter.py b/azure_monitor/tests/auto_collection/live_metrics/test_exporter.py index c480a0d..c376e71 100644 --- a/azure_monitor/tests/auto_collection/live_metrics/test_exporter.py +++ b/azure_monitor/tests/auto_collection/live_metrics/test_exporter.py @@ -44,7 +44,7 @@ def func(*_args, **_kwargs): class TestLiveMetricsExporter(unittest.TestCase): @classmethod def setUpClass(cls): - cls._instrumentation_key = "99c42f65-1656-4c41-afde-bd86b709a4a7" + cls._instrumentation_key = "12345678-1234-5678-abcd-12345678abcd" metrics.set_meter_provider(MeterProvider()) cls._meter = metrics.get_meter(__name__) cls._test_metric = cls._meter.create_metric( @@ -140,7 +140,7 @@ def test_live_metric_envelope_observer(self): self.assertIsInstance(envelope, LiveMetricEnvelope) self.assertEqual( envelope.instrumentation_key, - "99c42f65-1656-4c41-afde-bd86b709a4a7", + "12345678-1234-5678-abcd-12345678abcd", ) self.assertEqual(envelope.documents, []) self.assertEqual(envelope.metrics[0].name, "testname") diff --git a/azure_monitor/tests/auto_collection/live_metrics/test_manager.py b/azure_monitor/tests/auto_collection/live_metrics/test_manager.py index 7b9dc1d..90fcc92 100644 --- a/azure_monitor/tests/auto_collection/live_metrics/test_manager.py +++ b/azure_monitor/tests/auto_collection/live_metrics/test_manager.py @@ -32,7 +32,7 @@ def setUpClass(cls): ) testing_labels = {"environment": "testing"} cls._test_metric.add(5, testing_labels) - cls._instrumentation_key = "99c42f65-1656-4c41-afde-bd86b709a4a7" + cls._instrumentation_key = "12345678-1234-5678-abcd-12345678abcd" cls._manager = None cls._ping = None cls._post = None diff --git a/azure_monitor/tests/auto_collection/live_metrics/test_sender.py b/azure_monitor/tests/auto_collection/live_metrics/test_sender.py index dadf95c..b29859d 100644 --- a/azure_monitor/tests/auto_collection/live_metrics/test_sender.py +++ b/azure_monitor/tests/auto_collection/live_metrics/test_sender.py @@ -14,7 +14,7 @@ class TestLiveMetricsSender(unittest.TestCase): @classmethod def setUpClass(cls): - cls._instrumentation_key = "99c42f65-1656-4c41-afde-bd86b709a4a7" + cls._instrumentation_key = "12345678-1234-5678-abcd-12345678abcd" def test_constructor(self): """Test the constructor.""" diff --git a/azure_monitor/tests/auto_collection/test_auto_collection.py b/azure_monitor/tests/auto_collection/test_auto_collection.py index 40c2f8d..6c85d12 100644 --- a/azure_monitor/tests/auto_collection/test_auto_collection.py +++ b/azure_monitor/tests/auto_collection/test_auto_collection.py @@ -7,10 +7,7 @@ from opentelemetry import metrics from opentelemetry.sdk.metrics import MeterProvider -from azure_monitor.sdk.auto_collection import ( - AutoCollection, - AzureMetricsSpanProcessor, -) +from azure_monitor.sdk.auto_collection import AutoCollection # pylint: disable=protected-access @@ -20,7 +17,6 @@ def setUpClass(cls): metrics.set_meter_provider(MeterProvider()) cls._meter = metrics.get_meter(__name__) cls._test_labels = tuple({"environment": "staging"}.items()) - cls._span_processor = AzureMetricsSpanProcessor() @classmethod def tearDownClass(cls): @@ -35,11 +31,7 @@ def tearDownClass(cls): def test_constructor(self, mock_requests, mock_performance): """Test the constructor.""" - AutoCollection( - meter=self._meter, - labels=self._test_labels, - span_processor=self._span_processor, - ) + AutoCollection(meter=self._meter, labels=self._test_labels) self.assertEqual(mock_performance.called, True) self.assertEqual(mock_requests.called, True) self.assertEqual(mock_performance.call_args[0][0], self._meter) diff --git a/azure_monitor/tests/auto_collection/test_dependency_metrics.py b/azure_monitor/tests/auto_collection/test_dependency_metrics.py index 7ea3413..702d990 100644 --- a/azure_monitor/tests/auto_collection/test_dependency_metrics.py +++ b/azure_monitor/tests/auto_collection/test_dependency_metrics.py @@ -2,15 +2,17 @@ # Licensed under the MIT License. import unittest +from http.server import HTTPServer from unittest import mock +import requests from opentelemetry import metrics from opentelemetry.sdk.metrics import MeterProvider, Observer from azure_monitor.sdk.auto_collection import dependency_metrics -from azure_monitor.sdk.auto_collection.metrics_span_processor import ( - AzureMetricsSpanProcessor, -) + +ORIGINAL_FUNCTION = requests.Session.request +ORIGINAL_CONS = HTTPServer.__init__ # pylint: disable=protected-access @@ -20,21 +22,22 @@ def setUpClass(cls): metrics.set_meter_provider(MeterProvider()) cls._meter = metrics.get_meter(__name__) cls._test_labels = {"environment": "staging"} - cls._span_processor = AzureMetricsSpanProcessor() @classmethod def tearDown(cls): + requests.Session.request = ORIGINAL_FUNCTION + dependency_metrics.ORIGINAL_CONSTRUCTOR = ORIGINAL_CONS metrics._METER_PROVIDER = None def setUp(self): dependency_metrics.dependency_map.clear() + requests.Session.request = ORIGINAL_FUNCTION + dependency_metrics.ORIGINAL_CONSTRUCTOR = ORIGINAL_CONS def test_constructor(self): mock_meter = mock.Mock() metrics_collector = dependency_metrics.DependencyMetrics( - meter=mock_meter, - labels=self._test_labels, - span_processor=self._span_processor, + meter=mock_meter, labels=self._test_labels ) self.assertEqual(metrics_collector._meter, mock_meter) self.assertEqual(metrics_collector._labels, self._test_labels) @@ -46,7 +49,7 @@ def test_constructor(self): name="\\ApplicationInsights\\Dependency Call Duration", description="Average Outgoing Requests duration", unit="milliseconds", - value_type=int, + value_type=float, ) create_metric_calls[1].assert_called_with( callback=metrics_collector._track_failure_rate, @@ -67,9 +70,7 @@ def test_constructor(self): def test_track_dependency_rate(self, time_mock): time_mock.time.return_value = 100 metrics_collector = dependency_metrics.DependencyMetrics( - meter=self._meter, - labels=self._test_labels, - span_processor=self._span_processor, + meter=self._meter, labels=self._test_labels ) obs = Observer( callback=metrics_collector._track_dependency_rate, @@ -77,10 +78,9 @@ def test_track_dependency_rate(self, time_mock): description="Outgoing Requests per second", unit="rps", value_type=float, - meter=self._meter, ) dependency_metrics.dependency_map["last_time"] = 98.0 - self._span_processor.dependency_count = 4 + dependency_metrics.dependency_map["count"] = 4 metrics_collector._track_dependency_rate(obs) self.assertEqual( obs.aggregators[tuple(self._test_labels.items())].current, 2 @@ -90,9 +90,7 @@ def test_track_dependency_rate(self, time_mock): def test_track_dependency_rate_time_none(self, time_mock): time_mock.time.return_value = 100 metrics_collector = dependency_metrics.DependencyMetrics( - meter=self._meter, - labels=self._test_labels, - span_processor=self._span_processor, + meter=self._meter, labels=self._test_labels ) dependency_metrics.dependency_map["last_time"] = None obs = Observer( @@ -101,7 +99,6 @@ def test_track_dependency_rate_time_none(self, time_mock): description="Outgoing Requests per second", unit="rps", value_type=float, - meter=self._meter, ) metrics_collector._track_dependency_rate(obs) self.assertEqual( @@ -112,9 +109,7 @@ def test_track_dependency_rate_time_none(self, time_mock): def test_track_dependency_rate_error(self, time_mock): time_mock.time.return_value = 100 metrics_collector = dependency_metrics.DependencyMetrics( - meter=self._meter, - labels=self._test_labels, - span_processor=self._span_processor, + meter=self._meter, labels=self._test_labels ) dependency_metrics.dependency_map["last_time"] = 100 dependency_metrics.dependency_map["last_result"] = 5.0 @@ -124,7 +119,6 @@ def test_track_dependency_rate_error(self, time_mock): description="Outgoing Requests per second", unit="rps", value_type=float, - meter=self._meter, ) metrics_collector._track_dependency_rate(obs) self.assertEqual( @@ -135,9 +129,7 @@ def test_track_dependency_rate_error(self, time_mock): def test_track_failed_dependency_rate(self, time_mock): time_mock.time.return_value = 100 metrics_collector = dependency_metrics.DependencyMetrics( - meter=self._meter, - labels=self._test_labels, - span_processor=self._span_processor, + meter=self._meter, labels=self._test_labels ) obs = Observer( callback=metrics_collector._track_failure_rate, @@ -145,10 +137,9 @@ def test_track_failed_dependency_rate(self, time_mock): description="test", unit="test", value_type=float, - meter=self._meter, ) dependency_metrics.dependency_map["last_time"] = 98 - self._span_processor.failed_dependency_count = 4 + dependency_metrics.dependency_map["failed_count"] = 4 metrics_collector._track_failure_rate(obs) self.assertEqual( obs.aggregators[tuple(self._test_labels.items())].current, 2.0 @@ -158,9 +149,7 @@ def test_track_failed_dependency_rate(self, time_mock): def test_track_failed_dependency_rate_time_none(self, time_mock): time_mock.time.return_value = 100 metrics_collector = dependency_metrics.DependencyMetrics( - meter=self._meter, - labels=self._test_labels, - span_processor=self._span_processor, + meter=self._meter, labels=self._test_labels ) dependency_metrics.dependency_map["last_time"] = None obs = Observer( @@ -169,7 +158,6 @@ def test_track_failed_dependency_rate_time_none(self, time_mock): description="test", unit="test", value_type=float, - meter=self._meter, ) metrics_collector._track_failure_rate(obs) self.assertEqual( @@ -180,9 +168,7 @@ def test_track_failed_dependency_rate_time_none(self, time_mock): def test_track_failed_dependency_rate_error(self, time_mock): time_mock.time.return_value = 100 metrics_collector = dependency_metrics.DependencyMetrics( - meter=self._meter, - labels=self._test_labels, - span_processor=self._span_processor, + meter=self._meter, labels=self._test_labels ) dependency_metrics.dependency_map["last_time"] = 100 dependency_metrics.dependency_map["last_result"] = 5.0 @@ -192,7 +178,6 @@ def test_track_failed_dependency_rate_error(self, time_mock): description="test", unit="test", value_type=float, - meter=self._meter, ) metrics_collector._track_failure_rate(obs) self.assertEqual( @@ -201,20 +186,17 @@ def test_track_failed_dependency_rate_error(self, time_mock): def test_track_dependency_duration(self): metrics_collector = dependency_metrics.DependencyMetrics( - meter=self._meter, - labels=self._test_labels, - span_processor=self._span_processor, + meter=self._meter, labels=self._test_labels ) - self._span_processor.dependency_duration = 100 - self._span_processor.dependency_count = 10 + dependency_metrics.dependency_map["duration"] = 100 + dependency_metrics.dependency_map["count"] = 10 dependency_metrics.dependency_map["last_count"] = 5 obs = Observer( callback=metrics_collector._track_dependency_duration, name="test", description="test", unit="test", - value_type=int, - meter=self._meter, + value_type=float, ) metrics_collector._track_dependency_duration(obs) self.assertEqual( @@ -223,22 +205,39 @@ def test_track_dependency_duration(self): def test_track_dependency_duration_error(self): metrics_collector = dependency_metrics.DependencyMetrics( - meter=self._meter, - labels=self._test_labels, - span_processor=self._span_processor, + meter=self._meter, labels=self._test_labels ) - self._span_processor.dependency_duration = 100 - self._span_processor.dependency_count = 10 + dependency_metrics.dependency_map["duration"] = 100 + dependency_metrics.dependency_map["count"] = 10 dependency_metrics.dependency_map["last_count"] = 10 obs = Observer( callback=metrics_collector._track_dependency_duration, name="test", description="test", unit="test", - value_type=int, - meter=self._meter, + value_type=float, ) metrics_collector._track_dependency_duration(obs) self.assertEqual( obs.aggregators[tuple(self._test_labels.items())].current, 0 ) + + @mock.patch( + "azure_monitor.sdk.auto_collection.dependency_metrics.ORIGINAL_REQUEST" + ) + def test_dependency_patch(self, request_mock): + session = requests.Session() + dependency_metrics.dependency_patch(session) + self.assertEqual(dependency_metrics.dependency_map["count"], 1) + request_mock.assert_called_with(session) + + @mock.patch( + "azure_monitor.sdk.auto_collection.dependency_metrics.ORIGINAL_REQUEST" + ) + @mock.patch("azure_monitor.sdk.auto_collection.dependency_metrics.context") + def test_dependency_patch_suppress(self, context_mock, request_mock): + context_mock.get_value.return_value = {} + session = requests.Session() + dependency_metrics.dependency_patch(session) + self.assertEqual(dependency_metrics.dependency_map.get("count"), None) + request_mock.assert_called_with(session) diff --git a/azure_monitor/tests/auto_collection/test_metrics_span_processor.py b/azure_monitor/tests/auto_collection/test_metrics_span_processor.py index 216ca34..7c880be 100644 --- a/azure_monitor/tests/auto_collection/test_metrics_span_processor.py +++ b/azure_monitor/tests/auto_collection/test_metrics_span_processor.py @@ -13,107 +13,7 @@ # pylint: disable=protected-access -class TestAutoCollection(unittest.TestCase): - def test_constructor(self): - """Test the constructor.""" - span_processor = AzureMetricsSpanProcessor() - self.assertEqual(span_processor.dependency_count, 0) - self.assertEqual(span_processor.dependency_duration, 0) - self.assertEqual(span_processor.failed_dependency_count, 0) - self.assertEqual(span_processor.request_count, 0) - self.assertEqual(span_processor.request_duration, 0) - self.assertEqual(span_processor.failed_request_count, 0) - - def test_ok_dependency(self): - """Test the functionality when Client Span is ended.""" - span_processor = AzureMetricsSpanProcessor() - test_span = Span( - name="test", - kind=SpanKind.CLIENT, - context=SpanContext( - trace_id=36873507687745823477771305566750195431, - span_id=12030755672171557338, - is_remote=False, - ), - ) - test_span._start_time = 5000000 - test_span._end_time = 15000000 - span_processor.on_end(test_span) - self.assertEqual(span_processor.request_count, 0) - self.assertEqual(span_processor.request_duration, 0) - self.assertEqual(span_processor.failed_request_count, 0) - self.assertEqual(span_processor.dependency_count, 1) - self.assertEqual(span_processor.dependency_duration, 10) - self.assertEqual(span_processor.failed_dependency_count, 0) - - def test_failed_dependency(self): - """Test the functionality when Client Span is ended.""" - span_processor = AzureMetricsSpanProcessor() - test_span = Span( - name="test", - kind=SpanKind.CLIENT, - context=SpanContext( - trace_id=36873507687745823477771305566750195431, - span_id=12030755672171557338, - is_remote=False, - ), - ) - test_span.set_status(Status(StatusCanonicalCode.INTERNAL, "test")) - test_span._start_time = 5000000 - test_span._end_time = 15000000 - span_processor.on_end(test_span) - self.assertEqual(span_processor.request_count, 0) - self.assertEqual(span_processor.request_duration, 0) - self.assertEqual(span_processor.failed_request_count, 0) - self.assertEqual(span_processor.dependency_count, 1) - self.assertEqual(span_processor.dependency_duration, 10) - self.assertEqual(span_processor.failed_dependency_count, 1) - - def test_ok_request(self): - """Test the functionality when Server Span is ended.""" - span_processor = AzureMetricsSpanProcessor() - test_span = Span( - name="test", - kind=SpanKind.SERVER, - context=SpanContext( - trace_id=36873507687745823477771305566750195431, - span_id=12030755672171557338, - is_remote=False, - ), - ) - test_span._start_time = 5000000 - test_span._end_time = 15000000 - span_processor.on_end(test_span) - self.assertEqual(span_processor.dependency_count, 0) - self.assertEqual(span_processor.dependency_duration, 0) - self.assertEqual(span_processor.failed_dependency_count, 0) - self.assertEqual(span_processor.request_count, 1) - self.assertEqual(span_processor.request_duration, 10) - self.assertEqual(span_processor.failed_request_count, 0) - - def test_failed_request(self): - """Test the functionality when Server Span is ended.""" - span_processor = AzureMetricsSpanProcessor() - test_span = Span( - name="test", - kind=SpanKind.SERVER, - context=SpanContext( - trace_id=36873507687745823477771305566750195431, - span_id=12030755672171557338, - is_remote=False, - ), - ) - test_span.set_status(Status(StatusCanonicalCode.INTERNAL, "test")) - test_span._start_time = 5000000 - test_span._end_time = 15000000 - span_processor.on_end(test_span) - self.assertEqual(span_processor.dependency_count, 0) - self.assertEqual(span_processor.dependency_duration, 0) - self.assertEqual(span_processor.failed_dependency_count, 0) - self.assertEqual(span_processor.request_count, 1) - self.assertEqual(span_processor.request_duration, 10) - self.assertEqual(span_processor.failed_request_count, 1) - +class TestMetricsSpanProcessor(unittest.TestCase): def test_document_collection(self): """Test the document collection.""" span_processor = AzureMetricsSpanProcessor() diff --git a/azure_monitor/tests/auto_collection/test_performance_metrics.py b/azure_monitor/tests/auto_collection/test_performance_metrics.py index 0474a22..ef5b6d4 100644 --- a/azure_monitor/tests/auto_collection/test_performance_metrics.py +++ b/azure_monitor/tests/auto_collection/test_performance_metrics.py @@ -31,12 +31,12 @@ def setUpClass(cls): def tearDownClass(cls): metrics._METER_PROVIDER = None - def test_constructor_standard_metrics(self): + def test_constructor_perf_counters(self): mock_meter = mock.Mock() performance_metrics_collector = PerformanceMetrics( meter=mock_meter, labels=self._test_labels, - collection_type=AutoCollectionType.STANDARD_METRICS, + collection_type=AutoCollectionType.PERF_COUNTER, ) self.assertEqual(performance_metrics_collector._meter, mock_meter) self.assertEqual( @@ -105,7 +105,7 @@ def test_track_cpu(self): performance_metrics_collector = PerformanceMetrics( meter=self._meter, labels=self._test_labels, - collection_type=AutoCollectionType.STANDARD_METRICS, + collection_type=AutoCollectionType.PERF_COUNTER, ) with mock.patch("psutil.cpu_times_percent") as processor_mock: cpu = collections.namedtuple("cpu", "idle") @@ -117,7 +117,6 @@ def test_track_cpu(self): description="Processor time as a percentage", unit="percentage", value_type=float, - meter=self._meter, ) performance_metrics_collector._track_cpu(obs) self.assertEqual( @@ -129,7 +128,7 @@ def test_track_memory(self, psutil_mock): performance_metrics_collector = PerformanceMetrics( meter=self._meter, labels=self._test_labels, - collection_type=AutoCollectionType.STANDARD_METRICS, + collection_type=AutoCollectionType.PERF_COUNTER, ) memory = collections.namedtuple("memory", "available") vmem = memory(available=100) @@ -140,7 +139,6 @@ def test_track_memory(self, psutil_mock): description="Amount of available memory in bytes", unit="byte", value_type=int, - meter=self._meter, ) performance_metrics_collector._track_memory(obs) self.assertEqual( @@ -163,7 +161,6 @@ def test_track_commited_memory(self, psutil_mock): description="Amount of available memory in bytes", unit="byte", value_type=int, - meter=self._meter, ) performance_metrics_collector._track_commited_memory(obs) self.assertEqual( @@ -178,7 +175,7 @@ def test_track_process_cpu(self, psutil_mock): performance_metrics_collector = PerformanceMetrics( meter=self._meter, labels=self._test_labels, - collection_type=AutoCollectionType.STANDARD_METRICS, + collection_type=AutoCollectionType.PERF_COUNTER, ) process_mock.cpu_percent.return_value = 44.4 psutil_mock.cpu_count.return_value = 2 @@ -188,7 +185,6 @@ def test_track_process_cpu(self, psutil_mock): description="Process CPU usage as a percentage", unit="percentage", value_type=float, - meter=self._meter, ) performance_metrics_collector._track_process_cpu(obs) self.assertEqual( @@ -203,7 +199,7 @@ def test_track_process_cpu_exception(self, logger_mock): performance_metrics_collector = PerformanceMetrics( meter=self._meter, labels=self._test_labels, - collection_type=AutoCollectionType.STANDARD_METRICS, + collection_type=AutoCollectionType.PERF_COUNTER, ) psutil_mock.cpu_count.return_value = None obs = Observer( @@ -212,10 +208,9 @@ def test_track_process_cpu_exception(self, logger_mock): description="Process CPU usage as a percentage", unit="percentage", value_type=float, - meter=self._meter, ) performance_metrics_collector._track_process_cpu(obs) - self.assertEqual(logger_mock.exception.called, True) + self.assertEqual(logger_mock.warning.called, True) def test_track_process_memory(self): with mock.patch( @@ -224,7 +219,7 @@ def test_track_process_memory(self): performance_metrics_collector = PerformanceMetrics( meter=self._meter, labels=self._test_labels, - collection_type=AutoCollectionType.STANDARD_METRICS, + collection_type=AutoCollectionType.PERF_COUNTER, ) memory = collections.namedtuple("memory", "rss") pmem = memory(rss=100) @@ -235,7 +230,6 @@ def test_track_process_memory(self): description="Amount of memory process has used in bytes", unit="byte", value_type=int, - meter=self._meter, ) performance_metrics_collector._track_process_memory(obs) self.assertEqual( @@ -251,7 +245,7 @@ def test_track_process_memory_exception(self, logger_mock): performance_metrics_collector = PerformanceMetrics( meter=self._meter, labels=self._test_labels, - collection_type=AutoCollectionType.STANDARD_METRICS, + collection_type=AutoCollectionType.PERF_COUNTER, ) obs = Observer( callback=performance_metrics_collector._track_process_memory, @@ -259,7 +253,6 @@ def test_track_process_memory_exception(self, logger_mock): description="Amount of memory process has used in bytes", unit="byte", value_type=int, - meter=self._meter, ) performance_metrics_collector._track_process_memory(obs) - self.assertEqual(logger_mock.exception.called, True) + self.assertEqual(logger_mock.warning.called, True) diff --git a/azure_monitor/tests/auto_collection/test_request_metrics.py b/azure_monitor/tests/auto_collection/test_request_metrics.py index a367834..2ec6a85 100644 --- a/azure_monitor/tests/auto_collection/test_request_metrics.py +++ b/azure_monitor/tests/auto_collection/test_request_metrics.py @@ -2,17 +2,17 @@ # Licensed under the MIT License. import unittest +from http.server import HTTPServer from unittest import mock from opentelemetry import metrics from opentelemetry.sdk.metrics import MeterProvider, Observer from azure_monitor.sdk.auto_collection import request_metrics -from azure_monitor.sdk.auto_collection.metrics_span_processor import ( - AzureMetricsSpanProcessor, -) from azure_monitor.sdk.auto_collection.utils import AutoCollectionType +ORIGINAL_CONS = HTTPServer.__init__ + # pylint: disable=protected-access class TestRequestMetrics(unittest.TestCase): @@ -21,7 +21,6 @@ def setUpClass(cls): metrics.set_meter_provider(MeterProvider()) cls._meter = metrics.get_meter(__name__) cls._test_labels = {"environment": "staging"} - cls._span_processor = AzureMetricsSpanProcessor() @classmethod def tearDown(cls): @@ -29,14 +28,14 @@ def tearDown(cls): def setUp(self): request_metrics.requests_map.clear() + request_metrics.ORIGINAL_CONSTRUCTOR = ORIGINAL_CONS def test_constructor(self): mock_meter = mock.Mock() request_metrics_collector = request_metrics.RequestMetrics( meter=mock_meter, labels=self._test_labels, - span_processor=self._span_processor, - collection_type=AutoCollectionType.STANDARD_METRICS, + collection_type=AutoCollectionType.PERF_COUNTER, ) self.assertEqual(request_metrics_collector._meter, mock_meter) self.assertEqual(request_metrics_collector._labels, self._test_labels) @@ -44,15 +43,15 @@ def test_constructor(self): create_metric_calls = mock_meter.register_observer.call_args_list create_metric_calls[0].assert_called_with( callback=request_metrics_collector._track_request_duration, - name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Request Execution Time", + name="\\ApplicationInsights\\Request Duration", description="Incoming Requests Average Execution Time", unit="milliseconds", - value_type=int, + value_type=float, ) create_metric_calls[1].assert_called_with( callback=request_metrics_collector._track_request_rate, - name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Requests/Sec", + name="\\ApplicationInsights\\Requests/Sec", description="Incoming Requests Rate", unit="rps", value_type=float, @@ -62,19 +61,17 @@ def test_track_request_duration(self): request_metrics_collector = request_metrics.RequestMetrics( meter=self._meter, labels=self._test_labels, - span_processor=self._span_processor, - collection_type=AutoCollectionType.STANDARD_METRICS, + collection_type=AutoCollectionType.PERF_COUNTER, ) - self._span_processor.request_duration = 100 - self._span_processor.request_count = 10 + request_metrics.requests_map["duration"] = 100 + request_metrics.requests_map["count"] = 10 request_metrics.requests_map["last_count"] = 5 obs = Observer( callback=request_metrics_collector._track_request_duration, name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Request Execution Time", description="Incoming Requests Average Execution Time", unit="milliseconds", - value_type=int, - meter=self._meter, + value_type=float, ) request_metrics_collector._track_request_duration(obs) self.assertEqual( @@ -85,19 +82,17 @@ def test_track_request_duration_error(self): request_metrics_collector = request_metrics.RequestMetrics( meter=self._meter, labels=self._test_labels, - span_processor=self._span_processor, - collection_type=AutoCollectionType.STANDARD_METRICS, + collection_type=AutoCollectionType.PERF_COUNTER, ) - self._span_processor.request_duration = 100 - self._span_processor.request_count = 10 + request_metrics.requests_map["duration"] = 100 + request_metrics.requests_map["count"] = 10 request_metrics.requests_map["last_count"] = 10 obs = Observer( callback=request_metrics_collector._track_request_duration, name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Request Execution Time", description="Incoming Requests Average Execution Time", unit="milliseconds", - value_type=int, - meter=self._meter, + value_type=float, ) request_metrics_collector._track_request_duration(obs) self.assertEqual( @@ -109,19 +104,17 @@ def test_track_request_rate(self, time_mock): request_metrics_collector = request_metrics.RequestMetrics( meter=self._meter, labels=self._test_labels, - span_processor=self._span_processor, - collection_type=AutoCollectionType.STANDARD_METRICS, + collection_type=AutoCollectionType.PERF_COUNTER, ) time_mock.time.return_value = 100 request_metrics.requests_map["last_time"] = 98 - self._span_processor.request_count = 4 + request_metrics.requests_map["count"] = 4 obs = Observer( callback=request_metrics_collector._track_request_rate, name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Requests/Sec", description="Incoming Requests Average Execution Rate", unit="rps", value_type=float, - meter=self._meter, ) request_metrics_collector._track_request_rate(obs) self.assertEqual( @@ -134,8 +127,7 @@ def test_track_request_rate_time_none(self, time_mock): request_metrics_collector = request_metrics.RequestMetrics( meter=self._meter, labels=self._test_labels, - span_processor=self._span_processor, - collection_type=AutoCollectionType.STANDARD_METRICS, + collection_type=AutoCollectionType.PERF_COUNTER, ) request_metrics.requests_map["last_time"] = None obs = Observer( @@ -144,7 +136,6 @@ def test_track_request_rate_time_none(self, time_mock): description="Incoming Requests Average Execution Rate", unit="rps", value_type=float, - meter=self._meter, ) request_metrics_collector._track_request_rate(obs) self.assertEqual( @@ -156,8 +147,7 @@ def test_track_request_rate_error(self, time_mock): request_metrics_collector = request_metrics.RequestMetrics( meter=self._meter, labels=self._test_labels, - span_processor=self._span_processor, - collection_type=AutoCollectionType.STANDARD_METRICS, + collection_type=AutoCollectionType.PERF_COUNTER, ) time_mock.time.return_value = 100 request_metrics.requests_map["last_rate"] = 5.0 @@ -168,9 +158,69 @@ def test_track_request_rate_error(self, time_mock): description="Incoming Requests Average Execution Rate", unit="rps", value_type=float, - meter=self._meter, ) request_metrics_collector._track_request_rate(obs) self.assertEqual( obs.aggregators[tuple(self._test_labels.items())].current, 5.0 ) + + def test_request_patch(self): + map = request_metrics.requests_map # pylint: disable=redefined-builtin + func = mock.Mock() + new_func = request_metrics.request_patch(func) + new_func() + + self.assertEqual(map["count"], 1) + self.assertIsNotNone(map["duration"]) + self.assertEqual(len(func.call_args_list), 1) + + def test_server_patch(self): + request_metrics.ORIGINAL_CONSTRUCTOR = lambda x, y, z: None + with mock.patch( + "azure_monitor.sdk.auto_collection.request_metrics.request_patch" + ) as request_mock: + handler = mock.Mock() + handler.do_DELETE.return_value = None + handler.do_GET.return_value = None + handler.do_HEAD.return_value = None + handler.do_OPTIONS.return_value = None + handler.do_POST.return_value = None + handler.do_PUT.return_value = None + result = request_metrics.server_patch(None, None, handler) + handler.do_DELETE() + handler.do_GET() + handler.do_HEAD() + handler.do_OPTIONS() + handler.do_POST() + handler.do_PUT() + + self.assertEqual(result, None) + self.assertEqual(len(request_mock.call_args_list), 6) + + def test_server_patch_no_methods(self): + request_metrics.ORIGINAL_CONSTRUCTOR = lambda x, y, z: None + with mock.patch( + "azure_monitor.sdk.auto_collection.request_metrics.request_patch" + ) as request_mock: + handler = mock.Mock() + result = request_metrics.server_patch(None, None, handler) + handler.do_DELETE() + handler.do_GET() + handler.do_HEAD() + handler.do_OPTIONS() + handler.do_POST() + handler.do_PUT() + + self.assertEqual(result, None) + self.assertEqual(len(request_mock.call_args_list), 0) + + def test_server_patch_no_args(self): + request_metrics.ORIGINAL_CONSTRUCTOR = lambda x, y: None + req = request_metrics.server_patch(None, None) + + self.assertEqual(req, None) + + def test_server_patch_no_handler(self): + request_metrics.ORIGINAL_CONSTRUCTOR = lambda x, y, z: None + req = request_metrics.server_patch(None, None, None) + self.assertEqual(req, None) diff --git a/azure_monitor/tests/test_base_exporter.py b/azure_monitor/tests/test_base_exporter.py index d197d47..b939d7a 100644 --- a/azure_monitor/tests/test_base_exporter.py +++ b/azure_monitor/tests/test_base_exporter.py @@ -80,7 +80,7 @@ def test_constructor(self): "4321abcd-5678-4efa-8abc-1234567890ab", ) self.assertEqual( - base.options.proxies, {"https": "https://test-proxy.com"}, + base.options.proxies, {"https": "https://test-proxy.com"} ) self.assertEqual(base.options.storage_maintenance_period, 2) self.assertEqual(base.options.storage_max_size, 3) diff --git a/docs/conf.py b/docs/conf.py index c72e032..d688cbc 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -28,7 +28,7 @@ author = "Microsoft" # The full version, including alpha/beta/rc tags -release = "0.4b.0" +release = "0.5b.0" # -- General configuration --------------------------------------------------- diff --git a/tox.ini b/tox.ini index 9966fa7..553dd55 100644 --- a/tox.ini +++ b/tox.ini @@ -2,8 +2,8 @@ skipsdist = True skip_missing_interpreters = True envlist = - py3{4,5,6,7,8}-test-{azure_monitor} - py3{4,5,6,7,8}-coverage + py3{5,6,7,8}-test-{azure_monitor} + py3{5,6,7,8}-coverage lint docs