datadog-checks-base 37.13.0__py2.py3-none-any.whl → 37.16.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datadog_checks/base/__about__.py +1 -1
- datadog_checks/base/checks/_config_ast.py +84 -0
- datadog_checks/base/checks/base.py +40 -44
- datadog_checks/base/checks/kube_leader/base_check.py +2 -1
- datadog_checks/base/checks/kube_leader/mixins.py +1 -1
- datadog_checks/base/checks/kubelet_base/base.py +2 -2
- datadog_checks/base/checks/network.py +1 -1
- datadog_checks/base/checks/openmetrics/base_check.py +4 -3
- datadog_checks/base/checks/openmetrics/mixins.py +6 -7
- datadog_checks/base/checks/openmetrics/v2/base.py +4 -3
- datadog_checks/base/checks/openmetrics/v2/labels.py +1 -1
- datadog_checks/base/checks/openmetrics/v2/scraper/__init__.py +8 -0
- datadog_checks/base/checks/openmetrics/v2/{scraper.py → scraper/base_scraper.py} +19 -16
- datadog_checks/base/checks/openmetrics/v2/scraper/decorators.py +48 -0
- datadog_checks/base/checks/openmetrics/v2/transform.py +2 -1
- datadog_checks/base/checks/openmetrics/v2/transformers/histogram.py +2 -2
- datadog_checks/base/checks/openmetrics/v2/transformers/service_check.py +1 -1
- datadog_checks/base/checks/openmetrics/v2/transformers/temporal_percent.py +2 -2
- datadog_checks/base/checks/openmetrics/v2/transformers/time_elapsed.py +1 -1
- datadog_checks/base/checks/prometheus/base_check.py +4 -3
- datadog_checks/base/checks/prometheus/mixins.py +7 -7
- datadog_checks/base/checks/prometheus/prometheus_base.py +3 -2
- datadog_checks/base/checks/win/winpdh_base.py +2 -2
- datadog_checks/base/checks/win/wmi/base.py +13 -12
- datadog_checks/base/checks/win/wmi/sampler.py +10 -10
- datadog_checks/base/checks/windows/perf_counters/base.py +5 -4
- datadog_checks/base/checks/windows/perf_counters/connection.py +2 -2
- datadog_checks/base/checks/windows/perf_counters/counter.py +2 -1
- datadog_checks/base/checks/windows/perf_counters/transformers/service_check.py +2 -2
- datadog_checks/base/checks/windows/perf_counters/transformers/temporal_percent.py +3 -3
- datadog_checks/base/checks/windows/perf_counters/transformers/time_elapsed.py +1 -1
- datadog_checks/base/stubs/aggregator.py +21 -4
- datadog_checks/base/stubs/datadog_agent.py +5 -5
- datadog_checks/base/stubs/log.py +1 -1
- datadog_checks/base/utils/db/core.py +2 -2
- datadog_checks/base/utils/db/query.py +1 -3
- datadog_checks/base/utils/db/transform.py +6 -8
- datadog_checks/base/utils/db/utils.py +1 -2
- datadog_checks/base/utils/http.py +170 -71
- datadog_checks/base/utils/metadata/core.py +1 -1
- datadog_checks/base/utils/metadata/version.py +1 -1
- datadog_checks/base/utils/prometheus/metrics_pb2.py +2 -1
- datadog_checks/base/utils/replay/execute.py +2 -2
- datadog_checks/base/utils/replay/redirect.py +5 -6
- datadog_checks/base/utils/subprocess_output.py +2 -2
- datadog_checks/base/utils/tagging.py +1 -1
- datadog_checks/base/utils/tailfile.py +0 -2
- datadog_checks/base/utils/tls.py +96 -54
- datadog_checks/base/utils/tracing.py +5 -6
- datadog_checks/checks/libs/wmi/sampler.py +1 -0
- {datadog_checks_base-37.13.0.dist-info → datadog_checks_base-37.16.0.dist-info}/METADATA +13 -13
- {datadog_checks_base-37.13.0.dist-info → datadog_checks_base-37.16.0.dist-info}/RECORD +53 -51
- datadog_checks/base/utils/network.py +0 -49
- {datadog_checks_base-37.13.0.dist-info → datadog_checks_base-37.16.0.dist-info}/WHEEL +0 -0
datadog_checks/base/__about__.py
CHANGED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import ast
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
# This module is used to parse and modify the config AST before it is loaded.
|
|
6
|
+
# It is used to handle special float values (inf, -inf, nan) and replace them with placeholders since those
|
|
7
|
+
# are not valid Python literals.
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class SpecialFloatPlaceholder(str, Enum):
|
|
11
|
+
INF = '__PYTHON_INF__'
|
|
12
|
+
NEG_INF = '__PYTHON_NEG_INF__'
|
|
13
|
+
NAN = '__PYTHON_NAN__'
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class _SpecialFloatValuesTransformer(ast.NodeTransformer):
|
|
17
|
+
def visit_Name(self, node: ast.Name) -> ast.AST:
|
|
18
|
+
"""
|
|
19
|
+
Processes named constants like 'inf' and 'nan'.
|
|
20
|
+
If the name is 'inf', it's replaced with a placeholder for positive infinity.
|
|
21
|
+
If the name is 'nan', it's replaced with a placeholder for Not a Number.
|
|
22
|
+
Other names are returned unchanged.
|
|
23
|
+
"""
|
|
24
|
+
if node.id == 'inf':
|
|
25
|
+
return ast.Constant(value=SpecialFloatPlaceholder.INF.value)
|
|
26
|
+
elif node.id == 'nan':
|
|
27
|
+
return ast.Constant(value=SpecialFloatPlaceholder.NAN.value)
|
|
28
|
+
return node # Leaf node, no children to visit
|
|
29
|
+
|
|
30
|
+
def visit_UnaryOp(self, node: ast.UnaryOp) -> ast.AST:
|
|
31
|
+
"""
|
|
32
|
+
Processes unary operations like negation.
|
|
33
|
+
If the operation is a negation ('-') applied to the name 'inf',
|
|
34
|
+
it's replaced with a placeholder for negative infinity.
|
|
35
|
+
We can't use visit_Name for this because the constant is 'inf' and not '-inf'.
|
|
36
|
+
Other unary operations are processed as normal.
|
|
37
|
+
"""
|
|
38
|
+
if isinstance(node.op, ast.USub) and isinstance(node.operand, ast.Name) and node.operand.id == 'inf':
|
|
39
|
+
return ast.Constant(value=SpecialFloatPlaceholder.NEG_INF.value)
|
|
40
|
+
return self.generic_visit(node)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _restore_special_floats(data: Any) -> Any:
|
|
44
|
+
"""
|
|
45
|
+
Restores placeholders for special float values (inf, -inf, nan) to their actual
|
|
46
|
+
float values in a nested data structure.
|
|
47
|
+
"""
|
|
48
|
+
if isinstance(data, dict):
|
|
49
|
+
return {key: _restore_special_floats(value) for key, value in data.items()}
|
|
50
|
+
elif isinstance(data, list):
|
|
51
|
+
return [_restore_special_floats(item) for item in data]
|
|
52
|
+
elif isinstance(data, str):
|
|
53
|
+
if data == SpecialFloatPlaceholder.INF.value:
|
|
54
|
+
return float('inf')
|
|
55
|
+
elif data == SpecialFloatPlaceholder.NEG_INF.value:
|
|
56
|
+
return float('-inf')
|
|
57
|
+
elif data == SpecialFloatPlaceholder.NAN.value:
|
|
58
|
+
return float('nan')
|
|
59
|
+
return data
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def parse(object_string: str) -> Any:
|
|
63
|
+
"""
|
|
64
|
+
Parses a printed Python object, handling special float values (inf, -inf, nan).
|
|
65
|
+
If any error occurs, the original string is returned.
|
|
66
|
+
"""
|
|
67
|
+
try:
|
|
68
|
+
if not object_string:
|
|
69
|
+
return None
|
|
70
|
+
|
|
71
|
+
# Parse the string as a Python expression
|
|
72
|
+
ast_node = ast.parse(object_string, mode='eval').body
|
|
73
|
+
|
|
74
|
+
# Replace inf/nan with placeholders
|
|
75
|
+
transformer = _SpecialFloatValuesTransformer()
|
|
76
|
+
transformed_ast_node = transformer.visit(ast_node)
|
|
77
|
+
|
|
78
|
+
# Evaluate the AST node to get the actual value.
|
|
79
|
+
data_with_placeholders = ast.literal_eval(transformed_ast_node)
|
|
80
|
+
|
|
81
|
+
# Restore placeholders to actual float values
|
|
82
|
+
return _restore_special_floats(data_with_placeholders)
|
|
83
|
+
except Exception:
|
|
84
|
+
return object_string
|
|
@@ -29,31 +29,32 @@ from typing import ( # noqa: F401
|
|
|
29
29
|
import lazy_loader
|
|
30
30
|
|
|
31
31
|
from datadog_checks.base.agent import AGENT_RUNNING, aggregator, datadog_agent
|
|
32
|
+
from datadog_checks.base.config import is_affirmative
|
|
33
|
+
from datadog_checks.base.constants import ServiceCheck
|
|
34
|
+
from datadog_checks.base.errors import ConfigurationError
|
|
35
|
+
from datadog_checks.base.types import (
|
|
36
|
+
AgentConfigType,
|
|
37
|
+
Event,
|
|
38
|
+
ExternalTagType,
|
|
39
|
+
InitConfigType,
|
|
40
|
+
InstanceType,
|
|
41
|
+
ProxySettings,
|
|
42
|
+
ServiceCheckStatus,
|
|
43
|
+
)
|
|
44
|
+
from datadog_checks.base.utils.agent.utils import should_profile_memory
|
|
45
|
+
from datadog_checks.base.utils.common import ensure_bytes, to_native_string
|
|
46
|
+
from datadog_checks.base.utils.fips import enable_fips
|
|
32
47
|
from datadog_checks.base.utils.format import json
|
|
48
|
+
from datadog_checks.base.utils.tagging import GENERIC_TAGS
|
|
49
|
+
from datadog_checks.base.utils.tracing import traced_class
|
|
33
50
|
|
|
34
|
-
from
|
|
35
|
-
from ..constants import ServiceCheck
|
|
36
|
-
from ..errors import ConfigurationError
|
|
37
|
-
from ..types import (
|
|
38
|
-
AgentConfigType, # noqa: F401
|
|
39
|
-
Event, # noqa: F401
|
|
40
|
-
ExternalTagType, # noqa: F401
|
|
41
|
-
InitConfigType, # noqa: F401
|
|
42
|
-
InstanceType, # noqa: F401
|
|
43
|
-
ProxySettings, # noqa: F401
|
|
44
|
-
ServiceCheckStatus, # noqa: F401
|
|
45
|
-
)
|
|
46
|
-
from ..utils.agent.utils import should_profile_memory
|
|
47
|
-
from ..utils.common import ensure_bytes, to_native_string
|
|
48
|
-
from ..utils.fips import enable_fips
|
|
49
|
-
from ..utils.tagging import GENERIC_TAGS
|
|
50
|
-
from ..utils.tracing import traced_class
|
|
51
|
+
from ._config_ast import parse as _parse_ast_config
|
|
51
52
|
|
|
52
53
|
if AGENT_RUNNING:
|
|
53
|
-
from
|
|
54
|
+
from datadog_checks.base.log import CheckLoggingAdapter, init_logging
|
|
54
55
|
|
|
55
56
|
else:
|
|
56
|
-
from
|
|
57
|
+
from datadog_checks.base.stubs.log import CheckLoggingAdapter, init_logging
|
|
57
58
|
|
|
58
59
|
init_logging()
|
|
59
60
|
|
|
@@ -146,12 +147,12 @@ class AgentCheck(object):
|
|
|
146
147
|
# a mapping type, then each key will be considered a `name` and will be sent with its (str) value.
|
|
147
148
|
METADATA_TRANSFORMERS = None
|
|
148
149
|
|
|
149
|
-
FIRST_CAP_RE = re.compile(
|
|
150
|
-
ALL_CAP_RE = re.compile(
|
|
151
|
-
METRIC_REPLACEMENT = re.compile(
|
|
152
|
-
TAG_REPLACEMENT = re.compile(
|
|
153
|
-
MULTIPLE_UNDERSCORE_CLEANUP = re.compile(
|
|
154
|
-
DOT_UNDERSCORE_CLEANUP = re.compile(
|
|
150
|
+
FIRST_CAP_RE = re.compile(rb'(.)([A-Z][a-z]+)')
|
|
151
|
+
ALL_CAP_RE = re.compile(rb'([a-z0-9])([A-Z])')
|
|
152
|
+
METRIC_REPLACEMENT = re.compile(rb'([^a-zA-Z0-9_.]+)|(^[^a-zA-Z]+)')
|
|
153
|
+
TAG_REPLACEMENT = re.compile(rb'[,\+\*\-/()\[\]{}\s]')
|
|
154
|
+
MULTIPLE_UNDERSCORE_CLEANUP = re.compile(rb'__+')
|
|
155
|
+
DOT_UNDERSCORE_CLEANUP = re.compile(rb'_*\._*')
|
|
155
156
|
|
|
156
157
|
# allows to set a limit on the number of metric name and tags combination
|
|
157
158
|
# this check can send per run. This is useful for checks that have an unbounded
|
|
@@ -1145,10 +1146,10 @@ class AgentCheck(object):
|
|
|
1145
1146
|
And substitute illegal metric characters
|
|
1146
1147
|
"""
|
|
1147
1148
|
name = ensure_bytes(name)
|
|
1148
|
-
metric_name = self.FIRST_CAP_RE.sub(
|
|
1149
|
-
metric_name = self.ALL_CAP_RE.sub(
|
|
1150
|
-
metric_name = self.METRIC_REPLACEMENT.sub(
|
|
1151
|
-
return self.DOT_UNDERSCORE_CLEANUP.sub(
|
|
1149
|
+
metric_name = self.FIRST_CAP_RE.sub(rb'\1_\2', name)
|
|
1150
|
+
metric_name = self.ALL_CAP_RE.sub(rb'\1_\2', metric_name).lower()
|
|
1151
|
+
metric_name = self.METRIC_REPLACEMENT.sub(rb'_', metric_name)
|
|
1152
|
+
return self.DOT_UNDERSCORE_CLEANUP.sub(rb'.', metric_name).strip(b'_')
|
|
1152
1153
|
|
|
1153
1154
|
def warning(self, warning_message, *args, **kwargs):
|
|
1154
1155
|
# type: (str, *Any, **Any) -> None
|
|
@@ -1242,10 +1243,10 @@ class AgentCheck(object):
|
|
|
1242
1243
|
if prefix is not None:
|
|
1243
1244
|
prefix = self.convert_to_underscore_separated(prefix)
|
|
1244
1245
|
else:
|
|
1245
|
-
name = self.METRIC_REPLACEMENT.sub(
|
|
1246
|
-
name = self.DOT_UNDERSCORE_CLEANUP.sub(
|
|
1246
|
+
name = self.METRIC_REPLACEMENT.sub(rb'_', metric)
|
|
1247
|
+
name = self.DOT_UNDERSCORE_CLEANUP.sub(rb'.', name).strip(b'_')
|
|
1247
1248
|
|
|
1248
|
-
name = self.MULTIPLE_UNDERSCORE_CLEANUP.sub(
|
|
1249
|
+
name = self.MULTIPLE_UNDERSCORE_CLEANUP.sub(rb'_', name)
|
|
1249
1250
|
|
|
1250
1251
|
if prefix is not None:
|
|
1251
1252
|
name = ensure_bytes(prefix) + b"." + name
|
|
@@ -1261,9 +1262,9 @@ class AgentCheck(object):
|
|
|
1261
1262
|
"""
|
|
1262
1263
|
if isinstance(tag, str):
|
|
1263
1264
|
tag = tag.encode('utf-8', 'ignore')
|
|
1264
|
-
tag = self.TAG_REPLACEMENT.sub(
|
|
1265
|
-
tag = self.MULTIPLE_UNDERSCORE_CLEANUP.sub(
|
|
1266
|
-
tag = self.DOT_UNDERSCORE_CLEANUP.sub(
|
|
1265
|
+
tag = self.TAG_REPLACEMENT.sub(rb'_', tag)
|
|
1266
|
+
tag = self.MULTIPLE_UNDERSCORE_CLEANUP.sub(rb'_', tag)
|
|
1267
|
+
tag = self.DOT_UNDERSCORE_CLEANUP.sub(rb'.', tag).strip(b'_')
|
|
1267
1268
|
return to_native_string(tag)
|
|
1268
1269
|
|
|
1269
1270
|
def check(self, instance):
|
|
@@ -1286,7 +1287,7 @@ class AgentCheck(object):
|
|
|
1286
1287
|
self._clear_diagnosis()
|
|
1287
1288
|
# Ignore check initializations if running in a separate process
|
|
1288
1289
|
if is_affirmative(self.instance.get('process_isolation', self.init_config.get('process_isolation', False))):
|
|
1289
|
-
from
|
|
1290
|
+
from datadog_checks.base.utils.replay.execute import run_with_isolation
|
|
1290
1291
|
|
|
1291
1292
|
run_with_isolation(self, aggregator, datadog_agent)
|
|
1292
1293
|
else:
|
|
@@ -1301,7 +1302,7 @@ class AgentCheck(object):
|
|
|
1301
1302
|
instance = copy.deepcopy(self.instances[0])
|
|
1302
1303
|
|
|
1303
1304
|
if 'set_breakpoint' in self.init_config:
|
|
1304
|
-
from
|
|
1305
|
+
from datadog_checks.base.utils.agent.debug import enter_pdb
|
|
1305
1306
|
|
|
1306
1307
|
enter_pdb(self.check, line=self.init_config['set_breakpoint'], args=(instance,))
|
|
1307
1308
|
elif self.should_profile_memory():
|
|
@@ -1456,7 +1457,7 @@ class AgentCheck(object):
|
|
|
1456
1457
|
|
|
1457
1458
|
def profile_memory(self, func, namespaces=None, args=(), kwargs=None, extra_tags=None):
|
|
1458
1459
|
# type: (Callable[..., Any], Optional[Sequence[str]], Sequence[Any], Optional[Dict[str, Any]], Optional[List[str]]) -> None # noqa: E501
|
|
1459
|
-
from
|
|
1460
|
+
from datadog_checks.base.utils.agent.memory import profile_memory
|
|
1460
1461
|
|
|
1461
1462
|
if namespaces is None:
|
|
1462
1463
|
namespaces = self.check_id.split(':', 1)
|
|
@@ -1488,9 +1489,4 @@ class AgentCheck(object):
|
|
|
1488
1489
|
if process.returncode != 0:
|
|
1489
1490
|
raise ValueError(f'Failed to load config: {stderr.decode()}')
|
|
1490
1491
|
|
|
1491
|
-
|
|
1492
|
-
try:
|
|
1493
|
-
return eval(decoded)
|
|
1494
|
-
# a single, literal unquoted string
|
|
1495
|
-
except Exception:
|
|
1496
|
-
return decoded
|
|
1492
|
+
return _parse_ast_config(stdout.strip().decode())
|
|
@@ -2,8 +2,8 @@
|
|
|
2
2
|
# All rights reserved
|
|
3
3
|
# Licensed under a 3-clause BSD style license (see LICENSE)
|
|
4
4
|
from datadog_checks.base.agent import datadog_agent
|
|
5
|
+
from datadog_checks.base.checks import AgentCheck
|
|
5
6
|
|
|
6
|
-
from .. import AgentCheck
|
|
7
7
|
from .record import ElectionRecordAnnotation, ElectionRecordLease
|
|
8
8
|
|
|
9
9
|
# Import lazily to reduce memory footprint
|
|
@@ -4,8 +4,8 @@
|
|
|
4
4
|
import json
|
|
5
5
|
from datetime import datetime, timedelta, timezone
|
|
6
6
|
|
|
7
|
-
from
|
|
8
|
-
from
|
|
7
|
+
from datadog_checks.base.checks import AgentCheck
|
|
8
|
+
from datadog_checks.base.utils.date import parse_rfc3339
|
|
9
9
|
|
|
10
10
|
try:
|
|
11
11
|
from datadog_agent import get_config
|
|
@@ -33,7 +33,7 @@ class NetworkCheck(AgentCheck):
|
|
|
33
33
|
try:
|
|
34
34
|
statuses = self._check(instance)
|
|
35
35
|
except Exception:
|
|
36
|
-
self.log.exception(
|
|
36
|
+
self.log.exception("Failed to run instance '%s'.", instance.get('name', ""))
|
|
37
37
|
else:
|
|
38
38
|
if isinstance(statuses, tuple):
|
|
39
39
|
# Assume the check only returns one service check
|
|
@@ -5,9 +5,10 @@ from copy import deepcopy
|
|
|
5
5
|
|
|
6
6
|
import requests
|
|
7
7
|
|
|
8
|
-
from
|
|
9
|
-
from
|
|
10
|
-
from
|
|
8
|
+
from datadog_checks.base.checks import AgentCheck
|
|
9
|
+
from datadog_checks.base.errors import CheckException
|
|
10
|
+
from datadog_checks.base.utils.tracing import traced_class
|
|
11
|
+
|
|
11
12
|
from .mixins import OpenMetricsScraperMixin
|
|
12
13
|
|
|
13
14
|
STANDARD_FIELDS = [
|
|
@@ -14,13 +14,12 @@ import requests
|
|
|
14
14
|
from prometheus_client.samples import Sample
|
|
15
15
|
|
|
16
16
|
from datadog_checks.base.agent import datadog_agent
|
|
17
|
-
|
|
18
|
-
from
|
|
19
|
-
from
|
|
20
|
-
from
|
|
21
|
-
from
|
|
22
|
-
from
|
|
23
|
-
from ..libs.prometheus import text_fd_to_metric_families
|
|
17
|
+
from datadog_checks.base.checks import AgentCheck
|
|
18
|
+
from datadog_checks.base.checks.libs.prometheus import text_fd_to_metric_families
|
|
19
|
+
from datadog_checks.base.config import is_affirmative
|
|
20
|
+
from datadog_checks.base.errors import CheckException
|
|
21
|
+
from datadog_checks.base.utils.common import to_native_string
|
|
22
|
+
from datadog_checks.base.utils.http import RequestsWrapper
|
|
24
23
|
|
|
25
24
|
|
|
26
25
|
class OpenMetricsScraperMixin(object):
|
|
@@ -6,9 +6,10 @@ from contextlib import contextmanager
|
|
|
6
6
|
|
|
7
7
|
from requests.exceptions import RequestException
|
|
8
8
|
|
|
9
|
-
from
|
|
10
|
-
from
|
|
11
|
-
from
|
|
9
|
+
from datadog_checks.base.checks import AgentCheck
|
|
10
|
+
from datadog_checks.base.errors import ConfigurationError
|
|
11
|
+
from datadog_checks.base.utils.tracing import traced_class
|
|
12
|
+
|
|
12
13
|
from .scraper import OpenMetricsScraper
|
|
13
14
|
|
|
14
15
|
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
# (C) Datadog, Inc. 2025-present
|
|
2
|
+
# All rights reserved
|
|
3
|
+
# Licensed under a 3-clause BSD style license (see LICENSE)
|
|
4
|
+
|
|
5
|
+
from . import decorators
|
|
6
|
+
from .base_scraper import OpenMetricsCompatibilityScraper, OpenMetricsScraper
|
|
7
|
+
|
|
8
|
+
__all__ = ["OpenMetricsScraper", "OpenMetricsCompatibilityScraper", "decorators"]
|
|
@@ -4,25 +4,26 @@
|
|
|
4
4
|
import fnmatch
|
|
5
5
|
import inspect
|
|
6
6
|
import re
|
|
7
|
+
from collections.abc import Generator
|
|
7
8
|
from copy import copy, deepcopy
|
|
8
9
|
from itertools import chain
|
|
9
10
|
from math import isinf, isnan
|
|
10
11
|
from typing import List # noqa: F401
|
|
11
12
|
|
|
13
|
+
from prometheus_client import Metric
|
|
12
14
|
from prometheus_client.openmetrics.parser import text_fd_to_metric_families as parse_openmetrics
|
|
13
15
|
from prometheus_client.parser import text_fd_to_metric_families as parse_prometheus
|
|
14
16
|
from requests.exceptions import ConnectionError
|
|
15
17
|
|
|
16
18
|
from datadog_checks.base.agent import datadog_agent
|
|
17
|
-
|
|
18
|
-
from
|
|
19
|
-
from
|
|
20
|
-
from
|
|
21
|
-
from
|
|
22
|
-
from
|
|
23
|
-
from .
|
|
24
|
-
from .
|
|
25
|
-
from .transform import MetricTransformer
|
|
19
|
+
from datadog_checks.base.checks.openmetrics.v2.first_scrape_handler import first_scrape_handler
|
|
20
|
+
from datadog_checks.base.checks.openmetrics.v2.labels import LabelAggregator, get_label_normalizer
|
|
21
|
+
from datadog_checks.base.checks.openmetrics.v2.transform import MetricTransformer
|
|
22
|
+
from datadog_checks.base.config import is_affirmative
|
|
23
|
+
from datadog_checks.base.constants import ServiceCheck
|
|
24
|
+
from datadog_checks.base.errors import ConfigurationError
|
|
25
|
+
from datadog_checks.base.utils.functions import no_op, return_true
|
|
26
|
+
from datadog_checks.base.utils.http import RequestsWrapper
|
|
26
27
|
|
|
27
28
|
|
|
28
29
|
class OpenMetricsScraper:
|
|
@@ -238,19 +239,21 @@ class OpenMetricsScraper:
|
|
|
238
239
|
"""
|
|
239
240
|
runtime_data = {'flush_first_value': bool(self.flush_first_value), 'static_tags': self.static_tags}
|
|
240
241
|
|
|
241
|
-
|
|
242
|
-
if self.target_info:
|
|
243
|
-
consume_method = self.consume_metrics_w_target_info
|
|
244
|
-
else:
|
|
245
|
-
consume_method = self.consume_metrics
|
|
246
|
-
|
|
247
|
-
for metric in consume_method(runtime_data):
|
|
242
|
+
for metric in self.yield_metrics(runtime_data):
|
|
248
243
|
transformer = self.metric_transformer.get(metric)
|
|
249
244
|
if transformer is None:
|
|
250
245
|
continue
|
|
251
246
|
|
|
252
247
|
transformer(metric, self.generate_sample_data(metric), runtime_data)
|
|
253
248
|
|
|
249
|
+
def yield_metrics(self, runtime_data: dict) -> Generator[Metric]:
|
|
250
|
+
if self.target_info:
|
|
251
|
+
consume_method = self.consume_metrics_w_target_info
|
|
252
|
+
else:
|
|
253
|
+
consume_method = self.consume_metrics
|
|
254
|
+
|
|
255
|
+
yield from consume_method(runtime_data)
|
|
256
|
+
|
|
254
257
|
def scrape(self):
|
|
255
258
|
try:
|
|
256
259
|
self._scrape()
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
# (C) Datadog, Inc. 2025-present
|
|
2
|
+
# All rights reserved
|
|
3
|
+
# Licensed under a 3-clause BSD style license (see LICENSE)
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
from functools import partial
|
|
8
|
+
from typing import TYPE_CHECKING, Any
|
|
9
|
+
|
|
10
|
+
from .base_scraper import OpenMetricsScraper
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from collections.abc import Generator
|
|
14
|
+
|
|
15
|
+
from prometheus_client.metrics_core import Metric
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class WithHttpCodeClass(OpenMetricsScraper):
|
|
19
|
+
"""
|
|
20
|
+
Scraper decorator that parses the HTTP status code from the metric and adds a new tag named
|
|
21
|
+
`code_class` to the metric.
|
|
22
|
+
|
|
23
|
+
The HTTP status code is parsed and a new tag named `code_class` is added to the metric
|
|
24
|
+
stating whether the status code is in the 1xx, 2xx, 3xx, 4xx, or 5xx range.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(self, scraper: OpenMetricsScraper, http_status_tag: str):
|
|
28
|
+
self.scraper = scraper
|
|
29
|
+
self.http_status_tag = http_status_tag
|
|
30
|
+
self.decorated_methods = {"yield_metrics": self.yield_metrics}
|
|
31
|
+
|
|
32
|
+
def __getattr__(self, name: str) -> Any:
|
|
33
|
+
return self.decorated_methods.get(name, getattr(self.scraper, name))
|
|
34
|
+
|
|
35
|
+
def _add_http_code_class(self, metric: Metric, http_status_tag: str) -> Metric:
|
|
36
|
+
for sample in metric.samples:
|
|
37
|
+
if (
|
|
38
|
+
(code := sample.labels.get(http_status_tag))
|
|
39
|
+
and isinstance(code, str)
|
|
40
|
+
and len(code) == 3
|
|
41
|
+
and code.isdigit()
|
|
42
|
+
):
|
|
43
|
+
sample.labels["code_class"] = f"{code[0]}xx"
|
|
44
|
+
return metric
|
|
45
|
+
|
|
46
|
+
def yield_metrics(self, runtime_data: dict[str, Any]) -> Generator[Metric]:
|
|
47
|
+
add_http_code_class_func = partial(self._add_http_code_class, http_status_tag=self.http_status_tag)
|
|
48
|
+
yield from map(add_http_code_class_func, self.scraper.yield_metrics(runtime_data))
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
# (C) Datadog, Inc. 2020-present
|
|
2
2
|
# All rights reserved
|
|
3
3
|
# Licensed under a 3-clause BSD style license (see LICENSE)
|
|
4
|
-
from
|
|
5
|
-
from
|
|
4
|
+
from datadog_checks.base.checks.openmetrics.v2.labels import canonicalize_numeric_label
|
|
5
|
+
from datadog_checks.base.checks.openmetrics.v2.utils import decumulate_histogram_buckets
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
def get_histogram(check, metric_name, modifiers, global_options):
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# (C) Datadog, Inc. 2020-present
|
|
2
2
|
# All rights reserved
|
|
3
3
|
# Licensed under a 3-clause BSD style license (see LICENSE)
|
|
4
|
-
from
|
|
4
|
+
from datadog_checks.base.constants import ServiceCheck
|
|
5
5
|
|
|
6
6
|
|
|
7
7
|
def get_service_check(check, metric_name, modifiers, global_options):
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
# (C) Datadog, Inc. 2020-present
|
|
2
2
|
# All rights reserved
|
|
3
3
|
# Licensed under a 3-clause BSD style license (see LICENSE)
|
|
4
|
-
from
|
|
5
|
-
from
|
|
4
|
+
from datadog_checks.base.utils.common import total_time_to_temporal_percent
|
|
5
|
+
from datadog_checks.base.utils.constants import TIME_UNITS
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
def get_temporal_percent(check, metric_name, modifiers, global_options):
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# (C) Datadog, Inc. 2020-present
|
|
2
2
|
# All rights reserved
|
|
3
3
|
# Licensed under a 3-clause BSD style license (see LICENSE)
|
|
4
|
-
from
|
|
4
|
+
from datadog_checks.base.utils.time import get_timestamp
|
|
5
5
|
|
|
6
6
|
|
|
7
7
|
def get_time_elapsed(check, metric_name, modifiers, global_options):
|
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
# (C) Datadog, Inc. 2018-present
|
|
2
2
|
# All rights reserved
|
|
3
3
|
# Licensed under a 3-clause BSD style license (see LICENSE)
|
|
4
|
-
from
|
|
5
|
-
from
|
|
6
|
-
from
|
|
4
|
+
from datadog_checks.base.checks import AgentCheck
|
|
5
|
+
from datadog_checks.base.errors import CheckException
|
|
6
|
+
from datadog_checks.base.utils.common import to_native_string
|
|
7
|
+
|
|
7
8
|
from .mixins import PrometheusScraperMixin
|
|
8
9
|
|
|
9
10
|
|
|
@@ -11,11 +11,11 @@ from math import isinf, isnan
|
|
|
11
11
|
import requests
|
|
12
12
|
from google.protobuf.internal.decoder import _DecodeVarint32 # pylint: disable=E0611,E0401
|
|
13
13
|
|
|
14
|
-
from
|
|
15
|
-
from
|
|
16
|
-
from
|
|
17
|
-
from
|
|
18
|
-
from
|
|
14
|
+
from datadog_checks.base.checks import AgentCheck
|
|
15
|
+
from datadog_checks.base.checks.libs.prometheus import text_fd_to_metric_families
|
|
16
|
+
from datadog_checks.base.config import is_affirmative
|
|
17
|
+
from datadog_checks.base.utils.http import RequestsWrapper
|
|
18
|
+
from datadog_checks.base.utils.prometheus import metrics_pb2
|
|
19
19
|
|
|
20
20
|
|
|
21
21
|
class PrometheusFormat:
|
|
@@ -561,7 +561,7 @@ class PrometheusScraperMixin(object):
|
|
|
561
561
|
headers['Accept-Encoding'] = 'gzip'
|
|
562
562
|
if pFormat == PrometheusFormat.PROTOBUF:
|
|
563
563
|
headers['accept'] = (
|
|
564
|
-
'application/vnd.google.protobuf;
|
|
564
|
+
'application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited'
|
|
565
565
|
)
|
|
566
566
|
handler = self.get_http_handler(endpoint, instance)
|
|
567
567
|
if (
|
|
@@ -569,7 +569,7 @@ class PrometheusScraperMixin(object):
|
|
|
569
569
|
and not handler.ignore_tls_warning
|
|
570
570
|
and not is_affirmative(handler.options.get('ssl_verify', True))
|
|
571
571
|
):
|
|
572
|
-
self.log.debug(
|
|
572
|
+
self.log.debug('An unverified HTTPS request is being made to %s', endpoint)
|
|
573
573
|
|
|
574
574
|
try:
|
|
575
575
|
response = handler.get(endpoint, extra_headers=headers, stream=False)
|
|
@@ -2,8 +2,9 @@
|
|
|
2
2
|
# All rights reserved
|
|
3
3
|
# Licensed under a 3-clause BSD style license (see LICENSE)
|
|
4
4
|
|
|
5
|
-
from
|
|
6
|
-
from
|
|
5
|
+
from datadog_checks.base.checks import AgentCheck
|
|
6
|
+
from datadog_checks.base.utils.common import to_native_string
|
|
7
|
+
|
|
7
8
|
from .mixins import PrometheusScraperMixin
|
|
8
9
|
|
|
9
10
|
# Prometheus check is a parent class providing a structure and some helpers
|
|
@@ -6,8 +6,8 @@ from typing import Callable, Dict, List, Optional, Tuple # noqa: F401
|
|
|
6
6
|
|
|
7
7
|
import win32wnet
|
|
8
8
|
|
|
9
|
-
from
|
|
10
|
-
from
|
|
9
|
+
from datadog_checks.base import AgentCheck, is_affirmative
|
|
10
|
+
from datadog_checks.base.utils.containers import hash_mutable
|
|
11
11
|
|
|
12
12
|
try:
|
|
13
13
|
from .winpdh import DATA_TYPE_DOUBLE, DATA_TYPE_INT, WinPDHCounter
|