datadog-checks-base 37.21.1__py2.py3-none-any.whl → 37.22.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datadog_checks/base/__about__.py +1 -1
- datadog_checks/base/utils/db/health.py +59 -6
- datadog_checks/base/utils/db/utils.py +53 -0
- datadog_checks/base/utils/tls.py +65 -26
- {datadog_checks_base-37.21.1.dist-info → datadog_checks_base-37.22.0.dist-info}/METADATA +1 -1
- {datadog_checks_base-37.21.1.dist-info → datadog_checks_base-37.22.0.dist-info}/RECORD +7 -7
- {datadog_checks_base-37.21.1.dist-info → datadog_checks_base-37.22.0.dist-info}/WHEEL +0 -0
datadog_checks/base/__about__.py
CHANGED
|
@@ -9,16 +9,20 @@ from __future__ import annotations
|
|
|
9
9
|
import time
|
|
10
10
|
from typing import TYPE_CHECKING
|
|
11
11
|
|
|
12
|
+
from cachetools import TLRUCache
|
|
13
|
+
|
|
12
14
|
from datadog_checks.base.utils.serialization import json
|
|
13
15
|
|
|
14
16
|
if TYPE_CHECKING:
|
|
15
17
|
from datadog_checks.base import DatabaseCheck
|
|
16
18
|
try:
|
|
17
|
-
import datadog_agent
|
|
19
|
+
import datadog_agent # type: ignore
|
|
18
20
|
except ImportError:
|
|
19
21
|
from datadog_checks.base.stubs import datadog_agent
|
|
20
22
|
|
|
21
23
|
|
|
24
|
+
import threading
|
|
25
|
+
import traceback
|
|
22
26
|
from enum import Enum
|
|
23
27
|
|
|
24
28
|
|
|
@@ -28,6 +32,8 @@ class HealthEvent(Enum):
|
|
|
28
32
|
"""
|
|
29
33
|
|
|
30
34
|
INITIALIZATION = 'initialization'
|
|
35
|
+
UNKNOWN_ERROR = 'unknown_error'
|
|
36
|
+
MISSED_COLLECTION = 'missed_collection'
|
|
31
37
|
|
|
32
38
|
|
|
33
39
|
class HealthStatus(Enum):
|
|
@@ -40,6 +46,13 @@ class HealthStatus(Enum):
|
|
|
40
46
|
ERROR = 'error'
|
|
41
47
|
|
|
42
48
|
|
|
49
|
+
DEFAULT_COOLDOWN = 60 * 5
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def ttl(_key, value, now):
|
|
53
|
+
return now + value
|
|
54
|
+
|
|
55
|
+
|
|
43
56
|
class Health:
|
|
44
57
|
def __init__(self, check: DatabaseCheck):
|
|
45
58
|
"""
|
|
@@ -49,8 +62,18 @@ class Health:
|
|
|
49
62
|
The check instance that will be used to submit health events.
|
|
50
63
|
"""
|
|
51
64
|
self.check = check
|
|
52
|
-
|
|
53
|
-
|
|
65
|
+
self._cache_lock = threading.Lock()
|
|
66
|
+
self._ttl_cache = TLRUCache(maxsize=1000, ttu=ttl)
|
|
67
|
+
|
|
68
|
+
def submit_health_event(
|
|
69
|
+
self,
|
|
70
|
+
name: HealthEvent,
|
|
71
|
+
status: HealthStatus,
|
|
72
|
+
tags: list[str] = None,
|
|
73
|
+
cooldown_time: int = None,
|
|
74
|
+
cooldown_values: list[str] = None,
|
|
75
|
+
data: dict = None,
|
|
76
|
+
):
|
|
54
77
|
"""
|
|
55
78
|
Submit a health event to the aggregator.
|
|
56
79
|
|
|
@@ -60,22 +83,52 @@ class Health:
|
|
|
60
83
|
The health status to submit.
|
|
61
84
|
:param tags: list of str
|
|
62
85
|
Tags to associate with the health event.
|
|
63
|
-
:param
|
|
86
|
+
:param cooldown_time: int
|
|
87
|
+
The cooldown period in seconds to prevent the events with the same name and status
|
|
88
|
+
from being submitted again. If None there is no cooldown.
|
|
89
|
+
:param cooldown_values: list of str
|
|
90
|
+
Additional values to include in the cooldown key.
|
|
91
|
+
:param data: A dictionary to be submitted as `data`. Must be JSON serializable.
|
|
64
92
|
"""
|
|
93
|
+
category = self.check.__NAMESPACE__ or self.check.__class__.__name__.lower()
|
|
94
|
+
if cooldown_time:
|
|
95
|
+
cooldown_key = "|".join([category, name.value, status.value])
|
|
96
|
+
if cooldown_values:
|
|
97
|
+
cooldown_key = "|".join([cooldown_key, "|".join([f"{v}" for v in cooldown_values])])
|
|
98
|
+
with self._cache_lock:
|
|
99
|
+
if self._ttl_cache.get(cooldown_key, None):
|
|
100
|
+
return
|
|
101
|
+
self._ttl_cache[cooldown_key] = cooldown_time
|
|
65
102
|
self.check.event_platform_event(
|
|
66
103
|
json.dumps(
|
|
67
104
|
{
|
|
68
105
|
'timestamp': time.time() * 1000,
|
|
69
106
|
'version': 1,
|
|
70
107
|
'check_id': self.check.check_id,
|
|
71
|
-
'category':
|
|
108
|
+
'category': category,
|
|
72
109
|
'name': name,
|
|
73
110
|
'status': status,
|
|
74
111
|
'tags': tags or [],
|
|
75
112
|
'ddagentversion': datadog_agent.get_version(),
|
|
76
113
|
'ddagenthostname': datadog_agent.get_hostname(),
|
|
77
|
-
'data':
|
|
114
|
+
'data': data,
|
|
78
115
|
}
|
|
79
116
|
),
|
|
80
117
|
"dbm-health",
|
|
81
118
|
)
|
|
119
|
+
|
|
120
|
+
def submit_exception_health_event(self, exception: Exception, data: dict):
|
|
121
|
+
trace = traceback.extract_tb(exception.__traceback__)
|
|
122
|
+
exc = trace.pop()
|
|
123
|
+
if exc:
|
|
124
|
+
self.submit_health_event(
|
|
125
|
+
name=HealthEvent.UNKNOWN_ERROR,
|
|
126
|
+
status=HealthStatus.ERROR,
|
|
127
|
+
data={
|
|
128
|
+
"file": exc.filename,
|
|
129
|
+
"line": exc.lineno,
|
|
130
|
+
"function": exc.name,
|
|
131
|
+
"exception_type": type(exception).__name__,
|
|
132
|
+
**(data or {}),
|
|
133
|
+
},
|
|
134
|
+
)
|
|
@@ -21,6 +21,7 @@ from datadog_checks.base import is_affirmative
|
|
|
21
21
|
from datadog_checks.base.agent import datadog_agent
|
|
22
22
|
from datadog_checks.base.log import get_check_logger
|
|
23
23
|
from datadog_checks.base.utils.common import to_native_string
|
|
24
|
+
from datadog_checks.base.utils.db.health import DEFAULT_COOLDOWN, HealthEvent, HealthStatus
|
|
24
25
|
from datadog_checks.base.utils.db.types import Transformer # noqa: F401
|
|
25
26
|
from datadog_checks.base.utils.format import json
|
|
26
27
|
from datadog_checks.base.utils.tracing import INTEGRATION_TRACING_SERVICE_NAME, tracing_enabled
|
|
@@ -293,6 +294,14 @@ class DBMAsyncJob(object):
|
|
|
293
294
|
expected_db_exceptions=(),
|
|
294
295
|
shutdown_callback=None,
|
|
295
296
|
job_name=None,
|
|
297
|
+
# Some users may want to disable the missed collection event,
|
|
298
|
+
# for example if they set the collection interval intentionally low
|
|
299
|
+
# to effectively run the job in a loop
|
|
300
|
+
enable_missed_collection_event=True,
|
|
301
|
+
# List of features depenedent on the job running
|
|
302
|
+
# Defaults to [None] during init so that if no features are specified there will
|
|
303
|
+
# still be health events submitted for the job
|
|
304
|
+
features=None,
|
|
296
305
|
):
|
|
297
306
|
self._check = check
|
|
298
307
|
self._config_host = config_host
|
|
@@ -314,6 +323,10 @@ class DBMAsyncJob(object):
|
|
|
314
323
|
self._enabled = enabled
|
|
315
324
|
self._expected_db_exceptions = expected_db_exceptions
|
|
316
325
|
self._job_name = job_name
|
|
326
|
+
self._enable_missed_collection_event = enable_missed_collection_event
|
|
327
|
+
self._features = features
|
|
328
|
+
if self._features is None:
|
|
329
|
+
self._features = [None]
|
|
317
330
|
|
|
318
331
|
def cancel(self):
|
|
319
332
|
"""
|
|
@@ -342,6 +355,37 @@ class DBMAsyncJob(object):
|
|
|
342
355
|
elif self._job_loop_future is None or not self._job_loop_future.running():
|
|
343
356
|
self._job_loop_future = DBMAsyncJob.executor.submit(self._job_loop)
|
|
344
357
|
else:
|
|
358
|
+
if (
|
|
359
|
+
hasattr(self._check, 'health')
|
|
360
|
+
and self._enable_missed_collection_event
|
|
361
|
+
and self._min_collection_interval >= 1
|
|
362
|
+
and self._last_run_start
|
|
363
|
+
):
|
|
364
|
+
# Assume a collection interval of less than 1 second is an attempt to run the job in a loop
|
|
365
|
+
elapsed_time = time.time() - self._last_run_start
|
|
366
|
+
if elapsed_time > self._min_collection_interval:
|
|
367
|
+
# Missed a collection interval, submit a health event for each feature that depends on this job
|
|
368
|
+
for feature in self._features:
|
|
369
|
+
self._check.health.submit_health_event(
|
|
370
|
+
name=HealthEvent.MISSED_COLLECTION,
|
|
371
|
+
status=HealthStatus.WARNING,
|
|
372
|
+
tags=self._job_tags,
|
|
373
|
+
# Use a cooldown to avoid spamming if the job is missing the collection interval
|
|
374
|
+
# in a flappy manner
|
|
375
|
+
cooldown_time=DEFAULT_COOLDOWN,
|
|
376
|
+
cooldown_values=[self._dbms, self._job_name],
|
|
377
|
+
data={
|
|
378
|
+
"dbms": self._dbms,
|
|
379
|
+
"job_name": self._job_name,
|
|
380
|
+
"last_run_start": self._last_run_start,
|
|
381
|
+
"elapsed_time": (time.time() - self._last_run_start) * 1000,
|
|
382
|
+
"feature": feature,
|
|
383
|
+
},
|
|
384
|
+
)
|
|
385
|
+
self._check.count(
|
|
386
|
+
"dd.{}.async_job.missed_collection".format(self._dbms), 1, tags=self._job_tags, raw=True
|
|
387
|
+
)
|
|
388
|
+
|
|
345
389
|
self._log.debug("Job loop already running. job=%s", self._job_name)
|
|
346
390
|
|
|
347
391
|
def _job_loop(self):
|
|
@@ -394,6 +438,14 @@ class DBMAsyncJob(object):
|
|
|
394
438
|
tags=self._job_tags + ["error:crash-{}".format(type(e))],
|
|
395
439
|
raw=True,
|
|
396
440
|
)
|
|
441
|
+
|
|
442
|
+
if hasattr(self._check, 'health'):
|
|
443
|
+
try:
|
|
444
|
+
self._check.health.submit_exception_health_event(e, data={"job_name": self._job_name})
|
|
445
|
+
except Exception as health_error:
|
|
446
|
+
self._log.exception(
|
|
447
|
+
"[%s] Failed to submit error health event", self._job_tags_str, health_error
|
|
448
|
+
)
|
|
397
449
|
finally:
|
|
398
450
|
self._log.info("[%s] Shutting down job loop", self._job_tags_str)
|
|
399
451
|
if self._shutdown_callback:
|
|
@@ -410,6 +462,7 @@ class DBMAsyncJob(object):
|
|
|
410
462
|
|
|
411
463
|
def _run_job_rate_limited(self):
|
|
412
464
|
try:
|
|
465
|
+
self._last_run_start = time.time()
|
|
413
466
|
self._run_job_traced()
|
|
414
467
|
except:
|
|
415
468
|
raise
|
datadog_checks/base/utils/tls.py
CHANGED
|
@@ -56,6 +56,67 @@ class TlsConfig(BaseModel, frozen=True):
|
|
|
56
56
|
tls_verify: bool = True
|
|
57
57
|
|
|
58
58
|
|
|
59
|
+
def _load_certifi_fallback(context):
|
|
60
|
+
"""Attempt to load CA certificates from certifi as a fallback."""
|
|
61
|
+
try:
|
|
62
|
+
import certifi
|
|
63
|
+
except ImportError:
|
|
64
|
+
LOGGER.warning('Failed to import certifi, TLS verification may fail.')
|
|
65
|
+
return
|
|
66
|
+
|
|
67
|
+
try:
|
|
68
|
+
certifi_path = certifi.where()
|
|
69
|
+
context.load_verify_locations(cafile=certifi_path)
|
|
70
|
+
LOGGER.info('Successfully loaded CA certificates from certifi bundle: %s', certifi_path)
|
|
71
|
+
except (FileNotFoundError, IOError) as e:
|
|
72
|
+
LOGGER.error('Failed to load CA certificates from certifi bundle: %s. TLS verification may fail.', e)
|
|
73
|
+
except Exception as e:
|
|
74
|
+
LOGGER.error('Unexpected error loading certifi certificates: %s', e)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _load_ca_certs(context, config):
|
|
78
|
+
# https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_verify_locations
|
|
79
|
+
# https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_default_certs
|
|
80
|
+
ca_cert = config.get('tls_ca_cert')
|
|
81
|
+
|
|
82
|
+
# Handle user-provided CA cert
|
|
83
|
+
if ca_cert:
|
|
84
|
+
ca_cert = os.path.expanduser(ca_cert)
|
|
85
|
+
try:
|
|
86
|
+
if os.path.isdir(ca_cert):
|
|
87
|
+
context.load_verify_locations(cafile=None, capath=ca_cert, cadata=None)
|
|
88
|
+
else:
|
|
89
|
+
context.load_verify_locations(cafile=ca_cert, capath=None, cadata=None)
|
|
90
|
+
except FileNotFoundError:
|
|
91
|
+
LOGGER.warning(
|
|
92
|
+
'TLS CA certificate file not found: %s. Please check the `tls_ca_cert` configuration option.',
|
|
93
|
+
ca_cert,
|
|
94
|
+
)
|
|
95
|
+
return
|
|
96
|
+
else:
|
|
97
|
+
# Try to load system default certs
|
|
98
|
+
try:
|
|
99
|
+
context.load_default_certs(ssl.Purpose.SERVER_AUTH)
|
|
100
|
+
except Exception as e:
|
|
101
|
+
LOGGER.debug('Failed to load default CA certificates: %s', e)
|
|
102
|
+
|
|
103
|
+
# Check if any certs were actually loaded
|
|
104
|
+
if not context.get_ca_certs():
|
|
105
|
+
LOGGER.info('No CA certificates loaded from system default paths, attempting certifi fallback.')
|
|
106
|
+
_load_certifi_fallback(context)
|
|
107
|
+
|
|
108
|
+
# Load intermediate CA certs if provided
|
|
109
|
+
intermediate_ca_certs = config.get('tls_intermediate_ca_certs')
|
|
110
|
+
if intermediate_ca_certs:
|
|
111
|
+
try:
|
|
112
|
+
context.load_verify_locations(cadata='\n'.join(intermediate_ca_certs))
|
|
113
|
+
except ssl.SSLError:
|
|
114
|
+
LOGGER.warning(
|
|
115
|
+
"TLS intermediate CA certificate(s) could not be loaded: %s. ",
|
|
116
|
+
intermediate_ca_certs,
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
|
|
59
120
|
def create_ssl_context(config):
|
|
60
121
|
# https://docs.python.org/3/library/ssl.html#ssl.SSLContext
|
|
61
122
|
# https://docs.python.org/3/library/ssl.html#ssl.PROTOCOL_TLS_CLIENT
|
|
@@ -78,32 +139,10 @@ def create_ssl_context(config):
|
|
|
78
139
|
LOGGER.debug('Setting TLS ciphers to: %s', configured_ciphers)
|
|
79
140
|
context.set_ciphers(configured_ciphers)
|
|
80
141
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
if ca_cert:
|
|
86
|
-
ca_cert = os.path.expanduser(ca_cert)
|
|
87
|
-
if os.path.isdir(ca_cert):
|
|
88
|
-
context.load_verify_locations(cafile=None, capath=ca_cert, cadata=None)
|
|
89
|
-
else:
|
|
90
|
-
context.load_verify_locations(cafile=ca_cert, capath=None, cadata=None)
|
|
91
|
-
else:
|
|
92
|
-
context.load_default_certs(ssl.Purpose.SERVER_AUTH)
|
|
93
|
-
except FileNotFoundError:
|
|
94
|
-
LOGGER.warning(
|
|
95
|
-
'TLS CA certificate file not found: %s. Please check the `tls_ca_cert` configuration option.',
|
|
96
|
-
ca_cert,
|
|
97
|
-
)
|
|
98
|
-
intermediate_ca_certs = config.get('tls_intermediate_ca_certs')
|
|
99
|
-
try:
|
|
100
|
-
if intermediate_ca_certs:
|
|
101
|
-
context.load_verify_locations(cadata='\n'.join(intermediate_ca_certs))
|
|
102
|
-
except ssl.SSLError:
|
|
103
|
-
LOGGER.warning(
|
|
104
|
-
"TLS intermediate CA certificate(s) could not be loaded: %s. ",
|
|
105
|
-
intermediate_ca_certs,
|
|
106
|
-
)
|
|
142
|
+
if context.verify_mode == ssl.CERT_NONE:
|
|
143
|
+
LOGGER.debug('TLS verification is disabled; skipping CA certificate configuration.')
|
|
144
|
+
else:
|
|
145
|
+
_load_ca_certs(context, config)
|
|
107
146
|
|
|
108
147
|
# https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_cert_chain
|
|
109
148
|
client_cert, client_key = config.get('tls_cert'), config.get('tls_private_key')
|
|
@@ -3,7 +3,7 @@ datadog_checks/config.py,sha256=PrAXGdlLnoV2VMQff_noSaSJJ0wg4BAiGnw7jCQLSik,196
|
|
|
3
3
|
datadog_checks/errors.py,sha256=eFwmnrX-batIgbu-iJyseqAPNO_4rk1UuaKK89evLhg,155
|
|
4
4
|
datadog_checks/log.py,sha256=orvOgMKGNEsqSTLalCAQpWP-ouorpG1A7Gn-j2mRD80,301
|
|
5
5
|
datadog_checks/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
|
|
6
|
-
datadog_checks/base/__about__.py,sha256=
|
|
6
|
+
datadog_checks/base/__about__.py,sha256=JP31zlaMNXd1vaMqwEefR96nAQreufUfXwUgr65aLNU,139
|
|
7
7
|
datadog_checks/base/__init__.py,sha256=yWegSLE-TZWIGSvAiJj9PSrUxzlOo_UVJLt2zORZ8Ek,363
|
|
8
8
|
datadog_checks/base/__init__.pyi,sha256=a4Y1JIcPJ8pz9tRkBAvjWdtvSQwZxbMZBuRmIiSs_4E,1031
|
|
9
9
|
datadog_checks/base/agent.py,sha256=nX9x_BYYizRKGNYfXq5z7S0FZ9xcX_wd2tuxpGe3_8k,350
|
|
@@ -127,7 +127,7 @@ datadog_checks/base/utils/tagging.py,sha256=a-0_pXGOwg4ewmDiJon7rM_96ZJvVpHVMwhy
|
|
|
127
127
|
datadog_checks/base/utils/tailfile.py,sha256=QZmH8eCxeClFWRMZn6zDwgOx1y5dAXc5yI_uzECj0ak,3892
|
|
128
128
|
datadog_checks/base/utils/time.py,sha256=cNy7CtsJzSUMi7J-3WReZVUvNyYOGkJwItqJMY01qqA,1373
|
|
129
129
|
datadog_checks/base/utils/timeout.py,sha256=eOBZofFN-hOu5xJeMOF3ac_ofcy9EY6-kvmBvP4QEFg,2140
|
|
130
|
-
datadog_checks/base/utils/tls.py,sha256=
|
|
130
|
+
datadog_checks/base/utils/tls.py,sha256=to_33MaSEjvo6zvqRm9gKRx_Zk9Ayq53tnZiuKvNy24,8922
|
|
131
131
|
datadog_checks/base/utils/tracing.py,sha256=2QZjQKNKvowRYFpqOQc4sUoYUWu6JJKJofk-rvstUgY,6076
|
|
132
132
|
datadog_checks/base/utils/tracking.py,sha256=FYIouqu3KB-JsxgxM1iX5Ipv_cWhs8zQOTN4SxXOkJ4,3701
|
|
133
133
|
datadog_checks/base/utils/agent/__init__.py,sha256=o3aWvy3PhykD_h7YT3s628O0W2YpHis0NlQsSV1PI04,115
|
|
@@ -141,7 +141,7 @@ datadog_checks/base/utils/concurrency/limiter.py,sha256=is2ZpUEjfsI4nBGtXG2D0Zgv
|
|
|
141
141
|
datadog_checks/base/utils/db/__init__.py,sha256=EVTc2FtnHWLHXI3M79jyMn9ypZAMa9eqG3EKLAiMF-M,211
|
|
142
142
|
datadog_checks/base/utils/db/__init__.pyi,sha256=ewmGxxyJ52wAaYxNZahi2koEUnddfvHcn3HYxQ3RUr0,240
|
|
143
143
|
datadog_checks/base/utils/db/core.py,sha256=bYanwXIqBzsSxK7b-Ofb0W1WiHbFBtKyYdUBonBLe_Q,11165
|
|
144
|
-
datadog_checks/base/utils/db/health.py,sha256=
|
|
144
|
+
datadog_checks/base/utils/db/health.py,sha256=riJaJInOuYFK3y0wNH19HojIkXPwQaMtvxuyPCRxnZY,4182
|
|
145
145
|
datadog_checks/base/utils/db/query.py,sha256=-PyxdqpbShkQ78h7sWnghQZVtjdLGVrm71n8OpHuPW4,14432
|
|
146
146
|
datadog_checks/base/utils/db/sql.py,sha256=oiEzQa_vC_w3U65VFrFCoQHWj5GQLLRlSO0CfiSlp4A,2490
|
|
147
147
|
datadog_checks/base/utils/db/sql_commenter.py,sha256=r_efK6TGRQxM_-Qj-ndEJdECk47J4nCFjkVyxu1XmvU,1522
|
|
@@ -149,7 +149,7 @@ datadog_checks/base/utils/db/statement_metrics.py,sha256=U7EtERkmFzfCtfyd3094fBa
|
|
|
149
149
|
datadog_checks/base/utils/db/timed_cache.py,sha256=a9Ks5KKUvExB6GOATXTSCLamVtLD919Dn6HpweGKtFw,2114
|
|
150
150
|
datadog_checks/base/utils/db/transform.py,sha256=H3JN8_MF0Pk0HaXvIZeX1A8iQrP8KBgS741MPeBiWDo,23969
|
|
151
151
|
datadog_checks/base/utils/db/types.py,sha256=OLX2Oq58JQPFBD4oqUpCLkAP7ovRGN_i1vFk1E0N8Lg,267
|
|
152
|
-
datadog_checks/base/utils/db/utils.py,sha256=
|
|
152
|
+
datadog_checks/base/utils/db/utils.py,sha256=GhfrWmuVTOhNtBvPjLLM5t2nOW7HwtpLqBPpWOMV9vU,26092
|
|
153
153
|
datadog_checks/base/utils/discovery/__init__.py,sha256=vPCOdsThBcBjFJRPhDm6IsZGOwk8HlvciwCe_l8dKLk,211
|
|
154
154
|
datadog_checks/base/utils/discovery/__init__.pyi,sha256=ScVLU1Njj9ekZmewltb0cULI6BylssVHfn4CcPNeyr8,173
|
|
155
155
|
datadog_checks/base/utils/discovery/cache.py,sha256=f9L3A7YZpZ-mpZpFIwjsa5ab9cZMGkqdetdr9EpalbI,887
|
|
@@ -223,6 +223,6 @@ datadog_checks/utils/tracing.py,sha256=HQbQakKM-Lw75MDkItaYJYipS6YO24Z_ymDVxDsx5
|
|
|
223
223
|
datadog_checks/utils/prometheus/__init__.py,sha256=8WwXnM9g1sfS5267QYCJX_hd8MZl5kRgBgQ_SzdNdXs,161
|
|
224
224
|
datadog_checks/utils/prometheus/functions.py,sha256=4vWsTGLgujHwdYZo0tlAQkqDPHofqUJM3k9eItJqERQ,197
|
|
225
225
|
datadog_checks/utils/prometheus/metrics_pb2.py,sha256=xg3UdUHe4TjeR4s13LUKZ2U1WVSt6U6zjsVRG6lX6dc,173
|
|
226
|
-
datadog_checks_base-37.
|
|
227
|
-
datadog_checks_base-37.
|
|
228
|
-
datadog_checks_base-37.
|
|
226
|
+
datadog_checks_base-37.22.0.dist-info/METADATA,sha256=KkZTTQeulKtyL6BaH_OnKuBa0IH9qdE2jRHBPVUylSs,4245
|
|
227
|
+
datadog_checks_base-37.22.0.dist-info/WHEEL,sha256=tkmg4JIqwd9H8mL30xA7crRmoStyCtGp0VWshokd1Jc,105
|
|
228
|
+
datadog_checks_base-37.22.0.dist-info/RECORD,,
|
|
File without changes
|