datadog-checks-base 37.21.0__py2.py3-none-any.whl → 37.22.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,4 @@
1
1
  # (C) Datadog, Inc. 2018-present
2
2
  # All rights reserved
3
3
  # Licensed under a 3-clause BSD style license (see LICENSE)
4
- __version__ = "37.21.0"
4
+ __version__ = "37.22.0"
@@ -1094,6 +1094,9 @@ class AgentCheck(object):
1094
1094
  return entrypoint
1095
1095
 
1096
1096
  def __initialize_persistent_cache_key_prefix(self):
1097
+ if self.__persistent_cache_key_prefix:
1098
+ return
1099
+
1097
1100
  namespace = ':'.join(self.check_id.split(':')[:-1])
1098
1101
  self.__persistent_cache_key_prefix = f'{namespace}:{self.persistent_cache_id()}_'
1099
1102
 
@@ -1,6 +1,7 @@
1
1
  # (C) Datadog, Inc. 2010-present
2
2
  # All rights reserved
3
3
  # Licensed under Simplified BSD License (see LICENSE)
4
+ from typing import Any
4
5
 
5
6
 
6
7
  class _FreezeKey(object):
@@ -30,8 +31,9 @@ class _FreezeKey(object):
30
31
  if other.value is None:
31
32
  # `x < None` -> `False`
32
33
  return False
33
- # ...But we let other cases bubble through.
34
- raise
34
+
35
+ # If we get 2 types that cannot be compared, we compare the string representation of the types.
36
+ return str(type(self.value)) < str(type(other.value))
35
37
  else:
36
38
  # We're on Python 2, where `a < b` never fails (returns `False` by default), or
37
39
  # we're on Python 3 and values have the same type.
@@ -65,6 +67,17 @@ def hash_mutable(m):
65
67
  return hash(freeze(m))
66
68
 
67
69
 
70
+ def hash_mutable_stable(m: Any) -> str:
71
+ """
72
+ This method provides a way of hashing a mutable object ensuring that the same object always
73
+ provides the same hash even in different processes.
74
+ """
75
+ from datadog_checks.base.utils.hashing import HashMethod
76
+
77
+ algorithm = HashMethod.secure()
78
+ return algorithm(str(freeze(m)).encode()).hexdigest()
79
+
80
+
68
81
  def iter_unique(*iterables):
69
82
  seen = set()
70
83
 
@@ -9,16 +9,20 @@ from __future__ import annotations
9
9
  import time
10
10
  from typing import TYPE_CHECKING
11
11
 
12
+ from cachetools import TLRUCache
13
+
12
14
  from datadog_checks.base.utils.serialization import json
13
15
 
14
16
  if TYPE_CHECKING:
15
17
  from datadog_checks.base import DatabaseCheck
16
18
  try:
17
- import datadog_agent
19
+ import datadog_agent # type: ignore
18
20
  except ImportError:
19
21
  from datadog_checks.base.stubs import datadog_agent
20
22
 
21
23
 
24
+ import threading
25
+ import traceback
22
26
  from enum import Enum
23
27
 
24
28
 
@@ -28,6 +32,8 @@ class HealthEvent(Enum):
28
32
  """
29
33
 
30
34
  INITIALIZATION = 'initialization'
35
+ UNKNOWN_ERROR = 'unknown_error'
36
+ MISSED_COLLECTION = 'missed_collection'
31
37
 
32
38
 
33
39
  class HealthStatus(Enum):
@@ -40,6 +46,13 @@ class HealthStatus(Enum):
40
46
  ERROR = 'error'
41
47
 
42
48
 
49
+ DEFAULT_COOLDOWN = 60 * 5
50
+
51
+
52
+ def ttl(_key, value, now):
53
+ return now + value
54
+
55
+
43
56
  class Health:
44
57
  def __init__(self, check: DatabaseCheck):
45
58
  """
@@ -49,8 +62,18 @@ class Health:
49
62
  The check instance that will be used to submit health events.
50
63
  """
51
64
  self.check = check
52
-
53
- def submit_health_event(self, name: HealthEvent, status: HealthStatus, tags: list[str] = None, **kwargs):
65
+ self._cache_lock = threading.Lock()
66
+ self._ttl_cache = TLRUCache(maxsize=1000, ttu=ttl)
67
+
68
+ def submit_health_event(
69
+ self,
70
+ name: HealthEvent,
71
+ status: HealthStatus,
72
+ tags: list[str] = None,
73
+ cooldown_time: int = None,
74
+ cooldown_values: list[str] = None,
75
+ data: dict = None,
76
+ ):
54
77
  """
55
78
  Submit a health event to the aggregator.
56
79
 
@@ -60,22 +83,52 @@ class Health:
60
83
  The health status to submit.
61
84
  :param tags: list of str
62
85
  Tags to associate with the health event.
63
- :param kwargs: Additional keyword arguments to include in the event under `data`.
86
+ :param cooldown_time: int
87
+ The cooldown period in seconds to prevent the events with the same name and status
88
+ from being submitted again. If None there is no cooldown.
89
+ :param cooldown_values: list of str
90
+ Additional values to include in the cooldown key.
91
+ :param data: A dictionary to be submitted as `data`. Must be JSON serializable.
64
92
  """
93
+ category = self.check.__NAMESPACE__ or self.check.__class__.__name__.lower()
94
+ if cooldown_time:
95
+ cooldown_key = "|".join([category, name.value, status.value])
96
+ if cooldown_values:
97
+ cooldown_key = "|".join([cooldown_key, "|".join([f"{v}" for v in cooldown_values])])
98
+ with self._cache_lock:
99
+ if self._ttl_cache.get(cooldown_key, None):
100
+ return
101
+ self._ttl_cache[cooldown_key] = cooldown_time
65
102
  self.check.event_platform_event(
66
103
  json.dumps(
67
104
  {
68
105
  'timestamp': time.time() * 1000,
69
106
  'version': 1,
70
107
  'check_id': self.check.check_id,
71
- 'category': self.check.__NAMESPACE__ or self.check.__class__.__name__.lower(),
108
+ 'category': category,
72
109
  'name': name,
73
110
  'status': status,
74
111
  'tags': tags or [],
75
112
  'ddagentversion': datadog_agent.get_version(),
76
113
  'ddagenthostname': datadog_agent.get_hostname(),
77
- 'data': {**kwargs},
114
+ 'data': data,
78
115
  }
79
116
  ),
80
117
  "dbm-health",
81
118
  )
119
+
120
+ def submit_exception_health_event(self, exception: Exception, data: dict):
121
+ trace = traceback.extract_tb(exception.__traceback__)
122
+ exc = trace.pop()
123
+ if exc:
124
+ self.submit_health_event(
125
+ name=HealthEvent.UNKNOWN_ERROR,
126
+ status=HealthStatus.ERROR,
127
+ data={
128
+ "file": exc.filename,
129
+ "line": exc.lineno,
130
+ "function": exc.name,
131
+ "exception_type": type(exception).__name__,
132
+ **(data or {}),
133
+ },
134
+ )
@@ -21,6 +21,7 @@ from datadog_checks.base import is_affirmative
21
21
  from datadog_checks.base.agent import datadog_agent
22
22
  from datadog_checks.base.log import get_check_logger
23
23
  from datadog_checks.base.utils.common import to_native_string
24
+ from datadog_checks.base.utils.db.health import DEFAULT_COOLDOWN, HealthEvent, HealthStatus
24
25
  from datadog_checks.base.utils.db.types import Transformer # noqa: F401
25
26
  from datadog_checks.base.utils.format import json
26
27
  from datadog_checks.base.utils.tracing import INTEGRATION_TRACING_SERVICE_NAME, tracing_enabled
@@ -293,6 +294,14 @@ class DBMAsyncJob(object):
293
294
  expected_db_exceptions=(),
294
295
  shutdown_callback=None,
295
296
  job_name=None,
297
+ # Some users may want to disable the missed collection event,
298
+ # for example if they set the collection interval intentionally low
299
+ # to effectively run the job in a loop
300
+ enable_missed_collection_event=True,
301
+ # List of features depenedent on the job running
302
+ # Defaults to [None] during init so that if no features are specified there will
303
+ # still be health events submitted for the job
304
+ features=None,
296
305
  ):
297
306
  self._check = check
298
307
  self._config_host = config_host
@@ -314,6 +323,10 @@ class DBMAsyncJob(object):
314
323
  self._enabled = enabled
315
324
  self._expected_db_exceptions = expected_db_exceptions
316
325
  self._job_name = job_name
326
+ self._enable_missed_collection_event = enable_missed_collection_event
327
+ self._features = features
328
+ if self._features is None:
329
+ self._features = [None]
317
330
 
318
331
  def cancel(self):
319
332
  """
@@ -342,6 +355,37 @@ class DBMAsyncJob(object):
342
355
  elif self._job_loop_future is None or not self._job_loop_future.running():
343
356
  self._job_loop_future = DBMAsyncJob.executor.submit(self._job_loop)
344
357
  else:
358
+ if (
359
+ hasattr(self._check, 'health')
360
+ and self._enable_missed_collection_event
361
+ and self._min_collection_interval >= 1
362
+ and self._last_run_start
363
+ ):
364
+ # Assume a collection interval of less than 1 second is an attempt to run the job in a loop
365
+ elapsed_time = time.time() - self._last_run_start
366
+ if elapsed_time > self._min_collection_interval:
367
+ # Missed a collection interval, submit a health event for each feature that depends on this job
368
+ for feature in self._features:
369
+ self._check.health.submit_health_event(
370
+ name=HealthEvent.MISSED_COLLECTION,
371
+ status=HealthStatus.WARNING,
372
+ tags=self._job_tags,
373
+ # Use a cooldown to avoid spamming if the job is missing the collection interval
374
+ # in a flappy manner
375
+ cooldown_time=DEFAULT_COOLDOWN,
376
+ cooldown_values=[self._dbms, self._job_name],
377
+ data={
378
+ "dbms": self._dbms,
379
+ "job_name": self._job_name,
380
+ "last_run_start": self._last_run_start,
381
+ "elapsed_time": (time.time() - self._last_run_start) * 1000,
382
+ "feature": feature,
383
+ },
384
+ )
385
+ self._check.count(
386
+ "dd.{}.async_job.missed_collection".format(self._dbms), 1, tags=self._job_tags, raw=True
387
+ )
388
+
345
389
  self._log.debug("Job loop already running. job=%s", self._job_name)
346
390
 
347
391
  def _job_loop(self):
@@ -394,6 +438,14 @@ class DBMAsyncJob(object):
394
438
  tags=self._job_tags + ["error:crash-{}".format(type(e))],
395
439
  raw=True,
396
440
  )
441
+
442
+ if hasattr(self._check, 'health'):
443
+ try:
444
+ self._check.health.submit_exception_health_event(e, data={"job_name": self._job_name})
445
+ except Exception as health_error:
446
+ self._log.exception(
447
+ "[%s] Failed to submit error health event", self._job_tags_str, health_error
448
+ )
397
449
  finally:
398
450
  self._log.info("[%s] Shutting down job loop", self._job_tags_str)
399
451
  if self._shutdown_callback:
@@ -410,6 +462,7 @@ class DBMAsyncJob(object):
410
462
 
411
463
  def _run_job_rate_limited(self):
412
464
  try:
465
+ self._last_run_start = time.time()
413
466
  self._run_job_traced()
414
467
  except:
415
468
  raise
@@ -0,0 +1,75 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Any, Protocol
4
+
5
+ if TYPE_CHECKING:
6
+ from collections.abc import Buffer
7
+
8
+
9
+ class Hash(Protocol):
10
+ def digest(self) -> bytes: ...
11
+ def hexdigest(self) -> str: ...
12
+ def update(self, obj: Buffer, /) -> None: ...
13
+ def copy(self) -> Hash: ...
14
+
15
+
16
+ class HashingAlgorithm(Protocol):
17
+ def __call__(self, data: bytes, *args: Any, **kwargs: Any) -> Hash: ...
18
+
19
+
20
+ class HashMethod:
21
+ """
22
+ Singleton class used to provide hashing algorithms
23
+ """
24
+
25
+ _architecture: str | None = None
26
+ _secure: HashingAlgorithm | None = None
27
+ _fast: HashingAlgorithm | None = None
28
+
29
+ @classmethod
30
+ def secure(cls) -> HashingAlgorithm:
31
+ """
32
+ Provides a secure hashing algorithm.
33
+
34
+ This algorithm is compliant with the FIPS 140-2 standard.
35
+ """
36
+ if cls._secure is not None:
37
+ return cls._secure
38
+
39
+ from hashlib import sha256
40
+
41
+ def secure(data: bytes, *args: Any, **kwargs: Any) -> Hash:
42
+ return sha256(data, *args, **kwargs)
43
+
44
+ cls._secure = secure
45
+ return cls._secure
46
+
47
+ @classmethod
48
+ def fast(cls) -> HashingAlgorithm:
49
+ """
50
+ Provides a fast hashing algorithm.
51
+
52
+ If the platform is 64bit, it will use the blake2b algorithm, otherwise it will use the blake2s algorithm.
53
+ """
54
+ if cls._fast is not None:
55
+ return cls._fast
56
+
57
+ from hashlib import blake2b, blake2s
58
+
59
+ selected_blake = blake2b if cls.architecture() == "64bit" else blake2s
60
+
61
+ def blake(data: bytes, *args: Any, **kwargs: Any) -> Hash:
62
+ return selected_blake(data, *args, **kwargs)
63
+
64
+ cls._fast = blake
65
+ return cls._fast
66
+
67
+ @classmethod
68
+ def architecture(cls) -> str:
69
+ if cls._architecture is not None:
70
+ return cls._architecture
71
+
72
+ from datadog_checks.base.utils.platform import Platform
73
+
74
+ cls._architecture = Platform().python_architecture()
75
+ return cls._architecture
@@ -1,7 +1,7 @@
1
1
  from collections.abc import Collection
2
2
 
3
3
  from datadog_checks.base import AgentCheck
4
- from datadog_checks.base.utils.containers import hash_mutable
4
+ from datadog_checks.base.utils.containers import hash_mutable_stable
5
5
 
6
6
 
7
7
  def config_set_persistent_cache_id(
@@ -30,4 +30,5 @@ def config_set_persistent_cache_id(
30
30
  instance_config_values = tuple(value for key, value in check.instance.items() if key in set_instance_config_options)
31
31
 
32
32
  selected_values = init_config_values + instance_config_values
33
- return str(hash_mutable(selected_values)).replace("-", "")
33
+
34
+ return hash_mutable_stable(selected_values)
@@ -56,6 +56,67 @@ class TlsConfig(BaseModel, frozen=True):
56
56
  tls_verify: bool = True
57
57
 
58
58
 
59
+ def _load_certifi_fallback(context):
60
+ """Attempt to load CA certificates from certifi as a fallback."""
61
+ try:
62
+ import certifi
63
+ except ImportError:
64
+ LOGGER.warning('Failed to import certifi, TLS verification may fail.')
65
+ return
66
+
67
+ try:
68
+ certifi_path = certifi.where()
69
+ context.load_verify_locations(cafile=certifi_path)
70
+ LOGGER.info('Successfully loaded CA certificates from certifi bundle: %s', certifi_path)
71
+ except (FileNotFoundError, IOError) as e:
72
+ LOGGER.error('Failed to load CA certificates from certifi bundle: %s. TLS verification may fail.', e)
73
+ except Exception as e:
74
+ LOGGER.error('Unexpected error loading certifi certificates: %s', e)
75
+
76
+
77
+ def _load_ca_certs(context, config):
78
+ # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_verify_locations
79
+ # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_default_certs
80
+ ca_cert = config.get('tls_ca_cert')
81
+
82
+ # Handle user-provided CA cert
83
+ if ca_cert:
84
+ ca_cert = os.path.expanduser(ca_cert)
85
+ try:
86
+ if os.path.isdir(ca_cert):
87
+ context.load_verify_locations(cafile=None, capath=ca_cert, cadata=None)
88
+ else:
89
+ context.load_verify_locations(cafile=ca_cert, capath=None, cadata=None)
90
+ except FileNotFoundError:
91
+ LOGGER.warning(
92
+ 'TLS CA certificate file not found: %s. Please check the `tls_ca_cert` configuration option.',
93
+ ca_cert,
94
+ )
95
+ return
96
+ else:
97
+ # Try to load system default certs
98
+ try:
99
+ context.load_default_certs(ssl.Purpose.SERVER_AUTH)
100
+ except Exception as e:
101
+ LOGGER.debug('Failed to load default CA certificates: %s', e)
102
+
103
+ # Check if any certs were actually loaded
104
+ if not context.get_ca_certs():
105
+ LOGGER.info('No CA certificates loaded from system default paths, attempting certifi fallback.')
106
+ _load_certifi_fallback(context)
107
+
108
+ # Load intermediate CA certs if provided
109
+ intermediate_ca_certs = config.get('tls_intermediate_ca_certs')
110
+ if intermediate_ca_certs:
111
+ try:
112
+ context.load_verify_locations(cadata='\n'.join(intermediate_ca_certs))
113
+ except ssl.SSLError:
114
+ LOGGER.warning(
115
+ "TLS intermediate CA certificate(s) could not be loaded: %s. ",
116
+ intermediate_ca_certs,
117
+ )
118
+
119
+
59
120
  def create_ssl_context(config):
60
121
  # https://docs.python.org/3/library/ssl.html#ssl.SSLContext
61
122
  # https://docs.python.org/3/library/ssl.html#ssl.PROTOCOL_TLS_CLIENT
@@ -78,32 +139,10 @@ def create_ssl_context(config):
78
139
  LOGGER.debug('Setting TLS ciphers to: %s', configured_ciphers)
79
140
  context.set_ciphers(configured_ciphers)
80
141
 
81
- # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_verify_locations
82
- # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_default_certs
83
- ca_cert = config.get('tls_ca_cert')
84
- try:
85
- if ca_cert:
86
- ca_cert = os.path.expanduser(ca_cert)
87
- if os.path.isdir(ca_cert):
88
- context.load_verify_locations(cafile=None, capath=ca_cert, cadata=None)
89
- else:
90
- context.load_verify_locations(cafile=ca_cert, capath=None, cadata=None)
91
- else:
92
- context.load_default_certs(ssl.Purpose.SERVER_AUTH)
93
- except FileNotFoundError:
94
- LOGGER.warning(
95
- 'TLS CA certificate file not found: %s. Please check the `tls_ca_cert` configuration option.',
96
- ca_cert,
97
- )
98
- intermediate_ca_certs = config.get('tls_intermediate_ca_certs')
99
- try:
100
- if intermediate_ca_certs:
101
- context.load_verify_locations(cadata='\n'.join(intermediate_ca_certs))
102
- except ssl.SSLError:
103
- LOGGER.warning(
104
- "TLS intermediate CA certificate(s) could not be loaded: %s. ",
105
- intermediate_ca_certs,
106
- )
142
+ if context.verify_mode == ssl.CERT_NONE:
143
+ LOGGER.debug('TLS verification is disabled; skipping CA certificate configuration.')
144
+ else:
145
+ _load_ca_certs(context, config)
107
146
 
108
147
  # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_cert_chain
109
148
  client_cert, client_key = config.get('tls_cert'), config.get('tls_private_key')
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datadog-checks-base
3
- Version: 37.21.0
3
+ Version: 37.22.0
4
4
  Summary: The Datadog Check Toolkit
5
5
  Project-URL: Source, https://github.com/DataDog/integrations-core
6
6
  Author-email: Datadog <packages@datadoghq.com>
@@ -3,7 +3,7 @@ datadog_checks/config.py,sha256=PrAXGdlLnoV2VMQff_noSaSJJ0wg4BAiGnw7jCQLSik,196
3
3
  datadog_checks/errors.py,sha256=eFwmnrX-batIgbu-iJyseqAPNO_4rk1UuaKK89evLhg,155
4
4
  datadog_checks/log.py,sha256=orvOgMKGNEsqSTLalCAQpWP-ouorpG1A7Gn-j2mRD80,301
5
5
  datadog_checks/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
6
- datadog_checks/base/__about__.py,sha256=pvycJFU0hkYwQYew20pwmnwFDOFZoWPr3etBDGeoKzI,139
6
+ datadog_checks/base/__about__.py,sha256=JP31zlaMNXd1vaMqwEefR96nAQreufUfXwUgr65aLNU,139
7
7
  datadog_checks/base/__init__.py,sha256=yWegSLE-TZWIGSvAiJj9PSrUxzlOo_UVJLt2zORZ8Ek,363
8
8
  datadog_checks/base/__init__.pyi,sha256=a4Y1JIcPJ8pz9tRkBAvjWdtvSQwZxbMZBuRmIiSs_4E,1031
9
9
  datadog_checks/base/agent.py,sha256=nX9x_BYYizRKGNYfXq5z7S0FZ9xcX_wd2tuxpGe3_8k,350
@@ -15,7 +15,7 @@ datadog_checks/base/types.py,sha256=anajZS0W0TsxUHJQw-JHOP2NSeuC9BisXSy9mAStlxQ,
15
15
  datadog_checks/base/checks/__init__.py,sha256=q7V6v-FwQWkQC1QWaVzKaPjZMaxPJHJcLd71C0uM7bA,211
16
16
  datadog_checks/base/checks/__init__.pyi,sha256=ydetl6kEFCSChppYQhs8mvIP5l6vnZD5AbLABGhbFcM,309
17
17
  datadog_checks/base/checks/_config_ast.py,sha256=v1rAhwORF80b3kfZKhf6zXZ7S5D3A2QPUK4tSo8eo-Y,3268
18
- datadog_checks/base/checks/base.py,sha256=xP56kjtCvW-CMWrdVN3ZC1yAhQr4jad9ORPDuOdz-eM,60654
18
+ datadog_checks/base/checks/base.py,sha256=mnh61HRvHFisSP4IvUtoDPswxB3_ybX-nWkfXMyzWJE,60721
19
19
  datadog_checks/base/checks/db.py,sha256=HzEOH4uZaMDAaUTJYy0K5wV9FryNQDXsSMAOHXPVaf4,794
20
20
  datadog_checks/base/checks/network.py,sha256=UkgqkVHaoX7Hqi0WKEx-TvaFiF6-37VyF9A3m2aSaJM,1966
21
21
  datadog_checks/base/checks/kube_leader/__init__.py,sha256=q7V6v-FwQWkQC1QWaVzKaPjZMaxPJHJcLd71C0uM7bA,211
@@ -109,15 +109,16 @@ datadog_checks/base/utils/_http_utils.py,sha256=w8whzycmGzVKlCUv703rgg2qMAF0T3d5
109
109
  datadog_checks/base/utils/aws.py,sha256=wxFLWlVFtv5_EURdsXzDhORcTpM0jBlAokcMiPV1xD8,1945
110
110
  datadog_checks/base/utils/common.py,sha256=OtmKd5FKjlwJwRbhE4aImOBWPOzNPVqfXXmHXk4MMIw,3803
111
111
  datadog_checks/base/utils/constants.py,sha256=QwTey4CWB0NAxm2rcD-wPYRkEyglekQIrAzFMabEa38,306
112
- datadog_checks/base/utils/containers.py,sha256=LBqUzERHxx5TdgcWVQTq1A1WaGCMhzXT9k4Gn0fX3J8,2753
112
+ datadog_checks/base/utils/containers.py,sha256=S6c9kvmDJuiHh9hO62Pbdqrnhm6e2R3O5gGH3l5jtNo,3243
113
113
  datadog_checks/base/utils/date.py,sha256=JJmqP84CgVcFJ0cvAmMu8EtM6v96tIESucQNm9eKeEc,2780
114
114
  datadog_checks/base/utils/diagnose.py,sha256=eLMe0tISpkzS3yxVR83IHxorQJfHT_Xi6Cq4zzRNxVI,5285
115
115
  datadog_checks/base/utils/fips.py,sha256=vxm3K7wTQKYenP1LbXk7pmJ8WA6l367BTJGBNO0DkvQ,2490
116
116
  datadog_checks/base/utils/functions.py,sha256=iGlybxR6aPPElNxNb2ELOzbk328j9OVBAxredJxdCRw,695
117
+ datadog_checks/base/utils/hashing.py,sha256=dNAe3RUvd7TE3WmcDX6SYQhTaMXsxxDHue4fPkFuL5Q,2015
117
118
  datadog_checks/base/utils/headers.py,sha256=0SSdC71jwaB61BODfusahCVr1c56GvT9iwt7cidcHP0,1779
118
119
  datadog_checks/base/utils/http.py,sha256=ePSzL7x_xsb1dnXJyU2fBXIgOU0p5UFZIpG2AX_jZZA,44651
119
120
  datadog_checks/base/utils/limiter.py,sha256=YRTrPCX1S5EtHLVcP_-GEfzRots_LTcy1f_uHZVs90g,3027
120
- datadog_checks/base/utils/persistent_cache.py,sha256=i5I9BKIb5V2BFEs0sVTf8bBQUIH21G2nL_tRQHC9GMk,1538
121
+ datadog_checks/base/utils/persistent_cache.py,sha256=1Tk0dkWZH8yH4I_bMZgnhhz2jcZcIyu85dNpBvy5lsM,1531
121
122
  datadog_checks/base/utils/platform.py,sha256=wW8f6XKo4JHxvu1sN0DpLDmYjS_cCu8GoKvfTjIj4yM,2499
122
123
  datadog_checks/base/utils/secrets.py,sha256=Tj5MBOoyGXXDWB3Hr-7UKDy5GV1NZJkFPY4T4v9PHHg,551
123
124
  datadog_checks/base/utils/serialization.py,sha256=pcRUzZIUZkOsfnGDGbxeUwGXrSsFl_9rLhA0ekD_AZ8,975
@@ -126,7 +127,7 @@ datadog_checks/base/utils/tagging.py,sha256=a-0_pXGOwg4ewmDiJon7rM_96ZJvVpHVMwhy
126
127
  datadog_checks/base/utils/tailfile.py,sha256=QZmH8eCxeClFWRMZn6zDwgOx1y5dAXc5yI_uzECj0ak,3892
127
128
  datadog_checks/base/utils/time.py,sha256=cNy7CtsJzSUMi7J-3WReZVUvNyYOGkJwItqJMY01qqA,1373
128
129
  datadog_checks/base/utils/timeout.py,sha256=eOBZofFN-hOu5xJeMOF3ac_ofcy9EY6-kvmBvP4QEFg,2140
129
- datadog_checks/base/utils/tls.py,sha256=DoJqqpnWTC8up_UF4AL_UUBJ9Sb-1Fvwjd2gSxmiVBY,7434
130
+ datadog_checks/base/utils/tls.py,sha256=to_33MaSEjvo6zvqRm9gKRx_Zk9Ayq53tnZiuKvNy24,8922
130
131
  datadog_checks/base/utils/tracing.py,sha256=2QZjQKNKvowRYFpqOQc4sUoYUWu6JJKJofk-rvstUgY,6076
131
132
  datadog_checks/base/utils/tracking.py,sha256=FYIouqu3KB-JsxgxM1iX5Ipv_cWhs8zQOTN4SxXOkJ4,3701
132
133
  datadog_checks/base/utils/agent/__init__.py,sha256=o3aWvy3PhykD_h7YT3s628O0W2YpHis0NlQsSV1PI04,115
@@ -140,7 +141,7 @@ datadog_checks/base/utils/concurrency/limiter.py,sha256=is2ZpUEjfsI4nBGtXG2D0Zgv
140
141
  datadog_checks/base/utils/db/__init__.py,sha256=EVTc2FtnHWLHXI3M79jyMn9ypZAMa9eqG3EKLAiMF-M,211
141
142
  datadog_checks/base/utils/db/__init__.pyi,sha256=ewmGxxyJ52wAaYxNZahi2koEUnddfvHcn3HYxQ3RUr0,240
142
143
  datadog_checks/base/utils/db/core.py,sha256=bYanwXIqBzsSxK7b-Ofb0W1WiHbFBtKyYdUBonBLe_Q,11165
143
- datadog_checks/base/utils/db/health.py,sha256=rdcZPdlLMT4g9tepkietuViKtmP4gwPsjMGCvqC5s54,2366
144
+ datadog_checks/base/utils/db/health.py,sha256=riJaJInOuYFK3y0wNH19HojIkXPwQaMtvxuyPCRxnZY,4182
144
145
  datadog_checks/base/utils/db/query.py,sha256=-PyxdqpbShkQ78h7sWnghQZVtjdLGVrm71n8OpHuPW4,14432
145
146
  datadog_checks/base/utils/db/sql.py,sha256=oiEzQa_vC_w3U65VFrFCoQHWj5GQLLRlSO0CfiSlp4A,2490
146
147
  datadog_checks/base/utils/db/sql_commenter.py,sha256=r_efK6TGRQxM_-Qj-ndEJdECk47J4nCFjkVyxu1XmvU,1522
@@ -148,7 +149,7 @@ datadog_checks/base/utils/db/statement_metrics.py,sha256=U7EtERkmFzfCtfyd3094fBa
148
149
  datadog_checks/base/utils/db/timed_cache.py,sha256=a9Ks5KKUvExB6GOATXTSCLamVtLD919Dn6HpweGKtFw,2114
149
150
  datadog_checks/base/utils/db/transform.py,sha256=H3JN8_MF0Pk0HaXvIZeX1A8iQrP8KBgS741MPeBiWDo,23969
150
151
  datadog_checks/base/utils/db/types.py,sha256=OLX2Oq58JQPFBD4oqUpCLkAP7ovRGN_i1vFk1E0N8Lg,267
151
- datadog_checks/base/utils/db/utils.py,sha256=g6GAG1OGvo4O-GrTB6nETvUZGfVUSpqYYJMBuhLs2pM,23130
152
+ datadog_checks/base/utils/db/utils.py,sha256=GhfrWmuVTOhNtBvPjLLM5t2nOW7HwtpLqBPpWOMV9vU,26092
152
153
  datadog_checks/base/utils/discovery/__init__.py,sha256=vPCOdsThBcBjFJRPhDm6IsZGOwk8HlvciwCe_l8dKLk,211
153
154
  datadog_checks/base/utils/discovery/__init__.pyi,sha256=ScVLU1Njj9ekZmewltb0cULI6BylssVHfn4CcPNeyr8,173
154
155
  datadog_checks/base/utils/discovery/cache.py,sha256=f9L3A7YZpZ-mpZpFIwjsa5ab9cZMGkqdetdr9EpalbI,887
@@ -222,6 +223,6 @@ datadog_checks/utils/tracing.py,sha256=HQbQakKM-Lw75MDkItaYJYipS6YO24Z_ymDVxDsx5
222
223
  datadog_checks/utils/prometheus/__init__.py,sha256=8WwXnM9g1sfS5267QYCJX_hd8MZl5kRgBgQ_SzdNdXs,161
223
224
  datadog_checks/utils/prometheus/functions.py,sha256=4vWsTGLgujHwdYZo0tlAQkqDPHofqUJM3k9eItJqERQ,197
224
225
  datadog_checks/utils/prometheus/metrics_pb2.py,sha256=xg3UdUHe4TjeR4s13LUKZ2U1WVSt6U6zjsVRG6lX6dc,173
225
- datadog_checks_base-37.21.0.dist-info/METADATA,sha256=RngK4W5OJrfoUb7udQ1t_kuEqdSP0_6Mk6FecbRTVyo,4245
226
- datadog_checks_base-37.21.0.dist-info/WHEEL,sha256=tkmg4JIqwd9H8mL30xA7crRmoStyCtGp0VWshokd1Jc,105
227
- datadog_checks_base-37.21.0.dist-info/RECORD,,
226
+ datadog_checks_base-37.22.0.dist-info/METADATA,sha256=KkZTTQeulKtyL6BaH_OnKuBa0IH9qdE2jRHBPVUylSs,4245
227
+ datadog_checks_base-37.22.0.dist-info/WHEEL,sha256=tkmg4JIqwd9H8mL30xA7crRmoStyCtGp0VWshokd1Jc,105
228
+ datadog_checks_base-37.22.0.dist-info/RECORD,,