datadog-checks-base 37.7.0__py2.py3-none-any.whl → 37.8.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. datadog_checks/base/__about__.py +1 -1
  2. datadog_checks/base/__init__.py +3 -43
  3. datadog_checks/base/__init__.pyi +31 -0
  4. datadog_checks/base/checks/__init__.py +2 -4
  5. datadog_checks/base/checks/__init__.pyi +7 -0
  6. datadog_checks/base/checks/base.py +58 -27
  7. datadog_checks/base/checks/kube_leader/__init__.py +2 -5
  8. datadog_checks/base/checks/kube_leader/__init__.pyi +8 -0
  9. datadog_checks/base/checks/kubelet_base/__init__.py +6 -0
  10. datadog_checks/base/checks/kubelet_base/__init__.pyi +6 -0
  11. datadog_checks/base/checks/openmetrics/__init__.py +2 -4
  12. datadog_checks/base/checks/openmetrics/__init__.pyi +6 -0
  13. datadog_checks/base/checks/openmetrics/v2/scraper.py +0 -2
  14. datadog_checks/base/checks/openmetrics/v2/transformers/__init__.py +3 -10
  15. datadog_checks/base/checks/openmetrics/v2/transformers/__init__.pyi +26 -0
  16. datadog_checks/base/checks/prometheus/__init__.py +2 -5
  17. datadog_checks/base/checks/prometheus/__init__.pyi +8 -0
  18. datadog_checks/base/checks/prometheus/mixins.py +1 -1
  19. datadog_checks/base/checks/win/__init__.py +2 -4
  20. datadog_checks/base/checks/win/__init__.pyi +7 -0
  21. datadog_checks/base/checks/win/wmi/__init__.py +2 -386
  22. datadog_checks/base/checks/win/wmi/__init__.pyi +32 -0
  23. datadog_checks/base/checks/win/wmi/base.py +390 -0
  24. datadog_checks/base/log.py +10 -8
  25. datadog_checks/base/stubs/datadog_agent.py +5 -6
  26. datadog_checks/base/utils/_http_utils.py +51 -0
  27. datadog_checks/base/utils/db/__init__.py +3 -2
  28. datadog_checks/base/utils/db/__init__.pyi +7 -0
  29. datadog_checks/base/utils/db/sql.py +2 -2
  30. datadog_checks/base/utils/db/utils.py +3 -3
  31. datadog_checks/base/utils/discovery/__init__.py +3 -1
  32. datadog_checks/base/utils/discovery/__init__.pyi +6 -0
  33. datadog_checks/base/utils/format/__init__.py +3 -0
  34. datadog_checks/base/utils/format/_json.py +43 -0
  35. datadog_checks/base/utils/format/json.py +30 -0
  36. datadog_checks/base/utils/http.py +37 -66
  37. datadog_checks/base/utils/metadata/__init__.py +3 -1
  38. datadog_checks/base/utils/metadata/__init__.pyi +6 -0
  39. datadog_checks/base/utils/prometheus/__init__.py +2 -1
  40. datadog_checks/base/utils/prometheus/__init__.pyi +6 -0
  41. datadog_checks/base/utils/replay/execute.py +6 -5
  42. datadog_checks/base/utils/replay/redirect.py +8 -7
  43. datadog_checks/base/utils/serialization.py +5 -0
  44. datadog_checks/base/utils/tracing.py +10 -1
  45. {datadog_checks_base-37.7.0.dist-info → datadog_checks_base-37.8.0.dist-info}/METADATA +18 -8
  46. {datadog_checks_base-37.7.0.dist-info → datadog_checks_base-37.8.0.dist-info}/RECORD +47 -29
  47. {datadog_checks_base-37.7.0.dist-info → datadog_checks_base-37.8.0.dist-info}/WHEEL +0 -0
@@ -0,0 +1,390 @@
1
+ # (C) Datadog, Inc. 2025-present
2
+ # All rights reserved
3
+ # Licensed under a 3-clause BSD style license (see LICENSE)
4
+ from typing import Any, Dict, List, Optional, Tuple # noqa: F401
5
+
6
+ from ... import AgentCheck
7
+ from .sampler import WMISampler
8
+ from .types import TagQuery, WMIFilter, WMIMetric, WMIObject, WMIProperties # noqa: F401
9
+
10
+
11
+ class InvalidWMIQuery(Exception):
12
+ """
13
+ Invalid WMI Query.
14
+ """
15
+
16
+ pass
17
+
18
+
19
+ class MissingTagBy(Exception):
20
+ """
21
+ WMI query returned multiple rows but no `tag_by` value was given.
22
+ """
23
+
24
+ pass
25
+
26
+
27
+ class TagQueryUniquenessFailure(Exception):
28
+ """
29
+ 'Tagging query' did not return or returned multiple results.
30
+ """
31
+
32
+ pass
33
+
34
+
35
+ class WinWMICheck(AgentCheck):
36
+ """
37
+ WMI check.
38
+
39
+ Windows only.
40
+ """
41
+
42
+ def __init__(self, *args, **kwargs): # To support optional agentConfig
43
+ # type: (*Any, **Any) -> None
44
+ super(WinWMICheck, self).__init__(*args, **kwargs)
45
+
46
+ # Connection information
47
+ self.host = self.instance.get('host', "localhost") # type: str
48
+ self.namespace = self.instance.get('namespace', "root\\cimv2") # type: str
49
+ self.provider = self.instance.get('provider') # type: Optional[int]
50
+ self.username = self.instance.get('username', "") # type: str
51
+ self.password = self.instance.get('password', "") # type: str
52
+
53
+ # WMI instance
54
+ self.wmi_class = self.instance.get('class', '') # type: str
55
+
56
+ self._wmi_sampler = None # type: Optional[WMISampler]
57
+ self._wmi_props = None # type: Optional[WMIProperties]
58
+
59
+ def _format_tag_query(self, sampler, wmi_obj, tag_query):
60
+ # type: (WMISampler, WMIObject, TagQuery) -> Tuple[str, str, List[Dict]]
61
+ """
62
+ Format `tag_query` or raise on incorrect parameters.
63
+ """
64
+ try:
65
+ link_source_property = int(wmi_obj[tag_query[0]])
66
+ target_class = tag_query[1]
67
+ link_target_class_property = tag_query[2]
68
+ target_property = tag_query[3]
69
+ except IndexError:
70
+ self.log.error(
71
+ u"Wrong `tag_queries` parameter format. " "Please refer to the configuration file for more information."
72
+ )
73
+ raise
74
+ except TypeError:
75
+ wmi_property = tag_query[0]
76
+ wmi_class = sampler.class_name
77
+ self.log.error(
78
+ u"Incorrect 'link source property' in `tag_queries` parameter: `%s` is not a property of `%s`",
79
+ wmi_property,
80
+ wmi_class,
81
+ )
82
+ raise
83
+
84
+ return target_class, target_property, [{link_target_class_property: link_source_property}]
85
+
86
+ def _raise_on_invalid_tag_query_result(self, sampler, wmi_obj, tag_query):
87
+ # type: (WMISampler, WMIObject, TagQuery) -> None
88
+ target_property = sampler.property_names[0]
89
+ target_class = sampler.class_name
90
+
91
+ if len(sampler) != 1:
92
+ message = "no result was returned"
93
+ if len(sampler):
94
+ message = "multiple results returned (one expected)"
95
+
96
+ self.log.warning(
97
+ u"Failed to extract a tag from `tag_queries` parameter: %s. wmi_object=%s - query=%s",
98
+ message,
99
+ wmi_obj,
100
+ tag_query,
101
+ )
102
+ raise TagQueryUniquenessFailure
103
+
104
+ if sampler[0][target_property] is None:
105
+ self.log.error(
106
+ u"Incorrect 'target property' in `tag_queries` parameter: `%s` is empty or is not a property of `%s`",
107
+ target_property,
108
+ target_class,
109
+ )
110
+ raise TypeError
111
+
112
+ def _get_tag_query_tag(self, sampler, wmi_obj, tag_query):
113
+ # type: (WMISampler, WMIObject, TagQuery) -> str
114
+ """
115
+ Design a query based on the given WMIObject to extract a tag.
116
+
117
+ Returns: tag or TagQueryUniquenessFailure exception.
118
+ """
119
+ self.log.debug(u"`tag_queries` parameter found. wmi_object=%s - query=%s", wmi_obj, tag_query)
120
+
121
+ # Extract query information
122
+ target_class, target_property, filters = self._format_tag_query(sampler, wmi_obj, tag_query)
123
+
124
+ # Create a specific sampler
125
+ with WMISampler(
126
+ self.log, target_class, [target_property], filters=filters, **sampler.connection
127
+ ) as tag_query_sampler:
128
+ tag_query_sampler.sample()
129
+
130
+ # Extract tag
131
+ self._raise_on_invalid_tag_query_result(tag_query_sampler, wmi_obj, tag_query)
132
+
133
+ link_value = str(tag_query_sampler[0][target_property]).lower()
134
+
135
+ tag = "{tag_name}:{tag_value}".format(tag_name=target_property.lower(), tag_value="_".join(link_value.split()))
136
+
137
+ self.log.debug(u"Extracted `tag_queries` tag: '%s'", tag)
138
+ return tag
139
+
140
+ def _extract_metrics(self, wmi_sampler, tag_by, tag_queries, constant_tags):
141
+ # type: (WMISampler, str, List[List[str]], List[str]) -> List[WMIMetric]
142
+ """
143
+ Extract and tag metrics from the WMISampler.
144
+
145
+ Raise when multiple WMIObject were returned by the sampler with no `tag_by` specified.
146
+
147
+ Returns: List of WMIMetric
148
+ ```
149
+ [
150
+ WMIMetric("freemegabytes", 19742, ["name:_total"]),
151
+ WMIMetric("avgdiskbytesperwrite", 1536, ["name:c:"]),
152
+ ]
153
+ ```
154
+ """
155
+ if len(wmi_sampler) > 1 and not tag_by:
156
+ raise MissingTagBy(
157
+ u"WMI query returned multiple rows but no `tag_by` value was given."
158
+ " class={wmi_class} - properties={wmi_properties} - filters={filters}".format(
159
+ wmi_class=wmi_sampler.class_name,
160
+ wmi_properties=wmi_sampler.property_names,
161
+ filters=wmi_sampler.filters,
162
+ )
163
+ )
164
+
165
+ extracted_metrics = []
166
+ tag_by = tag_by.lower()
167
+
168
+ for wmi_obj in wmi_sampler:
169
+ tags = list(constant_tags) if constant_tags else []
170
+
171
+ # Tag with `tag_queries` parameter
172
+ for query in tag_queries:
173
+ try:
174
+ tags.append(self._get_tag_query_tag(wmi_sampler, wmi_obj, query))
175
+ except TagQueryUniquenessFailure:
176
+ continue
177
+
178
+ for wmi_property, wmi_value in wmi_obj.items():
179
+ # skips any property not in arguments since SWbemServices.ExecQuery will return key prop properties
180
+ # https://msdn.microsoft.com/en-us/library/aa393866(v=vs.85).aspx
181
+
182
+ # skip wmi_property "foo,bar"; there will be a separate wmi_property for each "foo" and "bar"
183
+ if ',' in wmi_property:
184
+ continue
185
+
186
+ normalized_wmi_property = wmi_property.lower()
187
+ for s in wmi_sampler.property_names:
188
+ if normalized_wmi_property in s.lower():
189
+ # wmi_property: "foo" should be found in property_names ["foo,bar", "name"]
190
+ break
191
+ else:
192
+ continue
193
+ # Tag with `tag_by` parameter
194
+ for t in tag_by.split(','):
195
+ t = t.strip()
196
+ if wmi_property == t:
197
+ tag_value = str(wmi_value).lower()
198
+ if tag_queries and tag_value.find("#") > 0:
199
+ tag_value = tag_value[: tag_value.find("#")]
200
+
201
+ tags.append("{name}:{value}".format(name=t, value=tag_value))
202
+ continue
203
+
204
+ # No metric extraction on 'Name' and properties in tag_by
205
+ if wmi_property == 'name' or normalized_wmi_property in tag_by.lower():
206
+ continue
207
+
208
+ try:
209
+ extracted_metrics.append(WMIMetric(wmi_property, float(wmi_value), tags))
210
+ except ValueError:
211
+ self.log.warning(
212
+ u"When extracting metrics with WMI, found a non digit value for property '%s'.", wmi_property
213
+ )
214
+ continue
215
+ except TypeError:
216
+ self.log.warning(u"When extracting metrics with WMI, found a missing property '%s'", wmi_property)
217
+ continue
218
+ return extracted_metrics
219
+
220
+ def _submit_metrics(self, metrics, metric_name_and_type_by_property):
221
+ # type: (List[WMIMetric], Dict[str, Tuple[str, str]]) -> None
222
+ """
223
+ Resolve metric names and types and submit it.
224
+ """
225
+ for metric in metrics:
226
+ if (
227
+ metric.name not in metric_name_and_type_by_property
228
+ and metric.name.lower() not in metric_name_and_type_by_property
229
+ ):
230
+ # Only report the metrics that were specified in the configuration
231
+ # Ignore added properties like 'Timestamp_Sys100NS', `Frequency_Sys100NS`, etc ...
232
+ continue
233
+
234
+ if metric_name_and_type_by_property.get(metric.name):
235
+ metric_name, metric_type = metric_name_and_type_by_property[metric.name]
236
+ elif metric_name_and_type_by_property.get(metric.name.lower()):
237
+ metric_name, metric_type = metric_name_and_type_by_property[metric.name.lower()]
238
+ else:
239
+ continue
240
+
241
+ try:
242
+ func = getattr(self, metric_type.lower())
243
+ except AttributeError:
244
+ raise Exception(u"Invalid metric type: {0}".format(metric_type))
245
+
246
+ func(metric_name, metric.value, metric.tags)
247
+
248
+ def _get_instance_key(self, host, namespace, wmi_class, other=None):
249
+ # type: (str, str, str, Any) -> str
250
+ """
251
+ Return an index key for a given instance. Useful for caching.
252
+ """
253
+ if other:
254
+ return "{host}:{namespace}:{wmi_class}-{other}".format(
255
+ host=host, namespace=namespace, wmi_class=wmi_class, other=other
256
+ )
257
+ return "{host}:{namespace}:{wmi_class}".format(host=host, namespace=namespace, wmi_class=wmi_class)
258
+
259
+ def get_running_wmi_sampler(self, properties, filters, **kwargs):
260
+ # type: (List[str], List[Dict[str, WMIFilter]], **Any) -> WMISampler
261
+ tag_by = kwargs.pop('tag_by', "")
262
+ return self._get_running_wmi_sampler(
263
+ instance_key=None,
264
+ wmi_class=self.wmi_class,
265
+ properties=properties,
266
+ filters=filters,
267
+ host=self.host,
268
+ namespace=self.namespace,
269
+ provider=self.provider,
270
+ username=self.username,
271
+ password=self.password,
272
+ tag_by=tag_by,
273
+ **kwargs
274
+ )
275
+
276
+ def _get_running_wmi_sampler(self, instance_key, wmi_class, properties, tag_by="", **kwargs):
277
+ # type: (Any, str, List[str], str, Any) -> WMISampler
278
+ """
279
+ Return a running WMISampler for the given (class, properties).
280
+
281
+ If no matching WMISampler is running yet, start one and cache it.
282
+ """
283
+ if self._wmi_sampler is None:
284
+ property_list = list(properties) + [tag_by] if tag_by else list(properties)
285
+ self._wmi_sampler = WMISampler(self.log, wmi_class, property_list, **kwargs)
286
+ self._wmi_sampler.start()
287
+
288
+ return self._wmi_sampler
289
+
290
+ def _get_wmi_properties(self, instance_key, metrics, tag_queries):
291
+ # type: (Any, List[List[str]], List[List[str]]) -> WMIProperties
292
+ """
293
+ Create and cache a (metric name, metric type) by WMI property map and a property list.
294
+ """
295
+ if not self._wmi_props:
296
+ metric_name_by_property = dict( # noqa: C402
297
+ (wmi_property.lower(), (metric_name, metric_type)) for wmi_property, metric_name, metric_type in metrics
298
+ ) # type: Dict[str, Tuple[str, str]]
299
+ properties = map(lambda x: x[0], metrics + tag_queries) # type: List[str] # noqa: C417
300
+
301
+ self._wmi_props = (metric_name_by_property, properties)
302
+
303
+ return self._wmi_props
304
+
305
+
306
+ def from_time(
307
+ year=None, month=None, day=None, hours=None, minutes=None, seconds=None, microseconds=None, timezone=None
308
+ ):
309
+ # type: (int, int, int, int, int, int, int, int) -> str
310
+ """Convenience wrapper to take a series of date/time elements and return a WMI time
311
+ of the form `yyyymmddHHMMSS.mmmmmm+UUU`. All elements may be int, string or
312
+ omitted altogether. If omitted, they will be replaced in the output string
313
+ by a series of stars of the appropriate length.
314
+ :param year: The year element of the date/time
315
+ :param month: The month element of the date/time
316
+ :param day: The day element of the date/time
317
+ :param hours: The hours element of the date/time
318
+ :param minutes: The minutes element of the date/time
319
+ :param seconds: The seconds element of the date/time
320
+ :param microseconds: The microseconds element of the date/time
321
+ :param timezone: The timezone element of the date/time
322
+ :returns: A WMI datetime string of the form: `yyyymmddHHMMSS.mmmmmm+UUU`
323
+ """
324
+
325
+ def str_or_stars(i, length):
326
+ # type: (Optional[int], int) -> str
327
+ if i is None:
328
+ return "*" * length
329
+ else:
330
+ return str(i).rjust(length, "0")
331
+
332
+ wmi_time = ""
333
+ wmi_time += str_or_stars(year, 4)
334
+ wmi_time += str_or_stars(month, 2)
335
+ wmi_time += str_or_stars(day, 2)
336
+ wmi_time += str_or_stars(hours, 2)
337
+ wmi_time += str_or_stars(minutes, 2)
338
+ wmi_time += str_or_stars(seconds, 2)
339
+ wmi_time += "."
340
+ wmi_time += str_or_stars(microseconds, 6)
341
+ if timezone is None:
342
+ wmi_time += "+"
343
+ else:
344
+ try:
345
+ int(timezone)
346
+ except ValueError:
347
+ wmi_time += "+"
348
+ else:
349
+ if timezone >= 0:
350
+ wmi_time += "+"
351
+ else:
352
+ wmi_time += "-"
353
+ timezone = abs(timezone)
354
+ wmi_time += str_or_stars(timezone, 3)
355
+
356
+ return wmi_time
357
+
358
+
359
+ def to_time(wmi_time):
360
+ # type: (str) -> Tuple[Optional[int], Optional[int], Optional[int], Optional[int], Optional[int], Optional[int], Optional[int], Optional[str]]
361
+ """Convenience wrapper to take a WMI datetime string of the form
362
+ yyyymmddHHMMSS.mmmmmm+UUU and return a 9-tuple containing the
363
+ individual elements, or None where string contains placeholder
364
+ stars.
365
+
366
+ :param wmi_time: The WMI datetime string in `yyyymmddHHMMSS.mmmmmm+UUU` format
367
+
368
+ :returns: A 9-tuple of (year, month, day, hours, minutes, seconds, microseconds, timezone)
369
+ """
370
+
371
+ def int_or_none(s, start, end):
372
+ # type: (str, int, int) -> Optional[int]
373
+ try:
374
+ return int(s[start:end])
375
+ except ValueError:
376
+ return None
377
+
378
+ year = int_or_none(wmi_time, 0, 4)
379
+ month = int_or_none(wmi_time, 4, 6)
380
+ day = int_or_none(wmi_time, 6, 8)
381
+ hours = int_or_none(wmi_time, 8, 10)
382
+ minutes = int_or_none(wmi_time, 10, 12)
383
+ seconds = int_or_none(wmi_time, 12, 14)
384
+ microseconds = int_or_none(wmi_time, 15, 21)
385
+ timezone = wmi_time[22:] # type: Optional[str]
386
+
387
+ if timezone == "***":
388
+ timezone = None
389
+
390
+ return year, month, day, hours, minutes, seconds, microseconds, timezone
@@ -1,18 +1,24 @@
1
1
  # (C) Datadog, Inc. 2018-present
2
2
  # All rights reserved
3
3
  # Licensed under a 3-clause BSD style license (see LICENSE)
4
+ from __future__ import annotations
5
+
4
6
  import logging
5
7
  import sys
6
- import warnings
7
- from typing import Callable # noqa: F401
8
+ from typing import TYPE_CHECKING, Callable
8
9
 
9
- from urllib3.exceptions import InsecureRequestWarning
10
+ import lazy_loader
10
11
 
11
12
  from datadog_checks.base.agent import datadog_agent
12
13
 
13
14
  from .utils.common import to_native_string
14
15
  from .utils.tracing import tracing_enabled
15
16
 
17
+ if TYPE_CHECKING:
18
+ import inspect as _module_inspect
19
+
20
+ inspect: _module_inspect = lazy_loader.load('inspect')
21
+
16
22
  # Arbitrary number less than 10 (DEBUG)
17
23
  TRACE_LEVEL = 7
18
24
 
@@ -33,8 +39,7 @@ class CheckLoggingAdapter(logging.LoggerAdapter):
33
39
  self.check = check
34
40
  self.check_id = self.check.check_id
35
41
 
36
- def setup_sanitization(self, sanitize):
37
- # type: (Callable[[str], str]) -> None
42
+ def setup_sanitization(self, sanitize: Callable[[str], str]) -> None:
38
43
  for handler in self.logger.handlers:
39
44
  if isinstance(handler, AgentLogHandler):
40
45
  handler.setFormatter(SanitizationFormatter(handler.formatter, sanitize=sanitize))
@@ -158,9 +163,6 @@ def init_logging():
158
163
  rootLogger.addHandler(AgentLogHandler())
159
164
  rootLogger.setLevel(_get_py_loglevel(datadog_agent.get_config('log_level')))
160
165
 
161
- # We log instead of emit warnings for unintentionally insecure HTTPS requests
162
- warnings.simplefilter('ignore', InsecureRequestWarning)
163
-
164
166
  # `requests` (used in a lot of checks) imports `urllib3`, which logs a bunch of stuff at the info level
165
167
  # Therefore, pre emptively increase the default level of that logger to `WARN`
166
168
  urllib_logger = logging.getLogger("requests.packages.urllib3")
@@ -1,11 +1,10 @@
1
1
  # (C) Datadog, Inc. 2018-present
2
2
  # All rights reserved
3
3
  # Licensed under a 3-clause BSD style license (see LICENSE)
4
- import json
5
4
  import re
6
5
  from collections import defaultdict
7
6
 
8
- from datadog_checks.base.utils.serialization import from_json, to_json
7
+ from datadog_checks.base.utils.format import json
9
8
 
10
9
 
11
10
  class DatadogAgentStub(object):
@@ -102,7 +101,7 @@ class DatadogAgentStub(object):
102
101
  return self._host_tags
103
102
 
104
103
  def _set_host_tags(self, tags_dict):
105
- self._host_tags = json.dumps(tags_dict)
104
+ self._host_tags = json.encode(tags_dict)
106
105
 
107
106
  def _reset_host_tags(self):
108
107
  self._host_tags = "{}"
@@ -120,7 +119,7 @@ class DatadogAgentStub(object):
120
119
  self._metadata[(check_id, name)] = value
121
120
 
122
121
  def send_log(self, log_line, check_id):
123
- self._sent_logs[check_id].append(from_json(log_line))
122
+ self._sent_logs[check_id].append(json.decode(log_line))
124
123
 
125
124
  def set_external_tags(self, external_tags):
126
125
  self._external_tags = external_tags
@@ -139,8 +138,8 @@ class DatadogAgentStub(object):
139
138
  if options:
140
139
  # Options provided is a JSON string because the Go stub requires it, whereas
141
140
  # the python stub does not for things such as testing.
142
- if from_json(options).get('return_json_metadata', False):
143
- return to_json({'query': re.sub(r'\s+', ' ', query or '').strip(), 'metadata': {}})
141
+ if json.decode(options).get('return_json_metadata', False):
142
+ return json.encode({'query': re.sub(r'\s+', ' ', query or '').strip(), 'metadata': {}})
144
143
  return re.sub(r'\s+', ' ', query or '').strip()
145
144
 
146
145
  def obfuscate_sql_exec_plan(self, plan, normalize=False):
@@ -0,0 +1,51 @@
1
+ # (C) Datadog, Inc. 2025-present
2
+ # All rights reserved
3
+ # Licensed under a 3-clause BSD style license (see LICENSE)
4
+ from __future__ import annotations
5
+
6
+ from typing import Any
7
+
8
+
9
+ def __getattr__(name: str) -> Any:
10
+ # This is used to lazily load imports when the path contains subpackages
11
+ if name == 'HostHeaderSSLAdapter':
12
+ from requests_toolbelt.adapters.host_header_ssl import HostHeaderSSLAdapter
13
+
14
+ return HostHeaderSSLAdapter
15
+
16
+ if name == 'BotoAWSRequestsAuth':
17
+ from aws_requests_auth.boto_utils import BotoAWSRequestsAuth
18
+
19
+ return BotoAWSRequestsAuth
20
+
21
+ if name == 'oauth2':
22
+ from oauthlib import oauth2
23
+
24
+ return oauth2
25
+
26
+ if name == 'cryptography_serialization':
27
+ from cryptography.hazmat.primitives import serialization
28
+
29
+ return serialization
30
+
31
+ if name == 'cryptography_x509_load_certificate':
32
+ from cryptography.x509 import load_der_x509_certificate
33
+
34
+ return load_der_x509_certificate
35
+
36
+ if name == 'cryptography_x509_ExtensionNotFound':
37
+ from cryptography.x509.extensions import ExtensionNotFound
38
+
39
+ return ExtensionNotFound
40
+
41
+ if name == 'cryptography_x509_AuthorityInformationAccessOID':
42
+ from cryptography.x509.oid import AuthorityInformationAccessOID
43
+
44
+ return AuthorityInformationAccessOID
45
+
46
+ if name == 'cryptography_x509_ExtensionOID':
47
+ from cryptography.x509.oid import ExtensionOID
48
+
49
+ return ExtensionOID
50
+
51
+ raise AttributeError(f'`{__name__}` object has no attribute `{name}`')
@@ -1,5 +1,6 @@
1
1
  # (C) Datadog, Inc. 2019-present
2
2
  # All rights reserved
3
3
  # Licensed under a 3-clause BSD style license (see LICENSE)
4
- from .core import QueryExecutor, QueryManager
5
- from .query import Query
4
+ import lazy_loader
5
+
6
+ __getattr__, __dir__, __all__ = lazy_loader.attach_stub(__name__, __file__)
@@ -0,0 +1,7 @@
1
+ # (C) Datadog, Inc. 2025-present
2
+ # All rights reserved
3
+ # Licensed under a 3-clause BSD style license (see LICENSE)
4
+ from .core import QueryExecutor, QueryManager
5
+ from .query import Query
6
+
7
+ __all__ = ['Query', 'QueryExecutor', 'QueryManager']
@@ -7,7 +7,7 @@ from __future__ import unicode_literals
7
7
  import mmh3
8
8
 
9
9
  from datadog_checks.base import ensure_bytes
10
- from datadog_checks.base.utils.serialization import json, sort_keys_kwargs
10
+ from datadog_checks.base.utils.format import json
11
11
 
12
12
  # Unicode character "Arabic Decimal Separator" (U+066B) is a character which looks like an ascii
13
13
  # comma, but is not treated like a comma when parsing metrics tags. This is used to replace
@@ -52,5 +52,5 @@ def compute_exec_plan_signature(normalized_json_plan):
52
52
  """
53
53
  if not normalized_json_plan:
54
54
  return None
55
- with_sorted_keys = json.dumps(json.loads(normalized_json_plan), **sort_keys_kwargs)
55
+ with_sorted_keys = json.encode(json.decode(normalized_json_plan), sort_keys=True)
56
56
  return format(mmh3.hash64(with_sorted_keys, signed=False)[0], 'x')
@@ -20,7 +20,7 @@ from datadog_checks.base import is_affirmative
20
20
  from datadog_checks.base.agent import datadog_agent
21
21
  from datadog_checks.base.log import get_check_logger
22
22
  from datadog_checks.base.utils.db.types import Transformer # noqa: F401
23
- from datadog_checks.base.utils.serialization import json
23
+ from datadog_checks.base.utils.format import json
24
24
  from datadog_checks.base.utils.tracing import INTEGRATION_TRACING_SERVICE_NAME, tracing_enabled
25
25
 
26
26
  from ..common import to_native_string
@@ -193,7 +193,7 @@ def get_agent_host_tags():
193
193
  if not host_tags:
194
194
  return result
195
195
  try:
196
- tags_dict = json.loads(host_tags) or {}
196
+ tags_dict = json.decode(host_tags) or {}
197
197
  for key, value in tags_dict.items():
198
198
  if isinstance(value, list):
199
199
  result.extend(value)
@@ -250,7 +250,7 @@ def obfuscate_sql_with_metadata(query, options=None, replace_null_character=Fals
250
250
  if not statement.startswith('{'):
251
251
  return {'query': statement, 'metadata': {}}
252
252
 
253
- statement_with_metadata = json.loads(statement)
253
+ statement_with_metadata = json.decode(statement)
254
254
  metadata = statement_with_metadata.get('metadata', {})
255
255
  tables = metadata.pop('tables_csv', None)
256
256
  tables = [table.strip() for table in tables.split(',') if table != ''] if tables else None
@@ -1,4 +1,6 @@
1
1
  # (C) Datadog, Inc. 2023-present
2
2
  # All rights reserved
3
3
  # Licensed under a 3-clause BSD style license (see LICENSE)
4
- from .discovery import Discovery
4
+ import lazy_loader
5
+
6
+ __getattr__, __dir__, __all__ = lazy_loader.attach_stub(__name__, __file__)
@@ -0,0 +1,6 @@
1
+ # (C) Datadog, Inc. 2025-present
2
+ # All rights reserved
3
+ # Licensed under a 3-clause BSD style license (see LICENSE)
4
+ from .discovery import Discovery
5
+
6
+ __all__ = ['Discovery']
@@ -0,0 +1,3 @@
1
+ # (C) Datadog, Inc. 2025-present
2
+ # All rights reserved
3
+ # Licensed under a 3-clause BSD style license (see LICENSE)
@@ -0,0 +1,43 @@
1
+ # (C) Datadog, Inc. 2025-present
2
+ # All rights reserved
3
+ # Licensed under a 3-clause BSD style license (see LICENSE)
4
+ from __future__ import annotations
5
+
6
+ import logging
7
+ from typing import TYPE_CHECKING, Any
8
+
9
+ if TYPE_CHECKING:
10
+ from collections.abc import Callable
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+ try:
15
+ import orjson
16
+
17
+ logger.debug('Using JSON implementation from orjson')
18
+
19
+ def decode(s: str | bytes) -> Any:
20
+ return orjson.loads(s)
21
+
22
+ def encode(obj: Any, *, sort_keys: bool = False, default: Callable[[Any], Any] | None = None) -> str:
23
+ return encode_bytes(obj, sort_keys=sort_keys, default=default).decode()
24
+
25
+ def encode_bytes(obj: Any, *, sort_keys: bool = False, default: Callable[[Any], Any] | None = None) -> bytes:
26
+ if sort_keys:
27
+ return orjson.dumps(obj, option=orjson.OPT_SORT_KEYS, default=default)
28
+
29
+ return orjson.dumps(obj, default=default)
30
+
31
+ except ImportError:
32
+ import json
33
+
34
+ logger.debug('Using JSON implementation from stdlib')
35
+
36
+ def decode(s: str | bytes) -> Any:
37
+ return json.loads(s)
38
+
39
+ def encode(obj: Any, *, sort_keys: bool = False, default: Callable[[Any], Any] | None = None) -> str:
40
+ return json.dumps(obj, sort_keys=sort_keys, separators=(',', ':'), default=default)
41
+
42
+ def encode_bytes(obj: Any, *, sort_keys: bool = False, default: Callable[[Any], Any] | None = None) -> bytes:
43
+ return encode(obj, sort_keys=sort_keys, default=default).encode()