datadog-checks-base 37.8.0__py2.py3-none-any.whl → 37.10.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datadog_checks/base/__about__.py +1 -1
- datadog_checks/base/checks/base.py +25 -16
- datadog_checks/base/checks/openmetrics/v2/scraper.py +17 -6
- datadog_checks/base/stubs/aggregator.py +2 -0
- datadog_checks/base/stubs/datadog_agent.py +1 -1
- datadog_checks/base/utils/db/statement_metrics.py +23 -3
- {datadog_checks_base-37.8.0.dist-info → datadog_checks_base-37.10.0.dist-info}/METADATA +10 -9
- {datadog_checks_base-37.8.0.dist-info → datadog_checks_base-37.10.0.dist-info}/RECORD +9 -10
- datadog_checks/base/ddyaml.py +0 -171
- {datadog_checks_base-37.8.0.dist-info → datadog_checks_base-37.10.0.dist-info}/WHEEL +0 -0
datadog_checks/base/__about__.py
CHANGED
|
@@ -57,11 +57,6 @@ else:
|
|
|
57
57
|
|
|
58
58
|
init_logging()
|
|
59
59
|
|
|
60
|
-
if datadog_agent.get_config('disable_unsafe_yaml'):
|
|
61
|
-
from ..ddyaml import monkey_patch_pyyaml
|
|
62
|
-
|
|
63
|
-
monkey_patch_pyyaml()
|
|
64
|
-
|
|
65
60
|
if datadog_agent.get_config('integration_tracing'):
|
|
66
61
|
from ddtrace import patch
|
|
67
62
|
|
|
@@ -386,17 +381,6 @@ class AgentCheck(object):
|
|
|
386
381
|
|
|
387
382
|
return limit
|
|
388
383
|
|
|
389
|
-
@staticmethod
|
|
390
|
-
def load_config(yaml_str):
|
|
391
|
-
# type: (str) -> Any
|
|
392
|
-
"""
|
|
393
|
-
Convenience wrapper to ease programmatic use of this class from the C API.
|
|
394
|
-
"""
|
|
395
|
-
# See Performance Optimizations in this package's README.md.
|
|
396
|
-
import yaml
|
|
397
|
-
|
|
398
|
-
return yaml.safe_load(yaml_str)
|
|
399
|
-
|
|
400
384
|
@property
|
|
401
385
|
def http(self) -> RequestsWrapper:
|
|
402
386
|
"""
|
|
@@ -1485,3 +1469,28 @@ class AgentCheck(object):
|
|
|
1485
1469
|
|
|
1486
1470
|
for m in metrics:
|
|
1487
1471
|
self.gauge(m.name, m.value, tags=tags, raw=True)
|
|
1472
|
+
|
|
1473
|
+
@staticmethod
|
|
1474
|
+
def load_config(yaml_str: str) -> Any:
|
|
1475
|
+
"""
|
|
1476
|
+
Convenience wrapper to ease programmatic use of this class from the C API.
|
|
1477
|
+
"""
|
|
1478
|
+
import subprocess
|
|
1479
|
+
import sys
|
|
1480
|
+
|
|
1481
|
+
process = subprocess.Popen(
|
|
1482
|
+
[sys.executable, '-c', 'import sys, yaml; print(yaml.safe_load(sys.stdin.read()))'],
|
|
1483
|
+
stdin=subprocess.PIPE,
|
|
1484
|
+
stdout=subprocess.PIPE,
|
|
1485
|
+
stderr=subprocess.PIPE,
|
|
1486
|
+
)
|
|
1487
|
+
stdout, stderr = process.communicate(yaml_str.encode())
|
|
1488
|
+
if process.returncode != 0:
|
|
1489
|
+
raise ValueError(f'Failed to load config: {stderr.decode()}')
|
|
1490
|
+
|
|
1491
|
+
decoded = stdout.strip().decode()
|
|
1492
|
+
try:
|
|
1493
|
+
return eval(decoded)
|
|
1494
|
+
# a single, literal unquoted string
|
|
1495
|
+
except Exception:
|
|
1496
|
+
return decoded
|
|
@@ -230,13 +230,13 @@ class OpenMetricsScraper:
|
|
|
230
230
|
self.use_process_start_time = is_affirmative(config.get('use_process_start_time'))
|
|
231
231
|
|
|
232
232
|
# Used for monotonic counts
|
|
233
|
-
self.flush_first_value =
|
|
233
|
+
self.flush_first_value = None
|
|
234
234
|
|
|
235
|
-
def
|
|
235
|
+
def _scrape(self):
|
|
236
236
|
"""
|
|
237
237
|
Execute a scrape, and for each metric collected, transform the metric.
|
|
238
238
|
"""
|
|
239
|
-
runtime_data = {'flush_first_value': self.flush_first_value, 'static_tags': self.static_tags}
|
|
239
|
+
runtime_data = {'flush_first_value': bool(self.flush_first_value), 'static_tags': self.static_tags}
|
|
240
240
|
|
|
241
241
|
# Determine which consume method to use based on target_info config
|
|
242
242
|
if self.target_info:
|
|
@@ -251,7 +251,18 @@ class OpenMetricsScraper:
|
|
|
251
251
|
|
|
252
252
|
transformer(metric, self.generate_sample_data(metric), runtime_data)
|
|
253
253
|
|
|
254
|
-
|
|
254
|
+
def scrape(self):
|
|
255
|
+
try:
|
|
256
|
+
self._scrape()
|
|
257
|
+
self.flush_first_value = True
|
|
258
|
+
except:
|
|
259
|
+
# Don't flush new monotonic counts on next scrape:
|
|
260
|
+
# 1. Previous value may have expired in the aggregator, causing a spike
|
|
261
|
+
# 2. New counter itself may be too old and large when we discover it next time.
|
|
262
|
+
# If we didn't have a successful scrape yet, keep the initial value (use process_start_time to decide).
|
|
263
|
+
if self.flush_first_value:
|
|
264
|
+
self.flush_first_value = False
|
|
265
|
+
raise
|
|
255
266
|
|
|
256
267
|
def consume_metrics(self, runtime_data):
|
|
257
268
|
"""
|
|
@@ -260,7 +271,7 @@ class OpenMetricsScraper:
|
|
|
260
271
|
|
|
261
272
|
metric_parser = self.parse_metrics()
|
|
262
273
|
|
|
263
|
-
if
|
|
274
|
+
if self.flush_first_value is None and self.use_process_start_time:
|
|
264
275
|
metric_parser = first_scrape_handler(metric_parser, runtime_data, datadog_agent.get_process_start_time())
|
|
265
276
|
if self.label_aggregator.configured:
|
|
266
277
|
metric_parser = self.label_aggregator(metric_parser)
|
|
@@ -283,7 +294,7 @@ class OpenMetricsScraper:
|
|
|
283
294
|
|
|
284
295
|
metric_parser = self.parse_metrics()
|
|
285
296
|
|
|
286
|
-
if
|
|
297
|
+
if self.flush_first_value is None and self.use_process_start_time:
|
|
287
298
|
metric_parser = first_scrape_handler(metric_parser, runtime_data, datadog_agent.get_process_start_time())
|
|
288
299
|
if self.label_aggregator.configured:
|
|
289
300
|
metric_parser = self.label_aggregator(metric_parser)
|
|
@@ -341,6 +341,8 @@ class AggregatorStub(object):
|
|
|
341
341
|
if expected_tags and expected_tags != sorted(metric.tags):
|
|
342
342
|
continue
|
|
343
343
|
|
|
344
|
+
# to assert hostname is None, pass in hostname as '':
|
|
345
|
+
# https://github.com/DataDog/integrations-core/blob/7.65.x/datadog_checks_base/datadog_checks/base/checks/base.py#L760
|
|
344
346
|
if hostname is not None and hostname != metric.hostname:
|
|
345
347
|
continue
|
|
346
348
|
|
|
@@ -29,7 +29,7 @@ class DatadogAgentStub(object):
|
|
|
29
29
|
self._sent_telemetry = defaultdict(list)
|
|
30
30
|
|
|
31
31
|
def get_default_config(self):
|
|
32
|
-
return {'enable_metadata_collection': True
|
|
32
|
+
return {'enable_metadata_collection': True}
|
|
33
33
|
|
|
34
34
|
def reset(self):
|
|
35
35
|
self._sent_logs.clear()
|
|
@@ -13,9 +13,7 @@ class StatementMetrics:
|
|
|
13
13
|
|
|
14
14
|
- Postgres: pg_stat_statements
|
|
15
15
|
- MySQL: performance_schema.events_statements_summary_by_digest
|
|
16
|
-
- Oracle: V$SQLAREA
|
|
17
16
|
- SQL Server: sys.dm_exec_query_stats
|
|
18
|
-
- DB2: mon_db_summary
|
|
19
17
|
|
|
20
18
|
These tables are monotonically increasing, so the metrics are computed from the difference
|
|
21
19
|
in values between check runs.
|
|
@@ -24,7 +22,7 @@ class StatementMetrics:
|
|
|
24
22
|
def __init__(self):
|
|
25
23
|
self._previous_statements = {}
|
|
26
24
|
|
|
27
|
-
def compute_derivative_rows(self, rows, metrics, key):
|
|
25
|
+
def compute_derivative_rows(self, rows, metrics, key, execution_indicators=None):
|
|
28
26
|
"""
|
|
29
27
|
Compute the first derivative of column-based metrics for a given set of rows. This function
|
|
30
28
|
takes the difference of the previous check run's values and the current check run's values
|
|
@@ -41,10 +39,20 @@ class StatementMetrics:
|
|
|
41
39
|
:params rows (_List[dict]_): rows from current check run
|
|
42
40
|
:params metrics (_List[str]_): the metrics to compute for each row
|
|
43
41
|
:params key (_callable_): function for an ID which uniquely identifies a row across runs
|
|
42
|
+
:params execution_indicators (_List[str]_): list of metrics that must change to consider a query as executed.
|
|
43
|
+
These are typically metrics that increment only when a query actually executes, such as:
|
|
44
|
+
- PostgreSQL: 'calls' from pg_stat_statements
|
|
45
|
+
- MySQL: 'exec_count' from performance_schema.events_statements_summary_by_digest
|
|
46
|
+
- SQL Server: 'execution_count' from sys.dm_exec_query_stats
|
|
47
|
+
This helps filter out cases where a normalized query was evicted then re-inserted with same call count
|
|
48
|
+
(usually 1) and slight duration change. In this case, the new normalized query entry should be treated
|
|
49
|
+
as the baseline for future diffs.
|
|
44
50
|
:return (_List[dict]_): a list of rows with the first derivative of the metrics
|
|
45
51
|
"""
|
|
46
52
|
result = []
|
|
47
53
|
metrics = set(metrics)
|
|
54
|
+
if execution_indicators:
|
|
55
|
+
execution_indicators = set(execution_indicators)
|
|
48
56
|
|
|
49
57
|
merged_rows, dropped_metrics = _merge_duplicate_rows(rows, metrics, key)
|
|
50
58
|
if dropped_metrics:
|
|
@@ -69,6 +77,12 @@ class StatementMetrics:
|
|
|
69
77
|
# 2. No changes since the previous run: There is no need to store metrics of 0, since that is implied by
|
|
70
78
|
# the absence of metrics. On any given check run, most rows will have no difference so this optimization
|
|
71
79
|
# avoids having to send a lot of unnecessary metrics.
|
|
80
|
+
#
|
|
81
|
+
# 3. Execution indicators: If execution_indicators is specified, only consider a query as changed if at
|
|
82
|
+
# least one of the execution indicator metrics has changed. This helps filter out cases where an old or
|
|
83
|
+
# less frequently executed normalized query was evicted due to the stats table being full, and then
|
|
84
|
+
# re-inserted to the stats table with a small call count and slight duration change. In this case,
|
|
85
|
+
# the new normalized query entry should be treated as the baseline for future diffs.
|
|
72
86
|
|
|
73
87
|
diffed_row = {k: row[k] - prev[k] if k in metric_columns else row[k] for k in row.keys()}
|
|
74
88
|
|
|
@@ -79,6 +93,12 @@ class StatementMetrics:
|
|
|
79
93
|
# of potentially including truncated rows that exceed previous run counts.
|
|
80
94
|
continue
|
|
81
95
|
|
|
96
|
+
# If execution_indicators is specified, check if any of the execution indicator metrics have changed
|
|
97
|
+
if execution_indicators:
|
|
98
|
+
indicator_columns = execution_indicators & metric_columns
|
|
99
|
+
if not any(diffed_row[k] > 0 for k in indicator_columns):
|
|
100
|
+
continue
|
|
101
|
+
|
|
82
102
|
# No changes to the query; no metric needed
|
|
83
103
|
if all(diffed_row[k] == 0 for k in metric_columns):
|
|
84
104
|
continue
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: datadog-checks-base
|
|
3
|
-
Version: 37.
|
|
3
|
+
Version: 37.10.0
|
|
4
4
|
Summary: The Datadog Check Toolkit
|
|
5
5
|
Project-URL: Source, https://github.com/DataDog/integrations-core
|
|
6
6
|
Author-email: Datadog <packages@datadoghq.com>
|
|
@@ -17,33 +17,34 @@ Requires-Dist: mmh3==5.1.0; extra == 'db'
|
|
|
17
17
|
Provides-Extra: deps
|
|
18
18
|
Requires-Dist: binary==1.0.1; extra == 'deps'
|
|
19
19
|
Requires-Dist: cachetools==5.5.2; extra == 'deps'
|
|
20
|
-
Requires-Dist: cryptography==44.0.
|
|
21
|
-
Requires-Dist: ddtrace==2.
|
|
20
|
+
Requires-Dist: cryptography==44.0.2; extra == 'deps'
|
|
21
|
+
Requires-Dist: ddtrace==2.21.4; extra == 'deps'
|
|
22
22
|
Requires-Dist: jellyfish==1.1.3; extra == 'deps'
|
|
23
23
|
Requires-Dist: lazy-loader==0.4; extra == 'deps'
|
|
24
24
|
Requires-Dist: prometheus-client==0.21.1; extra == 'deps'
|
|
25
|
-
Requires-Dist: protobuf==
|
|
26
|
-
Requires-Dist: pydantic==2.
|
|
25
|
+
Requires-Dist: protobuf==6.30.2; extra == 'deps'
|
|
26
|
+
Requires-Dist: pydantic==2.11.1; extra == 'deps'
|
|
27
27
|
Requires-Dist: python-dateutil==2.9.0.post0; extra == 'deps'
|
|
28
|
-
Requires-Dist: pywin32==
|
|
28
|
+
Requires-Dist: pywin32==310; (sys_platform == 'win32') and extra == 'deps'
|
|
29
29
|
Requires-Dist: pyyaml==6.0.2; extra == 'deps'
|
|
30
30
|
Requires-Dist: requests-toolbelt==1.0.0; extra == 'deps'
|
|
31
31
|
Requires-Dist: requests-unixsocket2==0.4.2; extra == 'deps'
|
|
32
32
|
Requires-Dist: requests==2.32.3; extra == 'deps'
|
|
33
33
|
Requires-Dist: simplejson==3.20.1; extra == 'deps'
|
|
34
|
+
Requires-Dist: urllib3==2.3.0; extra == 'deps'
|
|
34
35
|
Requires-Dist: wrapt==1.17.2; extra == 'deps'
|
|
35
36
|
Provides-Extra: http
|
|
36
37
|
Requires-Dist: aws-requests-auth==0.4.3; extra == 'http'
|
|
37
|
-
Requires-Dist: botocore==1.
|
|
38
|
+
Requires-Dist: botocore==1.37.23; extra == 'http'
|
|
38
39
|
Requires-Dist: oauthlib==3.2.2; extra == 'http'
|
|
39
40
|
Requires-Dist: pyjwt==2.10.1; extra == 'http'
|
|
40
|
-
Requires-Dist: pyopenssl==
|
|
41
|
+
Requires-Dist: pyopenssl==25.0.0; extra == 'http'
|
|
41
42
|
Requires-Dist: pysocks==1.7.1; extra == 'http'
|
|
42
43
|
Requires-Dist: requests-kerberos==0.15.0; extra == 'http'
|
|
43
44
|
Requires-Dist: requests-ntlm==1.3.0; extra == 'http'
|
|
44
45
|
Requires-Dist: requests-oauthlib==2.0.0; extra == 'http'
|
|
45
46
|
Provides-Extra: json
|
|
46
|
-
Requires-Dist: orjson==3.10.
|
|
47
|
+
Requires-Dist: orjson==3.10.16; extra == 'json'
|
|
47
48
|
Provides-Extra: kube
|
|
48
49
|
Requires-Dist: kubernetes==32.0.1; extra == 'kube'
|
|
49
50
|
Requires-Dist: requests-oauthlib==2.0.0; extra == 'kube'
|
|
@@ -3,19 +3,18 @@ datadog_checks/config.py,sha256=PrAXGdlLnoV2VMQff_noSaSJJ0wg4BAiGnw7jCQLSik,196
|
|
|
3
3
|
datadog_checks/errors.py,sha256=eFwmnrX-batIgbu-iJyseqAPNO_4rk1UuaKK89evLhg,155
|
|
4
4
|
datadog_checks/log.py,sha256=orvOgMKGNEsqSTLalCAQpWP-ouorpG1A7Gn-j2mRD80,301
|
|
5
5
|
datadog_checks/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
|
|
6
|
-
datadog_checks/base/__about__.py,sha256=
|
|
6
|
+
datadog_checks/base/__about__.py,sha256=eaRgo1wpoLueTRCL1MT8Qbkyi7huHv28bFzrJ1RIWcU,139
|
|
7
7
|
datadog_checks/base/__init__.py,sha256=yWegSLE-TZWIGSvAiJj9PSrUxzlOo_UVJLt2zORZ8Ek,363
|
|
8
8
|
datadog_checks/base/__init__.pyi,sha256=eH8XhrtvnD6uE6FWfEyCmKwOaaLJxNolS08D6IRHZuU,995
|
|
9
9
|
datadog_checks/base/agent.py,sha256=nX9x_BYYizRKGNYfXq5z7S0FZ9xcX_wd2tuxpGe3_8k,350
|
|
10
10
|
datadog_checks/base/config.py,sha256=qcAA4X9sXQZRdwQe8DgiGd2980VBp1SQA0d695tX_tU,604
|
|
11
11
|
datadog_checks/base/constants.py,sha256=cR19tAqVysMFbdBWqIGZoHIKk1kGTyqRsCn9FfFQOnw,197
|
|
12
|
-
datadog_checks/base/ddyaml.py,sha256=ZYhEQ9RUA3PvyX4Z7PLVlRVLlY658CSEckI7n0vRZJg,4898
|
|
13
12
|
datadog_checks/base/errors.py,sha256=fPcFaR3zHpSkMo8epvF3lRe9KrEnS8g4J0dXM_nvzsw,1896
|
|
14
13
|
datadog_checks/base/log.py,sha256=gSfzYimmg0z3Dgmn8NpXuyMNjgXwmTbn0NcyUX8B1ls,6283
|
|
15
14
|
datadog_checks/base/types.py,sha256=anajZS0W0TsxUHJQw-JHOP2NSeuC9BisXSy9mAStlxQ,1623
|
|
16
15
|
datadog_checks/base/checks/__init__.py,sha256=q7V6v-FwQWkQC1QWaVzKaPjZMaxPJHJcLd71C0uM7bA,211
|
|
17
16
|
datadog_checks/base/checks/__init__.pyi,sha256=LASfm-daLNQIYe6-w0NPqBw4cl83nYIX5_B-VhV6ARo,262
|
|
18
|
-
datadog_checks/base/checks/base.py,sha256=
|
|
17
|
+
datadog_checks/base/checks/base.py,sha256=zvGdvk45y7Ye3hYTCroulla4sTm78W0xPkYmN0w-nQk,59940
|
|
19
18
|
datadog_checks/base/checks/network.py,sha256=UijP1OVBCpCNKMy6bVkNazHgy6Sdm8qCvCYGPaWuTDo,1968
|
|
20
19
|
datadog_checks/base/checks/kube_leader/__init__.py,sha256=q7V6v-FwQWkQC1QWaVzKaPjZMaxPJHJcLd71C0uM7bA,211
|
|
21
20
|
datadog_checks/base/checks/kube_leader/__init__.pyi,sha256=UGDywoRwmCIz3Zii1uHsp7jiFGWRdn5fFMZZxgGGlQs,398
|
|
@@ -45,7 +44,7 @@ datadog_checks/base/checks/openmetrics/v2/base.py,sha256=UXytG0ze00rXU13VB0BM_ZK
|
|
|
45
44
|
datadog_checks/base/checks/openmetrics/v2/first_scrape_handler.py,sha256=In-tZXMzieSdL2OXJlIAFi78KbQVwuJnGP0iaLH9974,941
|
|
46
45
|
datadog_checks/base/checks/openmetrics/v2/labels.py,sha256=JAIlxAtzQAFoEj1d9IYLZCGt-2M3-ClyyGjZ-VzKNQY,8907
|
|
47
46
|
datadog_checks/base/checks/openmetrics/v2/metrics.py,sha256=uDVZOHHRWMW8YUQZBY9jCLzecN5DueRNjImxBm1K82k,2189
|
|
48
|
-
datadog_checks/base/checks/openmetrics/v2/scraper.py,sha256=
|
|
47
|
+
datadog_checks/base/checks/openmetrics/v2/scraper.py,sha256=17DV_gS0Lks7EnPYpE30cV4gzvAF1Koppc2_nuFPKEQ,25286
|
|
49
48
|
datadog_checks/base/checks/openmetrics/v2/transform.py,sha256=qv7Y1yvqWm-269mGU7Oq0it8x3VkWUZU2m2d3q-VJZ0,8377
|
|
50
49
|
datadog_checks/base/checks/openmetrics/v2/utils.py,sha256=tpk3htJAz_KwCRqFs2CTjajHkLCs_2TbGdBp514rWOQ,3565
|
|
51
50
|
datadog_checks/base/checks/openmetrics/v2/transformers/__init__.py,sha256=bqHxA7SxvyJzSyLdNfISOPtJKyF132bHkzdrMaKbjoA,211
|
|
@@ -95,9 +94,9 @@ datadog_checks/base/checks/windows/perf_counters/transformers/temporal_percent.p
|
|
|
95
94
|
datadog_checks/base/checks/windows/perf_counters/transformers/time_elapsed.py,sha256=hWM8CHYSqFnFYrNdr1NFY464nMAVFOiN8jhlhZTOb78,513
|
|
96
95
|
datadog_checks/base/stubs/__init__.py,sha256=wpWAR9v7BiTbbmTO6pVpshqa_z-PWaYTr33wSTZLz3c,272
|
|
97
96
|
datadog_checks/base/stubs/_util.py,sha256=ZDGtQa8F3cHf8-QvSVHMB4BGI3C9VgC7sGcGGvO0apI,1268
|
|
98
|
-
datadog_checks/base/stubs/aggregator.py,sha256=
|
|
97
|
+
datadog_checks/base/stubs/aggregator.py,sha256=iPEnNAsFeQ1_i6RwyjWKKGUGj2aoRccWJ9UEw28lsiU,24645
|
|
99
98
|
datadog_checks/base/stubs/common.py,sha256=ZGzF2dXy1uVFQGdG_bvz_pMMiULaBcpzra_k9wo9f04,1088
|
|
100
|
-
datadog_checks/base/stubs/datadog_agent.py,sha256=
|
|
99
|
+
datadog_checks/base/stubs/datadog_agent.py,sha256=zhKqluf_0WPIAm_GxJyz6A9JFw9Aoe1OsrjIAcEWdrg,5942
|
|
101
100
|
datadog_checks/base/stubs/log.py,sha256=A-eWnzY5gTpTW41Zch-W5CVclzldlmhMTW-vDNFdT1o,836
|
|
102
101
|
datadog_checks/base/stubs/similar.py,sha256=3HO9htx9LV6pRaE1ZGKsPExavTbjPHPK6ZMM2BPYj3Y,8045
|
|
103
102
|
datadog_checks/base/stubs/tagging.py,sha256=zxLdPC4EqHxGiS_HEhbaOsL_s5nZIhN8BDkx2BATOqs,1439
|
|
@@ -140,7 +139,7 @@ datadog_checks/base/utils/db/core.py,sha256=sVHdY5Z0dx6UM6IWZwB70HxiWDITNE26Rjff
|
|
|
140
139
|
datadog_checks/base/utils/db/query.py,sha256=TjyVNjxjNIB21bcIft6f-5b3algDPtdttHcAS2BdUuc,14462
|
|
141
140
|
datadog_checks/base/utils/db/sql.py,sha256=oiEzQa_vC_w3U65VFrFCoQHWj5GQLLRlSO0CfiSlp4A,2490
|
|
142
141
|
datadog_checks/base/utils/db/sql_commenter.py,sha256=r_efK6TGRQxM_-Qj-ndEJdECk47J4nCFjkVyxu1XmvU,1522
|
|
143
|
-
datadog_checks/base/utils/db/statement_metrics.py,sha256=
|
|
142
|
+
datadog_checks/base/utils/db/statement_metrics.py,sha256=U7EtERkmFzfCtfyd3094fBaKQ-CuJxoRt-3AcDBCkIA,7087
|
|
144
143
|
datadog_checks/base/utils/db/timed_cache.py,sha256=a9Ks5KKUvExB6GOATXTSCLamVtLD919Dn6HpweGKtFw,2114
|
|
145
144
|
datadog_checks/base/utils/db/transform.py,sha256=kNdiBZVoZm1oNRNWIatrAfgORjbkRKhs1AgW_aQCj3I,23879
|
|
146
145
|
datadog_checks/base/utils/db/types.py,sha256=OLX2Oq58JQPFBD4oqUpCLkAP7ovRGN_i1vFk1E0N8Lg,267
|
|
@@ -218,6 +217,6 @@ datadog_checks/utils/tracing.py,sha256=HQbQakKM-Lw75MDkItaYJYipS6YO24Z_ymDVxDsx5
|
|
|
218
217
|
datadog_checks/utils/prometheus/__init__.py,sha256=8WwXnM9g1sfS5267QYCJX_hd8MZl5kRgBgQ_SzdNdXs,161
|
|
219
218
|
datadog_checks/utils/prometheus/functions.py,sha256=4vWsTGLgujHwdYZo0tlAQkqDPHofqUJM3k9eItJqERQ,197
|
|
220
219
|
datadog_checks/utils/prometheus/metrics_pb2.py,sha256=xg3UdUHe4TjeR4s13LUKZ2U1WVSt6U6zjsVRG6lX6dc,173
|
|
221
|
-
datadog_checks_base-37.
|
|
222
|
-
datadog_checks_base-37.
|
|
223
|
-
datadog_checks_base-37.
|
|
220
|
+
datadog_checks_base-37.10.0.dist-info/METADATA,sha256=X3o_eQIQVR5O5Seeb9jsmrHQU63xXTUxMbOiXkUhg6E,4246
|
|
221
|
+
datadog_checks_base-37.10.0.dist-info/WHEEL,sha256=tkmg4JIqwd9H8mL30xA7crRmoStyCtGp0VWshokd1Jc,105
|
|
222
|
+
datadog_checks_base-37.10.0.dist-info/RECORD,,
|
datadog_checks/base/ddyaml.py
DELETED
|
@@ -1,171 +0,0 @@
|
|
|
1
|
-
# (C) Datadog, Inc. 2011-present
|
|
2
|
-
# All rights reserved
|
|
3
|
-
# Licensed under Simplified BSD License (see LICENSE)
|
|
4
|
-
import logging
|
|
5
|
-
from os.path import realpath
|
|
6
|
-
|
|
7
|
-
import yaml
|
|
8
|
-
|
|
9
|
-
try:
|
|
10
|
-
from yaml import CSafeDumper as yDumper
|
|
11
|
-
from yaml import CSafeLoader as yLoader
|
|
12
|
-
except ImportError:
|
|
13
|
-
# On source install C Extensions might have not been built
|
|
14
|
-
from yaml import SafeDumper as yDumper # noqa, imported from here elsewhere
|
|
15
|
-
from yaml import SafeLoader as yLoader # noqa, imported from here elsewhere
|
|
16
|
-
|
|
17
|
-
log = logging.getLogger(__name__)
|
|
18
|
-
|
|
19
|
-
pyyaml_load = None
|
|
20
|
-
pyyaml_load_all = None
|
|
21
|
-
pyyaml_dump_all = None
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
def safe_yaml_dump_all(
|
|
25
|
-
documents,
|
|
26
|
-
stream=None,
|
|
27
|
-
Dumper=yDumper,
|
|
28
|
-
default_style=None,
|
|
29
|
-
default_flow_style=None,
|
|
30
|
-
canonical=None,
|
|
31
|
-
indent=None,
|
|
32
|
-
width=None,
|
|
33
|
-
allow_unicode=None,
|
|
34
|
-
line_break=None,
|
|
35
|
-
encoding='utf-8',
|
|
36
|
-
explicit_start=None,
|
|
37
|
-
explicit_end=None,
|
|
38
|
-
version=None,
|
|
39
|
-
tags=None,
|
|
40
|
-
):
|
|
41
|
-
if Dumper != yDumper:
|
|
42
|
-
stream_name = get_stream_name(stream)
|
|
43
|
-
log.debug("Unsafe dumping of YAML has been disabled - using safe dumper instead in %s", stream_name)
|
|
44
|
-
|
|
45
|
-
if pyyaml_dump_all:
|
|
46
|
-
return pyyaml_dump_all(
|
|
47
|
-
documents,
|
|
48
|
-
stream,
|
|
49
|
-
yDumper,
|
|
50
|
-
default_style,
|
|
51
|
-
default_flow_style,
|
|
52
|
-
canonical,
|
|
53
|
-
indent,
|
|
54
|
-
width,
|
|
55
|
-
allow_unicode,
|
|
56
|
-
line_break,
|
|
57
|
-
encoding,
|
|
58
|
-
explicit_start,
|
|
59
|
-
explicit_end,
|
|
60
|
-
version,
|
|
61
|
-
tags,
|
|
62
|
-
)
|
|
63
|
-
|
|
64
|
-
return yaml.dump_all(
|
|
65
|
-
documents,
|
|
66
|
-
stream,
|
|
67
|
-
yDumper,
|
|
68
|
-
default_style,
|
|
69
|
-
default_flow_style,
|
|
70
|
-
canonical,
|
|
71
|
-
indent,
|
|
72
|
-
width,
|
|
73
|
-
allow_unicode,
|
|
74
|
-
line_break,
|
|
75
|
-
encoding,
|
|
76
|
-
explicit_start,
|
|
77
|
-
explicit_end,
|
|
78
|
-
version,
|
|
79
|
-
tags,
|
|
80
|
-
)
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
def safe_yaml_load(stream, Loader=yLoader):
|
|
84
|
-
if Loader != yLoader:
|
|
85
|
-
stream_name = get_stream_name(stream)
|
|
86
|
-
log.debug("Unsafe loading of YAML has been disabled - using safe loader instead in %s", stream_name)
|
|
87
|
-
|
|
88
|
-
if pyyaml_load:
|
|
89
|
-
return pyyaml_load(stream, Loader=yLoader)
|
|
90
|
-
|
|
91
|
-
return yaml.load(stream, Loader=yLoader)
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
def safe_yaml_load_all(stream, Loader=yLoader):
|
|
95
|
-
if Loader != yLoader:
|
|
96
|
-
stream_name = get_stream_name(stream)
|
|
97
|
-
log.debug("Unsafe loading of YAML has been disabled - using safe loader instead in %s", stream_name)
|
|
98
|
-
|
|
99
|
-
if pyyaml_load_all:
|
|
100
|
-
return pyyaml_load_all(stream, Loader=yLoader)
|
|
101
|
-
|
|
102
|
-
return yaml.load_all(stream, Loader=yLoader)
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
def get_stream_name(stream):
|
|
106
|
-
"""Using the same logic as pyyaml to handle both string types and file types. All file objects do not necessarily
|
|
107
|
-
have a `name` attribute, in that case we can only say the stream is a file."""
|
|
108
|
-
if isinstance(stream, str):
|
|
109
|
-
return "<string>"
|
|
110
|
-
elif hasattr(stream, 'name'):
|
|
111
|
-
return realpath(stream.name)
|
|
112
|
-
else:
|
|
113
|
-
return "<file>"
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
def yaml_load_force_loader(stream, Loader):
|
|
117
|
-
"""Override the default monkey patch for this call"""
|
|
118
|
-
log.debug(
|
|
119
|
-
"`%s` YAML loader is used instead of the default one, please make sure it is safe to do so", Loader.__name__
|
|
120
|
-
)
|
|
121
|
-
if pyyaml_load is None:
|
|
122
|
-
return yaml.load(stream, Loader)
|
|
123
|
-
return pyyaml_load(stream, Loader)
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
def yaml_load_all_force_loader(stream, Loader):
|
|
127
|
-
"""Override the default monkey patch for this call"""
|
|
128
|
-
log.debug(
|
|
129
|
-
"`%s` YAML loader is used instead of the default one, please make sure it is safe to do so", Loader.__name__
|
|
130
|
-
)
|
|
131
|
-
if pyyaml_load_all is None:
|
|
132
|
-
return yaml.load_all(stream, Loader)
|
|
133
|
-
return pyyaml_load_all(stream, Loader)
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
def monkey_patch_pyyaml():
|
|
137
|
-
global pyyaml_load
|
|
138
|
-
global pyyaml_load_all
|
|
139
|
-
global pyyaml_dump_all
|
|
140
|
-
|
|
141
|
-
if not pyyaml_load:
|
|
142
|
-
log.info("monkey patching yaml.load...")
|
|
143
|
-
pyyaml_load = yaml.load
|
|
144
|
-
yaml.load = safe_yaml_load
|
|
145
|
-
if not pyyaml_load_all:
|
|
146
|
-
log.info("monkey patching yaml.load_all...")
|
|
147
|
-
pyyaml_load_all = yaml.load_all
|
|
148
|
-
yaml.load_all = safe_yaml_load_all
|
|
149
|
-
if not pyyaml_dump_all:
|
|
150
|
-
log.info("monkey patching yaml.dump_all... (affects all yaml dump operations)")
|
|
151
|
-
pyyaml_dump_all = yaml.dump_all
|
|
152
|
-
yaml.dump_all = safe_yaml_dump_all
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
def monkey_patch_pyyaml_reverse():
|
|
156
|
-
global pyyaml_load
|
|
157
|
-
global pyyaml_load_all
|
|
158
|
-
global pyyaml_dump_all
|
|
159
|
-
|
|
160
|
-
if pyyaml_load:
|
|
161
|
-
log.info("reversing monkey patch for yaml.load...")
|
|
162
|
-
yaml.load = pyyaml_load
|
|
163
|
-
pyyaml_load = None
|
|
164
|
-
if pyyaml_load_all:
|
|
165
|
-
log.info("reversing monkey patch for yaml.load_all...")
|
|
166
|
-
yaml.load_all = pyyaml_load_all
|
|
167
|
-
pyyaml_load_all = None
|
|
168
|
-
if pyyaml_dump_all:
|
|
169
|
-
log.info("reversing monkey patch for yaml.dump_all... (affects all yaml dump operations)")
|
|
170
|
-
yaml.dump_all = pyyaml_dump_all
|
|
171
|
-
pyyaml_dump_all = None
|
|
File without changes
|