datadog-checks-base 37.9.0__py2.py3-none-any.whl → 37.10.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,4 @@
1
1
  # (C) Datadog, Inc. 2018-present
2
2
  # All rights reserved
3
3
  # Licensed under a 3-clause BSD style license (see LICENSE)
4
- __version__ = "37.9.0"
4
+ __version__ = "37.10.0"
@@ -230,13 +230,13 @@ class OpenMetricsScraper:
230
230
  self.use_process_start_time = is_affirmative(config.get('use_process_start_time'))
231
231
 
232
232
  # Used for monotonic counts
233
- self.flush_first_value = False
233
+ self.flush_first_value = None
234
234
 
235
- def scrape(self):
235
+ def _scrape(self):
236
236
  """
237
237
  Execute a scrape, and for each metric collected, transform the metric.
238
238
  """
239
- runtime_data = {'flush_first_value': self.flush_first_value, 'static_tags': self.static_tags}
239
+ runtime_data = {'flush_first_value': bool(self.flush_first_value), 'static_tags': self.static_tags}
240
240
 
241
241
  # Determine which consume method to use based on target_info config
242
242
  if self.target_info:
@@ -251,7 +251,18 @@ class OpenMetricsScraper:
251
251
 
252
252
  transformer(metric, self.generate_sample_data(metric), runtime_data)
253
253
 
254
- self.flush_first_value = True
254
+ def scrape(self):
255
+ try:
256
+ self._scrape()
257
+ self.flush_first_value = True
258
+ except:
259
+ # Don't flush new monotonic counts on next scrape:
260
+ # 1. Previous value may have expired in the aggregator, causing a spike
261
+ # 2. New counter itself may be too old and large when we discover it next time.
262
+ # If we didn't have a successful scrape yet, keep the initial value (use process_start_time to decide).
263
+ if self.flush_first_value:
264
+ self.flush_first_value = False
265
+ raise
255
266
 
256
267
  def consume_metrics(self, runtime_data):
257
268
  """
@@ -260,7 +271,7 @@ class OpenMetricsScraper:
260
271
 
261
272
  metric_parser = self.parse_metrics()
262
273
 
263
- if not self.flush_first_value and self.use_process_start_time:
274
+ if self.flush_first_value is None and self.use_process_start_time:
264
275
  metric_parser = first_scrape_handler(metric_parser, runtime_data, datadog_agent.get_process_start_time())
265
276
  if self.label_aggregator.configured:
266
277
  metric_parser = self.label_aggregator(metric_parser)
@@ -283,7 +294,7 @@ class OpenMetricsScraper:
283
294
 
284
295
  metric_parser = self.parse_metrics()
285
296
 
286
- if not self.flush_first_value and self.use_process_start_time:
297
+ if self.flush_first_value is None and self.use_process_start_time:
287
298
  metric_parser = first_scrape_handler(metric_parser, runtime_data, datadog_agent.get_process_start_time())
288
299
  if self.label_aggregator.configured:
289
300
  metric_parser = self.label_aggregator(metric_parser)
@@ -13,9 +13,7 @@ class StatementMetrics:
13
13
 
14
14
  - Postgres: pg_stat_statements
15
15
  - MySQL: performance_schema.events_statements_summary_by_digest
16
- - Oracle: V$SQLAREA
17
16
  - SQL Server: sys.dm_exec_query_stats
18
- - DB2: mon_db_summary
19
17
 
20
18
  These tables are monotonically increasing, so the metrics are computed from the difference
21
19
  in values between check runs.
@@ -24,7 +22,7 @@ class StatementMetrics:
24
22
  def __init__(self):
25
23
  self._previous_statements = {}
26
24
 
27
- def compute_derivative_rows(self, rows, metrics, key):
25
+ def compute_derivative_rows(self, rows, metrics, key, execution_indicators=None):
28
26
  """
29
27
  Compute the first derivative of column-based metrics for a given set of rows. This function
30
28
  takes the difference of the previous check run's values and the current check run's values
@@ -41,10 +39,20 @@ class StatementMetrics:
41
39
  :params rows (_List[dict]_): rows from current check run
42
40
  :params metrics (_List[str]_): the metrics to compute for each row
43
41
  :params key (_callable_): function for an ID which uniquely identifies a row across runs
42
+ :params execution_indicators (_List[str]_): list of metrics that must change to consider a query as executed.
43
+ These are typically metrics that increment only when a query actually executes, such as:
44
+ - PostgreSQL: 'calls' from pg_stat_statements
45
+ - MySQL: 'exec_count' from performance_schema.events_statements_summary_by_digest
46
+ - SQL Server: 'execution_count' from sys.dm_exec_query_stats
47
+ This helps filter out cases where a normalized query was evicted then re-inserted with same call count
48
+ (usually 1) and slight duration change. In this case, the new normalized query entry should be treated
49
+ as the baseline for future diffs.
44
50
  :return (_List[dict]_): a list of rows with the first derivative of the metrics
45
51
  """
46
52
  result = []
47
53
  metrics = set(metrics)
54
+ if execution_indicators:
55
+ execution_indicators = set(execution_indicators)
48
56
 
49
57
  merged_rows, dropped_metrics = _merge_duplicate_rows(rows, metrics, key)
50
58
  if dropped_metrics:
@@ -69,6 +77,12 @@ class StatementMetrics:
69
77
  # 2. No changes since the previous run: There is no need to store metrics of 0, since that is implied by
70
78
  # the absence of metrics. On any given check run, most rows will have no difference so this optimization
71
79
  # avoids having to send a lot of unnecessary metrics.
80
+ #
81
+ # 3. Execution indicators: If execution_indicators is specified, only consider a query as changed if at
82
+ # least one of the execution indicator metrics has changed. This helps filter out cases where an old or
83
+ # less frequently executed normalized query was evicted due to the stats table being full, and then
84
+ # re-inserted to the stats table with a small call count and slight duration change. In this case,
85
+ # the new normalized query entry should be treated as the baseline for future diffs.
72
86
 
73
87
  diffed_row = {k: row[k] - prev[k] if k in metric_columns else row[k] for k in row.keys()}
74
88
 
@@ -79,6 +93,12 @@ class StatementMetrics:
79
93
  # of potentially including truncated rows that exceed previous run counts.
80
94
  continue
81
95
 
96
+ # If execution_indicators is specified, check if any of the execution indicator metrics have changed
97
+ if execution_indicators:
98
+ indicator_columns = execution_indicators & metric_columns
99
+ if not any(diffed_row[k] > 0 for k in indicator_columns):
100
+ continue
101
+
82
102
  # No changes to the query; no metric needed
83
103
  if all(diffed_row[k] == 0 for k in metric_columns):
84
104
  continue
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datadog-checks-base
3
- Version: 37.9.0
3
+ Version: 37.10.0
4
4
  Summary: The Datadog Check Toolkit
5
5
  Project-URL: Source, https://github.com/DataDog/integrations-core
6
6
  Author-email: Datadog <packages@datadoghq.com>
@@ -17,33 +17,34 @@ Requires-Dist: mmh3==5.1.0; extra == 'db'
17
17
  Provides-Extra: deps
18
18
  Requires-Dist: binary==1.0.1; extra == 'deps'
19
19
  Requires-Dist: cachetools==5.5.2; extra == 'deps'
20
- Requires-Dist: cryptography==44.0.1; extra == 'deps'
21
- Requires-Dist: ddtrace==2.10.6; extra == 'deps'
20
+ Requires-Dist: cryptography==44.0.2; extra == 'deps'
21
+ Requires-Dist: ddtrace==2.21.4; extra == 'deps'
22
22
  Requires-Dist: jellyfish==1.1.3; extra == 'deps'
23
23
  Requires-Dist: lazy-loader==0.4; extra == 'deps'
24
24
  Requires-Dist: prometheus-client==0.21.1; extra == 'deps'
25
- Requires-Dist: protobuf==5.29.3; extra == 'deps'
26
- Requires-Dist: pydantic==2.10.6; extra == 'deps'
25
+ Requires-Dist: protobuf==6.30.2; extra == 'deps'
26
+ Requires-Dist: pydantic==2.11.1; extra == 'deps'
27
27
  Requires-Dist: python-dateutil==2.9.0.post0; extra == 'deps'
28
- Requires-Dist: pywin32==308; (sys_platform == 'win32') and extra == 'deps'
28
+ Requires-Dist: pywin32==310; (sys_platform == 'win32') and extra == 'deps'
29
29
  Requires-Dist: pyyaml==6.0.2; extra == 'deps'
30
30
  Requires-Dist: requests-toolbelt==1.0.0; extra == 'deps'
31
31
  Requires-Dist: requests-unixsocket2==0.4.2; extra == 'deps'
32
32
  Requires-Dist: requests==2.32.3; extra == 'deps'
33
33
  Requires-Dist: simplejson==3.20.1; extra == 'deps'
34
+ Requires-Dist: urllib3==2.3.0; extra == 'deps'
34
35
  Requires-Dist: wrapt==1.17.2; extra == 'deps'
35
36
  Provides-Extra: http
36
37
  Requires-Dist: aws-requests-auth==0.4.3; extra == 'http'
37
- Requires-Dist: botocore==1.36.26; extra == 'http'
38
+ Requires-Dist: botocore==1.37.23; extra == 'http'
38
39
  Requires-Dist: oauthlib==3.2.2; extra == 'http'
39
40
  Requires-Dist: pyjwt==2.10.1; extra == 'http'
40
- Requires-Dist: pyopenssl==24.3.0; extra == 'http'
41
+ Requires-Dist: pyopenssl==25.0.0; extra == 'http'
41
42
  Requires-Dist: pysocks==1.7.1; extra == 'http'
42
43
  Requires-Dist: requests-kerberos==0.15.0; extra == 'http'
43
44
  Requires-Dist: requests-ntlm==1.3.0; extra == 'http'
44
45
  Requires-Dist: requests-oauthlib==2.0.0; extra == 'http'
45
46
  Provides-Extra: json
46
- Requires-Dist: orjson==3.10.15; extra == 'json'
47
+ Requires-Dist: orjson==3.10.16; extra == 'json'
47
48
  Provides-Extra: kube
48
49
  Requires-Dist: kubernetes==32.0.1; extra == 'kube'
49
50
  Requires-Dist: requests-oauthlib==2.0.0; extra == 'kube'
@@ -3,7 +3,7 @@ datadog_checks/config.py,sha256=PrAXGdlLnoV2VMQff_noSaSJJ0wg4BAiGnw7jCQLSik,196
3
3
  datadog_checks/errors.py,sha256=eFwmnrX-batIgbu-iJyseqAPNO_4rk1UuaKK89evLhg,155
4
4
  datadog_checks/log.py,sha256=orvOgMKGNEsqSTLalCAQpWP-ouorpG1A7Gn-j2mRD80,301
5
5
  datadog_checks/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
6
- datadog_checks/base/__about__.py,sha256=iGqq-S-VDNzq-lXJgI6gEsOyXzbc8iNlsKUM6mQR6Do,138
6
+ datadog_checks/base/__about__.py,sha256=eaRgo1wpoLueTRCL1MT8Qbkyi7huHv28bFzrJ1RIWcU,139
7
7
  datadog_checks/base/__init__.py,sha256=yWegSLE-TZWIGSvAiJj9PSrUxzlOo_UVJLt2zORZ8Ek,363
8
8
  datadog_checks/base/__init__.pyi,sha256=eH8XhrtvnD6uE6FWfEyCmKwOaaLJxNolS08D6IRHZuU,995
9
9
  datadog_checks/base/agent.py,sha256=nX9x_BYYizRKGNYfXq5z7S0FZ9xcX_wd2tuxpGe3_8k,350
@@ -44,7 +44,7 @@ datadog_checks/base/checks/openmetrics/v2/base.py,sha256=UXytG0ze00rXU13VB0BM_ZK
44
44
  datadog_checks/base/checks/openmetrics/v2/first_scrape_handler.py,sha256=In-tZXMzieSdL2OXJlIAFi78KbQVwuJnGP0iaLH9974,941
45
45
  datadog_checks/base/checks/openmetrics/v2/labels.py,sha256=JAIlxAtzQAFoEj1d9IYLZCGt-2M3-ClyyGjZ-VzKNQY,8907
46
46
  datadog_checks/base/checks/openmetrics/v2/metrics.py,sha256=uDVZOHHRWMW8YUQZBY9jCLzecN5DueRNjImxBm1K82k,2189
47
- datadog_checks/base/checks/openmetrics/v2/scraper.py,sha256=PgZzohf1X1MdbzD3e1Q_tLYDAgK6l7I6bV7_4AFljtw,24731
47
+ datadog_checks/base/checks/openmetrics/v2/scraper.py,sha256=17DV_gS0Lks7EnPYpE30cV4gzvAF1Koppc2_nuFPKEQ,25286
48
48
  datadog_checks/base/checks/openmetrics/v2/transform.py,sha256=qv7Y1yvqWm-269mGU7Oq0it8x3VkWUZU2m2d3q-VJZ0,8377
49
49
  datadog_checks/base/checks/openmetrics/v2/utils.py,sha256=tpk3htJAz_KwCRqFs2CTjajHkLCs_2TbGdBp514rWOQ,3565
50
50
  datadog_checks/base/checks/openmetrics/v2/transformers/__init__.py,sha256=bqHxA7SxvyJzSyLdNfISOPtJKyF132bHkzdrMaKbjoA,211
@@ -139,7 +139,7 @@ datadog_checks/base/utils/db/core.py,sha256=sVHdY5Z0dx6UM6IWZwB70HxiWDITNE26Rjff
139
139
  datadog_checks/base/utils/db/query.py,sha256=TjyVNjxjNIB21bcIft6f-5b3algDPtdttHcAS2BdUuc,14462
140
140
  datadog_checks/base/utils/db/sql.py,sha256=oiEzQa_vC_w3U65VFrFCoQHWj5GQLLRlSO0CfiSlp4A,2490
141
141
  datadog_checks/base/utils/db/sql_commenter.py,sha256=r_efK6TGRQxM_-Qj-ndEJdECk47J4nCFjkVyxu1XmvU,1522
142
- datadog_checks/base/utils/db/statement_metrics.py,sha256=S0kHPoFqCZtRTZoTI6Pi-Zul6Iue8Y3j1_ksNH-zEo8,5398
142
+ datadog_checks/base/utils/db/statement_metrics.py,sha256=U7EtERkmFzfCtfyd3094fBaKQ-CuJxoRt-3AcDBCkIA,7087
143
143
  datadog_checks/base/utils/db/timed_cache.py,sha256=a9Ks5KKUvExB6GOATXTSCLamVtLD919Dn6HpweGKtFw,2114
144
144
  datadog_checks/base/utils/db/transform.py,sha256=kNdiBZVoZm1oNRNWIatrAfgORjbkRKhs1AgW_aQCj3I,23879
145
145
  datadog_checks/base/utils/db/types.py,sha256=OLX2Oq58JQPFBD4oqUpCLkAP7ovRGN_i1vFk1E0N8Lg,267
@@ -217,6 +217,6 @@ datadog_checks/utils/tracing.py,sha256=HQbQakKM-Lw75MDkItaYJYipS6YO24Z_ymDVxDsx5
217
217
  datadog_checks/utils/prometheus/__init__.py,sha256=8WwXnM9g1sfS5267QYCJX_hd8MZl5kRgBgQ_SzdNdXs,161
218
218
  datadog_checks/utils/prometheus/functions.py,sha256=4vWsTGLgujHwdYZo0tlAQkqDPHofqUJM3k9eItJqERQ,197
219
219
  datadog_checks/utils/prometheus/metrics_pb2.py,sha256=xg3UdUHe4TjeR4s13LUKZ2U1WVSt6U6zjsVRG6lX6dc,173
220
- datadog_checks_base-37.9.0.dist-info/METADATA,sha256=9XzjIX_1_haGzFBTjX5ZUNFvw36z_f2g2HaTvOErW5I,4198
221
- datadog_checks_base-37.9.0.dist-info/WHEEL,sha256=tkmg4JIqwd9H8mL30xA7crRmoStyCtGp0VWshokd1Jc,105
222
- datadog_checks_base-37.9.0.dist-info/RECORD,,
220
+ datadog_checks_base-37.10.0.dist-info/METADATA,sha256=X3o_eQIQVR5O5Seeb9jsmrHQU63xXTUxMbOiXkUhg6E,4246
221
+ datadog_checks_base-37.10.0.dist-info/WHEEL,sha256=tkmg4JIqwd9H8mL30xA7crRmoStyCtGp0VWshokd1Jc,105
222
+ datadog_checks_base-37.10.0.dist-info/RECORD,,