datadog-checks-base 37.13.0__py2.py3-none-any.whl → 37.16.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. datadog_checks/base/__about__.py +1 -1
  2. datadog_checks/base/checks/_config_ast.py +84 -0
  3. datadog_checks/base/checks/base.py +40 -44
  4. datadog_checks/base/checks/kube_leader/base_check.py +2 -1
  5. datadog_checks/base/checks/kube_leader/mixins.py +1 -1
  6. datadog_checks/base/checks/kubelet_base/base.py +2 -2
  7. datadog_checks/base/checks/network.py +1 -1
  8. datadog_checks/base/checks/openmetrics/base_check.py +4 -3
  9. datadog_checks/base/checks/openmetrics/mixins.py +6 -7
  10. datadog_checks/base/checks/openmetrics/v2/base.py +4 -3
  11. datadog_checks/base/checks/openmetrics/v2/labels.py +1 -1
  12. datadog_checks/base/checks/openmetrics/v2/scraper/__init__.py +8 -0
  13. datadog_checks/base/checks/openmetrics/v2/{scraper.py → scraper/base_scraper.py} +19 -16
  14. datadog_checks/base/checks/openmetrics/v2/scraper/decorators.py +48 -0
  15. datadog_checks/base/checks/openmetrics/v2/transform.py +2 -1
  16. datadog_checks/base/checks/openmetrics/v2/transformers/histogram.py +2 -2
  17. datadog_checks/base/checks/openmetrics/v2/transformers/service_check.py +1 -1
  18. datadog_checks/base/checks/openmetrics/v2/transformers/temporal_percent.py +2 -2
  19. datadog_checks/base/checks/openmetrics/v2/transformers/time_elapsed.py +1 -1
  20. datadog_checks/base/checks/prometheus/base_check.py +4 -3
  21. datadog_checks/base/checks/prometheus/mixins.py +7 -7
  22. datadog_checks/base/checks/prometheus/prometheus_base.py +3 -2
  23. datadog_checks/base/checks/win/winpdh_base.py +2 -2
  24. datadog_checks/base/checks/win/wmi/base.py +13 -12
  25. datadog_checks/base/checks/win/wmi/sampler.py +10 -10
  26. datadog_checks/base/checks/windows/perf_counters/base.py +5 -4
  27. datadog_checks/base/checks/windows/perf_counters/connection.py +2 -2
  28. datadog_checks/base/checks/windows/perf_counters/counter.py +2 -1
  29. datadog_checks/base/checks/windows/perf_counters/transformers/service_check.py +2 -2
  30. datadog_checks/base/checks/windows/perf_counters/transformers/temporal_percent.py +3 -3
  31. datadog_checks/base/checks/windows/perf_counters/transformers/time_elapsed.py +1 -1
  32. datadog_checks/base/stubs/aggregator.py +21 -4
  33. datadog_checks/base/stubs/datadog_agent.py +5 -5
  34. datadog_checks/base/stubs/log.py +1 -1
  35. datadog_checks/base/utils/db/core.py +2 -2
  36. datadog_checks/base/utils/db/query.py +1 -3
  37. datadog_checks/base/utils/db/transform.py +6 -8
  38. datadog_checks/base/utils/db/utils.py +1 -2
  39. datadog_checks/base/utils/http.py +170 -71
  40. datadog_checks/base/utils/metadata/core.py +1 -1
  41. datadog_checks/base/utils/metadata/version.py +1 -1
  42. datadog_checks/base/utils/prometheus/metrics_pb2.py +2 -1
  43. datadog_checks/base/utils/replay/execute.py +2 -2
  44. datadog_checks/base/utils/replay/redirect.py +5 -6
  45. datadog_checks/base/utils/subprocess_output.py +2 -2
  46. datadog_checks/base/utils/tagging.py +1 -1
  47. datadog_checks/base/utils/tailfile.py +0 -2
  48. datadog_checks/base/utils/tls.py +96 -54
  49. datadog_checks/base/utils/tracing.py +5 -6
  50. datadog_checks/checks/libs/wmi/sampler.py +1 -0
  51. {datadog_checks_base-37.13.0.dist-info → datadog_checks_base-37.16.0.dist-info}/METADATA +13 -13
  52. {datadog_checks_base-37.13.0.dist-info → datadog_checks_base-37.16.0.dist-info}/RECORD +53 -51
  53. datadog_checks/base/utils/network.py +0 -49
  54. {datadog_checks_base-37.13.0.dist-info → datadog_checks_base-37.16.0.dist-info}/WHEEL +0 -0
@@ -6,8 +6,9 @@ from __future__ import annotations
6
6
  import logging
7
7
  import os
8
8
  import re
9
- import ssl
9
+ import socket
10
10
  import warnings
11
+ from collections import ChainMap
11
12
  from contextlib import ExitStack, contextmanager
12
13
  from copy import deepcopy
13
14
  from urllib.parse import quote, urlparse, urlunparse
@@ -21,14 +22,14 @@ from urllib3.exceptions import InsecureRequestWarning
21
22
  from wrapt import ObjectProxy
22
23
 
23
24
  from datadog_checks.base.agent import datadog_agent
25
+ from datadog_checks.base.config import is_affirmative
26
+ from datadog_checks.base.errors import ConfigurationError
24
27
  from datadog_checks.base.utils import _http_utils
25
28
 
26
- from ..config import is_affirmative
27
- from ..errors import ConfigurationError
28
29
  from .common import ensure_bytes, ensure_unicode
29
30
  from .headers import get_default_headers, update_headers
30
- from .network import CertAdapter, create_socket_connection
31
31
  from .time import get_timestamp
32
+ from .tls import SUPPORTED_PROTOCOL_VERSIONS, TlsConfig, create_ssl_context
32
33
 
33
34
  # See Performance Optimizations in this package's README.md.
34
35
  requests_kerberos = lazy_loader.load('requests_kerberos')
@@ -54,10 +55,6 @@ DEFAULT_EXPIRATION = 300
54
55
  # https://www.bittorrent.org/beps/bep_0003.html
55
56
  DEFAULT_CHUNK_SIZE = 16
56
57
 
57
- # https://github.com/python/cpython/blob/ef516d11c1a0f885dba0aba8cf5366502077cdd4/Lib/ssl.py#L158-L165
58
- DEFAULT_PROTOCOL_VERSIONS = {'SSLv3', 'TLSv1.2', 'TLSv1.3'}
59
- SUPPORTED_PROTOCOL_VERSIONS = {'SSLv3', 'TLSv1', 'TLSv1.1', 'TLSv1.2', 'TLSv1.3'}
60
-
61
58
  STANDARD_FIELDS = {
62
59
  'allow_redirects': True,
63
60
  'auth_token': None,
@@ -83,17 +80,10 @@ STANDARD_FIELDS = {
83
80
  'read_timeout': None,
84
81
  'request_size': DEFAULT_CHUNK_SIZE,
85
82
  'skip_proxy': False,
86
- 'tls_ca_cert': None,
87
- 'tls_cert': None,
88
- 'tls_use_host_header': False,
89
- 'tls_ignore_warning': False,
90
- 'tls_private_key': None,
91
- 'tls_protocols_allowed': DEFAULT_PROTOCOL_VERSIONS,
92
- 'tls_verify': True,
93
- 'tls_ciphers': 'ALL',
94
83
  'timeout': DEFAULT_TIMEOUT,
95
84
  'use_legacy_auth_encoding': True,
96
85
  'username': None,
86
+ **TlsConfig().__dict__, # This will include all TLS-related fields
97
87
  }
98
88
  # For any known legacy fields that may be widespread
99
89
  DEFAULT_REMAPPED_FIELDS = {
@@ -115,6 +105,98 @@ KERBEROS_STRATEGIES = {}
115
105
  UDS_SCHEME = 'unix'
116
106
 
117
107
 
108
+ def create_socket_connection(hostname, port=443, sock_type=socket.SOCK_STREAM, timeout=10):
109
+ """See: https://github.com/python/cpython/blob/40ee9a3640d702bce127e9877c82a99ce817f0d1/Lib/socket.py#L691"""
110
+ err = None
111
+ try:
112
+ for res in socket.getaddrinfo(hostname, port, 0, sock_type):
113
+ af, socktype, proto, canonname, sa = res
114
+ sock = None
115
+ try:
116
+ sock = socket.socket(af, socktype, proto)
117
+ sock.settimeout(timeout)
118
+ sock.connect(sa)
119
+ # Break explicitly a reference cycle
120
+ err = None
121
+ return sock
122
+
123
+ except socket.error as _:
124
+ err = _
125
+ if sock is not None:
126
+ sock.close()
127
+
128
+ if err is not None:
129
+ raise err
130
+ else:
131
+ raise socket.error('No valid addresses found, try checking your IPv6 connectivity') # noqa: G
132
+ except socket.gaierror as e:
133
+ err_code, message = e.args
134
+ if err_code == socket.EAI_NODATA or err_code == socket.EAI_NONAME:
135
+ raise socket.error('Unable to resolve host, check your DNS: {}'.format(message)) # noqa: G
136
+
137
+ raise
138
+
139
+
140
+ def get_tls_config_from_options(new_options):
141
+ '''Extract TLS configuration from request options.'''
142
+ tls_config = {}
143
+ verify = new_options.get('verify')
144
+ cert = new_options.get('cert')
145
+
146
+ if isinstance(verify, str):
147
+ tls_config["tls_verify"] = True
148
+ tls_config["tls_ca_cert"] = verify
149
+ elif isinstance(verify, bool):
150
+ tls_config["tls_verify"] = verify
151
+ elif verify is not None:
152
+ raise TypeError(
153
+ 'Unexpected type for `verify` option. Expected bool or str, got {}.'.format(type(verify).__name__)
154
+ )
155
+
156
+ if isinstance(cert, str):
157
+ tls_config["tls_cert"] = cert
158
+ elif isinstance(cert, tuple) or isinstance(cert, list):
159
+ if len(cert) != 2:
160
+ raise TypeError(
161
+ 'Unexpected length for `cert` option. Expected a tuple of length 2, got {}.'.format(len(cert))
162
+ )
163
+ tls_config["tls_cert"] = cert[0]
164
+ tls_config["tls_private_key"] = cert[1]
165
+ elif cert is not None:
166
+ raise TypeError('Unexpected type for `cert` option. Expected str or tuple, got {}.'.format(type(cert).__name__))
167
+ return tls_config
168
+
169
+
170
+ class _SSLContextAdapter(requests.adapters.HTTPAdapter):
171
+ """
172
+ This adapter lets us hook into requests.Session and make it use the SSLContext that we manage.
173
+ """
174
+
175
+ def __init__(self, ssl_context, **kwargs):
176
+ self.ssl_context = ssl_context
177
+ super().__init__()
178
+
179
+ def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
180
+ pool_kwargs['ssl_context'] = self.ssl_context
181
+ return super().init_poolmanager(connections, maxsize, block=block, **pool_kwargs)
182
+
183
+ def cert_verify(self, conn, url, verify, cert):
184
+ """
185
+ This method is overridden to ensure that the SSL context
186
+ is configured on the integration side.
187
+ """
188
+ pass
189
+
190
+ def build_connection_pool_key_attributes(self, request, verify, cert=None):
191
+ """
192
+ This method is overridden according to the requests library's
193
+ expectations to ensure that the custom SSL context is passed to urllib3.
194
+ """
195
+ # See: https://github.com/psf/requests/blob/7341690e842a23cf18ded0abd9229765fa88c4e2/src/requests/adapters.py#L419-L423
196
+ host_params, _ = super().build_connection_pool_key_attributes(request, verify, cert)
197
+ return host_params, {"ssl_context": self.ssl_context}
198
+
199
+
118
200
  class ResponseWrapper(ObjectProxy):
119
201
  def __init__(self, response, default_chunk_size):
120
202
  super(ResponseWrapper, self).__init__(response)
@@ -141,6 +223,7 @@ class ResponseWrapper(ObjectProxy):
141
223
  class RequestsWrapper(object):
142
224
  __slots__ = (
143
225
  '_session',
226
+ '_https_adapters',
144
227
  'tls_use_host_header',
145
228
  'ignore_tls_warning',
146
229
  'log_requests',
@@ -152,7 +235,7 @@ class RequestsWrapper(object):
152
235
  'auth_token_handler',
153
236
  'request_size',
154
237
  'tls_protocols_allowed',
155
- 'tls_ciphers_allowed',
238
+ 'tls_config',
156
239
  )
157
240
 
158
241
  def __init__(self, instance, init_config, remapper=None, logger=None, session=None):
@@ -254,7 +337,8 @@ class RequestsWrapper(object):
254
337
 
255
338
  allow_redirects = is_affirmative(config['allow_redirects'])
256
339
 
257
- # https://requests.readthedocs.io/en/latest/user/advanced/#ssl-cert-verification
340
+ # For TLS verification, we now rely on the TLS context wrapper
341
+ # but still need to set verify for requests compatibility
258
342
  verify = True
259
343
  if isinstance(config['tls_ca_cert'], str):
260
344
  verify = config['tls_ca_cert']
@@ -347,13 +431,8 @@ class RequestsWrapper(object):
347
431
  if config['kerberos_cache']:
348
432
  self.request_hooks.append(lambda: handle_kerberos_cache(config['kerberos_cache']))
349
433
 
350
- ciphers = config.get('tls_ciphers')
351
- if ciphers:
352
- if 'ALL' in ciphers:
353
- updated_ciphers = "ALL"
354
- else:
355
- updated_ciphers = ":".join(ciphers)
356
- self.tls_ciphers_allowed = updated_ciphers
434
+ self.tls_config = {key: value for key, value in config.items() if key.startswith('tls_')}
435
+ self._https_adapters = {}
357
436
 
358
437
  def get(self, url, **options):
359
438
  return self._request('get', url, options)
@@ -378,7 +457,7 @@ class RequestsWrapper(object):
378
457
 
379
458
  def _request(self, method, url, options):
380
459
  if self.log_requests:
381
- self.logger.debug(u'Sending %s request to %s', method.upper(), url)
460
+ self.logger.debug('Sending %s request to %s', method.upper(), url)
382
461
 
383
462
  if self.no_proxy_uris and should_bypass_proxy(url, self.no_proxy_uris):
384
463
  options.setdefault('proxies', PROXY_SETTINGS_DISABLED)
@@ -387,10 +466,10 @@ class RequestsWrapper(object):
387
466
  if persist is None:
388
467
  persist = self.persist_connections
389
468
 
390
- new_options = self.populate_options(options)
469
+ new_options = ChainMap(options, self.options)
391
470
 
392
471
  if url.startswith('https') and not self.ignore_tls_warning and not new_options['verify']:
393
- self.logger.debug(u'An unverified HTTPS request is being made to %s', url)
472
+ self.logger.debug('An unverified HTTPS request is being made to %s', url)
394
473
 
395
474
  extra_headers = options.pop('extra_headers', None)
396
475
  if extra_headers is not None:
@@ -406,17 +485,18 @@ class RequestsWrapper(object):
406
485
  with ExitStack() as stack:
407
486
  for hook in self.request_hooks:
408
487
  stack.enter_context(hook())
409
- if persist:
410
- request_method = getattr(self.session, method)
411
- else:
412
- request_method = getattr(requests, method)
488
+
489
+ session = self.session if persist else self._create_session()
490
+ if url.startswith('https'):
491
+ self._mount_https_adapter(session, ChainMap(get_tls_config_from_options(new_options), self.tls_config))
492
+ request_method = getattr(session, method)
413
493
 
414
494
  if self.auth_token_handler:
415
495
  try:
416
496
  response = self.make_request_aia_chasing(request_method, method, url, new_options, persist)
417
497
  response.raise_for_status()
418
498
  except Exception as e:
419
- self.logger.debug(u'Renewing auth token, as an error occurred: %s', e)
499
+ self.logger.debug('Renewing auth token, as an error occurred: %s', e)
420
500
  self.handle_auth_token(method=method, url=url, default_options=self.options, error=str(e))
421
501
  response = self.make_request_aia_chasing(request_method, method, url, new_options, persist)
422
502
  else:
@@ -435,30 +515,13 @@ class RequestsWrapper(object):
435
515
  certs = self.fetch_intermediate_certs(hostname, port)
436
516
  if not certs:
437
517
  raise e
438
- # retry the connection via session object
439
- certadapter = CertAdapter(certs=certs)
440
- if not persist:
441
- session = requests.Session()
442
- for option, value in self.options.items():
443
- setattr(session, option, value)
444
- else:
445
- session = self.session
518
+ session = self.session if persist else self._create_session()
519
+ if parsed_url.scheme == "https":
520
+ self._mount_https_adapter(session, ChainMap({'tls_intermediate_ca_certs': certs}, self.tls_config))
446
521
  request_method = getattr(session, method)
447
- session.mount(url, certadapter)
448
522
  response = request_method(url, **new_options)
449
523
  return response
450
524
 
451
- def populate_options(self, options):
452
- # Avoid needless dictionary update if there are no options
453
- if not options:
454
- return self.options
455
-
456
- for option, value in self.options.items():
457
- # Make explicitly set options take precedence
458
- options.setdefault(option, value)
459
-
460
- return options
461
-
462
525
  def fetch_intermediate_certs(self, hostname, port=443):
463
526
  # TODO: prefer stdlib implementation when available, see https://bugs.python.org/issue18617
464
527
  certs = []
@@ -471,9 +534,7 @@ class RequestsWrapper(object):
471
534
 
472
535
  with sock:
473
536
  try:
474
- context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS)
475
- context.verify_mode = ssl.CERT_NONE
476
- context.set_ciphers(self.tls_ciphers_allowed)
537
+ context = create_ssl_context(ChainMap({'tls_verify': False}, self.tls_config))
477
538
 
478
539
  with context.wrap_socket(sock, server_hostname=hostname) as secure_sock:
479
540
  der_cert = secure_sock.getpeercert(binary_form=True)
@@ -521,7 +582,7 @@ class RequestsWrapper(object):
521
582
 
522
583
  # Assume HTTP for now
523
584
  try:
524
- response = requests.get(uri) # SKIP_HTTP_VALIDATION
585
+ response = self.get(uri) # SKIP_HTTP_VALIDATION
525
586
  except Exception as e:
526
587
  self.logger.error('Error fetching intermediate certificate from `%s`: %s', uri, e)
527
588
  continue
@@ -532,23 +593,29 @@ class RequestsWrapper(object):
532
593
  self.load_intermediate_certs(intermediate_cert, certs)
533
594
  return certs
534
595
 
535
- @property
536
- def session(self):
537
- if self._session is None:
538
- self._session = requests.Session()
596
+ def _create_session(self):
597
+ """
598
+ Initializes requests.Session and configures it with a UDS Adapter and options coming from user's config.
539
599
 
540
- # Enables HostHeaderSSLAdapter
541
- # https://toolbelt.readthedocs.io/en/latest/adapters.html#hostheaderssladapter
542
- if self.tls_use_host_header:
543
- self._session.mount('https://', _http_utils.HostHeaderSSLAdapter())
544
- # Enable Unix Domain Socket (UDS) support.
545
- # See: https://github.com/msabramo/requests-unixsocket
546
- self._session.mount('{}://'.format(UDS_SCHEME), requests_unixsocket.UnixAdapter())
600
+ We leave it to callers to mount any HTTPS adapters if necessary.
601
+ """
602
+ session = requests.Session()
603
+ # Enable Unix Domain Socket (UDS) support.
604
+ # See: https://github.com/msabramo/requests-unixsocket
605
+ session.mount('{}://'.format(UDS_SCHEME), requests_unixsocket.UnixAdapter())
547
606
 
548
- # Attributes can't be passed to the constructor
549
- for option, value in self.options.items():
550
- setattr(self._session, option, value)
607
+ # Options cannot be passed to the requests.Session init method
608
+ # but can be set as attributes on an initialized Session instance.
609
+ for option, value in self.options.items():
610
+ setattr(session, option, value)
611
+ return session
551
612
 
613
+ @property
614
+ def session(self):
615
+ if self._session is None:
616
+ # Create a new session if it doesn't exist and mount default HTTPS adapter.
617
+ self._session = self._create_session()
618
+ self._mount_https_adapter(self._session, self.tls_config)
552
619
  return self._session
553
620
 
554
621
  def handle_auth_token(self, **request):
@@ -563,6 +630,38 @@ class RequestsWrapper(object):
563
630
  # before _session was ever defined (since __del__ executes even if __init__ fails).
564
631
  pass
565
632
 
633
+ def _mount_https_adapter(self, session, tls_config):
634
+ # Reuse existing adapter if it matches the TLS config
635
+ tls_config_key = TlsConfig(**tls_config)
636
+ if tls_config_key in self._https_adapters:
637
+ session.mount('https://', self._https_adapters[tls_config_key])
638
+ return
639
+
640
+ context = create_ssl_context(tls_config)
641
+ # Enables HostHeaderSSLAdapter if needed
642
+ # https://toolbelt.readthedocs.io/en/latest/adapters.html#hostheaderssladapter
643
+ if self.tls_use_host_header:
644
+ # Create a combined adapter that supports both TLS context and host headers
645
+ class SSLContextHostHeaderAdapter(_SSLContextAdapter, _http_utils.HostHeaderSSLAdapter):
646
+ def __init__(self, ssl_context, **kwargs):
647
+ _SSLContextAdapter.__init__(self, ssl_context, **kwargs)
648
+ _http_utils.HostHeaderSSLAdapter.__init__(self, **kwargs)
649
+
650
+ def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
651
+ # Use TLS context from wrapper
652
+ pool_kwargs['ssl_context'] = self.ssl_context
653
+ return _http_utils.HostHeaderSSLAdapter.init_poolmanager(
654
+ self, connections, maxsize, block=block, **pool_kwargs
655
+ )
656
+
657
+ https_adapter = SSLContextHostHeaderAdapter(context)
658
+ else:
659
+ https_adapter = _SSLContextAdapter(context)
660
+
661
+ # Cache the adapter for reuse
662
+ self._https_adapters[tls_config_key] = https_adapter
663
+ session.mount('https://', https_adapter)
664
+
566
665
 
567
666
  @contextmanager
568
667
  def handle_kerberos_keytab(keytab_file):
@@ -4,8 +4,8 @@
4
4
  import logging
5
5
 
6
6
  from datadog_checks.base.agent import datadog_agent
7
+ from datadog_checks.base.utils.common import to_native_string
7
8
 
8
- from ..common import to_native_string
9
9
  from .utils import is_primitive
10
10
  from .version import parse_version
11
11
 
@@ -3,7 +3,7 @@
3
3
  # Licensed under a 3-clause BSD style license (see LICENSE)
4
4
  import re
5
5
 
6
- from ..common import exclude_undefined_keys
6
+ from datadog_checks.base.utils.common import exclude_undefined_keys
7
7
 
8
8
  # https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string
9
9
  SEMVER_PATTERN = re.compile(
@@ -6,6 +6,7 @@
6
6
  # https://github.com/prometheus/client_model/blob/086fe7ca28bde6cec2acd5223423c1475a362858/metrics.proto#L76-%20%20L81
7
7
 
8
8
  """Generated protocol buffer code."""
9
+
9
10
  from google.protobuf import descriptor as _descriptor
10
11
  from google.protobuf import descriptor_pool as _descriptor_pool
11
12
  from google.protobuf import symbol_database as _symbol_database
@@ -16,7 +17,7 @@ from google.protobuf.internal import builder as _builder
16
17
  _sym_db = _symbol_database.Default()
17
18
 
18
19
  DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
19
- b'\n\rmetrics.proto\x12\x14io.prometheus.client\"(\n\tLabelPair\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\x16\n\x05Gauge\x12\r\n\x05value\x18\x01 \x01(\x01\"\x18\n\x07\x43ounter\x12\r\n\x05value\x18\x01 \x01(\x01\"+\n\x08Quantile\x12\x10\n\x08quantile\x18\x01 \x01(\x01\x12\r\n\x05value\x18\x02 \x01(\x01\"e\n\x07Summary\x12\x14\n\x0csample_count\x18\x01 \x01(\x04\x12\x12\n\nsample_sum\x18\x02 \x01(\x01\x12\x30\n\x08quantile\x18\x03 \x03(\x0b\x32\x1e.io.prometheus.client.Quantile\"\x18\n\x07Untyped\x12\r\n\x05value\x18\x01 \x01(\x01\"c\n\tHistogram\x12\x14\n\x0csample_count\x18\x01 \x01(\x04\x12\x12\n\nsample_sum\x18\x02 \x01(\x01\x12,\n\x06\x62ucket\x18\x03 \x03(\x0b\x32\x1c.io.prometheus.client.Bucket\"7\n\x06\x42ucket\x12\x18\n\x10\x63umulative_count\x18\x01 \x01(\x04\x12\x13\n\x0bupper_bound\x18\x02 \x01(\x01\"\xbe\x02\n\x06Metric\x12.\n\x05label\x18\x01 \x03(\x0b\x32\x1f.io.prometheus.client.LabelPair\x12*\n\x05gauge\x18\x02 \x01(\x0b\x32\x1b.io.prometheus.client.Gauge\x12.\n\x07\x63ounter\x18\x03 \x01(\x0b\x32\x1d.io.prometheus.client.Counter\x12.\n\x07summary\x18\x04 \x01(\x0b\x32\x1d.io.prometheus.client.Summary\x12.\n\x07untyped\x18\x05 \x01(\x0b\x32\x1d.io.prometheus.client.Untyped\x12\x32\n\thistogram\x18\x07 \x01(\x0b\x32\x1f.io.prometheus.client.Histogram\x12\x14\n\x0ctimestamp_ms\x18\x06 \x01(\x03\"\x88\x01\n\x0cMetricFamily\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04help\x18\x02 \x01(\t\x12.\n\x04type\x18\x03 \x01(\x0e\x32 .io.prometheus.client.MetricType\x12,\n\x06metric\x18\x04 \x03(\x0b\x32\x1c.io.prometheus.client.Metric*M\n\nMetricType\x12\x0b\n\x07\x43OUNTER\x10\x00\x12\t\n\x05GAUGE\x10\x01\x12\x0b\n\x07SUMMARY\x10\x02\x12\x0b\n\x07UNTYPED\x10\x03\x12\r\n\tHISTOGRAM\x10\x04\x42\x16\n\x14io.prometheus.client'
20
+ b'\n\rmetrics.proto\x12\x14io.prometheus.client"(\n\tLabelPair\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t"\x16\n\x05Gauge\x12\r\n\x05value\x18\x01 \x01(\x01"\x18\n\x07\x43ounter\x12\r\n\x05value\x18\x01 \x01(\x01"+\n\x08Quantile\x12\x10\n\x08quantile\x18\x01 \x01(\x01\x12\r\n\x05value\x18\x02 \x01(\x01"e\n\x07Summary\x12\x14\n\x0csample_count\x18\x01 \x01(\x04\x12\x12\n\nsample_sum\x18\x02 \x01(\x01\x12\x30\n\x08quantile\x18\x03 \x03(\x0b\x32\x1e.io.prometheus.client.Quantile"\x18\n\x07Untyped\x12\r\n\x05value\x18\x01 \x01(\x01"c\n\tHistogram\x12\x14\n\x0csample_count\x18\x01 \x01(\x04\x12\x12\n\nsample_sum\x18\x02 \x01(\x01\x12,\n\x06\x62ucket\x18\x03 \x03(\x0b\x32\x1c.io.prometheus.client.Bucket"7\n\x06\x42ucket\x12\x18\n\x10\x63umulative_count\x18\x01 \x01(\x04\x12\x13\n\x0bupper_bound\x18\x02 \x01(\x01"\xbe\x02\n\x06Metric\x12.\n\x05label\x18\x01 \x03(\x0b\x32\x1f.io.prometheus.client.LabelPair\x12*\n\x05gauge\x18\x02 \x01(\x0b\x32\x1b.io.prometheus.client.Gauge\x12.\n\x07\x63ounter\x18\x03 \x01(\x0b\x32\x1d.io.prometheus.client.Counter\x12.\n\x07summary\x18\x04 \x01(\x0b\x32\x1d.io.prometheus.client.Summary\x12.\n\x07untyped\x18\x05 \x01(\x0b\x32\x1d.io.prometheus.client.Untyped\x12\x32\n\thistogram\x18\x07 \x01(\x0b\x32\x1f.io.prometheus.client.Histogram\x12\x14\n\x0ctimestamp_ms\x18\x06 \x01(\x03"\x88\x01\n\x0cMetricFamily\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04help\x18\x02 \x01(\t\x12.\n\x04type\x18\x03 \x01(\x0e\x32 .io.prometheus.client.MetricType\x12,\n\x06metric\x18\x04 \x03(\x0b\x32\x1c.io.prometheus.client.Metric*M\n\nMetricType\x12\x0b\n\x07\x43OUNTER\x10\x00\x12\t\n\x05GAUGE\x10\x01\x12\x0b\n\x07SUMMARY\x10\x02\x12\x0b\n\x07UNTYPED\x10\x03\x12\r\n\tHISTOGRAM\x10\x04\x42\x16\n\x14io.prometheus.client'
20
21
  )
21
22
 
22
23
  _globals = globals()
@@ -5,10 +5,10 @@ import os
5
5
  import subprocess
6
6
  import sys
7
7
 
8
+ from datadog_checks.base.utils.common import ensure_bytes, to_native_string
8
9
  from datadog_checks.base.utils.format import json
10
+ from datadog_checks.base.utils.platform import Platform
9
11
 
10
- from ..common import ensure_bytes, to_native_string
11
- from ..platform import Platform
12
12
  from .constants import KNOWN_DATADOG_AGENT_SETTER_METHODS, EnvVars
13
13
 
14
14
 
@@ -5,13 +5,12 @@ import logging
5
5
  import os
6
6
  import sys
7
7
 
8
+ from datadog_checks.base.checks import base
9
+ from datadog_checks.base.log import LOG_LEVEL_MAP, TRACE_LEVEL, _get_py_loglevel
10
+ from datadog_checks.base.utils.common import to_native_string
8
11
  from datadog_checks.base.utils.format import json
9
-
10
- from ...checks import base
11
- from ...log import LOG_LEVEL_MAP, TRACE_LEVEL, _get_py_loglevel
12
- from ...utils.common import to_native_string
13
- from ...utils.metadata import core
14
- from ...utils.replay.constants import KNOWN_DATADOG_AGENT_SETTER_METHODS, EnvVars
12
+ from datadog_checks.base.utils.metadata import core
13
+ from datadog_checks.base.utils.replay.constants import KNOWN_DATADOG_AGENT_SETTER_METHODS, EnvVars
15
14
 
16
15
  MESSAGE_INDICATOR = os.environ[EnvVars.MESSAGE_INDICATOR]
17
16
  LOG_METHODS = {log_level: log_method.lower() for log_method, log_level in LOG_LEVEL_MAP.items()}
@@ -3,7 +3,7 @@
3
3
  # Licensed under a 3-clause BSD style license (see LICENSE)
4
4
  import logging
5
5
 
6
- from .. import ensure_unicode
6
+ from datadog_checks.base import ensure_unicode
7
7
 
8
8
  try:
9
9
  from _util import SubprocessOutputEmptyError # noqa
@@ -11,7 +11,7 @@ try:
11
11
  except ImportError:
12
12
  # No agent
13
13
  from ..stubs._util import SubprocessOutputEmptyError # noqa
14
- from ..stubs._util import subprocess_output
14
+ from datadog_checks.base.stubs._util import subprocess_output
15
15
 
16
16
 
17
17
  log = logging.getLogger(__name__)
@@ -5,7 +5,7 @@
5
5
  try:
6
6
  import tagger
7
7
  except ImportError:
8
- from ..stubs import tagger # noqa: F401
8
+ from datadog_checks.base.stubs import tagger # noqa: F401
9
9
 
10
10
 
11
11
  GENERIC_TAGS = {
@@ -9,7 +9,6 @@ from .common import ensure_bytes
9
9
 
10
10
 
11
11
  class TailFile(object):
12
-
13
12
  CRC_SIZE = 16
14
13
 
15
14
  def __init__(self, logger, path, callback):
@@ -22,7 +21,6 @@ class TailFile(object):
22
21
  self._callback = callback
23
22
 
24
23
  def _open_file(self, move_end=False, pos=False):
25
-
26
24
  already_open = False
27
25
  # close and reopen to handle logrotate
28
26
  if self._f is not None: