datadog-checks-base 36.5.0__py2.py3-none-any.whl → 37.4.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datadog_checks/base/__about__.py +1 -1
- datadog_checks/base/__init__.py +7 -5
- datadog_checks/base/agent.py +15 -0
- datadog_checks/base/checks/base.py +140 -64
- datadog_checks/base/checks/kube_leader/mixins.py +1 -5
- datadog_checks/base/checks/libs/prometheus.py +15 -1
- datadog_checks/base/checks/libs/thread_pool.py +1 -2
- datadog_checks/base/checks/logs/__init__.py +3 -0
- datadog_checks/base/checks/logs/crawler/__init__.py +3 -0
- datadog_checks/base/checks/logs/crawler/base.py +34 -0
- datadog_checks/base/checks/logs/crawler/stream.py +52 -0
- datadog_checks/base/checks/openmetrics/base_check.py +1 -3
- datadog_checks/base/checks/openmetrics/mixins.py +13 -20
- datadog_checks/base/checks/openmetrics/v2/base.py +1 -4
- datadog_checks/base/checks/openmetrics/v2/metrics.py +41 -0
- datadog_checks/base/checks/openmetrics/v2/scraper.py +2 -5
- datadog_checks/base/checks/openmetrics/v2/transform.py +1 -3
- datadog_checks/base/checks/openmetrics/v2/transformers/counter.py +7 -8
- datadog_checks/base/checks/openmetrics/v2/transformers/service_check.py +1 -3
- datadog_checks/base/checks/prometheus/base_check.py +1 -3
- datadog_checks/base/checks/prometheus/mixins.py +8 -12
- datadog_checks/base/checks/win/winpdh.py +4 -5
- datadog_checks/base/checks/win/winpdh_base.py +2 -3
- datadog_checks/base/checks/win/wmi/__init__.py +1 -3
- datadog_checks/base/checks/win/wmi/sampler.py +7 -11
- datadog_checks/base/checks/windows/perf_counters/base.py +6 -8
- datadog_checks/base/ddyaml.py +1 -2
- datadog_checks/base/log.py +2 -21
- datadog_checks/base/stubs/aggregator.py +6 -8
- datadog_checks/base/stubs/datadog_agent.py +43 -7
- datadog_checks/base/stubs/similar.py +65 -3
- datadog_checks/base/utils/__init__.py +0 -26
- datadog_checks/base/utils/common.py +4 -6
- datadog_checks/base/utils/containers.py +1 -2
- datadog_checks/base/utils/db/query.py +7 -6
- datadog_checks/base/utils/db/timed_cache.py +64 -0
- datadog_checks/base/utils/db/transform.py +82 -1
- datadog_checks/base/utils/db/utils.py +51 -14
- datadog_checks/base/utils/fips.py +32 -0
- datadog_checks/base/utils/headers.py +13 -28
- datadog_checks/base/utils/http.py +24 -24
- datadog_checks/base/utils/metadata/core.py +3 -8
- datadog_checks/base/utils/metadata/utils.py +1 -2
- datadog_checks/base/utils/network.py +0 -12
- datadog_checks/base/utils/prometheus/metrics_pb2.py +46 -877
- datadog_checks/base/utils/replay/execute.py +1 -1
- datadog_checks/base/utils/replay/redirect.py +1 -1
- datadog_checks/base/utils/serialization.py +15 -1
- datadog_checks/base/utils/subprocess_output.py +1 -3
- datadog_checks/base/utils/time.py +2 -18
- datadog_checks/base/utils/tls.py +13 -5
- datadog_checks/base/utils/tracing.py +2 -7
- datadog_checks_base-37.4.0.dist-info/METADATA +94 -0
- {datadog_checks_base-36.5.0.dist-info → datadog_checks_base-37.4.0.dist-info}/RECORD +55 -48
- {datadog_checks_base-36.5.0.dist-info → datadog_checks_base-37.4.0.dist-info}/WHEEL +1 -1
- datadog_checks/base/data/agent_requirements.in +0 -115
- datadog_checks_base-36.5.0.dist-info/METADATA +0 -116
datadog_checks/base/__about__.py
CHANGED
datadog_checks/base/__init__.py
CHANGED
|
@@ -1,18 +1,20 @@
|
|
|
1
1
|
# (C) Datadog, Inc. 2018-present
|
|
2
2
|
# All rights reserved
|
|
3
3
|
# Licensed under a 3-clause BSD style license (see LICENSE)
|
|
4
|
+
from datadog_checks.base.agent import datadog_agent
|
|
5
|
+
|
|
4
6
|
from .__about__ import __version__
|
|
5
7
|
from .checks import AgentCheck
|
|
6
8
|
from .checks.openmetrics import OpenMetricsBaseCheck
|
|
9
|
+
from .checks.openmetrics.v2.base import OpenMetricsBaseCheckV2
|
|
7
10
|
from .config import is_affirmative
|
|
8
11
|
from .errors import ConfigurationError
|
|
9
12
|
from .utils.common import ensure_bytes, ensure_unicode, to_native_string, to_string
|
|
10
13
|
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
OpenMetricsBaseCheckV2 = None
|
|
14
|
+
if datadog_agent.get_config('use_boringssl'):
|
|
15
|
+
import urllib3.contrib.pyopenssl
|
|
16
|
+
|
|
17
|
+
urllib3.contrib.pyopenssl.inject_into_urllib3()
|
|
16
18
|
|
|
17
19
|
# Windows-only
|
|
18
20
|
try:
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# (C) Datadog, Inc. 2024-present
|
|
2
|
+
# All rights reserved
|
|
3
|
+
# Licensed under a 3-clause BSD style license (see LICENSE)
|
|
4
|
+
try:
|
|
5
|
+
import aggregator
|
|
6
|
+
import datadog_agent
|
|
7
|
+
|
|
8
|
+
AGENT_RUNNING = True
|
|
9
|
+
except ImportError:
|
|
10
|
+
from .stubs import aggregator, datadog_agent
|
|
11
|
+
|
|
12
|
+
AGENT_RUNNING = False
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
__all__ = ['AGENT_RUNNING', 'aggregator', 'datadog_agent']
|
|
@@ -5,8 +5,8 @@ import copy
|
|
|
5
5
|
import functools
|
|
6
6
|
import importlib
|
|
7
7
|
import inspect
|
|
8
|
-
import json
|
|
9
8
|
import logging
|
|
9
|
+
import os
|
|
10
10
|
import re
|
|
11
11
|
import traceback
|
|
12
12
|
import unicodedata
|
|
@@ -28,7 +28,9 @@ from typing import ( # noqa: F401
|
|
|
28
28
|
)
|
|
29
29
|
|
|
30
30
|
import yaml
|
|
31
|
-
from
|
|
31
|
+
from pydantic import BaseModel, ValidationError
|
|
32
|
+
|
|
33
|
+
from datadog_checks.base.agent import AGENT_RUNNING, aggregator, datadog_agent
|
|
32
34
|
|
|
33
35
|
from ..config import is_affirmative
|
|
34
36
|
from ..constants import ServiceCheck
|
|
@@ -45,43 +47,44 @@ from ..types import (
|
|
|
45
47
|
from ..utils.agent.utils import should_profile_memory
|
|
46
48
|
from ..utils.common import ensure_bytes, to_native_string
|
|
47
49
|
from ..utils.diagnose import Diagnosis
|
|
50
|
+
from ..utils.fips import enable_fips
|
|
48
51
|
from ..utils.http import RequestsWrapper
|
|
49
52
|
from ..utils.limiter import Limiter
|
|
50
53
|
from ..utils.metadata import MetadataManager
|
|
51
54
|
from ..utils.secrets import SecretsSanitizer
|
|
55
|
+
from ..utils.serialization import from_json, to_json
|
|
52
56
|
from ..utils.tagging import GENERIC_TAGS
|
|
53
57
|
from ..utils.tls import TlsContextWrapper
|
|
54
58
|
from ..utils.tracing import traced_class
|
|
55
59
|
|
|
56
|
-
|
|
57
|
-
import datadog_agent
|
|
58
|
-
|
|
60
|
+
if AGENT_RUNNING:
|
|
59
61
|
from ..log import CheckLoggingAdapter, init_logging
|
|
60
62
|
|
|
61
|
-
|
|
62
|
-
except ImportError:
|
|
63
|
-
from ..stubs import datadog_agent
|
|
63
|
+
else:
|
|
64
64
|
from ..stubs.log import CheckLoggingAdapter, init_logging
|
|
65
65
|
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
try:
|
|
69
|
-
import aggregator
|
|
70
|
-
|
|
71
|
-
using_stub_aggregator = False
|
|
72
|
-
except ImportError:
|
|
73
|
-
from ..stubs import aggregator
|
|
74
|
-
|
|
75
|
-
using_stub_aggregator = True
|
|
76
|
-
|
|
66
|
+
init_logging()
|
|
77
67
|
|
|
78
68
|
if datadog_agent.get_config('disable_unsafe_yaml'):
|
|
79
69
|
from ..ddyaml import monkey_patch_pyyaml
|
|
80
70
|
|
|
81
71
|
monkey_patch_pyyaml()
|
|
82
72
|
|
|
83
|
-
if
|
|
84
|
-
from
|
|
73
|
+
if datadog_agent.get_config('integration_tracing'):
|
|
74
|
+
from ddtrace import patch
|
|
75
|
+
|
|
76
|
+
# handle thread monitoring as an additional option
|
|
77
|
+
# See: http://pypi.datadoghq.com/trace/docs/other_integrations.html#futures
|
|
78
|
+
if datadog_agent.get_config('integration_tracing_futures'):
|
|
79
|
+
patch(logging=True, requests=True, futures=True)
|
|
80
|
+
else:
|
|
81
|
+
patch(logging=True, requests=True)
|
|
82
|
+
|
|
83
|
+
if is_affirmative(datadog_agent.get_config('integration_profiling')):
|
|
84
|
+
from ddtrace.profiling import Profiler
|
|
85
|
+
|
|
86
|
+
prof = Profiler(service='datadog-agent-integrations')
|
|
87
|
+
prof.start()
|
|
85
88
|
|
|
86
89
|
if TYPE_CHECKING:
|
|
87
90
|
import ssl # noqa: F401
|
|
@@ -174,7 +177,6 @@ class AgentCheck(object):
|
|
|
174
177
|
# type: (*Any, **Any) -> None
|
|
175
178
|
"""
|
|
176
179
|
Parameters:
|
|
177
|
-
|
|
178
180
|
name (str):
|
|
179
181
|
the name of the check
|
|
180
182
|
init_config (dict):
|
|
@@ -302,8 +304,13 @@ class AgentCheck(object):
|
|
|
302
304
|
# Functions that will be called exactly once (if successful) before the first check run
|
|
303
305
|
self.check_initializations = deque() # type: Deque[Callable[[], None]]
|
|
304
306
|
|
|
305
|
-
|
|
306
|
-
|
|
307
|
+
self.check_initializations.append(self.load_configuration_models)
|
|
308
|
+
|
|
309
|
+
self.__formatted_tags = None
|
|
310
|
+
self.__logs_enabled = None
|
|
311
|
+
|
|
312
|
+
if os.environ.get("GOFIPS", "0") == "1":
|
|
313
|
+
enable_fips()
|
|
307
314
|
|
|
308
315
|
def _create_metrics_pattern(self, metric_patterns, option_name):
|
|
309
316
|
all_patterns = metric_patterns.get(option_name, [])
|
|
@@ -394,6 +401,36 @@ class AgentCheck(object):
|
|
|
394
401
|
|
|
395
402
|
return self._http
|
|
396
403
|
|
|
404
|
+
@property
|
|
405
|
+
def logs_enabled(self):
|
|
406
|
+
# type: () -> bool
|
|
407
|
+
"""
|
|
408
|
+
Returns True if logs are enabled, False otherwise.
|
|
409
|
+
"""
|
|
410
|
+
if self.__logs_enabled is None:
|
|
411
|
+
self.__logs_enabled = bool(datadog_agent.get_config('logs_enabled'))
|
|
412
|
+
|
|
413
|
+
return self.__logs_enabled
|
|
414
|
+
|
|
415
|
+
@property
|
|
416
|
+
def formatted_tags(self):
|
|
417
|
+
# type: () -> str
|
|
418
|
+
if self.__formatted_tags is None:
|
|
419
|
+
normalized_tags = set()
|
|
420
|
+
for tag in self.instance.get('tags', []):
|
|
421
|
+
key, _, value = tag.partition(':')
|
|
422
|
+
if not value:
|
|
423
|
+
continue
|
|
424
|
+
|
|
425
|
+
if self.disable_generic_tags and key in GENERIC_TAGS:
|
|
426
|
+
key = '{}_{}'.format(self.name, key)
|
|
427
|
+
|
|
428
|
+
normalized_tags.add('{}:{}'.format(key, value))
|
|
429
|
+
|
|
430
|
+
self.__formatted_tags = ','.join(sorted(normalized_tags))
|
|
431
|
+
|
|
432
|
+
return self.__formatted_tags
|
|
433
|
+
|
|
397
434
|
@property
|
|
398
435
|
def diagnosis(self):
|
|
399
436
|
# type: () -> Diagnosis
|
|
@@ -430,7 +467,7 @@ class AgentCheck(object):
|
|
|
430
467
|
Used for sending metadata via Go bindings.
|
|
431
468
|
"""
|
|
432
469
|
if not hasattr(self, '_metadata_manager'):
|
|
433
|
-
if not self.check_id and
|
|
470
|
+
if not self.check_id and AGENT_RUNNING:
|
|
434
471
|
raise RuntimeError('Attribute `check_id` must be set')
|
|
435
472
|
|
|
436
473
|
self._metadata_manager = MetadataManager(self.name, self.check_id, self.log, self.METADATA_TRANSFORMERS)
|
|
@@ -470,11 +507,9 @@ class AgentCheck(object):
|
|
|
470
507
|
|
|
471
508
|
known_options = {k for k, _ in models_config} # type: Set[str]
|
|
472
509
|
|
|
473
|
-
if
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
# Also add aliases, if any
|
|
477
|
-
known_options.update(set(models_config.model_dump(by_alias=True)))
|
|
510
|
+
if isinstance(models_config, BaseModel):
|
|
511
|
+
# Also add aliases, if any
|
|
512
|
+
known_options.update(set(models_config.model_dump(by_alias=True)))
|
|
478
513
|
|
|
479
514
|
unknown_options = [option for option in user_configs.keys() if option not in known_options] # type: List[str]
|
|
480
515
|
|
|
@@ -526,8 +561,7 @@ class AgentCheck(object):
|
|
|
526
561
|
def load_configuration_model(import_path, model_name, config, context):
|
|
527
562
|
try:
|
|
528
563
|
package = importlib.import_module(import_path)
|
|
529
|
-
|
|
530
|
-
except ModuleNotFoundError as e: # type: ignore
|
|
564
|
+
except ModuleNotFoundError as e:
|
|
531
565
|
# Don't fail if there are no models
|
|
532
566
|
if str(e).startswith('No module named '):
|
|
533
567
|
return
|
|
@@ -538,8 +572,7 @@ class AgentCheck(object):
|
|
|
538
572
|
if model is not None:
|
|
539
573
|
try:
|
|
540
574
|
config_model = model.model_validate(config, context=context)
|
|
541
|
-
|
|
542
|
-
except ValidationError as e: # type: ignore
|
|
575
|
+
except ValidationError as e:
|
|
543
576
|
errors = e.errors()
|
|
544
577
|
num_errors = len(errors)
|
|
545
578
|
message_lines = [
|
|
@@ -558,7 +591,7 @@ class AgentCheck(object):
|
|
|
558
591
|
)
|
|
559
592
|
message_lines.append(' {}'.format(error['msg']))
|
|
560
593
|
|
|
561
|
-
|
|
594
|
+
raise ConfigurationError('\n'.join(message_lines)) from None
|
|
562
595
|
else:
|
|
563
596
|
return config_model
|
|
564
597
|
|
|
@@ -608,7 +641,7 @@ class AgentCheck(object):
|
|
|
608
641
|
err_msg = 'Histogram: {} has non integer value: {}. Only integer are valid bucket values (count).'.format(
|
|
609
642
|
repr(name), repr(value)
|
|
610
643
|
)
|
|
611
|
-
if
|
|
644
|
+
if not AGENT_RUNNING:
|
|
612
645
|
raise ValueError(err_msg)
|
|
613
646
|
self.warning(err_msg)
|
|
614
647
|
return
|
|
@@ -658,6 +691,20 @@ class AgentCheck(object):
|
|
|
658
691
|
|
|
659
692
|
aggregator.submit_event_platform_event(self, self.check_id, to_native_string(raw_event), "dbm-metadata")
|
|
660
693
|
|
|
694
|
+
def event_platform_event(self, raw_event, event_track_type):
|
|
695
|
+
# type: (str, str) -> None
|
|
696
|
+
"""Send an event platform event.
|
|
697
|
+
|
|
698
|
+
Parameters:
|
|
699
|
+
raw_event (str):
|
|
700
|
+
JSON formatted string representing the event to send
|
|
701
|
+
event_track_type (str):
|
|
702
|
+
type of event ingested and processed by the event platform
|
|
703
|
+
"""
|
|
704
|
+
if raw_event is None:
|
|
705
|
+
return
|
|
706
|
+
aggregator.submit_event_platform_event(self, self.check_id, to_native_string(raw_event), event_track_type)
|
|
707
|
+
|
|
661
708
|
def should_send_metric(self, metric_name):
|
|
662
709
|
return not self._metric_excluded(metric_name) and self._metric_included(metric_name)
|
|
663
710
|
|
|
@@ -706,7 +753,7 @@ class AgentCheck(object):
|
|
|
706
753
|
err_msg = 'Metric: {} has non float value: {}. Only float values can be submitted as metrics.'.format(
|
|
707
754
|
repr(name), repr(value)
|
|
708
755
|
)
|
|
709
|
-
if
|
|
756
|
+
if not AGENT_RUNNING:
|
|
710
757
|
raise ValueError(err_msg)
|
|
711
758
|
self.warning(err_msg)
|
|
712
759
|
return
|
|
@@ -718,7 +765,6 @@ class AgentCheck(object):
|
|
|
718
765
|
"""Sample a gauge metric.
|
|
719
766
|
|
|
720
767
|
Parameters:
|
|
721
|
-
|
|
722
768
|
name (str):
|
|
723
769
|
the name of the metric
|
|
724
770
|
value (float):
|
|
@@ -741,7 +787,6 @@ class AgentCheck(object):
|
|
|
741
787
|
"""Sample a raw count metric.
|
|
742
788
|
|
|
743
789
|
Parameters:
|
|
744
|
-
|
|
745
790
|
name (str):
|
|
746
791
|
the name of the metric
|
|
747
792
|
value (float):
|
|
@@ -766,7 +811,6 @@ class AgentCheck(object):
|
|
|
766
811
|
"""Sample an increasing counter metric.
|
|
767
812
|
|
|
768
813
|
Parameters:
|
|
769
|
-
|
|
770
814
|
name (str):
|
|
771
815
|
the name of the metric
|
|
772
816
|
value (float):
|
|
@@ -798,7 +842,6 @@ class AgentCheck(object):
|
|
|
798
842
|
"""Sample a point, with the rate calculated at the end of the check.
|
|
799
843
|
|
|
800
844
|
Parameters:
|
|
801
|
-
|
|
802
845
|
name (str):
|
|
803
846
|
the name of the metric
|
|
804
847
|
value (float):
|
|
@@ -821,7 +864,6 @@ class AgentCheck(object):
|
|
|
821
864
|
"""Sample a histogram metric.
|
|
822
865
|
|
|
823
866
|
Parameters:
|
|
824
|
-
|
|
825
867
|
name (str):
|
|
826
868
|
the name of the metric
|
|
827
869
|
value (float):
|
|
@@ -844,7 +886,6 @@ class AgentCheck(object):
|
|
|
844
886
|
"""Sample a histogram based on rate metrics.
|
|
845
887
|
|
|
846
888
|
Parameters:
|
|
847
|
-
|
|
848
889
|
name (str):
|
|
849
890
|
the name of the metric
|
|
850
891
|
value (float):
|
|
@@ -867,7 +908,6 @@ class AgentCheck(object):
|
|
|
867
908
|
"""Increment a counter metric.
|
|
868
909
|
|
|
869
910
|
Parameters:
|
|
870
|
-
|
|
871
911
|
name (str):
|
|
872
912
|
the name of the metric
|
|
873
913
|
value (float):
|
|
@@ -891,7 +931,6 @@ class AgentCheck(object):
|
|
|
891
931
|
"""Decrement a counter metric.
|
|
892
932
|
|
|
893
933
|
Parameters:
|
|
894
|
-
|
|
895
934
|
name (str):
|
|
896
935
|
the name of the metric
|
|
897
936
|
value (float):
|
|
@@ -915,7 +954,6 @@ class AgentCheck(object):
|
|
|
915
954
|
"""Send the status of a service.
|
|
916
955
|
|
|
917
956
|
Parameters:
|
|
918
|
-
|
|
919
957
|
name (str):
|
|
920
958
|
the name of the service check
|
|
921
959
|
status (int):
|
|
@@ -941,6 +979,42 @@ class AgentCheck(object):
|
|
|
941
979
|
self, self.check_id, self._format_namespace(name, raw), status, tags, hostname, message
|
|
942
980
|
)
|
|
943
981
|
|
|
982
|
+
def send_log(self, data, cursor=None, stream='default'):
|
|
983
|
+
# type: (dict[str, str], dict[str, Any] | None, str) -> None
|
|
984
|
+
"""Send a log for submission.
|
|
985
|
+
|
|
986
|
+
Parameters:
|
|
987
|
+
data (dict[str, str]):
|
|
988
|
+
The log data to send. The following keys are treated specially, if present:
|
|
989
|
+
|
|
990
|
+
- timestamp: should be an integer or float representing the number of seconds since the Unix epoch
|
|
991
|
+
- ddtags: if not defined, it will automatically be set based on the instance's `tags` option
|
|
992
|
+
cursor (dict[str, Any] or None):
|
|
993
|
+
Metadata associated with the log which will be saved to disk. The most recent value may be
|
|
994
|
+
retrieved with the `get_log_cursor` method.
|
|
995
|
+
stream (str):
|
|
996
|
+
The stream associated with this log, used for accurate cursor persistence.
|
|
997
|
+
Has no effect if `cursor` argument is `None`.
|
|
998
|
+
"""
|
|
999
|
+
attributes = data.copy()
|
|
1000
|
+
if 'ddtags' not in attributes and self.formatted_tags:
|
|
1001
|
+
attributes['ddtags'] = self.formatted_tags
|
|
1002
|
+
|
|
1003
|
+
timestamp = attributes.get('timestamp')
|
|
1004
|
+
if timestamp is not None:
|
|
1005
|
+
# convert seconds to milliseconds
|
|
1006
|
+
attributes['timestamp'] = int(timestamp * 1000)
|
|
1007
|
+
|
|
1008
|
+
datadog_agent.send_log(to_json(attributes), self.check_id)
|
|
1009
|
+
if cursor is not None:
|
|
1010
|
+
self.write_persistent_cache('log_cursor_{}'.format(stream), to_json(cursor))
|
|
1011
|
+
|
|
1012
|
+
def get_log_cursor(self, stream='default'):
|
|
1013
|
+
# type: (str) -> dict[str, Any] | None
|
|
1014
|
+
"""Returns the most recent log cursor from disk."""
|
|
1015
|
+
data = self.read_persistent_cache('log_cursor_{}'.format(stream))
|
|
1016
|
+
return from_json(data) if data else None
|
|
1017
|
+
|
|
944
1018
|
def _log_deprecation(self, deprecation_key, *args):
|
|
945
1019
|
# type: (str, *str) -> None
|
|
946
1020
|
"""
|
|
@@ -963,7 +1037,6 @@ class AgentCheck(object):
|
|
|
963
1037
|
"""Updates the cached metadata `name` with `value`, which is then sent by the Agent at regular intervals.
|
|
964
1038
|
|
|
965
1039
|
Parameters:
|
|
966
|
-
|
|
967
1040
|
name (str):
|
|
968
1041
|
the name of the metadata
|
|
969
1042
|
value (Any):
|
|
@@ -1015,7 +1088,6 @@ class AgentCheck(object):
|
|
|
1015
1088
|
"""Returns the value previously stored with `write_persistent_cache` for the same `key`.
|
|
1016
1089
|
|
|
1017
1090
|
Parameters:
|
|
1018
|
-
|
|
1019
1091
|
key (str):
|
|
1020
1092
|
the key to retrieve
|
|
1021
1093
|
"""
|
|
@@ -1030,7 +1102,6 @@ class AgentCheck(object):
|
|
|
1030
1102
|
The cache is persistent between agent restarts but will be rebuilt if the check instance configuration changes.
|
|
1031
1103
|
|
|
1032
1104
|
Parameters:
|
|
1033
|
-
|
|
1034
1105
|
key (str):
|
|
1035
1106
|
the key to retrieve
|
|
1036
1107
|
value (str):
|
|
@@ -1049,7 +1120,7 @@ class AgentCheck(object):
|
|
|
1049
1120
|
new_tags = []
|
|
1050
1121
|
for hostname, source_map in external_tags:
|
|
1051
1122
|
new_tags.append((to_native_string(hostname), source_map))
|
|
1052
|
-
for src_name, tags in
|
|
1123
|
+
for src_name, tags in source_map.items():
|
|
1053
1124
|
source_map[src_name] = self._normalize_tags_type(tags)
|
|
1054
1125
|
datadog_agent.set_external_tags(new_tags)
|
|
1055
1126
|
except IndexError:
|
|
@@ -1076,7 +1147,6 @@ class AgentCheck(object):
|
|
|
1076
1147
|
and make it compliant with flake8 logging format linter.
|
|
1077
1148
|
|
|
1078
1149
|
Parameters:
|
|
1079
|
-
|
|
1080
1150
|
warning_message (str):
|
|
1081
1151
|
the warning message
|
|
1082
1152
|
args (Any):
|
|
@@ -1114,7 +1184,7 @@ class AgentCheck(object):
|
|
|
1114
1184
|
The agent calls this method to retrieve diagnostics from integrations. This method
|
|
1115
1185
|
runs explicit diagnostics if available.
|
|
1116
1186
|
"""
|
|
1117
|
-
return
|
|
1187
|
+
return to_json([d._asdict() for d in (self.diagnosis.diagnoses + self.diagnosis.run_explicit())])
|
|
1118
1188
|
|
|
1119
1189
|
def _get_requests_proxy(self):
|
|
1120
1190
|
# type: () -> ProxySettings
|
|
@@ -1142,13 +1212,14 @@ class AgentCheck(object):
|
|
|
1142
1212
|
def normalize(self, metric, prefix=None, fix_case=False):
|
|
1143
1213
|
# type: (Union[str, bytes], Union[str, bytes], bool) -> str
|
|
1144
1214
|
"""
|
|
1145
|
-
Turn a metric into a well-formed metric name
|
|
1146
|
-
|
|
1147
|
-
:
|
|
1148
|
-
|
|
1149
|
-
|
|
1215
|
+
Turn a metric into a well-formed metric name prefix.b.c
|
|
1216
|
+
|
|
1217
|
+
Parameters:
|
|
1218
|
+
metric: The metric name to normalize
|
|
1219
|
+
prefix: A prefix to to add to the normalized name, default None
|
|
1220
|
+
fix_case: A boolean, indicating whether to make sure that the metric name returned is in "snake_case"
|
|
1150
1221
|
"""
|
|
1151
|
-
if isinstance(metric,
|
|
1222
|
+
if isinstance(metric, str):
|
|
1152
1223
|
metric = unicodedata.normalize('NFKD', metric).encode('ascii', 'ignore')
|
|
1153
1224
|
|
|
1154
1225
|
if fix_case:
|
|
@@ -1173,7 +1244,7 @@ class AgentCheck(object):
|
|
|
1173
1244
|
This happens for legacy reasons, when we cleaned up some characters (like '-')
|
|
1174
1245
|
which are allowed in tags.
|
|
1175
1246
|
"""
|
|
1176
|
-
if isinstance(tag,
|
|
1247
|
+
if isinstance(tag, str):
|
|
1177
1248
|
tag = tag.encode('utf-8', 'ignore')
|
|
1178
1249
|
tag = self.TAG_REPLACEMENT.sub(br'_', tag)
|
|
1179
1250
|
tag = self.MULTIPLE_UNDERSCORE_CLEANUP.sub(br'_', tag)
|
|
@@ -1219,7 +1290,13 @@ class AgentCheck(object):
|
|
|
1219
1290
|
|
|
1220
1291
|
enter_pdb(self.check, line=self.init_config['set_breakpoint'], args=(instance,))
|
|
1221
1292
|
elif self.should_profile_memory():
|
|
1222
|
-
|
|
1293
|
+
# self.init_config['profile_memory'] could be `/tmp/datadog-agent-memory-profiler*`
|
|
1294
|
+
# that is generated by Datadog Agent.
|
|
1295
|
+
# If we use `--m-dir` for `agent check` command, a hidden flag, it should be same as a given value.
|
|
1296
|
+
namespaces = [self.init_config['profile_memory']]
|
|
1297
|
+
for id in self.check_id.split(":"):
|
|
1298
|
+
namespaces.append(id)
|
|
1299
|
+
self.profile_memory(func=self.check, namespaces=namespaces, args=(instance,))
|
|
1223
1300
|
else:
|
|
1224
1301
|
self.check(instance)
|
|
1225
1302
|
|
|
@@ -1227,7 +1304,7 @@ class AgentCheck(object):
|
|
|
1227
1304
|
except Exception as e:
|
|
1228
1305
|
message = self.sanitize(str(e))
|
|
1229
1306
|
tb = self.sanitize(traceback.format_exc())
|
|
1230
|
-
error_report =
|
|
1307
|
+
error_report = to_json([{'message': message, 'traceback': tb}])
|
|
1231
1308
|
finally:
|
|
1232
1309
|
if self.metric_limiter:
|
|
1233
1310
|
if is_affirmative(self.debug_metrics.get('metric_contexts', False)):
|
|
@@ -1267,13 +1344,12 @@ class AgentCheck(object):
|
|
|
1267
1344
|
```
|
|
1268
1345
|
|
|
1269
1346
|
Parameters:
|
|
1270
|
-
|
|
1271
1347
|
event (dict[str, Any]):
|
|
1272
1348
|
the event to be sent
|
|
1273
1349
|
"""
|
|
1274
1350
|
# Enforce types of some fields, considerably facilitates handling in go bindings downstream
|
|
1275
|
-
for key, value in
|
|
1276
|
-
if not isinstance(value, (
|
|
1351
|
+
for key, value in event.items():
|
|
1352
|
+
if not isinstance(value, (str, bytes)):
|
|
1277
1353
|
continue
|
|
1278
1354
|
|
|
1279
1355
|
try:
|
|
@@ -1,11 +1,7 @@
|
|
|
1
1
|
# (C) Datadog, Inc. 2018-present
|
|
2
2
|
# All rights reserved
|
|
3
3
|
# Licensed under a 3-clause BSD style license (see LICENSE)
|
|
4
|
-
|
|
5
|
-
try:
|
|
6
|
-
import datadog_agent
|
|
7
|
-
except ImportError:
|
|
8
|
-
from ...stubs import datadog_agent
|
|
4
|
+
from datadog_checks.base.agent import datadog_agent
|
|
9
5
|
|
|
10
6
|
from .. import AgentCheck
|
|
11
7
|
from .record import ElectionRecordAnnotation, ElectionRecordLease
|
|
@@ -2,15 +2,29 @@
|
|
|
2
2
|
# All rights reserved
|
|
3
3
|
# Licensed under a 3-clause BSD style license (see LICENSE)
|
|
4
4
|
|
|
5
|
+
from itertools import tee
|
|
6
|
+
|
|
5
7
|
from prometheus_client.metrics_core import Metric
|
|
6
8
|
from prometheus_client.parser import _parse_sample, _replace_help_escaping
|
|
7
9
|
|
|
8
10
|
|
|
11
|
+
def text_fd_to_metric_families(fd):
|
|
12
|
+
raw_lines, input_lines = tee(fd, 2)
|
|
13
|
+
# It's important to start parsing outside of the for-loop.
|
|
14
|
+
# This way we treat crashes before we yield the first parsed line differently than crashes while yielding.
|
|
15
|
+
parsed_lines = _parse_payload(input_lines)
|
|
16
|
+
for raw_line, metric_family in zip(raw_lines, parsed_lines):
|
|
17
|
+
try:
|
|
18
|
+
yield metric_family
|
|
19
|
+
except Exception as e:
|
|
20
|
+
raise ValueError("Failed to parse the metric response '{}': {}".format(raw_line, e))
|
|
21
|
+
|
|
22
|
+
|
|
9
23
|
# This copies most of the code from upstream at that version:
|
|
10
24
|
# https://github.com/prometheus/client_python/blob/049744296d216e6be65dc8f3d44650310f39c384/prometheus_client/parser.py#L144
|
|
11
25
|
# but reverting the behavior to a compatible version, which doesn't change counters to have a total suffix. See
|
|
12
26
|
# https://github.com/prometheus/client_python/commit/a4dd93bcc6a0422e10cfa585048d1813909c6786#diff-0adf47ea7f99c66d4866ccb4e557a865L158
|
|
13
|
-
def
|
|
27
|
+
def _parse_payload(fd):
|
|
14
28
|
"""Parse Prometheus text format from a file descriptor.
|
|
15
29
|
|
|
16
30
|
This is a laxer parser than the main Go parser, so successful parsing does
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# (C) Datadog, Inc. 2024-present
|
|
2
|
+
# All rights reserved
|
|
3
|
+
# Licensed under a 3-clause BSD style license (see LICENSE)
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
from abc import ABC, abstractmethod
|
|
7
|
+
from typing import TYPE_CHECKING, Iterable
|
|
8
|
+
|
|
9
|
+
from datadog_checks.base import AgentCheck
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from datadog_checks.base.checks.logs.crawler.stream import LogStream
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class LogCrawlerCheck(AgentCheck, ABC):
|
|
16
|
+
@abstractmethod
|
|
17
|
+
def get_log_streams(self) -> Iterable[LogStream]:
|
|
18
|
+
"""
|
|
19
|
+
Yields the log streams associated with this check.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
def process_streams(self) -> None:
|
|
23
|
+
"""
|
|
24
|
+
Process the log streams and send the collected logs.
|
|
25
|
+
|
|
26
|
+
Crawler checks that need more functionality can implement the `check` method and call this directly.
|
|
27
|
+
"""
|
|
28
|
+
for stream in self.get_log_streams():
|
|
29
|
+
last_cursor = self.get_log_cursor(stream.name)
|
|
30
|
+
for record in stream.records(cursor=last_cursor):
|
|
31
|
+
self.send_log(record.data, cursor=record.cursor, stream=stream.name)
|
|
32
|
+
|
|
33
|
+
def check(self, _) -> None:
|
|
34
|
+
self.process_streams()
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
# (C) Datadog, Inc. 2024-present
|
|
2
|
+
# All rights reserved
|
|
3
|
+
# Licensed under a 3-clause BSD style license (see LICENSE)
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
from abc import ABC, abstractmethod
|
|
7
|
+
from typing import TYPE_CHECKING, Any, Iterable
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from datadog_checks.base import AgentCheck
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class LogRecord:
|
|
14
|
+
__slots__ = ('cursor', 'data')
|
|
15
|
+
|
|
16
|
+
def __init__(self, data: dict[str, str], *, cursor: dict[str, Any] | None):
|
|
17
|
+
self.data = data
|
|
18
|
+
self.cursor = cursor
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class LogStream(ABC):
|
|
22
|
+
def __init__(self, *, check: AgentCheck, name: str):
|
|
23
|
+
self.__check = check
|
|
24
|
+
self.__name = name
|
|
25
|
+
|
|
26
|
+
@property
|
|
27
|
+
def check(self) -> AgentCheck:
|
|
28
|
+
"""
|
|
29
|
+
The AgentCheck instance associated with this LogStream.
|
|
30
|
+
"""
|
|
31
|
+
return self.__check
|
|
32
|
+
|
|
33
|
+
@property
|
|
34
|
+
def name(self) -> str:
|
|
35
|
+
"""
|
|
36
|
+
The name of this LogStream.
|
|
37
|
+
"""
|
|
38
|
+
return self.__name
|
|
39
|
+
|
|
40
|
+
def construct_tags(self, tags: list[str]) -> list[str]:
|
|
41
|
+
"""
|
|
42
|
+
Returns a formatted string of tags which may be used directly as the `ddtags` field of logs.
|
|
43
|
+
This will include the `tags` from the integration instance config.
|
|
44
|
+
"""
|
|
45
|
+
formatted_tags = ','.join(tags)
|
|
46
|
+
return f'{self.check.formatted_tags},{formatted_tags}' if self.check.formatted_tags else formatted_tags
|
|
47
|
+
|
|
48
|
+
@abstractmethod
|
|
49
|
+
def records(self, *, cursor: dict[str, Any] | None = None) -> Iterable[LogRecord]:
|
|
50
|
+
"""
|
|
51
|
+
Yields log records as they are received.
|
|
52
|
+
"""
|
|
@@ -4,7 +4,6 @@
|
|
|
4
4
|
from copy import deepcopy
|
|
5
5
|
|
|
6
6
|
import requests
|
|
7
|
-
from six import PY2
|
|
8
7
|
|
|
9
8
|
from ...errors import CheckException
|
|
10
9
|
from ...utils.tracing import traced_class
|
|
@@ -183,5 +182,4 @@ class StandardFields(object):
|
|
|
183
182
|
pass
|
|
184
183
|
|
|
185
184
|
|
|
186
|
-
|
|
187
|
-
StandardFields.__doc__ = '\n'.join('- `{}`'.format(field) for field in STANDARD_FIELDS)
|
|
185
|
+
StandardFields.__doc__ = '\n'.join('- `{}`'.format(field) for field in STANDARD_FIELDS)
|