datadog_lambda 6.110.0__tar.gz → 7.112.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/PKG-INFO +2 -2
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/__init__.py +7 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/api.py +7 -8
- datadog_lambda-7.112.0/datadog_lambda/asm.py +184 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/cold_start.py +3 -9
- datadog_lambda-7.112.0/datadog_lambda/config.py +146 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/metric.py +5 -12
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/patch.py +4 -8
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/span_pointers.py +2 -7
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/tag_object.py +3 -4
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/tracing.py +65 -33
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/trigger.py +2 -2
- datadog_lambda-7.112.0/datadog_lambda/version.py +1 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/wrapper.py +56 -125
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/pyproject.toml +2 -2
- datadog_lambda-6.110.0/datadog_lambda/fips.py +0 -19
- datadog_lambda-6.110.0/datadog_lambda/version.py +0 -1
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/LICENSE +0 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/LICENSE-3rdparty.csv +0 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/NOTICE +0 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/README.md +0 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/constants.py +0 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/dogstatsd.py +0 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/extension.py +0 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/handler.py +0 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/logger.py +0 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/module_name.py +0 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/stats_writer.py +0 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/statsd_writer.py +0 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/tags.py +0 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/thread_stats_writer.py +0 -0
- {datadog_lambda-6.110.0 → datadog_lambda-7.112.0}/datadog_lambda/xray.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: datadog_lambda
|
|
3
|
-
Version:
|
|
3
|
+
Version: 7.112.0
|
|
4
4
|
Summary: The Datadog AWS Lambda Library
|
|
5
5
|
Home-page: https://github.com/DataDog/datadog-lambda-python
|
|
6
6
|
License: Apache-2.0
|
|
@@ -19,7 +19,7 @@ Classifier: Programming Language :: Python :: 3.13
|
|
|
19
19
|
Provides-Extra: dev
|
|
20
20
|
Requires-Dist: botocore (>=1.34.0,<2.0.0) ; extra == "dev"
|
|
21
21
|
Requires-Dist: datadog (>=0.51.0,<1.0.0)
|
|
22
|
-
Requires-Dist: ddtrace (>=
|
|
22
|
+
Requires-Dist: ddtrace (>=3.10.2,<4)
|
|
23
23
|
Requires-Dist: flake8 (>=5.0.4,<6.0.0) ; extra == "dev"
|
|
24
24
|
Requires-Dist: pytest (>=8.0.0,<9.0.0) ; extra == "dev"
|
|
25
25
|
Requires-Dist: pytest-benchmark (>=4.0,<5.0) ; extra == "dev"
|
|
@@ -17,3 +17,10 @@ from datadog_lambda.logger import initialize_logging # noqa: E402
|
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
initialize_logging(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
from datadog_lambda.patch import patch_all # noqa: E402
|
|
23
|
+
|
|
24
|
+
# Patch third-party libraries for tracing, must be done before importing any
|
|
25
|
+
# handler code.
|
|
26
|
+
patch_all()
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import os
|
|
3
3
|
|
|
4
|
-
from datadog_lambda.
|
|
4
|
+
from datadog_lambda.config import config
|
|
5
5
|
|
|
6
6
|
logger = logging.getLogger(__name__)
|
|
7
7
|
KMS_ENCRYPTION_CONTEXT_KEY = "LambdaFunctionName"
|
|
@@ -29,7 +29,6 @@ def decrypt_kms_api_key(kms_client, ciphertext):
|
|
|
29
29
|
is added. We need to try decrypting the API key both with and without the encryption context.
|
|
30
30
|
"""
|
|
31
31
|
# Try without encryption context, in case API key was encrypted using the AWS CLI
|
|
32
|
-
function_name = os.environ.get("AWS_LAMBDA_FUNCTION_NAME")
|
|
33
32
|
try:
|
|
34
33
|
plaintext = kms_client.decrypt(CiphertextBlob=decoded_bytes)[
|
|
35
34
|
"Plaintext"
|
|
@@ -43,7 +42,7 @@ def decrypt_kms_api_key(kms_client, ciphertext):
|
|
|
43
42
|
plaintext = kms_client.decrypt(
|
|
44
43
|
CiphertextBlob=decoded_bytes,
|
|
45
44
|
EncryptionContext={
|
|
46
|
-
KMS_ENCRYPTION_CONTEXT_KEY: function_name,
|
|
45
|
+
KMS_ENCRYPTION_CONTEXT_KEY: config.function_name,
|
|
47
46
|
},
|
|
48
47
|
)["Plaintext"].decode("utf-8")
|
|
49
48
|
|
|
@@ -66,7 +65,7 @@ def get_api_key() -> str:
|
|
|
66
65
|
DD_API_KEY = os.environ.get("DD_API_KEY", os.environ.get("DATADOG_API_KEY", ""))
|
|
67
66
|
|
|
68
67
|
LAMBDA_REGION = os.environ.get("AWS_REGION", "")
|
|
69
|
-
if fips_mode_enabled:
|
|
68
|
+
if config.fips_mode_enabled:
|
|
70
69
|
logger.debug(
|
|
71
70
|
"FIPS mode is enabled, using FIPS endpoints for secrets management."
|
|
72
71
|
)
|
|
@@ -82,7 +81,7 @@ def get_api_key() -> str:
|
|
|
82
81
|
return ""
|
|
83
82
|
endpoint_url = (
|
|
84
83
|
f"https://secretsmanager-fips.{secrets_region}.amazonaws.com"
|
|
85
|
-
if fips_mode_enabled
|
|
84
|
+
if config.fips_mode_enabled
|
|
86
85
|
else None
|
|
87
86
|
)
|
|
88
87
|
secrets_manager_client = _boto3_client(
|
|
@@ -95,7 +94,7 @@ def get_api_key() -> str:
|
|
|
95
94
|
# SSM endpoints: https://docs.aws.amazon.com/general/latest/gr/ssm.html
|
|
96
95
|
fips_endpoint = (
|
|
97
96
|
f"https://ssm-fips.{LAMBDA_REGION}.amazonaws.com"
|
|
98
|
-
if fips_mode_enabled
|
|
97
|
+
if config.fips_mode_enabled
|
|
99
98
|
else None
|
|
100
99
|
)
|
|
101
100
|
ssm_client = _boto3_client("ssm", endpoint_url=fips_endpoint)
|
|
@@ -106,7 +105,7 @@ def get_api_key() -> str:
|
|
|
106
105
|
# KMS endpoints: https://docs.aws.amazon.com/general/latest/gr/kms.html
|
|
107
106
|
fips_endpoint = (
|
|
108
107
|
f"https://kms-fips.{LAMBDA_REGION}.amazonaws.com"
|
|
109
|
-
if fips_mode_enabled
|
|
108
|
+
if config.fips_mode_enabled
|
|
110
109
|
else None
|
|
111
110
|
)
|
|
112
111
|
kms_client = _boto3_client("kms", endpoint_url=fips_endpoint)
|
|
@@ -118,7 +117,7 @@ def get_api_key() -> str:
|
|
|
118
117
|
|
|
119
118
|
|
|
120
119
|
def init_api():
|
|
121
|
-
if not
|
|
120
|
+
if not config.flush_to_log:
|
|
122
121
|
# Make sure that this package would always be lazy-loaded/outside from the critical path
|
|
123
122
|
# since underlying packages are quite heavy to load
|
|
124
123
|
# and useless with the extension unless sending metrics with timestamps
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
from copy import deepcopy
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Any, Dict, List, Optional, Union
|
|
4
|
+
|
|
5
|
+
from ddtrace.contrib.internal.trace_utils import _get_request_header_client_ip
|
|
6
|
+
from ddtrace.internal import core
|
|
7
|
+
from ddtrace.trace import Span
|
|
8
|
+
|
|
9
|
+
from datadog_lambda.trigger import (
|
|
10
|
+
EventSubtypes,
|
|
11
|
+
EventTypes,
|
|
12
|
+
_EventSource,
|
|
13
|
+
_http_event_types,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _to_single_value_headers(headers: Dict[str, List[str]]) -> Dict[str, str]:
|
|
20
|
+
"""
|
|
21
|
+
Convert multi-value headers to single-value headers.
|
|
22
|
+
If a header has multiple values, join them with commas.
|
|
23
|
+
"""
|
|
24
|
+
single_value_headers = {}
|
|
25
|
+
for key, values in headers.items():
|
|
26
|
+
single_value_headers[key] = ", ".join(values)
|
|
27
|
+
return single_value_headers
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _merge_single_and_multi_value_headers(
|
|
31
|
+
single_value_headers: Dict[str, str],
|
|
32
|
+
multi_value_headers: Dict[str, List[str]],
|
|
33
|
+
):
|
|
34
|
+
"""
|
|
35
|
+
Merge single-value headers with multi-value headers.
|
|
36
|
+
If a header exists in both, we merge them removing duplicates
|
|
37
|
+
"""
|
|
38
|
+
merged_headers = deepcopy(multi_value_headers)
|
|
39
|
+
for key, value in single_value_headers.items():
|
|
40
|
+
if key not in merged_headers:
|
|
41
|
+
merged_headers[key] = [value]
|
|
42
|
+
elif value not in merged_headers[key]:
|
|
43
|
+
merged_headers[key].append(value)
|
|
44
|
+
return _to_single_value_headers(merged_headers)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def asm_set_context(event_source: _EventSource):
|
|
48
|
+
"""Add asm specific items to the ExecutionContext.
|
|
49
|
+
|
|
50
|
+
This allows the AppSecSpanProcessor to know information about the event
|
|
51
|
+
at the moment the span is created and skip it when not relevant.
|
|
52
|
+
"""
|
|
53
|
+
if event_source.event_type not in _http_event_types:
|
|
54
|
+
core.set_item("appsec_skip_next_lambda_event", True)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def asm_start_request(
|
|
58
|
+
span: Span,
|
|
59
|
+
event: Dict[str, Any],
|
|
60
|
+
event_source: _EventSource,
|
|
61
|
+
trigger_tags: Dict[str, str],
|
|
62
|
+
):
|
|
63
|
+
if event_source.event_type not in _http_event_types:
|
|
64
|
+
return
|
|
65
|
+
|
|
66
|
+
request_headers: Dict[str, str] = {}
|
|
67
|
+
peer_ip: Optional[str] = None
|
|
68
|
+
request_path_parameters: Optional[Dict[str, Any]] = None
|
|
69
|
+
route: Optional[str] = None
|
|
70
|
+
|
|
71
|
+
if event_source.event_type == EventTypes.ALB:
|
|
72
|
+
headers = event.get("headers")
|
|
73
|
+
multi_value_request_headers = event.get("multiValueHeaders")
|
|
74
|
+
if multi_value_request_headers:
|
|
75
|
+
request_headers = _to_single_value_headers(multi_value_request_headers)
|
|
76
|
+
else:
|
|
77
|
+
request_headers = headers or {}
|
|
78
|
+
|
|
79
|
+
raw_uri = event.get("path")
|
|
80
|
+
parsed_query = event.get("multiValueQueryStringParameters") or event.get(
|
|
81
|
+
"queryStringParameters"
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
elif event_source.event_type == EventTypes.LAMBDA_FUNCTION_URL:
|
|
85
|
+
request_headers = event.get("headers", {})
|
|
86
|
+
peer_ip = event.get("requestContext", {}).get("http", {}).get("sourceIp")
|
|
87
|
+
raw_uri = event.get("rawPath")
|
|
88
|
+
parsed_query = event.get("queryStringParameters")
|
|
89
|
+
|
|
90
|
+
elif event_source.event_type == EventTypes.API_GATEWAY:
|
|
91
|
+
request_context = event.get("requestContext", {})
|
|
92
|
+
request_path_parameters = event.get("pathParameters")
|
|
93
|
+
route = trigger_tags.get("http.route")
|
|
94
|
+
|
|
95
|
+
if event_source.subtype == EventSubtypes.API_GATEWAY:
|
|
96
|
+
request_headers = event.get("headers", {})
|
|
97
|
+
peer_ip = request_context.get("identity", {}).get("sourceIp")
|
|
98
|
+
raw_uri = event.get("path")
|
|
99
|
+
parsed_query = event.get("multiValueQueryStringParameters")
|
|
100
|
+
|
|
101
|
+
elif event_source.subtype == EventSubtypes.HTTP_API:
|
|
102
|
+
request_headers = event.get("headers", {})
|
|
103
|
+
peer_ip = request_context.get("http", {}).get("sourceIp")
|
|
104
|
+
raw_uri = event.get("rawPath")
|
|
105
|
+
parsed_query = event.get("queryStringParameters")
|
|
106
|
+
|
|
107
|
+
elif event_source.subtype == EventSubtypes.WEBSOCKET:
|
|
108
|
+
request_headers = _to_single_value_headers(
|
|
109
|
+
event.get("multiValueHeaders", {})
|
|
110
|
+
)
|
|
111
|
+
peer_ip = request_context.get("identity", {}).get("sourceIp")
|
|
112
|
+
raw_uri = event.get("path")
|
|
113
|
+
parsed_query = event.get("multiValueQueryStringParameters")
|
|
114
|
+
|
|
115
|
+
else:
|
|
116
|
+
return
|
|
117
|
+
|
|
118
|
+
else:
|
|
119
|
+
return
|
|
120
|
+
|
|
121
|
+
body = event.get("body")
|
|
122
|
+
is_base64_encoded = event.get("isBase64Encoded", False)
|
|
123
|
+
|
|
124
|
+
request_ip = _get_request_header_client_ip(request_headers, peer_ip, True)
|
|
125
|
+
if request_ip is not None:
|
|
126
|
+
span.set_tag_str("http.client_ip", request_ip)
|
|
127
|
+
span.set_tag_str("network.client.ip", request_ip)
|
|
128
|
+
|
|
129
|
+
core.dispatch(
|
|
130
|
+
# The matching listener is registered in ddtrace.appsec._handlers
|
|
131
|
+
"aws_lambda.start_request",
|
|
132
|
+
(
|
|
133
|
+
span,
|
|
134
|
+
request_headers,
|
|
135
|
+
request_ip,
|
|
136
|
+
body,
|
|
137
|
+
is_base64_encoded,
|
|
138
|
+
raw_uri,
|
|
139
|
+
route,
|
|
140
|
+
trigger_tags.get("http.method"),
|
|
141
|
+
parsed_query,
|
|
142
|
+
request_path_parameters,
|
|
143
|
+
),
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def asm_start_response(
|
|
148
|
+
span: Span,
|
|
149
|
+
status_code: str,
|
|
150
|
+
event_source: _EventSource,
|
|
151
|
+
response: Union[Dict[str, Any], str, None],
|
|
152
|
+
):
|
|
153
|
+
if event_source.event_type not in _http_event_types:
|
|
154
|
+
return
|
|
155
|
+
|
|
156
|
+
if isinstance(response, dict) and (
|
|
157
|
+
"headers" in response or "multiValueHeaders" in response
|
|
158
|
+
):
|
|
159
|
+
headers = response.get("headers", {})
|
|
160
|
+
multi_value_request_headers = response.get("multiValueHeaders")
|
|
161
|
+
if isinstance(multi_value_request_headers, dict) and isinstance(headers, dict):
|
|
162
|
+
response_headers = _merge_single_and_multi_value_headers(
|
|
163
|
+
headers, multi_value_request_headers
|
|
164
|
+
)
|
|
165
|
+
elif isinstance(headers, dict):
|
|
166
|
+
response_headers = headers
|
|
167
|
+
else:
|
|
168
|
+
response_headers = {
|
|
169
|
+
"content-type": "application/json",
|
|
170
|
+
}
|
|
171
|
+
else:
|
|
172
|
+
response_headers = {
|
|
173
|
+
"content-type": "application/json",
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
core.dispatch(
|
|
177
|
+
# The matching listener is registered in ddtrace.appsec._handlers
|
|
178
|
+
"aws_lambda.start_response",
|
|
179
|
+
(
|
|
180
|
+
span,
|
|
181
|
+
status_code,
|
|
182
|
+
response_headers,
|
|
183
|
+
),
|
|
184
|
+
)
|
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import time
|
|
2
|
-
import os
|
|
3
2
|
from typing import List, Hashable
|
|
4
3
|
import logging
|
|
5
4
|
|
|
5
|
+
from datadog_lambda.config import config
|
|
6
|
+
|
|
6
7
|
logger = logging.getLogger(__name__)
|
|
7
8
|
|
|
8
9
|
_cold_start = True
|
|
@@ -86,14 +87,12 @@ def reset_node_stacks():
|
|
|
86
87
|
|
|
87
88
|
def push_node(module_name, file_path):
|
|
88
89
|
node = ImportNode(module_name, file_path, time.time_ns())
|
|
89
|
-
global import_stack
|
|
90
90
|
if import_stack:
|
|
91
91
|
import_stack[-1].children.append(node)
|
|
92
92
|
import_stack.append(node)
|
|
93
93
|
|
|
94
94
|
|
|
95
95
|
def pop_node(module_name):
|
|
96
|
-
global import_stack
|
|
97
96
|
if not import_stack:
|
|
98
97
|
return
|
|
99
98
|
node = import_stack.pop()
|
|
@@ -102,7 +101,6 @@ def pop_node(module_name):
|
|
|
102
101
|
end_time_ns = time.time_ns()
|
|
103
102
|
node.end_time_ns = end_time_ns
|
|
104
103
|
if not import_stack: # import_stack empty, a root node has been found
|
|
105
|
-
global root_nodes
|
|
106
104
|
root_nodes.append(node)
|
|
107
105
|
|
|
108
106
|
|
|
@@ -147,11 +145,7 @@ def wrap_find_spec(original_find_spec):
|
|
|
147
145
|
|
|
148
146
|
|
|
149
147
|
def initialize_cold_start_tracing():
|
|
150
|
-
if (
|
|
151
|
-
is_new_sandbox()
|
|
152
|
-
and os.environ.get("DD_TRACE_ENABLED", "true").lower() == "true"
|
|
153
|
-
and os.environ.get("DD_COLD_START_TRACING", "true").lower() == "true"
|
|
154
|
-
):
|
|
148
|
+
if is_new_sandbox() and config.cold_start_tracing:
|
|
155
149
|
from sys import meta_path
|
|
156
150
|
|
|
157
151
|
for importer in meta_path:
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
# Unless explicitly stated otherwise all files in this repository are licensed
|
|
2
|
+
# under the Apache License Version 2.0.
|
|
3
|
+
# This product includes software developed at Datadog (https://www.datadoghq.com/).
|
|
4
|
+
# Copyright 2019 Datadog, Inc.
|
|
5
|
+
|
|
6
|
+
import logging
|
|
7
|
+
import os
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _get_env(key, default=None, cast=None, depends_on_tracing=False):
|
|
13
|
+
@property
|
|
14
|
+
def _getter(self):
|
|
15
|
+
if not hasattr(self, prop_key):
|
|
16
|
+
val = self._resolve_env(key, default, cast, depends_on_tracing)
|
|
17
|
+
setattr(self, prop_key, val)
|
|
18
|
+
return getattr(self, prop_key)
|
|
19
|
+
|
|
20
|
+
prop_key = f"_config_{key}"
|
|
21
|
+
return _getter
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def as_bool(val):
|
|
25
|
+
return val.lower() == "true" or val == "1"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def as_list(val):
|
|
29
|
+
return [val.strip() for val in val.split(",") if val.strip()]
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class Config:
|
|
33
|
+
def _resolve_env(self, key, default=None, cast=None, depends_on_tracing=False):
|
|
34
|
+
if depends_on_tracing and not self.trace_enabled:
|
|
35
|
+
return False
|
|
36
|
+
val = os.environ.get(key, default)
|
|
37
|
+
if cast is not None:
|
|
38
|
+
try:
|
|
39
|
+
val = cast(val)
|
|
40
|
+
except (ValueError, TypeError):
|
|
41
|
+
msg = (
|
|
42
|
+
"Failed to cast environment variable '%s' with "
|
|
43
|
+
"value '%s' to type %s. Using default value '%s'."
|
|
44
|
+
)
|
|
45
|
+
logger.warning(msg, key, val, cast.__name__, default)
|
|
46
|
+
val = default
|
|
47
|
+
return val
|
|
48
|
+
|
|
49
|
+
service = _get_env("DD_SERVICE")
|
|
50
|
+
env = _get_env("DD_ENV")
|
|
51
|
+
|
|
52
|
+
cold_start_tracing = _get_env(
|
|
53
|
+
"DD_COLD_START_TRACING", "true", as_bool, depends_on_tracing=True
|
|
54
|
+
)
|
|
55
|
+
min_cold_start_trace_duration = _get_env("DD_MIN_COLD_START_DURATION", 3, int)
|
|
56
|
+
cold_start_trace_skip_lib = _get_env(
|
|
57
|
+
"DD_COLD_START_TRACE_SKIP_LIB",
|
|
58
|
+
"ddtrace.internal.compat,ddtrace.filters",
|
|
59
|
+
as_list,
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
capture_payload_max_depth = _get_env("DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH", 10, int)
|
|
63
|
+
capture_payload_enabled = _get_env("DD_CAPTURE_LAMBDA_PAYLOAD", "false", as_bool)
|
|
64
|
+
|
|
65
|
+
trace_enabled = _get_env("DD_TRACE_ENABLED", "true", as_bool)
|
|
66
|
+
make_inferred_span = _get_env(
|
|
67
|
+
"DD_TRACE_MANAGED_SERVICES", "true", as_bool, depends_on_tracing=True
|
|
68
|
+
)
|
|
69
|
+
encode_authorizer_context = _get_env(
|
|
70
|
+
"DD_ENCODE_AUTHORIZER_CONTEXT", "true", as_bool, depends_on_tracing=True
|
|
71
|
+
)
|
|
72
|
+
decode_authorizer_context = _get_env(
|
|
73
|
+
"DD_DECODE_AUTHORIZER_CONTEXT", "true", as_bool, depends_on_tracing=True
|
|
74
|
+
)
|
|
75
|
+
add_span_pointers = _get_env("DD_BOTOCORE_ADD_SPAN_POINTERS", "true", as_bool)
|
|
76
|
+
trace_extractor = _get_env("DD_TRACE_EXTRACTOR")
|
|
77
|
+
|
|
78
|
+
enhanced_metrics_enabled = _get_env("DD_ENHANCED_METRICS", "true", as_bool)
|
|
79
|
+
|
|
80
|
+
flush_in_thread = _get_env("DD_FLUSH_IN_THREAD", "false", as_bool)
|
|
81
|
+
flush_to_log = _get_env("DD_FLUSH_TO_LOG", "false", as_bool)
|
|
82
|
+
logs_injection = _get_env("DD_LOGS_INJECTION", "true", as_bool)
|
|
83
|
+
merge_xray_traces = _get_env("DD_MERGE_XRAY_TRACES", "false", as_bool)
|
|
84
|
+
|
|
85
|
+
telemetry_enabled = _get_env(
|
|
86
|
+
"DD_INSTRUMENTATION_TELEMETRY_ENABLED",
|
|
87
|
+
"false",
|
|
88
|
+
as_bool,
|
|
89
|
+
depends_on_tracing=True,
|
|
90
|
+
)
|
|
91
|
+
otel_enabled = _get_env("DD_TRACE_OTEL_ENABLED", "false", as_bool)
|
|
92
|
+
profiling_enabled = _get_env("DD_PROFILING_ENABLED", "false", as_bool)
|
|
93
|
+
llmobs_enabled = _get_env("DD_LLMOBS_ENABLED", "false", as_bool)
|
|
94
|
+
exception_replay_enabled = _get_env("DD_EXCEPTION_REPLAY_ENABLED", "false", as_bool)
|
|
95
|
+
data_streams_enabled = _get_env(
|
|
96
|
+
"DD_DATA_STREAMS_ENABLED", "false", as_bool, depends_on_tracing=True
|
|
97
|
+
)
|
|
98
|
+
appsec_enabled = _get_env("DD_APPSEC_ENABLED", "false", as_bool)
|
|
99
|
+
|
|
100
|
+
is_gov_region = _get_env("AWS_REGION", "", lambda x: x.startswith("us-gov-"))
|
|
101
|
+
|
|
102
|
+
local_test = _get_env("DD_LOCAL_TEST", "false", as_bool)
|
|
103
|
+
integration_test = _get_env("DD_INTEGRATION_TEST", "false", as_bool)
|
|
104
|
+
|
|
105
|
+
aws_lambda_function_name = _get_env("AWS_LAMBDA_FUNCTION_NAME")
|
|
106
|
+
|
|
107
|
+
@property
|
|
108
|
+
def function_name(self):
|
|
109
|
+
if not hasattr(self, "_config_function_name"):
|
|
110
|
+
if self.aws_lambda_function_name is None:
|
|
111
|
+
self._config_function_name = "function"
|
|
112
|
+
else:
|
|
113
|
+
self._config_function_name = self.aws_lambda_function_name
|
|
114
|
+
return self._config_function_name
|
|
115
|
+
|
|
116
|
+
@property
|
|
117
|
+
def is_lambda_context(self):
|
|
118
|
+
if not hasattr(self, "_config_is_lambda_context"):
|
|
119
|
+
self._config_is_lambda_context = bool(self.aws_lambda_function_name)
|
|
120
|
+
return self._config_is_lambda_context
|
|
121
|
+
|
|
122
|
+
@property
|
|
123
|
+
def fips_mode_enabled(self):
|
|
124
|
+
if not hasattr(self, "_config_fips_mode_enabled"):
|
|
125
|
+
self._config_fips_mode_enabled = (
|
|
126
|
+
os.environ.get(
|
|
127
|
+
"DD_LAMBDA_FIPS_MODE",
|
|
128
|
+
"true" if self.is_gov_region else "false",
|
|
129
|
+
).lower()
|
|
130
|
+
== "true"
|
|
131
|
+
)
|
|
132
|
+
return self._config_fips_mode_enabled
|
|
133
|
+
|
|
134
|
+
def _reset(self):
|
|
135
|
+
for attr in dir(self):
|
|
136
|
+
if attr.startswith("_config_"):
|
|
137
|
+
delattr(self, attr)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
config = Config()
|
|
141
|
+
|
|
142
|
+
if config.is_gov_region or config.fips_mode_enabled:
|
|
143
|
+
logger.debug(
|
|
144
|
+
"Python Lambda Layer FIPS mode is %s.",
|
|
145
|
+
"enabled" if config.fips_mode_enabled else "not enabled",
|
|
146
|
+
)
|
|
@@ -5,14 +5,13 @@
|
|
|
5
5
|
|
|
6
6
|
import enum
|
|
7
7
|
import logging
|
|
8
|
-
import os
|
|
9
8
|
import time
|
|
10
9
|
from datetime import datetime, timedelta
|
|
11
10
|
|
|
12
11
|
import ujson as json
|
|
13
12
|
|
|
13
|
+
from datadog_lambda.config import config
|
|
14
14
|
from datadog_lambda.extension import should_use_extension
|
|
15
|
-
from datadog_lambda.fips import fips_mode_enabled
|
|
16
15
|
from datadog_lambda.tags import dd_lambda_layer_tag, get_enhanced_metrics_tags
|
|
17
16
|
|
|
18
17
|
logger = logging.getLogger(__name__)
|
|
@@ -28,10 +27,10 @@ class MetricsHandler(enum.Enum):
|
|
|
28
27
|
def _select_metrics_handler():
|
|
29
28
|
if should_use_extension:
|
|
30
29
|
return MetricsHandler.EXTENSION
|
|
31
|
-
if
|
|
30
|
+
if config.flush_to_log:
|
|
32
31
|
return MetricsHandler.FORWARDER
|
|
33
32
|
|
|
34
|
-
if fips_mode_enabled:
|
|
33
|
+
if config.fips_mode_enabled:
|
|
35
34
|
logger.debug(
|
|
36
35
|
"With FIPS mode enabled, the Datadog API metrics handler is unavailable."
|
|
37
36
|
)
|
|
@@ -58,14 +57,8 @@ elif metrics_handler == MetricsHandler.DATADOG_API:
|
|
|
58
57
|
from datadog_lambda.api import init_api
|
|
59
58
|
from datadog_lambda.thread_stats_writer import ThreadStatsWriter
|
|
60
59
|
|
|
61
|
-
flush_in_thread = os.environ.get("DD_FLUSH_IN_THREAD", "").lower() == "true"
|
|
62
60
|
init_api()
|
|
63
|
-
lambda_stats = ThreadStatsWriter(flush_in_thread)
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
enhanced_metrics_enabled = (
|
|
67
|
-
os.environ.get("DD_ENHANCED_METRICS", "true").lower() == "true"
|
|
68
|
-
)
|
|
61
|
+
lambda_stats = ThreadStatsWriter(config.flush_in_thread)
|
|
69
62
|
|
|
70
63
|
|
|
71
64
|
def lambda_metric(metric_name, value, timestamp=None, tags=None, force_async=False):
|
|
@@ -191,7 +184,7 @@ def submit_enhanced_metric(metric_name, lambda_context):
|
|
|
191
184
|
metric_name (str): metric name w/o enhanced prefix i.e. "invocations" or "errors"
|
|
192
185
|
lambda_context (object): Lambda context dict passed to the function by AWS
|
|
193
186
|
"""
|
|
194
|
-
if not enhanced_metrics_enabled:
|
|
187
|
+
if not config.enhanced_metrics_enabled:
|
|
195
188
|
logger.debug(
|
|
196
189
|
"Not submitting enhanced metric %s because enhanced metrics are disabled",
|
|
197
190
|
metric_name,
|
|
@@ -3,7 +3,6 @@
|
|
|
3
3
|
# This product includes software developed at Datadog (https://www.datadoghq.com/).
|
|
4
4
|
# Copyright 2019 Datadog, Inc.
|
|
5
5
|
|
|
6
|
-
import os
|
|
7
6
|
import sys
|
|
8
7
|
import logging
|
|
9
8
|
import zlib
|
|
@@ -13,10 +12,8 @@ from wrapt import wrap_function_wrapper as wrap
|
|
|
13
12
|
from wrapt.importer import when_imported
|
|
14
13
|
from ddtrace import patch_all as patch_all_dd
|
|
15
14
|
|
|
16
|
-
from datadog_lambda.
|
|
17
|
-
|
|
18
|
-
dd_tracing_enabled,
|
|
19
|
-
)
|
|
15
|
+
from datadog_lambda.config import config
|
|
16
|
+
from datadog_lambda.tracing import get_dd_trace_context
|
|
20
17
|
from collections.abc import MutableMapping
|
|
21
18
|
|
|
22
19
|
logger = logging.getLogger(__name__)
|
|
@@ -32,7 +29,7 @@ def patch_all():
|
|
|
32
29
|
"""
|
|
33
30
|
_patch_for_integration_tests()
|
|
34
31
|
|
|
35
|
-
if
|
|
32
|
+
if config.trace_enabled:
|
|
36
33
|
patch_all_dd()
|
|
37
34
|
else:
|
|
38
35
|
_patch_http()
|
|
@@ -44,8 +41,7 @@ def _patch_for_integration_tests():
|
|
|
44
41
|
Patch `requests` to log the outgoing requests for integration tests.
|
|
45
42
|
"""
|
|
46
43
|
global _integration_tests_patched
|
|
47
|
-
|
|
48
|
-
if not _integration_tests_patched and is_in_tests:
|
|
44
|
+
if not _integration_tests_patched and config.integration_test:
|
|
49
45
|
wrap("requests", "Session.send", _log_request)
|
|
50
46
|
_integration_tests_patched = True
|
|
51
47
|
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
from itertools import chain
|
|
2
2
|
import logging
|
|
3
|
-
import os
|
|
4
3
|
from typing import List
|
|
5
4
|
from typing import Optional
|
|
6
5
|
|
|
7
6
|
from ddtrace._trace._span_pointer import _SpanPointerDirection
|
|
8
7
|
from ddtrace._trace._span_pointer import _SpanPointerDescription
|
|
9
8
|
|
|
9
|
+
from datadog_lambda.config import config
|
|
10
10
|
from datadog_lambda.metric import submit_dynamodb_stream_type_metric
|
|
11
11
|
from datadog_lambda.trigger import EventTypes
|
|
12
12
|
|
|
@@ -14,15 +14,10 @@ from datadog_lambda.trigger import EventTypes
|
|
|
14
14
|
logger = logging.getLogger(__name__)
|
|
15
15
|
|
|
16
16
|
|
|
17
|
-
dd_botocore_add_span_pointers = os.environ.get(
|
|
18
|
-
"DD_BOTOCORE_ADD_SPAN_POINTERS", "true"
|
|
19
|
-
).lower() in ("true", "1")
|
|
20
|
-
|
|
21
|
-
|
|
22
17
|
def calculate_span_pointers(
|
|
23
18
|
event_source,
|
|
24
19
|
event,
|
|
25
|
-
botocore_add_span_pointers=
|
|
20
|
+
botocore_add_span_pointers=config.add_span_pointers,
|
|
26
21
|
) -> List[_SpanPointerDescription]:
|
|
27
22
|
try:
|
|
28
23
|
if botocore_add_span_pointers:
|
|
@@ -4,18 +4,17 @@
|
|
|
4
4
|
# Copyright 2021 Datadog, Inc.
|
|
5
5
|
|
|
6
6
|
from decimal import Decimal
|
|
7
|
-
import logging
|
|
8
7
|
import ujson as json
|
|
9
8
|
|
|
9
|
+
from datadog_lambda.config import config
|
|
10
|
+
|
|
10
11
|
redactable_keys = ["authorization", "x-authorization", "password", "token"]
|
|
11
|
-
max_depth = 10
|
|
12
|
-
logger = logging.getLogger(__name__)
|
|
13
12
|
|
|
14
13
|
|
|
15
14
|
def tag_object(span, key, obj, depth=0):
|
|
16
15
|
if obj is None:
|
|
17
16
|
return span.set_tag(key, obj)
|
|
18
|
-
if depth >=
|
|
17
|
+
if depth >= config.capture_payload_max_depth:
|
|
19
18
|
return span.set_tag(key, _redact_val(key, str(obj)[0:5000]))
|
|
20
19
|
depth += 1
|
|
21
20
|
if _should_try_string(obj):
|