datadog_lambda 6.111.0__tar.gz → 8.113.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/PKG-INFO +2 -2
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/__init__.py +7 -0
- datadog_lambda-8.113.0/datadog_lambda/asm.py +240 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/config.py +1 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/tracing.py +96 -21
- datadog_lambda-8.113.0/datadog_lambda/version.py +1 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/wrapper.py +39 -9
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/pyproject.toml +2 -2
- datadog_lambda-6.111.0/datadog_lambda/dsm.py +0 -38
- datadog_lambda-6.111.0/datadog_lambda/version.py +0 -1
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/LICENSE +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/LICENSE-3rdparty.csv +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/NOTICE +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/README.md +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/api.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/cold_start.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/constants.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/dogstatsd.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/extension.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/handler.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/logger.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/metric.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/module_name.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/patch.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/span_pointers.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/stats_writer.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/statsd_writer.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/tag_object.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/tags.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/thread_stats_writer.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/trigger.py +0 -0
- {datadog_lambda-6.111.0 → datadog_lambda-8.113.0}/datadog_lambda/xray.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: datadog_lambda
|
|
3
|
-
Version:
|
|
3
|
+
Version: 8.113.0
|
|
4
4
|
Summary: The Datadog AWS Lambda Library
|
|
5
5
|
Home-page: https://github.com/DataDog/datadog-lambda-python
|
|
6
6
|
License: Apache-2.0
|
|
@@ -19,7 +19,7 @@ Classifier: Programming Language :: Python :: 3.13
|
|
|
19
19
|
Provides-Extra: dev
|
|
20
20
|
Requires-Dist: botocore (>=1.34.0,<2.0.0) ; extra == "dev"
|
|
21
21
|
Requires-Dist: datadog (>=0.51.0,<1.0.0)
|
|
22
|
-
Requires-Dist: ddtrace (>=
|
|
22
|
+
Requires-Dist: ddtrace (>=3.11.0,<4)
|
|
23
23
|
Requires-Dist: flake8 (>=5.0.4,<6.0.0) ; extra == "dev"
|
|
24
24
|
Requires-Dist: pytest (>=8.0.0,<9.0.0) ; extra == "dev"
|
|
25
25
|
Requires-Dist: pytest-benchmark (>=4.0,<5.0) ; extra == "dev"
|
|
@@ -17,3 +17,10 @@ from datadog_lambda.logger import initialize_logging # noqa: E402
|
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
initialize_logging(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
from datadog_lambda.patch import patch_all # noqa: E402
|
|
23
|
+
|
|
24
|
+
# Patch third-party libraries for tracing, must be done before importing any
|
|
25
|
+
# handler code.
|
|
26
|
+
patch_all()
|
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import urllib.parse
|
|
3
|
+
from copy import deepcopy
|
|
4
|
+
from typing import Any, Dict, List, Optional, Union
|
|
5
|
+
|
|
6
|
+
from ddtrace.contrib.internal.trace_utils import _get_request_header_client_ip
|
|
7
|
+
from ddtrace.internal import core
|
|
8
|
+
from ddtrace.internal.utils import get_blocked
|
|
9
|
+
from ddtrace.internal.utils import http as http_utils
|
|
10
|
+
from ddtrace.trace import Span
|
|
11
|
+
|
|
12
|
+
from datadog_lambda.trigger import (
|
|
13
|
+
EventSubtypes,
|
|
14
|
+
EventTypes,
|
|
15
|
+
_EventSource,
|
|
16
|
+
_http_event_types,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _to_single_value_headers(headers: Dict[str, List[str]]) -> Dict[str, str]:
|
|
23
|
+
"""
|
|
24
|
+
Convert multi-value headers to single-value headers.
|
|
25
|
+
If a header has multiple values, join them with commas.
|
|
26
|
+
"""
|
|
27
|
+
single_value_headers = {}
|
|
28
|
+
for key, values in headers.items():
|
|
29
|
+
single_value_headers[key] = ", ".join(values)
|
|
30
|
+
return single_value_headers
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _merge_single_and_multi_value_headers(
|
|
34
|
+
single_value_headers: Dict[str, str],
|
|
35
|
+
multi_value_headers: Dict[str, List[str]],
|
|
36
|
+
):
|
|
37
|
+
"""
|
|
38
|
+
Merge single-value headers with multi-value headers.
|
|
39
|
+
If a header exists in both, we merge them removing duplicates
|
|
40
|
+
"""
|
|
41
|
+
merged_headers = deepcopy(multi_value_headers)
|
|
42
|
+
for key, value in single_value_headers.items():
|
|
43
|
+
if key not in merged_headers:
|
|
44
|
+
merged_headers[key] = [value]
|
|
45
|
+
elif value not in merged_headers[key]:
|
|
46
|
+
merged_headers[key].append(value)
|
|
47
|
+
return _to_single_value_headers(merged_headers)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def asm_set_context(event_source: _EventSource):
|
|
51
|
+
"""Add asm specific items to the ExecutionContext.
|
|
52
|
+
|
|
53
|
+
This allows the AppSecSpanProcessor to know information about the event
|
|
54
|
+
at the moment the span is created and skip it when not relevant.
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
if event_source.event_type not in _http_event_types:
|
|
58
|
+
core.set_item("appsec_skip_next_lambda_event", True)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def asm_start_request(
|
|
62
|
+
span: Span,
|
|
63
|
+
event: Dict[str, Any],
|
|
64
|
+
event_source: _EventSource,
|
|
65
|
+
trigger_tags: Dict[str, str],
|
|
66
|
+
):
|
|
67
|
+
if event_source.event_type not in _http_event_types:
|
|
68
|
+
return
|
|
69
|
+
|
|
70
|
+
request_headers: Dict[str, str] = {}
|
|
71
|
+
peer_ip: Optional[str] = None
|
|
72
|
+
request_path_parameters: Optional[Dict[str, Any]] = None
|
|
73
|
+
route: Optional[str] = None
|
|
74
|
+
|
|
75
|
+
if event_source.event_type == EventTypes.ALB:
|
|
76
|
+
headers = event.get("headers")
|
|
77
|
+
multi_value_request_headers = event.get("multiValueHeaders")
|
|
78
|
+
if multi_value_request_headers:
|
|
79
|
+
request_headers = _to_single_value_headers(multi_value_request_headers)
|
|
80
|
+
else:
|
|
81
|
+
request_headers = headers or {}
|
|
82
|
+
|
|
83
|
+
raw_uri = event.get("path")
|
|
84
|
+
parsed_query = event.get("multiValueQueryStringParameters") or event.get(
|
|
85
|
+
"queryStringParameters"
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
elif event_source.event_type == EventTypes.LAMBDA_FUNCTION_URL:
|
|
89
|
+
request_headers = event.get("headers", {})
|
|
90
|
+
peer_ip = event.get("requestContext", {}).get("http", {}).get("sourceIp")
|
|
91
|
+
raw_uri = event.get("rawPath")
|
|
92
|
+
parsed_query = event.get("queryStringParameters")
|
|
93
|
+
|
|
94
|
+
elif event_source.event_type == EventTypes.API_GATEWAY:
|
|
95
|
+
request_context = event.get("requestContext", {})
|
|
96
|
+
request_path_parameters = event.get("pathParameters")
|
|
97
|
+
route = trigger_tags.get("http.route")
|
|
98
|
+
|
|
99
|
+
if event_source.subtype == EventSubtypes.API_GATEWAY:
|
|
100
|
+
request_headers = event.get("headers", {})
|
|
101
|
+
peer_ip = request_context.get("identity", {}).get("sourceIp")
|
|
102
|
+
raw_uri = event.get("path")
|
|
103
|
+
parsed_query = event.get("multiValueQueryStringParameters")
|
|
104
|
+
|
|
105
|
+
elif event_source.subtype == EventSubtypes.HTTP_API:
|
|
106
|
+
request_headers = event.get("headers", {})
|
|
107
|
+
peer_ip = request_context.get("http", {}).get("sourceIp")
|
|
108
|
+
raw_uri = event.get("rawPath")
|
|
109
|
+
parsed_query = event.get("queryStringParameters")
|
|
110
|
+
|
|
111
|
+
elif event_source.subtype == EventSubtypes.WEBSOCKET:
|
|
112
|
+
request_headers = _to_single_value_headers(
|
|
113
|
+
event.get("multiValueHeaders", {})
|
|
114
|
+
)
|
|
115
|
+
peer_ip = request_context.get("identity", {}).get("sourceIp")
|
|
116
|
+
raw_uri = event.get("path")
|
|
117
|
+
parsed_query = event.get("multiValueQueryStringParameters")
|
|
118
|
+
|
|
119
|
+
else:
|
|
120
|
+
return
|
|
121
|
+
|
|
122
|
+
else:
|
|
123
|
+
return
|
|
124
|
+
|
|
125
|
+
body = event.get("body")
|
|
126
|
+
is_base64_encoded = event.get("isBase64Encoded", False)
|
|
127
|
+
|
|
128
|
+
request_ip = _get_request_header_client_ip(request_headers, peer_ip, True)
|
|
129
|
+
if request_ip is not None:
|
|
130
|
+
span.set_tag_str("http.client_ip", request_ip)
|
|
131
|
+
span.set_tag_str("network.client.ip", request_ip)
|
|
132
|
+
|
|
133
|
+
# Encode the parsed query and append it to reconstruct the original raw URI expected by AppSec.
|
|
134
|
+
if parsed_query:
|
|
135
|
+
try:
|
|
136
|
+
encoded_query = urllib.parse.urlencode(parsed_query, doseq=True)
|
|
137
|
+
raw_uri += "?" + encoded_query # type: ignore
|
|
138
|
+
except Exception:
|
|
139
|
+
pass
|
|
140
|
+
|
|
141
|
+
core.dispatch(
|
|
142
|
+
# The matching listener is registered in ddtrace.appsec._handlers
|
|
143
|
+
"aws_lambda.start_request",
|
|
144
|
+
(
|
|
145
|
+
span,
|
|
146
|
+
request_headers,
|
|
147
|
+
request_ip,
|
|
148
|
+
body,
|
|
149
|
+
is_base64_encoded,
|
|
150
|
+
raw_uri,
|
|
151
|
+
route,
|
|
152
|
+
trigger_tags.get("http.method"),
|
|
153
|
+
parsed_query,
|
|
154
|
+
request_path_parameters,
|
|
155
|
+
),
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def asm_start_response(
|
|
160
|
+
span: Span,
|
|
161
|
+
status_code: str,
|
|
162
|
+
event_source: _EventSource,
|
|
163
|
+
response: Union[Dict[str, Any], str, None],
|
|
164
|
+
):
|
|
165
|
+
if event_source.event_type not in _http_event_types:
|
|
166
|
+
return
|
|
167
|
+
|
|
168
|
+
if isinstance(response, dict) and (
|
|
169
|
+
"headers" in response or "multiValueHeaders" in response
|
|
170
|
+
):
|
|
171
|
+
headers = response.get("headers", {})
|
|
172
|
+
multi_value_request_headers = response.get("multiValueHeaders")
|
|
173
|
+
if isinstance(multi_value_request_headers, dict) and isinstance(headers, dict):
|
|
174
|
+
response_headers = _merge_single_and_multi_value_headers(
|
|
175
|
+
headers, multi_value_request_headers
|
|
176
|
+
)
|
|
177
|
+
elif isinstance(headers, dict):
|
|
178
|
+
response_headers = headers
|
|
179
|
+
else:
|
|
180
|
+
response_headers = {
|
|
181
|
+
"content-type": "application/json",
|
|
182
|
+
}
|
|
183
|
+
else:
|
|
184
|
+
response_headers = {
|
|
185
|
+
"content-type": "application/json",
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
core.dispatch(
|
|
189
|
+
# The matching listener is registered in ddtrace.appsec._handlers
|
|
190
|
+
"aws_lambda.start_response",
|
|
191
|
+
(
|
|
192
|
+
span,
|
|
193
|
+
status_code,
|
|
194
|
+
response_headers,
|
|
195
|
+
),
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
if isinstance(response, dict) and "statusCode" in response:
|
|
199
|
+
body = response.get("body")
|
|
200
|
+
else:
|
|
201
|
+
body = response
|
|
202
|
+
|
|
203
|
+
core.dispatch(
|
|
204
|
+
# The matching listener is registered in ddtrace.appsec._handlers
|
|
205
|
+
"aws_lambda.parse_body",
|
|
206
|
+
(body,),
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def get_asm_blocked_response(
|
|
211
|
+
event_source: _EventSource,
|
|
212
|
+
) -> Optional[Dict[str, Any]]:
|
|
213
|
+
"""Get the blocked response for the given event source."""
|
|
214
|
+
if event_source.event_type not in _http_event_types:
|
|
215
|
+
return None
|
|
216
|
+
|
|
217
|
+
blocked = get_blocked()
|
|
218
|
+
if not blocked:
|
|
219
|
+
return None
|
|
220
|
+
|
|
221
|
+
desired_type = blocked.get("type", "auto")
|
|
222
|
+
if desired_type == "none":
|
|
223
|
+
content_type = "text/plain; charset=utf-8"
|
|
224
|
+
content = ""
|
|
225
|
+
else:
|
|
226
|
+
content_type = blocked.get("content-type", "application/json")
|
|
227
|
+
content = http_utils._get_blocked_template(content_type)
|
|
228
|
+
|
|
229
|
+
response_headers = {
|
|
230
|
+
"content-type": content_type,
|
|
231
|
+
}
|
|
232
|
+
if "location" in blocked:
|
|
233
|
+
response_headers["location"] = blocked["location"]
|
|
234
|
+
|
|
235
|
+
return {
|
|
236
|
+
"statusCode": blocked.get("status_code", 403),
|
|
237
|
+
"headers": response_headers,
|
|
238
|
+
"body": content,
|
|
239
|
+
"isBase64Encoded": False,
|
|
240
|
+
}
|
|
@@ -95,6 +95,7 @@ class Config:
|
|
|
95
95
|
data_streams_enabled = _get_env(
|
|
96
96
|
"DD_DATA_STREAMS_ENABLED", "false", as_bool, depends_on_tracing=True
|
|
97
97
|
)
|
|
98
|
+
appsec_enabled = _get_env("DD_APPSEC_ENABLED", "false", as_bool)
|
|
98
99
|
|
|
99
100
|
is_gov_region = _get_env("AWS_REGION", "", lambda x: x.startswith("us-gov-"))
|
|
100
101
|
|
|
@@ -4,6 +4,7 @@
|
|
|
4
4
|
# Copyright 2019 Datadog, Inc.
|
|
5
5
|
import logging
|
|
6
6
|
import os
|
|
7
|
+
import re
|
|
7
8
|
import traceback
|
|
8
9
|
import ujson as json
|
|
9
10
|
from datetime import datetime, timezone
|
|
@@ -67,6 +68,24 @@ HIGHER_64_BITS = "HIGHER_64_BITS"
|
|
|
67
68
|
LOWER_64_BITS = "LOWER_64_BITS"
|
|
68
69
|
|
|
69
70
|
|
|
71
|
+
def _dsm_set_checkpoint(context_json, event_type, arn):
|
|
72
|
+
if not config.data_streams_enabled:
|
|
73
|
+
return
|
|
74
|
+
|
|
75
|
+
if not arn:
|
|
76
|
+
return
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
from ddtrace.data_streams import set_consume_checkpoint
|
|
80
|
+
|
|
81
|
+
carrier_get = lambda k: context_json and context_json.get(k) # noqa: E731
|
|
82
|
+
set_consume_checkpoint(event_type, arn, carrier_get, manual_checkpoint=False)
|
|
83
|
+
except Exception as e:
|
|
84
|
+
logger.debug(
|
|
85
|
+
f"DSM:Failed to set consume checkpoint for {event_type} {arn}: {e}"
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
|
|
70
89
|
def _convert_xray_trace_id(xray_trace_id):
|
|
71
90
|
"""
|
|
72
91
|
Convert X-Ray trace id (hex)'s last 63 bits to a Datadog trace id (int).
|
|
@@ -202,7 +221,9 @@ def create_sns_event(message):
|
|
|
202
221
|
}
|
|
203
222
|
|
|
204
223
|
|
|
205
|
-
def extract_context_from_sqs_or_sns_event_or_context(
|
|
224
|
+
def extract_context_from_sqs_or_sns_event_or_context(
|
|
225
|
+
event, lambda_context, event_source
|
|
226
|
+
):
|
|
206
227
|
"""
|
|
207
228
|
Extract Datadog trace context from an SQS event.
|
|
208
229
|
|
|
@@ -214,7 +235,10 @@ def extract_context_from_sqs_or_sns_event_or_context(event, lambda_context):
|
|
|
214
235
|
Lambda Context.
|
|
215
236
|
|
|
216
237
|
Falls back to lambda context if no trace data is found in the SQS message attributes.
|
|
238
|
+
Set a DSM checkpoint if DSM is enabled and the method for context propagation is supported.
|
|
217
239
|
"""
|
|
240
|
+
source_arn = ""
|
|
241
|
+
event_type = "sqs" if event_source.equals(EventTypes.SQS) else "sns"
|
|
218
242
|
|
|
219
243
|
# EventBridge => SQS
|
|
220
244
|
try:
|
|
@@ -226,6 +250,7 @@ def extract_context_from_sqs_or_sns_event_or_context(event, lambda_context):
|
|
|
226
250
|
|
|
227
251
|
try:
|
|
228
252
|
first_record = event.get("Records")[0]
|
|
253
|
+
source_arn = first_record.get("eventSourceARN", "")
|
|
229
254
|
|
|
230
255
|
# logic to deal with SNS => SQS event
|
|
231
256
|
if "body" in first_record:
|
|
@@ -241,6 +266,9 @@ def extract_context_from_sqs_or_sns_event_or_context(event, lambda_context):
|
|
|
241
266
|
msg_attributes = first_record.get("messageAttributes")
|
|
242
267
|
if msg_attributes is None:
|
|
243
268
|
sns_record = first_record.get("Sns") or {}
|
|
269
|
+
# SNS->SQS event would extract SNS arn without this check
|
|
270
|
+
if event_source.equals(EventTypes.SNS):
|
|
271
|
+
source_arn = sns_record.get("TopicArn", "")
|
|
244
272
|
msg_attributes = sns_record.get("MessageAttributes") or {}
|
|
245
273
|
dd_payload = msg_attributes.get("_datadog")
|
|
246
274
|
if dd_payload:
|
|
@@ -272,8 +300,9 @@ def extract_context_from_sqs_or_sns_event_or_context(event, lambda_context):
|
|
|
272
300
|
logger.debug(
|
|
273
301
|
"Failed to extract Step Functions context from SQS/SNS event."
|
|
274
302
|
)
|
|
275
|
-
|
|
276
|
-
|
|
303
|
+
context = propagator.extract(dd_data)
|
|
304
|
+
_dsm_set_checkpoint(dd_data, event_type, source_arn)
|
|
305
|
+
return context
|
|
277
306
|
else:
|
|
278
307
|
# Handle case where trace context is injected into attributes.AWSTraceHeader
|
|
279
308
|
# example: Root=1-654321ab-000000001234567890abcdef;Parent=0123456789abcdef;Sampled=1
|
|
@@ -296,9 +325,13 @@ def extract_context_from_sqs_or_sns_event_or_context(event, lambda_context):
|
|
|
296
325
|
span_id=int(x_ray_context["parent_id"], 16),
|
|
297
326
|
sampling_priority=float(x_ray_context["sampled"]),
|
|
298
327
|
)
|
|
328
|
+
# Still want to set a DSM checkpoint even if DSM context not propagated
|
|
329
|
+
_dsm_set_checkpoint(None, event_type, source_arn)
|
|
299
330
|
return extract_context_from_lambda_context(lambda_context)
|
|
300
331
|
except Exception as e:
|
|
301
332
|
logger.debug("The trace extractor returned with error %s", e)
|
|
333
|
+
# Still want to set a DSM checkpoint even if DSM context not propagated
|
|
334
|
+
_dsm_set_checkpoint(None, event_type, source_arn)
|
|
302
335
|
return extract_context_from_lambda_context(lambda_context)
|
|
303
336
|
|
|
304
337
|
|
|
@@ -357,9 +390,12 @@ def extract_context_from_eventbridge_event(event, lambda_context):
|
|
|
357
390
|
def extract_context_from_kinesis_event(event, lambda_context):
|
|
358
391
|
"""
|
|
359
392
|
Extract datadog trace context from a Kinesis Stream's base64 encoded data string
|
|
393
|
+
Set a DSM checkpoint if DSM is enabled and the method for context propagation is supported.
|
|
360
394
|
"""
|
|
395
|
+
source_arn = ""
|
|
361
396
|
try:
|
|
362
397
|
record = get_first_record(event)
|
|
398
|
+
source_arn = record.get("eventSourceARN", "")
|
|
363
399
|
kinesis = record.get("kinesis")
|
|
364
400
|
if not kinesis:
|
|
365
401
|
return extract_context_from_lambda_context(lambda_context)
|
|
@@ -373,10 +409,13 @@ def extract_context_from_kinesis_event(event, lambda_context):
|
|
|
373
409
|
data_obj = json.loads(data_str)
|
|
374
410
|
dd_ctx = data_obj.get("_datadog")
|
|
375
411
|
if dd_ctx:
|
|
376
|
-
|
|
412
|
+
context = propagator.extract(dd_ctx)
|
|
413
|
+
_dsm_set_checkpoint(dd_ctx, "kinesis", source_arn)
|
|
414
|
+
return context
|
|
377
415
|
except Exception as e:
|
|
378
416
|
logger.debug("The trace extractor returned with error %s", e)
|
|
379
|
-
|
|
417
|
+
# Still want to set a DSM checkpoint even if DSM context not propagated
|
|
418
|
+
_dsm_set_checkpoint(None, "kinesis", source_arn)
|
|
380
419
|
return extract_context_from_lambda_context(lambda_context)
|
|
381
420
|
|
|
382
421
|
|
|
@@ -594,7 +633,7 @@ def extract_dd_trace_context(
|
|
|
594
633
|
)
|
|
595
634
|
elif event_source.equals(EventTypes.SNS) or event_source.equals(EventTypes.SQS):
|
|
596
635
|
context = extract_context_from_sqs_or_sns_event_or_context(
|
|
597
|
-
event, lambda_context
|
|
636
|
+
event, lambda_context, event_source
|
|
598
637
|
)
|
|
599
638
|
elif event_source.equals(EventTypes.EVENTBRIDGE):
|
|
600
639
|
context = extract_context_from_eventbridge_event(event, lambda_context)
|
|
@@ -818,15 +857,31 @@ def create_service_mapping(val):
|
|
|
818
857
|
return new_service_mapping
|
|
819
858
|
|
|
820
859
|
|
|
821
|
-
def determine_service_name(
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
860
|
+
def determine_service_name(
|
|
861
|
+
service_mapping, specific_key, generic_key, extracted_key, fallback=None
|
|
862
|
+
):
|
|
863
|
+
# Check for mapped service (specific key first, then generic key)
|
|
864
|
+
mapped_service = service_mapping.get(specific_key) or service_mapping.get(
|
|
865
|
+
generic_key
|
|
866
|
+
)
|
|
867
|
+
if mapped_service:
|
|
868
|
+
return mapped_service
|
|
869
|
+
|
|
870
|
+
# Check if AWS service representation is disabled
|
|
871
|
+
aws_service_representation = os.environ.get(
|
|
872
|
+
"DD_TRACE_AWS_SERVICE_REPRESENTATION_ENABLED", ""
|
|
873
|
+
).lower()
|
|
874
|
+
if aws_service_representation in ("false", "0"):
|
|
875
|
+
return fallback
|
|
876
|
+
|
|
877
|
+
# Use extracted_key if it exists and is not empty, otherwise use fallback
|
|
878
|
+
return (
|
|
879
|
+
extracted_key.strip() if extracted_key and extracted_key.strip() else fallback
|
|
880
|
+
)
|
|
826
881
|
|
|
827
882
|
|
|
828
883
|
# Initialization code
|
|
829
|
-
service_mapping_str = os.
|
|
884
|
+
service_mapping_str = os.environ.get("DD_SERVICE_MAPPING", "")
|
|
830
885
|
service_mapping = create_service_mapping(service_mapping_str)
|
|
831
886
|
|
|
832
887
|
_dd_origin = {"_dd.origin": "lambda"}
|
|
@@ -859,7 +914,7 @@ def create_inferred_span_from_lambda_function_url_event(event, context):
|
|
|
859
914
|
InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync")
|
|
860
915
|
if span:
|
|
861
916
|
span.set_tags(tags)
|
|
862
|
-
span.start_ns = int(request_time_epoch
|
|
917
|
+
span.start_ns = int(request_time_epoch * 1e6)
|
|
863
918
|
return span
|
|
864
919
|
|
|
865
920
|
|
|
@@ -950,6 +1005,7 @@ def create_inferred_span_from_api_gateway_websocket_event(
|
|
|
950
1005
|
"http.url": http_url,
|
|
951
1006
|
"endpoint": endpoint,
|
|
952
1007
|
"resource_names": endpoint,
|
|
1008
|
+
"span.kind": "server",
|
|
953
1009
|
"apiid": api_id,
|
|
954
1010
|
"apiname": api_id,
|
|
955
1011
|
"stage": request_context.get("stage"),
|
|
@@ -1008,6 +1064,7 @@ def create_inferred_span_from_api_gateway_event(
|
|
|
1008
1064
|
"endpoint": path,
|
|
1009
1065
|
"http.method": method,
|
|
1010
1066
|
"resource_names": resource,
|
|
1067
|
+
"span.kind": "server",
|
|
1011
1068
|
"apiid": api_id,
|
|
1012
1069
|
"apiname": api_id,
|
|
1013
1070
|
"stage": request_context.get("stage"),
|
|
@@ -1112,12 +1169,13 @@ def create_inferred_span_from_sqs_event(event, context):
|
|
|
1112
1169
|
event_source_arn = event_record.get("eventSourceARN")
|
|
1113
1170
|
queue_name = event_source_arn.split(":")[-1]
|
|
1114
1171
|
service_name = determine_service_name(
|
|
1115
|
-
service_mapping, queue_name, "lambda_sqs", "sqs"
|
|
1172
|
+
service_mapping, queue_name, "lambda_sqs", queue_name, "sqs"
|
|
1116
1173
|
)
|
|
1117
1174
|
attrs = event_record.get("attributes") or {}
|
|
1118
1175
|
tags = {
|
|
1119
1176
|
"operation_name": "aws.sqs",
|
|
1120
1177
|
"resource_names": queue_name,
|
|
1178
|
+
"span.kind": "server",
|
|
1121
1179
|
"queuename": queue_name,
|
|
1122
1180
|
"event_source_arn": event_source_arn,
|
|
1123
1181
|
"receipt_handle": event_record.get("receiptHandle"),
|
|
@@ -1179,11 +1237,12 @@ def create_inferred_span_from_sns_event(event, context):
|
|
|
1179
1237
|
topic_arn = sns_message.get("TopicArn")
|
|
1180
1238
|
topic_name = topic_arn.split(":")[-1]
|
|
1181
1239
|
service_name = determine_service_name(
|
|
1182
|
-
service_mapping, topic_name, "lambda_sns", "sns"
|
|
1240
|
+
service_mapping, topic_name, "lambda_sns", topic_name, "sns"
|
|
1183
1241
|
)
|
|
1184
1242
|
tags = {
|
|
1185
1243
|
"operation_name": "aws.sns",
|
|
1186
1244
|
"resource_names": topic_name,
|
|
1245
|
+
"span.kind": "server",
|
|
1187
1246
|
"topicname": topic_name,
|
|
1188
1247
|
"topic_arn": topic_arn,
|
|
1189
1248
|
"message_id": sns_message.get("MessageId"),
|
|
@@ -1214,15 +1273,16 @@ def create_inferred_span_from_kinesis_event(event, context):
|
|
|
1214
1273
|
event_record = get_first_record(event)
|
|
1215
1274
|
event_source_arn = event_record.get("eventSourceARN")
|
|
1216
1275
|
event_id = event_record.get("eventID")
|
|
1217
|
-
stream_name = event_source_arn.split(":")[-1]
|
|
1276
|
+
stream_name = re.sub(r"^stream/", "", (event_source_arn or "").split(":")[-1])
|
|
1218
1277
|
shard_id = event_id.split(":")[0]
|
|
1219
1278
|
service_name = determine_service_name(
|
|
1220
|
-
service_mapping, stream_name, "lambda_kinesis", "kinesis"
|
|
1279
|
+
service_mapping, stream_name, "lambda_kinesis", stream_name, "kinesis"
|
|
1221
1280
|
)
|
|
1222
1281
|
kinesis = event_record.get("kinesis") or {}
|
|
1223
1282
|
tags = {
|
|
1224
1283
|
"operation_name": "aws.kinesis",
|
|
1225
1284
|
"resource_names": stream_name,
|
|
1285
|
+
"span.kind": "server",
|
|
1226
1286
|
"streamname": stream_name,
|
|
1227
1287
|
"shardid": shard_id,
|
|
1228
1288
|
"event_source_arn": event_source_arn,
|
|
@@ -1249,12 +1309,13 @@ def create_inferred_span_from_dynamodb_event(event, context):
|
|
|
1249
1309
|
event_source_arn = event_record.get("eventSourceARN")
|
|
1250
1310
|
table_name = event_source_arn.split("/")[1]
|
|
1251
1311
|
service_name = determine_service_name(
|
|
1252
|
-
service_mapping, table_name, "lambda_dynamodb", "dynamodb"
|
|
1312
|
+
service_mapping, table_name, "lambda_dynamodb", table_name, "dynamodb"
|
|
1253
1313
|
)
|
|
1254
1314
|
dynamodb_message = event_record.get("dynamodb") or {}
|
|
1255
1315
|
tags = {
|
|
1256
1316
|
"operation_name": "aws.dynamodb",
|
|
1257
1317
|
"resource_names": table_name,
|
|
1318
|
+
"span.kind": "server",
|
|
1258
1319
|
"tablename": table_name,
|
|
1259
1320
|
"event_source_arn": event_source_arn,
|
|
1260
1321
|
"event_id": event_record.get("eventID"),
|
|
@@ -1283,11 +1344,12 @@ def create_inferred_span_from_s3_event(event, context):
|
|
|
1283
1344
|
obj = s3.get("object") or {}
|
|
1284
1345
|
bucket_name = bucket.get("name")
|
|
1285
1346
|
service_name = determine_service_name(
|
|
1286
|
-
service_mapping, bucket_name, "lambda_s3", "s3"
|
|
1347
|
+
service_mapping, bucket_name, "lambda_s3", bucket_name, "s3"
|
|
1287
1348
|
)
|
|
1288
1349
|
tags = {
|
|
1289
1350
|
"operation_name": "aws.s3",
|
|
1290
1351
|
"resource_names": bucket_name,
|
|
1352
|
+
"span.kind": "server",
|
|
1291
1353
|
"event_name": event_record.get("eventName"),
|
|
1292
1354
|
"bucketname": bucket_name,
|
|
1293
1355
|
"bucket_arn": bucket.get("arn"),
|
|
@@ -1313,11 +1375,12 @@ def create_inferred_span_from_s3_event(event, context):
|
|
|
1313
1375
|
def create_inferred_span_from_eventbridge_event(event, context):
|
|
1314
1376
|
source = event.get("source")
|
|
1315
1377
|
service_name = determine_service_name(
|
|
1316
|
-
service_mapping, source, "lambda_eventbridge", "eventbridge"
|
|
1378
|
+
service_mapping, source, "lambda_eventbridge", source, "eventbridge"
|
|
1317
1379
|
)
|
|
1318
1380
|
tags = {
|
|
1319
1381
|
"operation_name": "aws.eventbridge",
|
|
1320
1382
|
"resource_names": source,
|
|
1383
|
+
"span.kind": "server",
|
|
1321
1384
|
"detail_type": event.get("detail-type"),
|
|
1322
1385
|
}
|
|
1323
1386
|
InferredSpanInfo.set_tags(
|
|
@@ -1391,9 +1454,21 @@ def create_function_execution_span(
|
|
|
1391
1454
|
tags["_dd.parent_source"] = trace_context_source
|
|
1392
1455
|
tags.update(trigger_tags)
|
|
1393
1456
|
tracer.set_tags(_dd_origin)
|
|
1457
|
+
# Determine service name based on config and env var
|
|
1458
|
+
if config.service:
|
|
1459
|
+
service_name = config.service
|
|
1460
|
+
else:
|
|
1461
|
+
aws_service_representation = os.environ.get(
|
|
1462
|
+
"DD_TRACE_AWS_SERVICE_REPRESENTATION_ENABLED", ""
|
|
1463
|
+
).lower()
|
|
1464
|
+
if aws_service_representation in ("false", "0"):
|
|
1465
|
+
service_name = "aws.lambda"
|
|
1466
|
+
else:
|
|
1467
|
+
service_name = function_name if function_name else "aws.lambda"
|
|
1468
|
+
|
|
1394
1469
|
span = tracer.trace(
|
|
1395
1470
|
"aws.lambda",
|
|
1396
|
-
service=
|
|
1471
|
+
service=service_name,
|
|
1397
1472
|
resource=function_name,
|
|
1398
1473
|
span_type="serverless",
|
|
1399
1474
|
)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "8.113.0"
|
|
@@ -9,7 +9,7 @@ import ujson as json
|
|
|
9
9
|
from importlib import import_module
|
|
10
10
|
from time import time_ns
|
|
11
11
|
|
|
12
|
-
from
|
|
12
|
+
from ddtrace.internal._exceptions import BlockingException
|
|
13
13
|
from datadog_lambda.extension import should_use_extension, flush_extension
|
|
14
14
|
from datadog_lambda.cold_start import (
|
|
15
15
|
set_cold_start,
|
|
@@ -25,7 +25,6 @@ from datadog_lambda.constants import (
|
|
|
25
25
|
Headers,
|
|
26
26
|
)
|
|
27
27
|
from datadog_lambda.module_name import modify_module_name
|
|
28
|
-
from datadog_lambda.patch import patch_all
|
|
29
28
|
from datadog_lambda.span_pointers import calculate_span_pointers
|
|
30
29
|
from datadog_lambda.tag_object import tag_object
|
|
31
30
|
from datadog_lambda.tracing import (
|
|
@@ -47,6 +46,14 @@ from datadog_lambda.trigger import (
|
|
|
47
46
|
extract_http_status_code_tag,
|
|
48
47
|
)
|
|
49
48
|
|
|
49
|
+
if config.appsec_enabled:
|
|
50
|
+
from datadog_lambda.asm import (
|
|
51
|
+
asm_set_context,
|
|
52
|
+
asm_start_response,
|
|
53
|
+
asm_start_request,
|
|
54
|
+
get_asm_blocked_response,
|
|
55
|
+
)
|
|
56
|
+
|
|
50
57
|
if config.profiling_enabled:
|
|
51
58
|
from ddtrace.profiling import profiler
|
|
52
59
|
|
|
@@ -121,6 +128,7 @@ class _LambdaDecorator(object):
|
|
|
121
128
|
self.span = None
|
|
122
129
|
self.inferred_span = None
|
|
123
130
|
self.response = None
|
|
131
|
+
self.blocking_response = None
|
|
124
132
|
|
|
125
133
|
if config.profiling_enabled:
|
|
126
134
|
self.prof = profiler.Profiler(env=config.env, service=config.service)
|
|
@@ -142,8 +150,6 @@ class _LambdaDecorator(object):
|
|
|
142
150
|
os.environ[DD_REQUESTS_SERVICE_NAME] = os.environ.get(
|
|
143
151
|
DD_SERVICE, "aws.lambda"
|
|
144
152
|
)
|
|
145
|
-
# Patch third-party libraries for tracing
|
|
146
|
-
patch_all()
|
|
147
153
|
|
|
148
154
|
# Enable LLM Observability
|
|
149
155
|
if config.llmobs_enabled:
|
|
@@ -162,19 +168,24 @@ class _LambdaDecorator(object):
|
|
|
162
168
|
"""Executes when the wrapped function gets called"""
|
|
163
169
|
self._before(event, context)
|
|
164
170
|
try:
|
|
171
|
+
if self.blocking_response:
|
|
172
|
+
return self.blocking_response
|
|
165
173
|
self.response = self.func(event, context, **kwargs)
|
|
166
174
|
return self.response
|
|
175
|
+
except BlockingException:
|
|
176
|
+
self.blocking_response = get_asm_blocked_response(self.event_source)
|
|
167
177
|
except Exception:
|
|
168
|
-
|
|
169
|
-
from datadog_lambda.metric import submit_errors_metric
|
|
178
|
+
from datadog_lambda.metric import submit_errors_metric
|
|
170
179
|
|
|
171
|
-
|
|
180
|
+
submit_errors_metric(context)
|
|
172
181
|
|
|
173
182
|
if self.span:
|
|
174
183
|
self.span.set_traceback()
|
|
175
184
|
raise
|
|
176
185
|
finally:
|
|
177
186
|
self._after(event, context)
|
|
187
|
+
if self.blocking_response:
|
|
188
|
+
return self.blocking_response
|
|
178
189
|
|
|
179
190
|
def _inject_authorizer_span_headers(self, request_id):
|
|
180
191
|
reference_span = self.inferred_span if self.inferred_span else self.span
|
|
@@ -207,6 +218,7 @@ class _LambdaDecorator(object):
|
|
|
207
218
|
def _before(self, event, context):
|
|
208
219
|
try:
|
|
209
220
|
self.response = None
|
|
221
|
+
self.blocking_response = None
|
|
210
222
|
set_cold_start(init_timestamp_ns)
|
|
211
223
|
|
|
212
224
|
if not should_use_extension:
|
|
@@ -240,8 +252,10 @@ class _LambdaDecorator(object):
|
|
|
240
252
|
self.inferred_span = create_inferred_span(
|
|
241
253
|
event, context, event_source, config.decode_authorizer_context
|
|
242
254
|
)
|
|
243
|
-
|
|
244
|
-
|
|
255
|
+
|
|
256
|
+
if config.appsec_enabled:
|
|
257
|
+
asm_set_context(event_source)
|
|
258
|
+
|
|
245
259
|
self.span = create_function_execution_span(
|
|
246
260
|
context=context,
|
|
247
261
|
function_name=config.function_name,
|
|
@@ -253,6 +267,9 @@ class _LambdaDecorator(object):
|
|
|
253
267
|
parent_span=self.inferred_span,
|
|
254
268
|
span_pointers=calculate_span_pointers(event_source, event),
|
|
255
269
|
)
|
|
270
|
+
if config.appsec_enabled:
|
|
271
|
+
asm_start_request(self.span, event, event_source, self.trigger_tags)
|
|
272
|
+
self.blocking_response = get_asm_blocked_response(self.event_source)
|
|
256
273
|
else:
|
|
257
274
|
set_correlation_ids()
|
|
258
275
|
if config.profiling_enabled and is_new_sandbox():
|
|
@@ -285,12 +302,25 @@ class _LambdaDecorator(object):
|
|
|
285
302
|
|
|
286
303
|
if status_code:
|
|
287
304
|
self.span.set_tag("http.status_code", status_code)
|
|
305
|
+
|
|
306
|
+
if config.appsec_enabled and not self.blocking_response:
|
|
307
|
+
asm_start_response(
|
|
308
|
+
self.span,
|
|
309
|
+
status_code,
|
|
310
|
+
self.event_source,
|
|
311
|
+
response=self.response,
|
|
312
|
+
)
|
|
313
|
+
self.blocking_response = get_asm_blocked_response(self.event_source)
|
|
314
|
+
|
|
288
315
|
self.span.finish()
|
|
289
316
|
|
|
290
317
|
if self.inferred_span:
|
|
291
318
|
if status_code:
|
|
292
319
|
self.inferred_span.set_tag("http.status_code", status_code)
|
|
293
320
|
|
|
321
|
+
if self.trigger_tags and (route := self.trigger_tags.get("http.route")):
|
|
322
|
+
self.inferred_span.set_tag("http.route", route)
|
|
323
|
+
|
|
294
324
|
if config.service:
|
|
295
325
|
self.inferred_span.set_tag("peer.service", config.service)
|
|
296
326
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "datadog_lambda"
|
|
3
|
-
version = "
|
|
3
|
+
version = "8.113.0"
|
|
4
4
|
description = "The Datadog AWS Lambda Library"
|
|
5
5
|
authors = ["Datadog, Inc. <dev@datadoghq.com>"]
|
|
6
6
|
license = "Apache-2.0"
|
|
@@ -28,7 +28,7 @@ classifiers = [
|
|
|
28
28
|
python = ">=3.8.0,<4"
|
|
29
29
|
datadog = ">=0.51.0,<1.0.0"
|
|
30
30
|
wrapt = "^1.11.2"
|
|
31
|
-
ddtrace = ">=
|
|
31
|
+
ddtrace = ">=3.11.0,<4"
|
|
32
32
|
ujson = ">=5.9.0"
|
|
33
33
|
botocore = { version = "^1.34.0", optional = true }
|
|
34
34
|
requests = { version ="^2.22.0", optional = true }
|
|
@@ -1,38 +0,0 @@
|
|
|
1
|
-
from datadog_lambda import logger
|
|
2
|
-
from datadog_lambda.trigger import EventTypes
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
def set_dsm_context(event, event_source):
|
|
6
|
-
|
|
7
|
-
if event_source.equals(EventTypes.SQS):
|
|
8
|
-
_dsm_set_sqs_context(event)
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def _dsm_set_sqs_context(event):
|
|
12
|
-
from datadog_lambda.wrapper import format_err_with_traceback
|
|
13
|
-
from ddtrace.internal.datastreams import data_streams_processor
|
|
14
|
-
from ddtrace.internal.datastreams.processor import DsmPathwayCodec
|
|
15
|
-
from ddtrace.internal.datastreams.botocore import (
|
|
16
|
-
get_datastreams_context,
|
|
17
|
-
calculate_sqs_payload_size,
|
|
18
|
-
)
|
|
19
|
-
|
|
20
|
-
records = event.get("Records")
|
|
21
|
-
if records is None:
|
|
22
|
-
return
|
|
23
|
-
processor = data_streams_processor()
|
|
24
|
-
|
|
25
|
-
for record in records:
|
|
26
|
-
try:
|
|
27
|
-
queue_arn = record.get("eventSourceARN", "")
|
|
28
|
-
|
|
29
|
-
contextjson = get_datastreams_context(record)
|
|
30
|
-
payload_size = calculate_sqs_payload_size(record)
|
|
31
|
-
|
|
32
|
-
ctx = DsmPathwayCodec.decode(contextjson, processor)
|
|
33
|
-
ctx.set_checkpoint(
|
|
34
|
-
["direction:in", f"topic:{queue_arn}", "type:sqs"],
|
|
35
|
-
payload_size=payload_size,
|
|
36
|
-
)
|
|
37
|
-
except Exception as e:
|
|
38
|
-
logger.error(format_err_with_traceback(e))
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "6.111.0"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|