datadog_lambda 5.91.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,395 @@
1
+ # Unless explicitly stated otherwise all files in this repository are licensed
2
+ # under the Apache License Version 2.0.
3
+ # This product includes software developed at Datadog (https://www.datadoghq.com/).
4
+ # Copyright 2019 Datadog, Inc.
5
+ import base64
6
+ import os
7
+ import logging
8
+ import traceback
9
+ from importlib import import_module
10
+ import json
11
+ from time import time_ns
12
+
13
+ from datadog_lambda.extension import should_use_extension, flush_extension
14
+ from datadog_lambda.cold_start import (
15
+ set_cold_start,
16
+ is_cold_start,
17
+ is_proactive_init,
18
+ is_new_sandbox,
19
+ ColdStartTracer,
20
+ )
21
+ from datadog_lambda.constants import (
22
+ TraceContextSource,
23
+ XraySubsegment,
24
+ Headers,
25
+ TraceHeader,
26
+ )
27
+ from datadog_lambda.metric import (
28
+ flush_stats,
29
+ submit_invocations_metric,
30
+ submit_errors_metric,
31
+ )
32
+ from datadog_lambda.module_name import modify_module_name
33
+ from datadog_lambda.patch import patch_all
34
+ from datadog_lambda.tracing import (
35
+ extract_dd_trace_context,
36
+ create_dd_dummy_metadata_subsegment,
37
+ inject_correlation_ids,
38
+ dd_tracing_enabled,
39
+ mark_trace_as_error_for_5xx_responses,
40
+ set_correlation_ids,
41
+ set_dd_trace_py_root,
42
+ create_function_execution_span,
43
+ create_inferred_span,
44
+ InferredSpanInfo,
45
+ is_authorizer_response,
46
+ tracer,
47
+ )
48
+ from datadog_lambda.trigger import (
49
+ extract_trigger_tags,
50
+ extract_http_status_code_tag,
51
+ )
52
+
53
+ profiling_env_var = os.environ.get("DD_PROFILING_ENABLED", "false").lower() == "true"
54
+ if profiling_env_var:
55
+ from ddtrace.profiling import profiler
56
+
57
+ logger = logging.getLogger(__name__)
58
+
59
+ DD_FLUSH_TO_LOG = "DD_FLUSH_TO_LOG"
60
+ DD_LOGS_INJECTION = "DD_LOGS_INJECTION"
61
+ DD_MERGE_XRAY_TRACES = "DD_MERGE_XRAY_TRACES"
62
+ AWS_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME"
63
+ DD_LOCAL_TEST = "DD_LOCAL_TEST"
64
+ DD_TRACE_EXTRACTOR = "DD_TRACE_EXTRACTOR"
65
+ DD_TRACE_MANAGED_SERVICES = "DD_TRACE_MANAGED_SERVICES"
66
+ DD_ENCODE_AUTHORIZER_CONTEXT = "DD_ENCODE_AUTHORIZER_CONTEXT"
67
+ DD_DECODE_AUTHORIZER_CONTEXT = "DD_DECODE_AUTHORIZER_CONTEXT"
68
+ DD_COLD_START_TRACING = "DD_COLD_START_TRACING"
69
+ DD_MIN_COLD_START_DURATION = "DD_MIN_COLD_START_DURATION"
70
+ DD_COLD_START_TRACE_SKIP_LIB = "DD_COLD_START_TRACE_SKIP_LIB"
71
+ DD_CAPTURE_LAMBDA_PAYLOAD = "DD_CAPTURE_LAMBDA_PAYLOAD"
72
+ DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH = "DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH"
73
+ DD_REQUESTS_SERVICE_NAME = "DD_REQUESTS_SERVICE_NAME"
74
+ DD_SERVICE = "DD_SERVICE"
75
+ DD_ENV = "DD_ENV"
76
+
77
+
78
+ def get_env_as_int(env_key, default_value: int) -> int:
79
+ try:
80
+ return int(os.environ.get(env_key, default_value))
81
+ except Exception as e:
82
+ logger.warn(
83
+ f"Failed to parse {env_key} as int. Using default value: {default_value}. Error: {e}"
84
+ )
85
+ return default_value
86
+
87
+
88
+ dd_capture_lambda_payload_enabled = (
89
+ os.environ.get(DD_CAPTURE_LAMBDA_PAYLOAD, "false").lower() == "true"
90
+ )
91
+
92
+ if dd_capture_lambda_payload_enabled:
93
+ import datadog_lambda.tag_object as tag_object
94
+
95
+ tag_object.max_depth = get_env_as_int(
96
+ DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH, tag_object.max_depth
97
+ )
98
+
99
+ env_env_var = os.environ.get(DD_ENV, None)
100
+
101
+ init_timestamp_ns = time_ns()
102
+
103
+ """
104
+ Usage:
105
+
106
+ import requests
107
+ from datadog_lambda.wrapper import datadog_lambda_wrapper
108
+ from datadog_lambda.metric import lambda_metric
109
+
110
+ @datadog_lambda_wrapper
111
+ def my_lambda_handle(event, context):
112
+ lambda_metric("my_metric", 10)
113
+ requests.get("https://www.datadoghq.com")
114
+ """
115
+
116
+
117
+ class _NoopDecorator(object):
118
+ def __init__(self, func):
119
+ self.func = func
120
+
121
+ def __call__(self, *args, **kwargs):
122
+ return self.func(*args, **kwargs)
123
+
124
+
125
+ class _LambdaDecorator(object):
126
+ """
127
+ Decorator to automatically initialize Datadog API client, flush metrics,
128
+ and extracts/injects trace context.
129
+ """
130
+
131
+ _force_wrap = False
132
+
133
+ def __new__(cls, func):
134
+ """
135
+ If the decorator is accidentally applied to the same function multiple times,
136
+ wrap only once.
137
+
138
+ If _force_wrap, always return a real decorator, useful for unit tests.
139
+ """
140
+ try:
141
+ if cls._force_wrap or not isinstance(func, _LambdaDecorator):
142
+ wrapped = super(_LambdaDecorator, cls).__new__(cls)
143
+ logger.debug("datadog_lambda_wrapper wrapped")
144
+ return wrapped
145
+ else:
146
+ logger.debug("datadog_lambda_wrapper already wrapped")
147
+ return _NoopDecorator(func)
148
+ except Exception as e:
149
+ logger.error(format_err_with_traceback(e))
150
+ return func
151
+
152
+ def __init__(self, func):
153
+ """Executes when the wrapped function gets wrapped"""
154
+ try:
155
+ self.func = func
156
+ self.flush_to_log = os.environ.get(DD_FLUSH_TO_LOG, "").lower() == "true"
157
+ self.logs_injection = (
158
+ os.environ.get(DD_LOGS_INJECTION, "true").lower() == "true"
159
+ )
160
+ self.merge_xray_traces = (
161
+ os.environ.get(DD_MERGE_XRAY_TRACES, "false").lower() == "true"
162
+ )
163
+ self.function_name = os.environ.get(AWS_LAMBDA_FUNCTION_NAME, "function")
164
+ self.service = os.environ.get(DD_SERVICE, None)
165
+ self.extractor_env = os.environ.get(DD_TRACE_EXTRACTOR, None)
166
+ self.trace_extractor = None
167
+ self.span = None
168
+ self.inferred_span = None
169
+ depends_on_dd_tracing_enabled = (
170
+ lambda original_boolean: dd_tracing_enabled and original_boolean
171
+ )
172
+ self.make_inferred_span = depends_on_dd_tracing_enabled(
173
+ os.environ.get(DD_TRACE_MANAGED_SERVICES, "true").lower() == "true"
174
+ )
175
+ self.encode_authorizer_context = depends_on_dd_tracing_enabled(
176
+ os.environ.get(DD_ENCODE_AUTHORIZER_CONTEXT, "true").lower() == "true"
177
+ )
178
+ self.decode_authorizer_context = depends_on_dd_tracing_enabled(
179
+ os.environ.get(DD_DECODE_AUTHORIZER_CONTEXT, "true").lower() == "true"
180
+ )
181
+ self.cold_start_tracing = depends_on_dd_tracing_enabled(
182
+ os.environ.get(DD_COLD_START_TRACING, "true").lower() == "true"
183
+ )
184
+ self.min_cold_start_trace_duration = get_env_as_int(
185
+ DD_MIN_COLD_START_DURATION, 3
186
+ )
187
+ self.local_testing_mode = os.environ.get(
188
+ DD_LOCAL_TEST, "false"
189
+ ).lower() in ("true", "1")
190
+ self.cold_start_trace_skip_lib = [
191
+ "ddtrace.internal.compat",
192
+ "ddtrace.filters",
193
+ ]
194
+ if DD_COLD_START_TRACE_SKIP_LIB in os.environ:
195
+ try:
196
+ self.cold_start_trace_skip_lib = os.environ[
197
+ DD_COLD_START_TRACE_SKIP_LIB
198
+ ].split(",")
199
+ except Exception:
200
+ logger.debug(f"Malformatted for env {DD_COLD_START_TRACE_SKIP_LIB}")
201
+ self.response = None
202
+ if profiling_env_var:
203
+ self.prof = profiler.Profiler(env=env_env_var, service=self.service)
204
+ if self.extractor_env:
205
+ extractor_parts = self.extractor_env.rsplit(".", 1)
206
+ if len(extractor_parts) == 2:
207
+ (mod_name, extractor_name) = extractor_parts
208
+ modified_extractor_name = modify_module_name(mod_name)
209
+ extractor_module = import_module(modified_extractor_name)
210
+ self.trace_extractor = getattr(extractor_module, extractor_name)
211
+
212
+ # Inject trace correlation ids to logs
213
+ if self.logs_injection:
214
+ inject_correlation_ids()
215
+
216
+ # This prevents a breaking change in ddtrace v0.49 regarding the service name
217
+ # in requests-related spans
218
+ os.environ[DD_REQUESTS_SERVICE_NAME] = os.environ.get(
219
+ DD_SERVICE, "aws.lambda"
220
+ )
221
+ # Patch third-party libraries for tracing
222
+ patch_all()
223
+
224
+ logger.debug("datadog_lambda_wrapper initialized")
225
+ except Exception as e:
226
+ logger.error(format_err_with_traceback(e))
227
+
228
+ def __call__(self, event, context, **kwargs):
229
+ """Executes when the wrapped function gets called"""
230
+ self._before(event, context)
231
+ try:
232
+ self.response = self.func(event, context, **kwargs)
233
+ return self.response
234
+ except Exception:
235
+ submit_errors_metric(context)
236
+ if self.span:
237
+ self.span.set_traceback()
238
+ raise
239
+ finally:
240
+ self._after(event, context)
241
+
242
+ def _inject_authorizer_span_headers(self, request_id):
243
+ reference_span = self.inferred_span if self.inferred_span else self.span
244
+ assert reference_span.finished
245
+ # the finish_time_ns should be set as the end of the inferred span if it exist
246
+ # or the end of the current span
247
+ finish_time_ns = (
248
+ reference_span.start_ns + reference_span.duration_ns
249
+ if reference_span is not None
250
+ and hasattr(reference_span, "start_ns")
251
+ and hasattr(reference_span, "duration_ns")
252
+ else time_ns()
253
+ )
254
+ injected_headers = {}
255
+ source_span = self.inferred_span if self.inferred_span else self.span
256
+ span_context = source_span.context
257
+ injected_headers[TraceHeader.TRACE_ID] = str(span_context.trace_id)
258
+ injected_headers[TraceHeader.PARENT_ID] = str(span_context.span_id)
259
+ sampling_priority = span_context.sampling_priority
260
+ if sampling_priority is not None:
261
+ injected_headers[TraceHeader.SAMPLING_PRIORITY] = str(
262
+ span_context.sampling_priority
263
+ )
264
+ injected_headers[Headers.Parent_Span_Finish_Time] = finish_time_ns
265
+ if request_id is not None:
266
+ injected_headers[Headers.Authorizing_Request_Id] = request_id
267
+ datadog_data = base64.b64encode(json.dumps(injected_headers).encode()).decode()
268
+ self.response.setdefault("context", {})
269
+ self.response["context"]["_datadog"] = datadog_data
270
+
271
+ def _before(self, event, context):
272
+ try:
273
+ self.response = None
274
+ set_cold_start(init_timestamp_ns)
275
+ submit_invocations_metric(context)
276
+ self.trigger_tags = extract_trigger_tags(event, context)
277
+ # Extract Datadog trace context and source from incoming requests
278
+ dd_context, trace_context_source, event_source = extract_dd_trace_context(
279
+ event,
280
+ context,
281
+ extractor=self.trace_extractor,
282
+ decode_authorizer_context=self.decode_authorizer_context,
283
+ )
284
+ self.event_source = event_source
285
+ # Create a Datadog X-Ray subsegment with the trace context
286
+ if dd_context and trace_context_source == TraceContextSource.EVENT:
287
+ create_dd_dummy_metadata_subsegment(
288
+ {
289
+ "trace-id": str(dd_context.trace_id),
290
+ "parent-id": str(dd_context.span_id),
291
+ "sampling-priority": str(dd_context.sampling_priority),
292
+ },
293
+ XraySubsegment.TRACE_KEY,
294
+ )
295
+
296
+ if dd_tracing_enabled:
297
+ set_dd_trace_py_root(trace_context_source, self.merge_xray_traces)
298
+ if self.make_inferred_span:
299
+ self.inferred_span = create_inferred_span(
300
+ event, context, event_source, self.decode_authorizer_context
301
+ )
302
+ self.span = create_function_execution_span(
303
+ context,
304
+ self.function_name,
305
+ is_cold_start(),
306
+ is_proactive_init(),
307
+ trace_context_source,
308
+ self.merge_xray_traces,
309
+ self.trigger_tags,
310
+ parent_span=self.inferred_span,
311
+ )
312
+ else:
313
+ set_correlation_ids()
314
+ if profiling_env_var and is_new_sandbox():
315
+ self.prof.start(stop_on_exit=False, profile_children=True)
316
+ logger.debug("datadog_lambda_wrapper _before() done")
317
+ except Exception as e:
318
+ logger.error(format_err_with_traceback(e))
319
+
320
+ def _after(self, event, context):
321
+ try:
322
+ status_code = extract_http_status_code_tag(self.trigger_tags, self.response)
323
+ if status_code:
324
+ self.trigger_tags["http.status_code"] = status_code
325
+ mark_trace_as_error_for_5xx_responses(context, status_code, self.span)
326
+
327
+ # Create a new dummy Datadog subsegment for function trigger tags so we
328
+ # can attach them to X-Ray spans when hybrid tracing is used
329
+ if self.trigger_tags:
330
+ create_dd_dummy_metadata_subsegment(
331
+ self.trigger_tags, XraySubsegment.LAMBDA_FUNCTION_TAGS_KEY
332
+ )
333
+ should_trace_cold_start = self.cold_start_tracing and is_new_sandbox()
334
+ if should_trace_cold_start:
335
+ trace_ctx = tracer.current_trace_context()
336
+
337
+ if self.span:
338
+ if dd_capture_lambda_payload_enabled:
339
+ tag_object.tag_object(self.span, "function.request", event)
340
+ tag_object.tag_object(self.span, "function.response", self.response)
341
+
342
+ if status_code:
343
+ self.span.set_tag("http.status_code", status_code)
344
+ self.span.finish()
345
+
346
+ if self.inferred_span:
347
+ if status_code:
348
+ self.inferred_span.set_tag("http.status_code", status_code)
349
+
350
+ if self.service:
351
+ self.inferred_span.set_tag("peer.service", self.service)
352
+
353
+ if InferredSpanInfo.is_async(self.inferred_span) and self.span:
354
+ self.inferred_span.finish(finish_time=self.span.start)
355
+ else:
356
+ self.inferred_span.finish()
357
+
358
+ if should_trace_cold_start:
359
+ try:
360
+ following_span = self.span or self.inferred_span
361
+ ColdStartTracer(
362
+ tracer,
363
+ self.function_name,
364
+ following_span.start_ns,
365
+ trace_ctx,
366
+ self.min_cold_start_trace_duration,
367
+ self.cold_start_trace_skip_lib,
368
+ ).trace()
369
+ except Exception as e:
370
+ logger.debug("Failed to create cold start spans. %s", e)
371
+
372
+ if not self.flush_to_log or should_use_extension:
373
+ flush_stats()
374
+ if should_use_extension and self.local_testing_mode:
375
+ # when testing locally, the extension does not know when an
376
+ # invocation completes because it does not have access to the
377
+ # logs api
378
+ flush_extension()
379
+
380
+ if self.encode_authorizer_context and is_authorizer_response(self.response):
381
+ self._inject_authorizer_span_headers(
382
+ event.get("requestContext", {}).get("requestId")
383
+ )
384
+ logger.debug("datadog_lambda_wrapper _after() done")
385
+ except Exception as e:
386
+ logger.error(format_err_with_traceback(e))
387
+
388
+
389
+ def format_err_with_traceback(e):
390
+ return "Error {}. Traceback: {}".format(
391
+ e, traceback.format_exc().replace("\n", "\r")
392
+ )
393
+
394
+
395
+ datadog_lambda_wrapper = _LambdaDecorator
datadog_lambda/xray.py ADDED
@@ -0,0 +1,118 @@
1
+ import os
2
+ import logging
3
+ import json
4
+ import binascii
5
+ import time
6
+ import socket
7
+
8
+ from datadog_lambda.constants import XrayDaemon, XraySubsegment, TraceContextSource
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ def get_xray_host_port(address):
14
+ if address == "":
15
+ logger.debug("X-Ray daemon env var not set, not sending sub-segment")
16
+ return None
17
+ parts = address.split(":")
18
+ if len(parts) <= 1:
19
+ logger.debug("X-Ray daemon env var not set, not sending sub-segment")
20
+ return None
21
+ port = int(parts[1])
22
+ host = parts[0]
23
+ return (host, port)
24
+
25
+
26
+ def send(host_port_tuple, payload):
27
+ sock = None
28
+ try:
29
+ sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
30
+ sock.setblocking(0)
31
+ sock.connect(host_port_tuple)
32
+ sock.send(payload.encode("utf-8"))
33
+ except Exception as e_send:
34
+ logger.error("Error occurred submitting to xray daemon: %s", str(e_send))
35
+ try:
36
+ sock.close()
37
+ except Exception as e_close:
38
+ logger.error("Error while closing the socket: %s", str(e_close))
39
+
40
+
41
+ def build_segment_payload(payload):
42
+ if payload is None:
43
+ return None
44
+ return '{"format": "json", "version": 1}' + "\n" + payload
45
+
46
+
47
+ def parse_xray_header(raw_trace_id):
48
+ # Example:
49
+ # Root=1-5e272390-8c398be037738dc042009320;Parent=94ae789b969f1cc5;Sampled=1;Lineage=c6c5b1b9:0
50
+ logger.debug("Reading trace context from env var %s", raw_trace_id)
51
+ if len(raw_trace_id) == 0:
52
+ return None
53
+ parts = raw_trace_id.split(";")
54
+ if len(parts) < 3:
55
+ return None
56
+ root = parts[0].replace("Root=", "")
57
+ parent = parts[1].replace("Parent=", "")
58
+ sampled = parts[2].replace("Sampled=", "")
59
+ if (
60
+ len(root) == len(parts[0])
61
+ or len(parent) == len(parts[1])
62
+ or len(sampled) == len(parts[2])
63
+ ):
64
+ return None
65
+ return {
66
+ "parent_id": parent,
67
+ "trace_id": root,
68
+ "sampled": sampled,
69
+ "source": TraceContextSource.XRAY,
70
+ }
71
+
72
+
73
+ def generate_random_id():
74
+ return binascii.b2a_hex(os.urandom(8)).decode("utf-8")
75
+
76
+
77
+ def build_segment(context, key, metadata):
78
+ segment = json.dumps(
79
+ {
80
+ "id": generate_random_id(),
81
+ "trace_id": context["trace_id"],
82
+ "parent_id": context["parent_id"],
83
+ "name": XraySubsegment.NAME,
84
+ "start_time": time.time(),
85
+ "end_time": time.time(),
86
+ "type": "subsegment",
87
+ "metadata": {
88
+ XraySubsegment.NAMESPACE: {
89
+ key: metadata,
90
+ }
91
+ },
92
+ }
93
+ )
94
+ return segment
95
+
96
+
97
+ def send_segment(key, metadata):
98
+ host_port_tuple = get_xray_host_port(
99
+ os.environ.get(XrayDaemon.XRAY_DAEMON_ADDRESS, "")
100
+ )
101
+ if host_port_tuple is None:
102
+ return None
103
+ context = parse_xray_header(
104
+ os.environ.get(XrayDaemon.XRAY_TRACE_ID_HEADER_NAME, "")
105
+ )
106
+ if context is None:
107
+ logger.debug(
108
+ "Failed to create segment since it was not possible to get trace context from header"
109
+ )
110
+ return None
111
+
112
+ # Skip adding segment, if the xray trace is going to be sampled away.
113
+ if context["sampled"] == "0":
114
+ logger.debug("Skipping sending metadata, x-ray trace was sampled out")
115
+ return None
116
+ segment = build_segment(context, key, metadata)
117
+ segment_payload = build_segment_payload(segment)
118
+ send(host_port_tuple, segment_payload)