datadog_lambda 5.91.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1309 @@
1
+ # Unless explicitly stated otherwise all files in this repository are licensed
2
+ # under the Apache License Version 2.0.
3
+ # This product includes software developed at Datadog (https://www.datadoghq.com/).
4
+ # Copyright 2019 Datadog, Inc.
5
+ import hashlib
6
+ import logging
7
+ import os
8
+ import json
9
+ import base64
10
+ from datetime import datetime, timezone
11
+ from typing import Optional, Dict
12
+
13
+ from datadog_lambda.metric import submit_errors_metric
14
+
15
+ try:
16
+ from typing import Literal
17
+ except ImportError:
18
+ # Literal was added to typing in python 3.8
19
+ from typing_extensions import Literal
20
+
21
+ from datadog_lambda.constants import (
22
+ SamplingPriority,
23
+ TraceContextSource,
24
+ XrayDaemon,
25
+ Headers,
26
+ )
27
+ from datadog_lambda.xray import (
28
+ send_segment,
29
+ parse_xray_header,
30
+ )
31
+
32
+ from ddtrace import tracer, patch, Span
33
+ from ddtrace import __version__ as ddtrace_version
34
+ from ddtrace.propagation.http import HTTPPropagator
35
+ from ddtrace.context import Context
36
+ from datadog_lambda import __version__ as datadog_lambda_version
37
+ from datadog_lambda.trigger import (
38
+ _EventSource,
39
+ parse_event_source,
40
+ get_first_record,
41
+ EventTypes,
42
+ EventSubtypes,
43
+ )
44
+
45
+ dd_trace_otel_enabled = (
46
+ os.environ.get("DD_TRACE_OTEL_ENABLED", "false").lower() == "true"
47
+ )
48
+ if dd_trace_otel_enabled:
49
+ from opentelemetry.trace import set_tracer_provider
50
+ from ddtrace.opentelemetry import TracerProvider
51
+
52
+ set_tracer_provider(TracerProvider())
53
+
54
+
55
+ logger = logging.getLogger(__name__)
56
+
57
+ dd_trace_context = None
58
+ dd_tracing_enabled = os.environ.get("DD_TRACE_ENABLED", "false").lower() == "true"
59
+ if dd_tracing_enabled:
60
+ # Enable the telemetry client if the user has opted in
61
+ if (
62
+ os.environ.get("DD_INSTRUMENTATION_TELEMETRY_ENABLED", "false").lower()
63
+ == "true"
64
+ ):
65
+ from ddtrace.internal.telemetry import telemetry_writer
66
+
67
+ telemetry_writer.enable()
68
+
69
+ propagator = HTTPPropagator()
70
+
71
+
72
+ def _convert_xray_trace_id(xray_trace_id):
73
+ """
74
+ Convert X-Ray trace id (hex)'s last 63 bits to a Datadog trace id (int).
75
+ """
76
+ return 0x7FFFFFFFFFFFFFFF & int(xray_trace_id[-16:], 16)
77
+
78
+
79
+ def _convert_xray_entity_id(xray_entity_id):
80
+ """
81
+ Convert X-Ray (sub)segement id (hex) to a Datadog span id (int).
82
+ """
83
+ return int(xray_entity_id, 16)
84
+
85
+
86
+ def _convert_xray_sampling(xray_sampled):
87
+ """
88
+ Convert X-Ray sampled (True/False) to its Datadog counterpart.
89
+ """
90
+ return SamplingPriority.USER_KEEP if xray_sampled else SamplingPriority.USER_REJECT
91
+
92
+
93
+ def _get_xray_trace_context():
94
+ if not is_lambda_context():
95
+ return None
96
+
97
+ xray_trace_entity = parse_xray_header(
98
+ os.environ.get(XrayDaemon.XRAY_TRACE_ID_HEADER_NAME, "")
99
+ )
100
+ if xray_trace_entity is None:
101
+ return None
102
+ trace_context = Context(
103
+ trace_id=_convert_xray_trace_id(xray_trace_entity.get("trace_id")),
104
+ span_id=_convert_xray_entity_id(xray_trace_entity.get("parent_id")),
105
+ sampling_priority=_convert_xray_sampling(xray_trace_entity.get("sampled")),
106
+ )
107
+ logger.debug(
108
+ "Converted trace context %s from X-Ray segment %s",
109
+ trace_context,
110
+ (
111
+ xray_trace_entity["trace_id"],
112
+ xray_trace_entity["parent_id"],
113
+ xray_trace_entity["sampled"],
114
+ ),
115
+ )
116
+ return trace_context
117
+
118
+
119
+ def _get_dd_trace_py_context():
120
+ span = tracer.current_span()
121
+ if not span:
122
+ return None
123
+
124
+ logger.debug(
125
+ "found dd trace context: %s", (span.context.trace_id, span.context.span_id)
126
+ )
127
+ return span.context
128
+
129
+
130
+ def _is_context_complete(context):
131
+ return (
132
+ context
133
+ and context.trace_id
134
+ and context.span_id
135
+ and context.sampling_priority is not None
136
+ )
137
+
138
+
139
+ def create_dd_dummy_metadata_subsegment(
140
+ subsegment_metadata_value, subsegment_metadata_key
141
+ ):
142
+ """
143
+ Create a Datadog subsegment to pass the Datadog trace context or Lambda function
144
+ tags into its metadata field, so the X-Ray trace can be converted to a Datadog
145
+ trace in the Datadog backend with the correct context.
146
+ """
147
+ send_segment(subsegment_metadata_key, subsegment_metadata_value)
148
+
149
+
150
+ def extract_context_from_lambda_context(lambda_context):
151
+ """
152
+ Extract Datadog trace context from the `client_context` attr
153
+ from the Lambda `context` object.
154
+
155
+ dd_trace libraries inject this trace context on synchronous invocations
156
+ """
157
+ dd_data = None
158
+ client_context = lambda_context.client_context
159
+ if client_context and client_context.custom:
160
+ dd_data = client_context.custom
161
+ if "_datadog" in client_context.custom:
162
+ # Legacy trace propagation dict
163
+ dd_data = client_context.custom.get("_datadog")
164
+ return propagator.extract(dd_data)
165
+
166
+
167
+ def extract_context_from_http_event_or_context(
168
+ event,
169
+ lambda_context,
170
+ event_source: _EventSource,
171
+ decode_authorizer_context: bool = True,
172
+ ):
173
+ """
174
+ Extract Datadog trace context from the `headers` key in from the Lambda
175
+ `event` object.
176
+
177
+ Falls back to lambda context if no trace data is found in the `headers`
178
+ """
179
+ if decode_authorizer_context:
180
+ is_http_api = event_source.equals(
181
+ EventTypes.API_GATEWAY, subtype=EventSubtypes.HTTP_API
182
+ )
183
+ injected_authorizer_data = get_injected_authorizer_data(event, is_http_api)
184
+ context = propagator.extract(injected_authorizer_data)
185
+ if _is_context_complete(context):
186
+ return context
187
+
188
+ headers = event.get("headers")
189
+ context = propagator.extract(headers)
190
+
191
+ if not _is_context_complete(context):
192
+ return extract_context_from_lambda_context(lambda_context)
193
+
194
+ return context
195
+
196
+
197
+ def create_sns_event(message):
198
+ return {
199
+ "Records": [
200
+ {
201
+ "EventSource": "aws:sns",
202
+ "EventVersion": "1.0",
203
+ "Sns": message,
204
+ }
205
+ ]
206
+ }
207
+
208
+
209
+ def extract_context_from_sqs_or_sns_event_or_context(event, lambda_context):
210
+ """
211
+ Extract Datadog trace context from an SQS event.
212
+
213
+ The extraction chain goes as follows:
214
+ EB => SQS (First records body contains EB context), or
215
+ SNS => SQS (First records body contains SNS context), or
216
+ SQS or SNS (`messageAttributes` for SQS context,
217
+ `MessageAttributes` for SNS context), else
218
+ Lambda Context.
219
+
220
+ Falls back to lambda context if no trace data is found in the SQS message attributes.
221
+ """
222
+
223
+ # EventBridge => SQS
224
+ try:
225
+ context = _extract_context_from_eventbridge_sqs_event(event)
226
+ if _is_context_complete(context):
227
+ return context
228
+ except Exception:
229
+ logger.debug("Failed extracting context as EventBridge to SQS.")
230
+
231
+ try:
232
+ first_record = event.get("Records")[0]
233
+
234
+ # logic to deal with SNS => SQS event
235
+ if "body" in first_record:
236
+ body_str = first_record.get("body", {})
237
+ try:
238
+ body = json.loads(body_str)
239
+ if body.get("Type", "") == "Notification" and "TopicArn" in body:
240
+ logger.debug("Found SNS message inside SQS event")
241
+ first_record = get_first_record(create_sns_event(body))
242
+ except Exception:
243
+ first_record = event.get("Records")[0]
244
+ pass
245
+
246
+ msg_attributes = first_record.get(
247
+ "messageAttributes",
248
+ first_record.get("Sns", {}).get("MessageAttributes", {}),
249
+ )
250
+ dd_payload = msg_attributes.get("_datadog", {})
251
+ # SQS uses dataType and binaryValue/stringValue
252
+ # SNS uses Type and Value
253
+ dd_json_data_type = dd_payload.get("Type", dd_payload.get("dataType", ""))
254
+ if dd_json_data_type == "Binary":
255
+ dd_json_data = dd_payload.get(
256
+ "binaryValue",
257
+ dd_payload.get("Value", r"{}"),
258
+ )
259
+ dd_json_data = base64.b64decode(dd_json_data)
260
+ elif dd_json_data_type == "String":
261
+ dd_json_data = dd_payload.get(
262
+ "stringValue",
263
+ dd_payload.get("Value", r"{}"),
264
+ )
265
+ else:
266
+ logger.debug(
267
+ "Datadog Lambda Python only supports extracting trace"
268
+ "context from String or Binary SQS/SNS message attributes"
269
+ )
270
+ return extract_context_from_lambda_context(lambda_context)
271
+ dd_data = json.loads(dd_json_data)
272
+ return propagator.extract(dd_data)
273
+ except Exception as e:
274
+ logger.debug("The trace extractor returned with error %s", e)
275
+ return extract_context_from_lambda_context(lambda_context)
276
+
277
+
278
+ def _extract_context_from_eventbridge_sqs_event(event):
279
+ """
280
+ Extracts Datadog trace context from an SQS event triggered by
281
+ EventBridge.
282
+
283
+ This is only possible if first record in `Records` contains a
284
+ `body` field which contains the EventBridge `detail` as a JSON string.
285
+ """
286
+ first_record = event.get("Records")[0]
287
+ body_str = first_record.get("body")
288
+ body = json.loads(body_str)
289
+ detail = body.get("detail")
290
+ dd_context = detail.get("_datadog")
291
+ return propagator.extract(dd_context)
292
+
293
+
294
+ def extract_context_from_eventbridge_event(event, lambda_context):
295
+ """
296
+ Extract datadog trace context from an EventBridge message's Details.
297
+ This is only possible if Details is a JSON string.
298
+ """
299
+ try:
300
+ detail = event.get("detail")
301
+ dd_context = detail.get("_datadog")
302
+ if not dd_context:
303
+ return extract_context_from_lambda_context(lambda_context)
304
+ return propagator.extract(dd_context)
305
+ except Exception as e:
306
+ logger.debug("The trace extractor returned with error %s", e)
307
+ return extract_context_from_lambda_context(lambda_context)
308
+
309
+
310
+ def extract_context_from_kinesis_event(event, lambda_context):
311
+ """
312
+ Extract datadog trace context from a Kinesis Stream's base64 encoded data string
313
+ """
314
+ try:
315
+ record = get_first_record(event)
316
+ data = record.get("kinesis", {}).get("data", None)
317
+ if data:
318
+ b64_bytes = data.encode("ascii")
319
+ str_bytes = base64.b64decode(b64_bytes)
320
+ data_str = str_bytes.decode("ascii")
321
+ data_obj = json.loads(data_str)
322
+ dd_ctx = data_obj.get("_datadog")
323
+
324
+ if not dd_ctx:
325
+ return extract_context_from_lambda_context(lambda_context)
326
+
327
+ return propagator.extract(dd_ctx)
328
+ except Exception as e:
329
+ logger.debug("The trace extractor returned with error %s", e)
330
+ return extract_context_from_lambda_context(lambda_context)
331
+
332
+
333
+ def _deterministic_md5_hash(s: str) -> int:
334
+ """MD5 here is to generate trace_id, not for any encryption."""
335
+ hex_number = hashlib.md5(s.encode("ascii")).hexdigest()
336
+ binary = bin(int(hex_number, 16))
337
+ binary_str = str(binary)
338
+ binary_str_remove_0b = binary_str[2:].rjust(128, "0")
339
+ most_significant_64_bits_without_leading_1 = "0" + binary_str_remove_0b[1:-64]
340
+ result = int(most_significant_64_bits_without_leading_1, 2)
341
+ if result == 0:
342
+ return 1
343
+ return result
344
+
345
+
346
+ def extract_context_from_step_functions(event, lambda_context):
347
+ """
348
+ Only extract datadog trace context when Step Functions Context Object is injected
349
+ into lambda's event dict.
350
+ """
351
+ try:
352
+ execution_id = event.get("Execution").get("Id")
353
+ state_name = event.get("State").get("Name")
354
+ state_entered_time = event.get("State").get("EnteredTime")
355
+ trace_id = _deterministic_md5_hash(execution_id)
356
+ parent_id = _deterministic_md5_hash(
357
+ execution_id + "#" + state_name + "#" + state_entered_time
358
+ )
359
+ sampling_priority = SamplingPriority.AUTO_KEEP
360
+ return Context(
361
+ trace_id=trace_id, span_id=parent_id, sampling_priority=sampling_priority
362
+ )
363
+ except Exception as e:
364
+ logger.debug("The Step Functions trace extractor returned with error %s", e)
365
+ return extract_context_from_lambda_context(lambda_context)
366
+
367
+
368
+ def extract_context_custom_extractor(extractor, event, lambda_context):
369
+ """
370
+ Extract Datadog trace context using a custom trace extractor function
371
+ """
372
+ try:
373
+ (
374
+ trace_id,
375
+ parent_id,
376
+ sampling_priority,
377
+ ) = extractor(event, lambda_context)
378
+ return Context(
379
+ trace_id=int(trace_id),
380
+ span_id=int(parent_id),
381
+ sampling_priority=int(sampling_priority),
382
+ )
383
+ except Exception as e:
384
+ logger.debug("The trace extractor returned with error %s", e)
385
+
386
+
387
+ def is_authorizer_response(response) -> bool:
388
+ try:
389
+ return (
390
+ response is not None
391
+ and response["principalId"]
392
+ and response["policyDocument"]
393
+ )
394
+ except (KeyError, AttributeError):
395
+ pass
396
+ except Exception as e:
397
+ logger.debug("unknown error while checking is_authorizer_response %s", e)
398
+ return False
399
+
400
+
401
+ def get_injected_authorizer_data(event, is_http_api) -> dict:
402
+ try:
403
+ authorizer_headers = event.get("requestContext", {}).get("authorizer")
404
+ if not authorizer_headers:
405
+ return None
406
+
407
+ dd_data_raw = (
408
+ authorizer_headers.get("lambda", {}).get("_datadog")
409
+ if is_http_api
410
+ else authorizer_headers.get("_datadog")
411
+ )
412
+
413
+ if not dd_data_raw:
414
+ return None
415
+
416
+ injected_data = json.loads(base64.b64decode(dd_data_raw))
417
+
418
+ # Lambda authorizer's results can be cached. But the payload will still have the injected
419
+ # data in cached requests. How to distinguish cached case and ignore the injected data ?
420
+ # APIGateway automatically injects a integrationLatency data in some cases. If it's >0 we
421
+ # know that it's not cached. But integrationLatency is not available for Http API case. In
422
+ # that case, we use the injected Authorizing_Request_Id to tell if it's cached. But token
423
+ # authorizers don't pass on the requestId. The Authorizing_Request_Id can't work for all
424
+ # cases neither. As a result, we combine both methods as shown below.
425
+ if authorizer_headers.get("integrationLatency", 0) > 0 or event.get(
426
+ "requestContext", {}
427
+ ).get("requestId") == injected_data.get(Headers.Authorizing_Request_Id):
428
+ return injected_data
429
+ else:
430
+ return None
431
+
432
+ except Exception as e:
433
+ logger.debug("Failed to check if invocated by an authorizer. error %s", e)
434
+ return None
435
+
436
+
437
+ def extract_dd_trace_context(
438
+ event, lambda_context, extractor=None, decode_authorizer_context: bool = True
439
+ ):
440
+ """
441
+ Extract Datadog trace context from the Lambda `event` object.
442
+
443
+ Write the context to a global `dd_trace_context`, so the trace
444
+ can be continued on the outgoing requests with the context injected.
445
+ """
446
+ global dd_trace_context
447
+ trace_context_source = None
448
+ event_source = parse_event_source(event)
449
+
450
+ if extractor is not None:
451
+ context = extract_context_custom_extractor(extractor, event, lambda_context)
452
+ elif isinstance(event, (set, dict)) and "headers" in event:
453
+ context = extract_context_from_http_event_or_context(
454
+ event, lambda_context, event_source, decode_authorizer_context
455
+ )
456
+ elif event_source.equals(EventTypes.SNS) or event_source.equals(EventTypes.SQS):
457
+ context = extract_context_from_sqs_or_sns_event_or_context(
458
+ event, lambda_context
459
+ )
460
+ elif event_source.equals(EventTypes.EVENTBRIDGE):
461
+ context = extract_context_from_eventbridge_event(event, lambda_context)
462
+ elif event_source.equals(EventTypes.KINESIS):
463
+ context = extract_context_from_kinesis_event(event, lambda_context)
464
+ elif event_source.equals(EventTypes.STEPFUNCTIONS):
465
+ context = extract_context_from_step_functions(event, lambda_context)
466
+ else:
467
+ context = extract_context_from_lambda_context(lambda_context)
468
+
469
+ if _is_context_complete(context):
470
+ logger.debug("Extracted Datadog trace context from event or context")
471
+ dd_trace_context = context
472
+ trace_context_source = TraceContextSource.EVENT
473
+ else:
474
+ # AWS Lambda runtime caches global variables between invocations,
475
+ # reset to avoid using the context from the last invocation.
476
+ dd_trace_context = _get_xray_trace_context()
477
+ if dd_trace_context:
478
+ trace_context_source = TraceContextSource.XRAY
479
+ logger.debug("extracted dd trace context %s", dd_trace_context)
480
+ return dd_trace_context, trace_context_source, event_source
481
+
482
+
483
+ def get_dd_trace_context_obj():
484
+ """
485
+ Return the Datadog trace context to be propagated on the outgoing requests.
486
+
487
+ If the Lambda function is invoked by a Datadog-traced service, a Datadog
488
+ trace context may already exist, and it should be used. Otherwise, use the
489
+ current X-Ray trace entity, or the dd-trace-py context if DD_TRACE_ENABLED is true.
490
+
491
+ Most of widely-used HTTP clients are patched to inject the context
492
+ automatically, but this function can be used to manually inject the trace
493
+ context to an outgoing request.
494
+ """
495
+ if dd_tracing_enabled:
496
+ dd_trace_py_context = _get_dd_trace_py_context()
497
+ if _is_context_complete(dd_trace_py_context):
498
+ return dd_trace_py_context
499
+
500
+ global dd_trace_context
501
+
502
+ try:
503
+ xray_context = _get_xray_trace_context() # xray (sub)segment
504
+ except Exception as e:
505
+ logger.debug(
506
+ "get_dd_trace_context couldn't read from segment from x-ray, with error %s"
507
+ % e
508
+ )
509
+ if not xray_context:
510
+ return None
511
+
512
+ if not _is_context_complete(dd_trace_context):
513
+ return xray_context
514
+
515
+ logger.debug("Set parent id from xray trace context: %s", xray_context.span_id)
516
+ return Context(
517
+ trace_id=dd_trace_context.trace_id,
518
+ span_id=xray_context.span_id,
519
+ sampling_priority=dd_trace_context.sampling_priority,
520
+ meta=dd_trace_context._meta.copy(),
521
+ metrics=dd_trace_context._metrics.copy(),
522
+ )
523
+
524
+
525
+ def get_dd_trace_context():
526
+ """
527
+ Return the Datadog trace context to be propagated on the outgoing requests,
528
+ as a dict of headers.
529
+ """
530
+ headers = {}
531
+ context = get_dd_trace_context_obj()
532
+ if not _is_context_complete(context):
533
+ return headers
534
+ propagator.inject(context, headers)
535
+ return headers
536
+
537
+
538
+ def set_correlation_ids():
539
+ """
540
+ Create a dummy span, and overrides its trace_id and span_id, to make
541
+ ddtrace.helpers.get_log_correlation_context() return a dict containing the correct ids for both
542
+ auto and manual log correlations.
543
+
544
+ TODO: Remove me when Datadog tracer is natively supported in Lambda.
545
+ """
546
+ if not is_lambda_context():
547
+ logger.debug("set_correlation_ids is only supported in LambdaContext")
548
+ return
549
+ if dd_tracing_enabled:
550
+ logger.debug("using ddtrace implementation for spans")
551
+ return
552
+
553
+ context = get_dd_trace_context_obj()
554
+ if not _is_context_complete(context):
555
+ return
556
+
557
+ tracer.context_provider.activate(context)
558
+ tracer.trace("dummy.span")
559
+ logger.debug("correlation ids set")
560
+
561
+
562
+ def inject_correlation_ids():
563
+ """
564
+ Override the formatter of LambdaLoggerHandler to inject datadog trace and
565
+ span id for log correlation.
566
+
567
+ For manual injections to custom log handlers, use `ddtrace.helpers.get_log_correlation_context`
568
+ to retrieve a dict containing correlation ids (trace_id, span_id).
569
+ """
570
+ # Override the log format of the AWS provided LambdaLoggerHandler
571
+ root_logger = logging.getLogger()
572
+ for handler in root_logger.handlers:
573
+ if handler.__class__.__name__ == "LambdaLoggerHandler" and isinstance(
574
+ handler.formatter, logging.Formatter
575
+ ):
576
+ handler.setFormatter(
577
+ logging.Formatter(
578
+ "[%(levelname)s]\t%(asctime)s.%(msecs)dZ\t%(aws_request_id)s\t"
579
+ "[dd.trace_id=%(dd.trace_id)s dd.span_id=%(dd.span_id)s]\t%(message)s\n",
580
+ "%Y-%m-%dT%H:%M:%S",
581
+ )
582
+ )
583
+
584
+ # Patch `logging.Logger.makeRecord` to actually inject correlation ids
585
+ patch(logging=True)
586
+
587
+ logger.debug("logs injection configured")
588
+
589
+
590
+ def is_lambda_context():
591
+ """
592
+ Return True if the X-Ray context is `LambdaContext`, rather than the
593
+ regular `Context` (e.g., when testing lambda functions locally).
594
+ """
595
+ return os.environ.get(XrayDaemon.FUNCTION_NAME_HEADER_NAME, "") != ""
596
+
597
+
598
+ def set_dd_trace_py_root(trace_context_source, merge_xray_traces):
599
+ if trace_context_source == TraceContextSource.EVENT or merge_xray_traces:
600
+ context = Context(
601
+ trace_id=dd_trace_context.trace_id,
602
+ span_id=dd_trace_context.span_id,
603
+ sampling_priority=dd_trace_context.sampling_priority,
604
+ )
605
+ if merge_xray_traces:
606
+ xray_context = _get_xray_trace_context()
607
+ if xray_context and xray_context.span_id:
608
+ context.span_id = xray_context.span_id
609
+
610
+ tracer.context_provider.activate(context)
611
+ logger.debug(
612
+ "Set dd trace root context to: %s",
613
+ (context.trace_id, context.span_id),
614
+ )
615
+
616
+
617
+ def create_inferred_span(
618
+ event,
619
+ context,
620
+ event_source: _EventSource = None,
621
+ decode_authorizer_context: bool = True,
622
+ ):
623
+ if event_source is None:
624
+ event_source = parse_event_source(event)
625
+ try:
626
+ if event_source.equals(
627
+ EventTypes.API_GATEWAY, subtype=EventSubtypes.API_GATEWAY
628
+ ):
629
+ logger.debug("API Gateway event detected. Inferring a span")
630
+ return create_inferred_span_from_api_gateway_event(
631
+ event, context, decode_authorizer_context
632
+ )
633
+ elif event_source.equals(EventTypes.LAMBDA_FUNCTION_URL):
634
+ logger.debug("Function URL event detected. Inferring a span")
635
+ return create_inferred_span_from_lambda_function_url_event(event, context)
636
+ elif event_source.equals(
637
+ EventTypes.API_GATEWAY, subtype=EventSubtypes.HTTP_API
638
+ ):
639
+ logger.debug("HTTP API event detected. Inferring a span")
640
+ return create_inferred_span_from_http_api_event(
641
+ event, context, decode_authorizer_context
642
+ )
643
+ elif event_source.equals(
644
+ EventTypes.API_GATEWAY, subtype=EventSubtypes.WEBSOCKET
645
+ ):
646
+ logger.debug("API Gateway Websocket event detected. Inferring a span")
647
+ return create_inferred_span_from_api_gateway_websocket_event(
648
+ event, context, decode_authorizer_context
649
+ )
650
+ elif event_source.equals(EventTypes.SQS):
651
+ logger.debug("SQS event detected. Inferring a span")
652
+ return create_inferred_span_from_sqs_event(event, context)
653
+ elif event_source.equals(EventTypes.SNS):
654
+ logger.debug("SNS event detected. Inferring a span")
655
+ return create_inferred_span_from_sns_event(event, context)
656
+ elif event_source.equals(EventTypes.KINESIS):
657
+ logger.debug("Kinesis event detected. Inferring a span")
658
+ return create_inferred_span_from_kinesis_event(event, context)
659
+ elif event_source.equals(EventTypes.DYNAMODB):
660
+ logger.debug("Dynamodb event detected. Inferring a span")
661
+ return create_inferred_span_from_dynamodb_event(event, context)
662
+ elif event_source.equals(EventTypes.S3):
663
+ logger.debug("S3 event detected. Inferring a span")
664
+ return create_inferred_span_from_s3_event(event, context)
665
+ elif event_source.equals(EventTypes.EVENTBRIDGE):
666
+ logger.debug("Eventbridge event detected. Inferring a span")
667
+ return create_inferred_span_from_eventbridge_event(event, context)
668
+ except Exception as e:
669
+ logger.debug(
670
+ "Unable to infer span. Detected type: %s. Reason: %s",
671
+ event_source.to_string(),
672
+ e,
673
+ )
674
+ return None
675
+ logger.debug("Unable to infer a span: unknown event type")
676
+ return None
677
+
678
+
679
+ def create_service_mapping(val):
680
+ new_service_mapping = {}
681
+ for entry in val.split(","):
682
+ parts = entry.split(":")
683
+ if len(parts) == 2:
684
+ key = parts[0].strip()
685
+ value = parts[1].strip()
686
+ if key != value and key and value:
687
+ new_service_mapping[key] = value
688
+ return new_service_mapping
689
+
690
+
691
+ def determine_service_name(service_mapping, specific_key, generic_key, default_value):
692
+ service_name = service_mapping.get(specific_key)
693
+ if service_name is None:
694
+ service_name = service_mapping.get(generic_key, default_value)
695
+ return service_name
696
+
697
+
698
+ service_mapping = {}
699
+ # Initialization code
700
+ service_mapping_str = os.getenv("DD_SERVICE_MAPPING", "")
701
+ service_mapping = create_service_mapping(service_mapping_str)
702
+
703
+
704
+ def create_inferred_span_from_lambda_function_url_event(event, context):
705
+ request_context = event.get("requestContext")
706
+ api_id = request_context.get("apiId")
707
+ domain = request_context.get("domainName")
708
+ service_name = determine_service_name(service_mapping, api_id, "lambda_url", domain)
709
+ method = request_context.get("http", {}).get("method")
710
+ path = request_context.get("http", {}).get("path")
711
+ resource = "{0} {1}".format(method, path)
712
+ tags = {
713
+ "operation_name": "aws.lambda.url",
714
+ "http.url": domain + path,
715
+ "endpoint": path,
716
+ "http.method": method,
717
+ "resource_names": domain + path,
718
+ "request_id": context.aws_request_id,
719
+ }
720
+ request_time_epoch = request_context.get("timeEpoch")
721
+ args = {
722
+ "service": service_name,
723
+ "resource": resource,
724
+ "span_type": "http",
725
+ }
726
+ tracer.set_tags(
727
+ {"_dd.origin": "lambda"}
728
+ ) # function urls don't count as lambda_inferred,
729
+ # because they're in the same service as the inferring lambda function
730
+ span = tracer.trace("aws.lambda.url", **args)
731
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync")
732
+ if span:
733
+ span.set_tags(tags)
734
+ span.start = request_time_epoch / 1000
735
+ return span
736
+
737
+
738
+ def is_api_gateway_invocation_async(event):
739
+ return event.get("headers", {}).get("X-Amz-Invocation-Type") == "Event"
740
+
741
+
742
+ def insert_upstream_authorizer_span(
743
+ kwargs_to_start_span, other_tags_for_span, start_time_ns, finish_time_ns
744
+ ):
745
+ """Insert the authorizer span.
746
+ Without this: parent span --child-> inferred span
747
+ With this insertion: parent span --child-> upstreamAuthorizerSpan --child-> inferred span
748
+
749
+ Args:
750
+ kwargs_to_start_span (Dict): the same keyword arguments used for the inferred span
751
+ other_tags_for_span (Dict): the same tag keyword arguments used for the inferred span
752
+ start_time_ns (int): the start time of the span in nanoseconds
753
+ finish_time_ns (int): the finish time of the sapn in nanoseconds
754
+ """
755
+ trace_ctx = tracer.current_trace_context()
756
+ upstream_authorizer_span = tracer.trace(
757
+ "aws.apigateway.authorizer", **kwargs_to_start_span
758
+ )
759
+ upstream_authorizer_span.set_tags(other_tags_for_span)
760
+ upstream_authorizer_span.set_tag("operation_name", "aws.apigateway.authorizer")
761
+ # always sync for the authorizer invocation
762
+ InferredSpanInfo.set_tags_to_span(upstream_authorizer_span, synchronicity="sync")
763
+ upstream_authorizer_span.start_ns = int(start_time_ns)
764
+ upstream_authorizer_span.finish(finish_time_ns / 1e9)
765
+ # trace context needs to be set again as it is reset by finish()
766
+ tracer.context_provider.activate(trace_ctx)
767
+ return upstream_authorizer_span
768
+
769
+
770
+ def process_injected_data(event, request_time_epoch_ms, args, tags):
771
+ """
772
+ This covers the ApiGateway RestAPI and Websocket cases. It doesn't cover Http API cases.
773
+ """
774
+ injected_authorizer_data = get_injected_authorizer_data(event, False)
775
+ if injected_authorizer_data:
776
+ try:
777
+ start_time_ns = int(
778
+ injected_authorizer_data.get(Headers.Parent_Span_Finish_Time)
779
+ )
780
+ finish_time_ns = (
781
+ request_time_epoch_ms
782
+ + (
783
+ int(
784
+ event["requestContext"]["authorizer"].get(
785
+ "integrationLatency", 0
786
+ )
787
+ )
788
+ )
789
+ ) * 1e6
790
+ upstream_authorizer_span = insert_upstream_authorizer_span(
791
+ args, tags, start_time_ns, finish_time_ns
792
+ )
793
+ return upstream_authorizer_span, finish_time_ns
794
+ except Exception as e:
795
+ logger.debug(
796
+ "Unable to insert authorizer span. Continue to generate the main span.\
797
+ Reason: %s",
798
+ e,
799
+ )
800
+ return None, None
801
+ else:
802
+ return None, None
803
+
804
+
805
+ def create_inferred_span_from_api_gateway_websocket_event(
806
+ event, context, decode_authorizer_context: bool = True
807
+ ):
808
+ request_context = event.get("requestContext")
809
+ domain = request_context.get("domainName")
810
+ endpoint = request_context.get("routeKey")
811
+ api_id = request_context.get("apiId")
812
+
813
+ service_name = determine_service_name(
814
+ service_mapping, api_id, "lambda_api_gateway", domain
815
+ )
816
+ tags = {
817
+ "operation_name": "aws.apigateway.websocket",
818
+ "http.url": domain + endpoint,
819
+ "endpoint": endpoint,
820
+ "resource_names": endpoint,
821
+ "apiid": api_id,
822
+ "apiname": api_id,
823
+ "stage": request_context.get("stage"),
824
+ "request_id": context.aws_request_id,
825
+ "connection_id": request_context.get("connectionId"),
826
+ "event_type": request_context.get("eventType"),
827
+ "message_direction": request_context.get("messageDirection"),
828
+ }
829
+ request_time_epoch_ms = int(request_context.get("requestTimeEpoch"))
830
+ if is_api_gateway_invocation_async(event):
831
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
832
+ else:
833
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync")
834
+ args = {
835
+ "service": service_name,
836
+ "resource": endpoint,
837
+ "span_type": "web",
838
+ }
839
+ tracer.set_tags({"_dd.origin": "lambda"})
840
+ upstream_authorizer_span = None
841
+ finish_time_ns = None
842
+ if decode_authorizer_context:
843
+ upstream_authorizer_span, finish_time_ns = process_injected_data(
844
+ event, request_time_epoch_ms, args, tags
845
+ )
846
+ span = tracer.trace("aws.apigateway.websocket", **args)
847
+ if span:
848
+ span.set_tags(tags)
849
+ span.start_ns = int(
850
+ finish_time_ns
851
+ if finish_time_ns is not None
852
+ else request_time_epoch_ms * 1e6
853
+ )
854
+ if upstream_authorizer_span:
855
+ span.parent_id = upstream_authorizer_span.span_id
856
+ return span
857
+
858
+
859
+ def create_inferred_span_from_api_gateway_event(
860
+ event, context, decode_authorizer_context: bool = True
861
+ ):
862
+ request_context = event.get("requestContext")
863
+ domain = request_context.get("domainName", "")
864
+ api_id = request_context.get("apiId")
865
+ service_name = determine_service_name(
866
+ service_mapping, api_id, "lambda_api_gateway", domain
867
+ )
868
+ method = event.get("httpMethod")
869
+ path = event.get("path")
870
+ resource = "{0} {1}".format(method, path)
871
+ tags = {
872
+ "operation_name": "aws.apigateway.rest",
873
+ "http.url": domain + path,
874
+ "endpoint": path,
875
+ "http.method": method,
876
+ "resource_names": resource,
877
+ "apiid": api_id,
878
+ "apiname": api_id,
879
+ "stage": request_context.get("stage"),
880
+ "request_id": context.aws_request_id,
881
+ }
882
+ request_time_epoch_ms = int(request_context.get("requestTimeEpoch"))
883
+ if is_api_gateway_invocation_async(event):
884
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
885
+ else:
886
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync")
887
+ args = {
888
+ "service": service_name,
889
+ "resource": resource,
890
+ "span_type": "http",
891
+ }
892
+ tracer.set_tags({"_dd.origin": "lambda"})
893
+ upstream_authorizer_span = None
894
+ finish_time_ns = None
895
+ if decode_authorizer_context:
896
+ upstream_authorizer_span, finish_time_ns = process_injected_data(
897
+ event, request_time_epoch_ms, args, tags
898
+ )
899
+ span = tracer.trace("aws.apigateway", **args)
900
+ if span:
901
+ span.set_tags(tags)
902
+ # start time pushed by the inserted authorizer span
903
+ span.start_ns = int(
904
+ finish_time_ns
905
+ if finish_time_ns is not None
906
+ else request_time_epoch_ms * 1e6
907
+ )
908
+ if upstream_authorizer_span:
909
+ span.parent_id = upstream_authorizer_span.span_id
910
+ return span
911
+
912
+
913
+ def create_inferred_span_from_http_api_event(
914
+ event, context, decode_authorizer_context: bool = True
915
+ ):
916
+ request_context = event.get("requestContext")
917
+ domain = request_context.get("domainName")
918
+ api_id = request_context.get("apiId")
919
+ service_name = determine_service_name(
920
+ service_mapping, api_id, "lambda_api_gateway", domain
921
+ )
922
+ method = request_context.get("http", {}).get("method")
923
+ path = event.get("rawPath")
924
+ resource = "{0} {1}".format(method, path)
925
+ tags = {
926
+ "operation_name": "aws.httpapi",
927
+ "endpoint": path,
928
+ "http.url": domain + path,
929
+ "http.method": request_context.get("http", {}).get("method"),
930
+ "http.protocol": request_context.get("http", {}).get("protocol"),
931
+ "http.source_ip": request_context.get("http", {}).get("sourceIp"),
932
+ "http.user_agent": request_context.get("http", {}).get("userAgent"),
933
+ "resource_names": resource,
934
+ "request_id": context.aws_request_id,
935
+ "apiid": api_id,
936
+ "apiname": api_id,
937
+ "stage": request_context.get("stage"),
938
+ }
939
+ request_time_epoch_ms = int(request_context.get("timeEpoch"))
940
+ if is_api_gateway_invocation_async(event):
941
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
942
+ else:
943
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync")
944
+ args = {
945
+ "service": service_name,
946
+ "resource": resource,
947
+ "span_type": "http",
948
+ }
949
+ tracer.set_tags({"_dd.origin": "lambda"})
950
+ inferred_span_start_ns = request_time_epoch_ms * 1e6
951
+ if decode_authorizer_context:
952
+ injected_authorizer_data = get_injected_authorizer_data(event, True)
953
+ if injected_authorizer_data:
954
+ inferred_span_start_ns = injected_authorizer_data.get(
955
+ Headers.Parent_Span_Finish_Time
956
+ )
957
+ span = tracer.trace("aws.httpapi", **args)
958
+ if span:
959
+ span.set_tags(tags)
960
+ span.start_ns = int(inferred_span_start_ns)
961
+ return span
962
+
963
+
964
+ def create_inferred_span_from_sqs_event(event, context):
965
+ trace_ctx = tracer.current_trace_context()
966
+
967
+ event_record = get_first_record(event)
968
+ event_source_arn = event_record.get("eventSourceARN")
969
+ queue_name = event_source_arn.split(":")[-1]
970
+ service_name = determine_service_name(
971
+ service_mapping, queue_name, "lambda_sqs", "sqs"
972
+ )
973
+ tags = {
974
+ "operation_name": "aws.sqs",
975
+ "resource_names": queue_name,
976
+ "queuename": queue_name,
977
+ "event_source_arn": event_source_arn,
978
+ "receipt_handle": event_record.get("receiptHandle"),
979
+ "sender_id": event_record.get("attributes", {}).get("SenderId"),
980
+ }
981
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
982
+ request_time_epoch = event_record.get("attributes", {}).get("SentTimestamp")
983
+ args = {
984
+ "service": service_name,
985
+ "resource": queue_name,
986
+ "span_type": "web",
987
+ }
988
+ start_time = int(request_time_epoch) / 1000
989
+
990
+ upstream_span = None
991
+ if "body" in event_record:
992
+ body_str = event_record.get("body", {})
993
+ try:
994
+ body = json.loads(body_str)
995
+
996
+ # logic to deal with SNS => SQS event
997
+ if body.get("Type", "") == "Notification" and "TopicArn" in body:
998
+ logger.debug("Found SNS message inside SQS event")
999
+ upstream_span = create_inferred_span_from_sns_event(
1000
+ create_sns_event(body), context
1001
+ )
1002
+ upstream_span.finish(finish_time=start_time)
1003
+
1004
+ # EventBridge => SQS
1005
+ elif body.get("detail"):
1006
+ detail = body.get("detail")
1007
+ if detail.get("_datadog"):
1008
+ logger.debug("Found an EventBridge message inside SQS event")
1009
+ upstream_span = create_inferred_span_from_eventbridge_event(
1010
+ body, context
1011
+ )
1012
+ upstream_span.finish(finish_time=start_time)
1013
+
1014
+ except Exception as e:
1015
+ logger.debug(
1016
+ "Unable to create upstream span from SQS message, with error %s" % e
1017
+ )
1018
+ pass
1019
+
1020
+ # trace context needs to be set again as it is reset
1021
+ # when sns_span.finish executes
1022
+ tracer.context_provider.activate(trace_ctx)
1023
+ tracer.set_tags({"_dd.origin": "lambda"})
1024
+ span = tracer.trace("aws.sqs", **args)
1025
+ if span:
1026
+ span.set_tags(tags)
1027
+ span.start = start_time
1028
+ if upstream_span:
1029
+ span.parent_id = upstream_span.span_id
1030
+
1031
+ return span
1032
+
1033
+
1034
+ def create_inferred_span_from_sns_event(event, context):
1035
+ event_record = get_first_record(event)
1036
+ sns_message = event_record.get("Sns")
1037
+ topic_arn = event_record.get("Sns", {}).get("TopicArn")
1038
+ topic_name = topic_arn.split(":")[-1]
1039
+ service_name = determine_service_name(
1040
+ service_mapping, topic_name, "lambda_sns", "sns"
1041
+ )
1042
+ tags = {
1043
+ "operation_name": "aws.sns",
1044
+ "resource_names": topic_name,
1045
+ "topicname": topic_name,
1046
+ "topic_arn": topic_arn,
1047
+ "message_id": sns_message.get("MessageId"),
1048
+ "type": sns_message.get("Type"),
1049
+ }
1050
+
1051
+ # Subject not available in SNS => SQS scenario
1052
+ if "Subject" in sns_message and sns_message["Subject"]:
1053
+ tags["subject"] = sns_message.get("Subject")
1054
+
1055
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
1056
+ sns_dt_format = "%Y-%m-%dT%H:%M:%S.%fZ"
1057
+ timestamp = event_record.get("Sns", {}).get("Timestamp")
1058
+ dt = datetime.strptime(timestamp, sns_dt_format)
1059
+
1060
+ args = {
1061
+ "service": service_name,
1062
+ "resource": topic_name,
1063
+ "span_type": "web",
1064
+ }
1065
+ tracer.set_tags({"_dd.origin": "lambda"})
1066
+ span = tracer.trace("aws.sns", **args)
1067
+ if span:
1068
+ span.set_tags(tags)
1069
+ span.start = dt.replace(tzinfo=timezone.utc).timestamp()
1070
+ return span
1071
+
1072
+
1073
+ def create_inferred_span_from_kinesis_event(event, context):
1074
+ event_record = get_first_record(event)
1075
+ event_source_arn = event_record.get("eventSourceARN")
1076
+ event_id = event_record.get("eventID")
1077
+ stream_name = event_source_arn.split(":")[-1]
1078
+ shard_id = event_id.split(":")[0]
1079
+ service_name = determine_service_name(
1080
+ service_mapping, stream_name, "lambda_kinesis", "kinesis"
1081
+ )
1082
+ tags = {
1083
+ "operation_name": "aws.kinesis",
1084
+ "resource_names": stream_name,
1085
+ "streamname": stream_name,
1086
+ "shardid": shard_id,
1087
+ "event_source_arn": event_source_arn,
1088
+ "event_id": event_id,
1089
+ "event_name": event_record.get("eventName"),
1090
+ "event_version": event_record.get("eventVersion"),
1091
+ "partition_key": event_record.get("kinesis", {}).get("partitionKey"),
1092
+ }
1093
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
1094
+ request_time_epoch = event_record.get("kinesis", {}).get(
1095
+ "approximateArrivalTimestamp"
1096
+ )
1097
+
1098
+ args = {
1099
+ "service": service_name,
1100
+ "resource": stream_name,
1101
+ "span_type": "web",
1102
+ }
1103
+ tracer.set_tags({"_dd.origin": "lambda"})
1104
+ span = tracer.trace("aws.kinesis", **args)
1105
+ if span:
1106
+ span.set_tags(tags)
1107
+ span.start = request_time_epoch
1108
+ return span
1109
+
1110
+
1111
+ def create_inferred_span_from_dynamodb_event(event, context):
1112
+ event_record = get_first_record(event)
1113
+ event_source_arn = event_record.get("eventSourceARN")
1114
+ table_name = event_source_arn.split("/")[1]
1115
+ service_name = determine_service_name(
1116
+ service_mapping, table_name, "lambda_dynamodb", "dynamodb"
1117
+ )
1118
+ dynamodb_message = event_record.get("dynamodb")
1119
+ tags = {
1120
+ "operation_name": "aws.dynamodb",
1121
+ "resource_names": table_name,
1122
+ "tablename": table_name,
1123
+ "event_source_arn": event_source_arn,
1124
+ "event_id": event_record.get("eventID"),
1125
+ "event_name": event_record.get("eventName"),
1126
+ "event_version": event_record.get("eventVersion"),
1127
+ "stream_view_type": dynamodb_message.get("StreamViewType"),
1128
+ "size_bytes": str(dynamodb_message.get("SizeBytes")),
1129
+ }
1130
+ InferredSpanInfo.set_tags(tags, synchronicity="async", tag_source="self")
1131
+ request_time_epoch = event_record.get("dynamodb", {}).get(
1132
+ "ApproximateCreationDateTime"
1133
+ )
1134
+ args = {
1135
+ "service": service_name,
1136
+ "resource": table_name,
1137
+ "span_type": "web",
1138
+ }
1139
+ tracer.set_tags({"_dd.origin": "lambda"})
1140
+ span = tracer.trace("aws.dynamodb", **args)
1141
+ if span:
1142
+ span.set_tags(tags)
1143
+
1144
+ span.start = int(request_time_epoch)
1145
+ return span
1146
+
1147
+
1148
+ def create_inferred_span_from_s3_event(event, context):
1149
+ event_record = get_first_record(event)
1150
+ bucket_name = event_record.get("s3", {}).get("bucket", {}).get("name")
1151
+ service_name = determine_service_name(
1152
+ service_mapping, bucket_name, "lambda_s3", "s3"
1153
+ )
1154
+ tags = {
1155
+ "operation_name": "aws.s3",
1156
+ "resource_names": bucket_name,
1157
+ "event_name": event_record.get("eventName"),
1158
+ "bucketname": bucket_name,
1159
+ "bucket_arn": event_record.get("s3", {}).get("bucket", {}).get("arn"),
1160
+ "object_key": event_record.get("s3", {}).get("object", {}).get("key"),
1161
+ "object_size": str(event_record.get("s3", {}).get("object", {}).get("size")),
1162
+ "object_etag": event_record.get("s3", {}).get("object", {}).get("eTag"),
1163
+ }
1164
+ InferredSpanInfo.set_tags(tags, synchronicity="async", tag_source="self")
1165
+ dt_format = "%Y-%m-%dT%H:%M:%S.%fZ"
1166
+ timestamp = event_record.get("eventTime")
1167
+ dt = datetime.strptime(timestamp, dt_format)
1168
+
1169
+ args = {
1170
+ "service": service_name,
1171
+ "resource": bucket_name,
1172
+ "span_type": "web",
1173
+ }
1174
+ tracer.set_tags({"_dd.origin": "lambda"})
1175
+ span = tracer.trace("aws.s3", **args)
1176
+ if span:
1177
+ span.set_tags(tags)
1178
+ span.start = dt.replace(tzinfo=timezone.utc).timestamp()
1179
+ return span
1180
+
1181
+
1182
+ def create_inferred_span_from_eventbridge_event(event, context):
1183
+ source = event.get("source")
1184
+ service_name = determine_service_name(
1185
+ service_mapping, source, "lambda_eventbridge", "eventbridge"
1186
+ )
1187
+ tags = {
1188
+ "operation_name": "aws.eventbridge",
1189
+ "resource_names": source,
1190
+ "detail_type": event.get("detail-type"),
1191
+ }
1192
+ InferredSpanInfo.set_tags(
1193
+ tags,
1194
+ synchronicity="async",
1195
+ tag_source="self",
1196
+ )
1197
+ dt_format = "%Y-%m-%dT%H:%M:%SZ"
1198
+ timestamp = event.get("time")
1199
+ dt = datetime.strptime(timestamp, dt_format)
1200
+
1201
+ args = {
1202
+ "service": service_name,
1203
+ "resource": source,
1204
+ "span_type": "web",
1205
+ }
1206
+ tracer.set_tags({"_dd.origin": "lambda"})
1207
+ span = tracer.trace("aws.eventbridge", **args)
1208
+ if span:
1209
+ span.set_tags(tags)
1210
+ span.start = dt.replace(tzinfo=timezone.utc).timestamp()
1211
+ return span
1212
+
1213
+
1214
+ def create_function_execution_span(
1215
+ context,
1216
+ function_name,
1217
+ is_cold_start,
1218
+ is_proactive_init,
1219
+ trace_context_source,
1220
+ merge_xray_traces,
1221
+ trigger_tags,
1222
+ parent_span=None,
1223
+ ):
1224
+ tags = {}
1225
+ if context:
1226
+ function_arn = (context.invoked_function_arn or "").lower()
1227
+ tk = function_arn.split(":")
1228
+ function_arn = ":".join(tk[0:7]) if len(tk) > 7 else function_arn
1229
+ function_version = tk[7] if len(tk) > 7 else "$LATEST"
1230
+ tags = {
1231
+ "cold_start": str(is_cold_start).lower(),
1232
+ "function_arn": function_arn,
1233
+ "function_version": function_version,
1234
+ "request_id": context.aws_request_id,
1235
+ "resource_names": context.function_name,
1236
+ "functionname": context.function_name.lower()
1237
+ if context.function_name
1238
+ else None,
1239
+ "datadog_lambda": datadog_lambda_version,
1240
+ "dd_trace": ddtrace_version,
1241
+ "span.name": "aws.lambda",
1242
+ }
1243
+ if is_proactive_init:
1244
+ tags["proactive_initialization"] = str(is_proactive_init).lower()
1245
+ if trace_context_source == TraceContextSource.XRAY and merge_xray_traces:
1246
+ tags["_dd.parent_source"] = trace_context_source
1247
+ tags.update(trigger_tags)
1248
+ args = {
1249
+ "service": "aws.lambda",
1250
+ "resource": function_name,
1251
+ "span_type": "serverless",
1252
+ }
1253
+ tracer.set_tags({"_dd.origin": "lambda"})
1254
+ span = tracer.trace("aws.lambda", **args)
1255
+ if span:
1256
+ span.set_tags(tags)
1257
+ if parent_span:
1258
+ span.parent_id = parent_span.span_id
1259
+ return span
1260
+
1261
+
1262
+ def mark_trace_as_error_for_5xx_responses(context, status_code, span):
1263
+ if len(status_code) == 3 and status_code.startswith("5"):
1264
+ submit_errors_metric(context)
1265
+ if span:
1266
+ span.error = 1
1267
+
1268
+
1269
+ class InferredSpanInfo(object):
1270
+ BASE_NAME = "_inferred_span"
1271
+ SYNCHRONICITY = f"{BASE_NAME}.synchronicity"
1272
+ TAG_SOURCE = f"{BASE_NAME}.tag_source"
1273
+
1274
+ @staticmethod
1275
+ def set_tags(
1276
+ tags: Dict[str, str],
1277
+ synchronicity: Optional[Literal["sync", "async"]] = None,
1278
+ tag_source: Optional[Literal["labmda", "self"]] = None,
1279
+ ):
1280
+ if synchronicity is not None:
1281
+ tags[InferredSpanInfo.SYNCHRONICITY] = str(synchronicity)
1282
+ if tag_source is not None:
1283
+ tags[InferredSpanInfo.TAG_SOURCE] = str(tag_source)
1284
+
1285
+ @staticmethod
1286
+ def set_tags_to_span(
1287
+ span: Span,
1288
+ synchronicity: Optional[Literal["sync", "async"]] = None,
1289
+ tag_source: Optional[Literal["labmda", "self"]] = None,
1290
+ ):
1291
+ if synchronicity is not None:
1292
+ span.set_tags({InferredSpanInfo.SYNCHRONICITY: synchronicity})
1293
+ if tag_source is not None:
1294
+ span.set_tags({InferredSpanInfo.TAG_SOURCE: str(tag_source)})
1295
+
1296
+ @staticmethod
1297
+ def is_async(span: Span) -> bool:
1298
+ if not span:
1299
+ return False
1300
+ try:
1301
+ return span.get_tag(InferredSpanInfo.SYNCHRONICITY) == "async"
1302
+ except Exception as e:
1303
+ logger.debug(
1304
+ "Unabled to read the %s tag, returning False. \
1305
+ Reason: %s.",
1306
+ InferredSpanInfo.SYNCHRONICITY,
1307
+ e,
1308
+ )
1309
+ return False