datadog_lambda 5.92.0__py3-none-any.whl → 5.93.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
datadog_lambda/tracing.py CHANGED
@@ -5,8 +5,8 @@
5
5
  import hashlib
6
6
  import logging
7
7
  import os
8
- import json
9
8
  import base64
9
+ import ujson as json
10
10
  from datetime import datetime, timezone
11
11
  from typing import Optional, Dict
12
12
 
@@ -66,6 +66,8 @@ if dd_tracing_enabled:
66
66
 
67
67
  telemetry_writer.enable()
68
68
 
69
+ is_lambda_context = os.environ.get(XrayDaemon.FUNCTION_NAME_HEADER_NAME) != ""
70
+
69
71
  propagator = HTTPPropagator()
70
72
 
71
73
  DD_TRACE_JAVA_TRACE_ID_PADDING = "00000000"
@@ -93,7 +95,7 @@ def _convert_xray_sampling(xray_sampled):
93
95
 
94
96
 
95
97
  def _get_xray_trace_context():
96
- if not is_lambda_context():
98
+ if not is_lambda_context:
97
99
  return None
98
100
 
99
101
  xray_trace_entity = parse_xray_header(
@@ -109,11 +111,7 @@ def _get_xray_trace_context():
109
111
  logger.debug(
110
112
  "Converted trace context %s from X-Ray segment %s",
111
113
  trace_context,
112
- (
113
- xray_trace_entity["trace_id"],
114
- xray_trace_entity["parent_id"],
115
- xray_trace_entity["sampled"],
116
- ),
114
+ xray_trace_entity,
117
115
  )
118
116
  return trace_context
119
117
 
@@ -124,7 +122,9 @@ def _get_dd_trace_py_context():
124
122
  return None
125
123
 
126
124
  logger.debug(
127
- "found dd trace context: %s", (span.context.trace_id, span.context.span_id)
125
+ "found dd trace context: trace_id=%s span_id=%s",
126
+ span.context.trace_id,
127
+ span.context.span_id,
128
128
  )
129
129
  return span.context
130
130
 
@@ -235,37 +235,31 @@ def extract_context_from_sqs_or_sns_event_or_context(event, lambda_context):
235
235
 
236
236
  # logic to deal with SNS => SQS event
237
237
  if "body" in first_record:
238
- body_str = first_record.get("body", {})
238
+ body_str = first_record.get("body")
239
239
  try:
240
240
  body = json.loads(body_str)
241
241
  if body.get("Type", "") == "Notification" and "TopicArn" in body:
242
242
  logger.debug("Found SNS message inside SQS event")
243
243
  first_record = get_first_record(create_sns_event(body))
244
244
  except Exception:
245
- first_record = event.get("Records")[0]
246
245
  pass
247
246
 
248
- msg_attributes = first_record.get(
249
- "messageAttributes",
250
- first_record.get("Sns", {}).get("MessageAttributes", {}),
251
- )
252
- dd_payload = msg_attributes.get("_datadog", {})
247
+ msg_attributes = first_record.get("messageAttributes")
248
+ if msg_attributes is None:
249
+ sns_record = first_record.get("Sns") or {}
250
+ msg_attributes = sns_record.get("MessageAttributes") or {}
251
+ dd_payload = msg_attributes.get("_datadog")
253
252
  if dd_payload:
254
253
  # SQS uses dataType and binaryValue/stringValue
255
254
  # SNS uses Type and Value
256
255
  dd_json_data = None
257
- dd_json_data_type = dd_payload.get("Type", dd_payload.get("dataType", ""))
256
+ dd_json_data_type = dd_payload.get("Type") or dd_payload.get("dataType")
258
257
  if dd_json_data_type == "Binary":
259
- dd_json_data = dd_payload.get(
260
- "binaryValue",
261
- dd_payload.get("Value", r"{}"),
262
- )
263
- dd_json_data = base64.b64decode(dd_json_data)
258
+ dd_json_data = dd_payload.get("binaryValue") or dd_payload.get("Value")
259
+ if dd_json_data:
260
+ dd_json_data = base64.b64decode(dd_json_data)
264
261
  elif dd_json_data_type == "String":
265
- dd_json_data = dd_payload.get(
266
- "stringValue",
267
- dd_payload.get("Value", r"{}"),
268
- )
262
+ dd_json_data = dd_payload.get("stringValue") or dd_payload.get("Value")
269
263
  else:
270
264
  logger.debug(
271
265
  "Datadog Lambda Python only supports extracting trace"
@@ -278,23 +272,25 @@ def extract_context_from_sqs_or_sns_event_or_context(event, lambda_context):
278
272
  else:
279
273
  # Handle case where trace context is injected into attributes.AWSTraceHeader
280
274
  # example: Root=1-654321ab-000000001234567890abcdef;Parent=0123456789abcdef;Sampled=1
281
- x_ray_header = first_record.get("attributes", {}).get("AWSTraceHeader")
282
- if x_ray_header:
283
- x_ray_context = parse_xray_header(x_ray_header)
284
- trace_id_parts = x_ray_context.get("trace_id", "").split("-")
285
- if len(trace_id_parts) > 2 and trace_id_parts[2].startswith(
286
- DD_TRACE_JAVA_TRACE_ID_PADDING
287
- ):
288
- # If it starts with eight 0's padding,
289
- # then this AWSTraceHeader contains Datadog injected trace context
290
- logger.debug(
291
- "Found dd-trace injected trace context from AWSTraceHeader"
292
- )
293
- return Context(
294
- trace_id=int(trace_id_parts[2][8:], 16),
295
- span_id=int(int(x_ray_context["parent_id"], 16)),
296
- sampling_priority=float(x_ray_context["sampled"]),
297
- )
275
+ attrs = first_record.get("attributes")
276
+ if attrs:
277
+ x_ray_header = attrs.get("AWSTraceHeader")
278
+ if x_ray_header:
279
+ x_ray_context = parse_xray_header(x_ray_header)
280
+ trace_id_parts = x_ray_context.get("trace_id", "").split("-")
281
+ if len(trace_id_parts) > 2 and trace_id_parts[2].startswith(
282
+ DD_TRACE_JAVA_TRACE_ID_PADDING
283
+ ):
284
+ # If it starts with eight 0's padding,
285
+ # then this AWSTraceHeader contains Datadog injected trace context
286
+ logger.debug(
287
+ "Found dd-trace injected trace context from AWSTraceHeader"
288
+ )
289
+ return Context(
290
+ trace_id=int(trace_id_parts[2][8:], 16),
291
+ span_id=int(x_ray_context["parent_id"], 16),
292
+ sampling_priority=float(x_ray_context["sampled"]),
293
+ )
298
294
  return extract_context_from_lambda_context(lambda_context)
299
295
  except Exception as e:
300
296
  logger.debug("The trace extractor returned with error %s", e)
@@ -339,21 +335,22 @@ def extract_context_from_kinesis_event(event, lambda_context):
339
335
  """
340
336
  try:
341
337
  record = get_first_record(event)
342
- data = record.get("kinesis", {}).get("data", None)
338
+ kinesis = record.get("kinesis")
339
+ if not kinesis:
340
+ return extract_context_from_lambda_context(lambda_context)
341
+ data = kinesis.get("data")
343
342
  if data:
344
343
  b64_bytes = data.encode("ascii")
345
344
  str_bytes = base64.b64decode(b64_bytes)
346
345
  data_str = str_bytes.decode("ascii")
347
346
  data_obj = json.loads(data_str)
348
347
  dd_ctx = data_obj.get("_datadog")
349
-
350
- if not dd_ctx:
351
- return extract_context_from_lambda_context(lambda_context)
352
-
353
- return propagator.extract(dd_ctx)
348
+ if dd_ctx:
349
+ return propagator.extract(dd_ctx)
354
350
  except Exception as e:
355
351
  logger.debug("The trace extractor returned with error %s", e)
356
- return extract_context_from_lambda_context(lambda_context)
352
+
353
+ return extract_context_from_lambda_context(lambda_context)
357
354
 
358
355
 
359
356
  def _deterministic_md5_hash(s: str) -> int:
@@ -380,7 +377,7 @@ def extract_context_from_step_functions(event, lambda_context):
380
377
  state_entered_time = event.get("State").get("EnteredTime")
381
378
  trace_id = _deterministic_md5_hash(execution_id)
382
379
  parent_id = _deterministic_md5_hash(
383
- execution_id + "#" + state_name + "#" + state_entered_time
380
+ f"{execution_id}#{state_name}#{state_entered_time}"
384
381
  )
385
382
  sampling_priority = SamplingPriority.AUTO_KEEP
386
383
  return Context(
@@ -396,11 +393,7 @@ def extract_context_custom_extractor(extractor, event, lambda_context):
396
393
  Extract Datadog trace context using a custom trace extractor function
397
394
  """
398
395
  try:
399
- (
400
- trace_id,
401
- parent_id,
402
- sampling_priority,
403
- ) = extractor(event, lambda_context)
396
+ trace_id, parent_id, sampling_priority = extractor(event, lambda_context)
404
397
  return Context(
405
398
  trace_id=int(trace_id),
406
399
  span_id=int(parent_id),
@@ -426,15 +419,20 @@ def is_authorizer_response(response) -> bool:
426
419
 
427
420
  def get_injected_authorizer_data(event, is_http_api) -> dict:
428
421
  try:
429
- authorizer_headers = event.get("requestContext", {}).get("authorizer")
422
+ req_ctx = event.get("requestContext")
423
+ if not req_ctx:
424
+ return None
425
+ authorizer_headers = req_ctx.get("authorizer")
430
426
  if not authorizer_headers:
431
427
  return None
432
428
 
433
- dd_data_raw = (
434
- authorizer_headers.get("lambda", {}).get("_datadog")
435
- if is_http_api
436
- else authorizer_headers.get("_datadog")
437
- )
429
+ if is_http_api:
430
+ lambda_hdr = authorizer_headers.get("lambda")
431
+ if not lambda_hdr:
432
+ return None
433
+ dd_data_raw = lambda_hdr.get("_datadog")
434
+ else:
435
+ dd_data_raw = authorizer_headers.get("_datadog")
438
436
 
439
437
  if not dd_data_raw:
440
438
  return None
@@ -448,16 +446,19 @@ def get_injected_authorizer_data(event, is_http_api) -> dict:
448
446
  # that case, we use the injected Authorizing_Request_Id to tell if it's cached. But token
449
447
  # authorizers don't pass on the requestId. The Authorizing_Request_Id can't work for all
450
448
  # cases neither. As a result, we combine both methods as shown below.
451
- if authorizer_headers.get("integrationLatency", 0) > 0 or event.get(
452
- "requestContext", {}
453
- ).get("requestId") == injected_data.get(Headers.Authorizing_Request_Id):
449
+ if authorizer_headers.get("integrationLatency", 0) > 0:
454
450
  return injected_data
455
- else:
451
+ req_ctx = event.get("requestContext")
452
+ if not req_ctx:
456
453
  return None
454
+ if req_ctx.get("requestId") == injected_data.get(
455
+ Headers.Authorizing_Request_Id
456
+ ):
457
+ return injected_data
458
+ return None
457
459
 
458
460
  except Exception as e:
459
461
  logger.debug("Failed to check if invocated by an authorizer. error %s", e)
460
- return None
461
462
 
462
463
 
463
464
  def extract_dd_trace_context(
@@ -529,8 +530,8 @@ def get_dd_trace_context_obj():
529
530
  xray_context = _get_xray_trace_context() # xray (sub)segment
530
531
  except Exception as e:
531
532
  logger.debug(
532
- "get_dd_trace_context couldn't read from segment from x-ray, with error %s"
533
- % e
533
+ "get_dd_trace_context couldn't read from segment from x-ray, with error %s",
534
+ e,
534
535
  )
535
536
  if not xray_context:
536
537
  return None
@@ -569,7 +570,7 @@ def set_correlation_ids():
569
570
 
570
571
  TODO: Remove me when Datadog tracer is natively supported in Lambda.
571
572
  """
572
- if not is_lambda_context():
573
+ if not is_lambda_context:
573
574
  logger.debug("set_correlation_ids is only supported in LambdaContext")
574
575
  return
575
576
  if dd_tracing_enabled:
@@ -613,14 +614,6 @@ def inject_correlation_ids():
613
614
  logger.debug("logs injection configured")
614
615
 
615
616
 
616
- def is_lambda_context():
617
- """
618
- Return True if the X-Ray context is `LambdaContext`, rather than the
619
- regular `Context` (e.g., when testing lambda functions locally).
620
- """
621
- return os.environ.get(XrayDaemon.FUNCTION_NAME_HEADER_NAME, "") != ""
622
-
623
-
624
617
  def set_dd_trace_py_root(trace_context_source, merge_xray_traces):
625
618
  if trace_context_source == TraceContextSource.EVENT or merge_xray_traces:
626
619
  context = Context(
@@ -635,8 +628,9 @@ def set_dd_trace_py_root(trace_context_source, merge_xray_traces):
635
628
 
636
629
  tracer.context_provider.activate(context)
637
630
  logger.debug(
638
- "Set dd trace root context to: %s",
639
- (context.trace_id, context.span_id),
631
+ "Set dd trace root context to: trace_id=%s span_id=%s",
632
+ context.trace_id,
633
+ context.span_id,
640
634
  )
641
635
 
642
636
 
@@ -697,9 +691,7 @@ def create_inferred_span(
697
691
  event_source.to_string(),
698
692
  e,
699
693
  )
700
- return None
701
694
  logger.debug("Unable to infer a span: unknown event type")
702
- return None
703
695
 
704
696
 
705
697
  def create_service_mapping(val):
@@ -721,20 +713,22 @@ def determine_service_name(service_mapping, specific_key, generic_key, default_v
721
713
  return service_name
722
714
 
723
715
 
724
- service_mapping = {}
725
716
  # Initialization code
726
717
  service_mapping_str = os.getenv("DD_SERVICE_MAPPING", "")
727
718
  service_mapping = create_service_mapping(service_mapping_str)
728
719
 
720
+ _dd_origin = {"_dd.origin": "lambda"}
721
+
729
722
 
730
723
  def create_inferred_span_from_lambda_function_url_event(event, context):
731
724
  request_context = event.get("requestContext")
732
725
  api_id = request_context.get("apiId")
733
726
  domain = request_context.get("domainName")
734
727
  service_name = determine_service_name(service_mapping, api_id, "lambda_url", domain)
735
- method = request_context.get("http", {}).get("method")
736
- path = request_context.get("http", {}).get("path")
737
- resource = "{0} {1}".format(method, path)
728
+ http = request_context.get("http")
729
+ method = http.get("method") if http else None
730
+ path = http.get("path") if http else None
731
+ resource = f"{method} {path}"
738
732
  tags = {
739
733
  "operation_name": "aws.lambda.url",
740
734
  "http.url": domain + path,
@@ -744,25 +738,23 @@ def create_inferred_span_from_lambda_function_url_event(event, context):
744
738
  "request_id": context.aws_request_id,
745
739
  }
746
740
  request_time_epoch = request_context.get("timeEpoch")
747
- args = {
748
- "service": service_name,
749
- "resource": resource,
750
- "span_type": "http",
751
- }
752
- tracer.set_tags(
753
- {"_dd.origin": "lambda"}
754
- ) # function urls don't count as lambda_inferred,
741
+ tracer.set_tags(_dd_origin) # function urls don't count as lambda_inferred,
755
742
  # because they're in the same service as the inferring lambda function
756
- span = tracer.trace("aws.lambda.url", **args)
743
+ span = tracer.trace(
744
+ "aws.lambda.url", service=service_name, resource=resource, span_type="http"
745
+ )
757
746
  InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync")
758
747
  if span:
759
748
  span.set_tags(tags)
760
- span.start = request_time_epoch / 1000
749
+ span.start_ns = int(request_time_epoch) * 1e6
761
750
  return span
762
751
 
763
752
 
764
753
  def is_api_gateway_invocation_async(event):
765
- return event.get("headers", {}).get("X-Amz-Invocation-Type") == "Event"
754
+ hdrs = event.get("headers")
755
+ if not hdrs:
756
+ return False
757
+ return hdrs.get("X-Amz-Invocation-Type") == "Event"
766
758
 
767
759
 
768
760
  def insert_upstream_authorizer_span(
@@ -862,7 +854,7 @@ def create_inferred_span_from_api_gateway_websocket_event(
862
854
  "resource": endpoint,
863
855
  "span_type": "web",
864
856
  }
865
- tracer.set_tags({"_dd.origin": "lambda"})
857
+ tracer.set_tags(_dd_origin)
866
858
  upstream_authorizer_span = None
867
859
  finish_time_ns = None
868
860
  if decode_authorizer_context:
@@ -893,7 +885,8 @@ def create_inferred_span_from_api_gateway_event(
893
885
  )
894
886
  method = event.get("httpMethod")
895
887
  path = event.get("path")
896
- resource = "{0} {1}".format(method, path)
888
+ resource_path = _get_resource_path(event, request_context)
889
+ resource = f"{method} {resource_path}"
897
890
  tags = {
898
891
  "operation_name": "aws.apigateway.rest",
899
892
  "http.url": domain + path,
@@ -915,7 +908,7 @@ def create_inferred_span_from_api_gateway_event(
915
908
  "resource": resource,
916
909
  "span_type": "http",
917
910
  }
918
- tracer.set_tags({"_dd.origin": "lambda"})
911
+ tracer.set_tags(_dd_origin)
919
912
  upstream_authorizer_span = None
920
913
  finish_time_ns = None
921
914
  if decode_authorizer_context:
@@ -936,6 +929,16 @@ def create_inferred_span_from_api_gateway_event(
936
929
  return span
937
930
 
938
931
 
932
+ def _get_resource_path(event, request_context):
933
+ route_key = request_context.get("routeKey") or ""
934
+ if "{" in route_key:
935
+ try:
936
+ return route_key.split(" ")[1]
937
+ except Exception as e:
938
+ logger.debug("Error parsing routeKey: %s", e)
939
+ return event.get("rawPath") or request_context.get("resourcePath") or route_key
940
+
941
+
939
942
  def create_inferred_span_from_http_api_event(
940
943
  event, context, decode_authorizer_context: bool = True
941
944
  ):
@@ -945,17 +948,19 @@ def create_inferred_span_from_http_api_event(
945
948
  service_name = determine_service_name(
946
949
  service_mapping, api_id, "lambda_api_gateway", domain
947
950
  )
948
- method = request_context.get("http", {}).get("method")
951
+ http = request_context.get("http") or {}
952
+ method = http.get("method")
949
953
  path = event.get("rawPath")
950
- resource = "{0} {1}".format(method, path)
954
+ resource_path = _get_resource_path(event, request_context)
955
+ resource = f"{method} {resource_path}"
951
956
  tags = {
952
957
  "operation_name": "aws.httpapi",
953
958
  "endpoint": path,
954
959
  "http.url": domain + path,
955
- "http.method": request_context.get("http", {}).get("method"),
956
- "http.protocol": request_context.get("http", {}).get("protocol"),
957
- "http.source_ip": request_context.get("http", {}).get("sourceIp"),
958
- "http.user_agent": request_context.get("http", {}).get("userAgent"),
960
+ "http.method": http.get("method"),
961
+ "http.protocol": http.get("protocol"),
962
+ "http.source_ip": http.get("sourceIp"),
963
+ "http.user_agent": http.get("userAgent"),
959
964
  "resource_names": resource,
960
965
  "request_id": context.aws_request_id,
961
966
  "apiid": api_id,
@@ -967,12 +972,7 @@ def create_inferred_span_from_http_api_event(
967
972
  InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
968
973
  else:
969
974
  InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync")
970
- args = {
971
- "service": service_name,
972
- "resource": resource,
973
- "span_type": "http",
974
- }
975
- tracer.set_tags({"_dd.origin": "lambda"})
975
+ tracer.set_tags(_dd_origin)
976
976
  inferred_span_start_ns = request_time_epoch_ms * 1e6
977
977
  if decode_authorizer_context:
978
978
  injected_authorizer_data = get_injected_authorizer_data(event, True)
@@ -980,7 +980,9 @@ def create_inferred_span_from_http_api_event(
980
980
  inferred_span_start_ns = injected_authorizer_data.get(
981
981
  Headers.Parent_Span_Finish_Time
982
982
  )
983
- span = tracer.trace("aws.httpapi", **args)
983
+ span = tracer.trace(
984
+ "aws.httpapi", service=service_name, resource=resource, span_type="http"
985
+ )
984
986
  if span:
985
987
  span.set_tags(tags)
986
988
  span.start_ns = int(inferred_span_start_ns)
@@ -996,21 +998,17 @@ def create_inferred_span_from_sqs_event(event, context):
996
998
  service_name = determine_service_name(
997
999
  service_mapping, queue_name, "lambda_sqs", "sqs"
998
1000
  )
1001
+ attrs = event_record.get("attributes") or {}
999
1002
  tags = {
1000
1003
  "operation_name": "aws.sqs",
1001
1004
  "resource_names": queue_name,
1002
1005
  "queuename": queue_name,
1003
1006
  "event_source_arn": event_source_arn,
1004
1007
  "receipt_handle": event_record.get("receiptHandle"),
1005
- "sender_id": event_record.get("attributes", {}).get("SenderId"),
1008
+ "sender_id": attrs.get("SenderId"),
1006
1009
  }
1007
1010
  InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
1008
- request_time_epoch = event_record.get("attributes", {}).get("SentTimestamp")
1009
- args = {
1010
- "service": service_name,
1011
- "resource": queue_name,
1012
- "span_type": "web",
1013
- }
1011
+ request_time_epoch = attrs.get("SentTimestamp")
1014
1012
  start_time = int(request_time_epoch) / 1000
1015
1013
 
1016
1014
  upstream_span = None
@@ -1039,15 +1037,17 @@ def create_inferred_span_from_sqs_event(event, context):
1039
1037
 
1040
1038
  except Exception as e:
1041
1039
  logger.debug(
1042
- "Unable to create upstream span from SQS message, with error %s" % e
1040
+ "Unable to create upstream span from SQS message, with error %s", e
1043
1041
  )
1044
1042
  pass
1045
1043
 
1046
1044
  # trace context needs to be set again as it is reset
1047
1045
  # when sns_span.finish executes
1048
1046
  tracer.context_provider.activate(trace_ctx)
1049
- tracer.set_tags({"_dd.origin": "lambda"})
1050
- span = tracer.trace("aws.sqs", **args)
1047
+ tracer.set_tags(_dd_origin)
1048
+ span = tracer.trace(
1049
+ "aws.sqs", service=service_name, resource=queue_name, span_type="web"
1050
+ )
1051
1051
  if span:
1052
1052
  span.set_tags(tags)
1053
1053
  span.start = start_time
@@ -1059,8 +1059,8 @@ def create_inferred_span_from_sqs_event(event, context):
1059
1059
 
1060
1060
  def create_inferred_span_from_sns_event(event, context):
1061
1061
  event_record = get_first_record(event)
1062
- sns_message = event_record.get("Sns")
1063
- topic_arn = event_record.get("Sns", {}).get("TopicArn")
1062
+ sns_message = event_record.get("Sns") or {}
1063
+ topic_arn = sns_message.get("TopicArn")
1064
1064
  topic_name = topic_arn.split(":")[-1]
1065
1065
  service_name = determine_service_name(
1066
1066
  service_mapping, topic_name, "lambda_sns", "sns"
@@ -1075,21 +1075,19 @@ def create_inferred_span_from_sns_event(event, context):
1075
1075
  }
1076
1076
 
1077
1077
  # Subject not available in SNS => SQS scenario
1078
- if "Subject" in sns_message and sns_message["Subject"]:
1079
- tags["subject"] = sns_message.get("Subject")
1078
+ subject = sns_message.get("Subject")
1079
+ if subject:
1080
+ tags["subject"] = subject
1080
1081
 
1081
1082
  InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
1082
1083
  sns_dt_format = "%Y-%m-%dT%H:%M:%S.%fZ"
1083
- timestamp = event_record.get("Sns", {}).get("Timestamp")
1084
+ timestamp = sns_message.get("Timestamp")
1084
1085
  dt = datetime.strptime(timestamp, sns_dt_format)
1085
1086
 
1086
- args = {
1087
- "service": service_name,
1088
- "resource": topic_name,
1089
- "span_type": "web",
1090
- }
1091
- tracer.set_tags({"_dd.origin": "lambda"})
1092
- span = tracer.trace("aws.sns", **args)
1087
+ tracer.set_tags(_dd_origin)
1088
+ span = tracer.trace(
1089
+ "aws.sns", service=service_name, resource=topic_name, span_type="web"
1090
+ )
1093
1091
  if span:
1094
1092
  span.set_tags(tags)
1095
1093
  span.start = dt.replace(tzinfo=timezone.utc).timestamp()
@@ -1105,6 +1103,7 @@ def create_inferred_span_from_kinesis_event(event, context):
1105
1103
  service_name = determine_service_name(
1106
1104
  service_mapping, stream_name, "lambda_kinesis", "kinesis"
1107
1105
  )
1106
+ kinesis = event_record.get("kinesis") or {}
1108
1107
  tags = {
1109
1108
  "operation_name": "aws.kinesis",
1110
1109
  "resource_names": stream_name,
@@ -1114,20 +1113,15 @@ def create_inferred_span_from_kinesis_event(event, context):
1114
1113
  "event_id": event_id,
1115
1114
  "event_name": event_record.get("eventName"),
1116
1115
  "event_version": event_record.get("eventVersion"),
1117
- "partition_key": event_record.get("kinesis", {}).get("partitionKey"),
1116
+ "partition_key": kinesis.get("partitionKey"),
1118
1117
  }
1119
1118
  InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
1120
- request_time_epoch = event_record.get("kinesis", {}).get(
1121
- "approximateArrivalTimestamp"
1122
- )
1119
+ request_time_epoch = kinesis.get("approximateArrivalTimestamp")
1123
1120
 
1124
- args = {
1125
- "service": service_name,
1126
- "resource": stream_name,
1127
- "span_type": "web",
1128
- }
1129
- tracer.set_tags({"_dd.origin": "lambda"})
1130
- span = tracer.trace("aws.kinesis", **args)
1121
+ tracer.set_tags(_dd_origin)
1122
+ span = tracer.trace(
1123
+ "aws.kinesis", service=service_name, resource=stream_name, span_type="web"
1124
+ )
1131
1125
  if span:
1132
1126
  span.set_tags(tags)
1133
1127
  span.start = request_time_epoch
@@ -1141,7 +1135,7 @@ def create_inferred_span_from_dynamodb_event(event, context):
1141
1135
  service_name = determine_service_name(
1142
1136
  service_mapping, table_name, "lambda_dynamodb", "dynamodb"
1143
1137
  )
1144
- dynamodb_message = event_record.get("dynamodb")
1138
+ dynamodb_message = event_record.get("dynamodb") or {}
1145
1139
  tags = {
1146
1140
  "operation_name": "aws.dynamodb",
1147
1141
  "resource_names": table_name,
@@ -1154,16 +1148,11 @@ def create_inferred_span_from_dynamodb_event(event, context):
1154
1148
  "size_bytes": str(dynamodb_message.get("SizeBytes")),
1155
1149
  }
1156
1150
  InferredSpanInfo.set_tags(tags, synchronicity="async", tag_source="self")
1157
- request_time_epoch = event_record.get("dynamodb", {}).get(
1158
- "ApproximateCreationDateTime"
1151
+ request_time_epoch = dynamodb_message.get("ApproximateCreationDateTime")
1152
+ tracer.set_tags(_dd_origin)
1153
+ span = tracer.trace(
1154
+ "aws.dynamodb", service=service_name, resource=table_name, span_type="web"
1159
1155
  )
1160
- args = {
1161
- "service": service_name,
1162
- "resource": table_name,
1163
- "span_type": "web",
1164
- }
1165
- tracer.set_tags({"_dd.origin": "lambda"})
1166
- span = tracer.trace("aws.dynamodb", **args)
1167
1156
  if span:
1168
1157
  span.set_tags(tags)
1169
1158
 
@@ -1173,7 +1162,10 @@ def create_inferred_span_from_dynamodb_event(event, context):
1173
1162
 
1174
1163
  def create_inferred_span_from_s3_event(event, context):
1175
1164
  event_record = get_first_record(event)
1176
- bucket_name = event_record.get("s3", {}).get("bucket", {}).get("name")
1165
+ s3 = event_record.get("s3") or {}
1166
+ bucket = s3.get("bucket") or {}
1167
+ obj = s3.get("object") or {}
1168
+ bucket_name = bucket.get("name")
1177
1169
  service_name = determine_service_name(
1178
1170
  service_mapping, bucket_name, "lambda_s3", "s3"
1179
1171
  )
@@ -1182,23 +1174,20 @@ def create_inferred_span_from_s3_event(event, context):
1182
1174
  "resource_names": bucket_name,
1183
1175
  "event_name": event_record.get("eventName"),
1184
1176
  "bucketname": bucket_name,
1185
- "bucket_arn": event_record.get("s3", {}).get("bucket", {}).get("arn"),
1186
- "object_key": event_record.get("s3", {}).get("object", {}).get("key"),
1187
- "object_size": str(event_record.get("s3", {}).get("object", {}).get("size")),
1188
- "object_etag": event_record.get("s3", {}).get("object", {}).get("eTag"),
1177
+ "bucket_arn": bucket.get("arn"),
1178
+ "object_key": obj.get("key"),
1179
+ "object_size": str(obj.get("size")),
1180
+ "object_etag": obj.get("eTag"),
1189
1181
  }
1190
1182
  InferredSpanInfo.set_tags(tags, synchronicity="async", tag_source="self")
1191
1183
  dt_format = "%Y-%m-%dT%H:%M:%S.%fZ"
1192
1184
  timestamp = event_record.get("eventTime")
1193
1185
  dt = datetime.strptime(timestamp, dt_format)
1194
1186
 
1195
- args = {
1196
- "service": service_name,
1197
- "resource": bucket_name,
1198
- "span_type": "web",
1199
- }
1200
- tracer.set_tags({"_dd.origin": "lambda"})
1201
- span = tracer.trace("aws.s3", **args)
1187
+ tracer.set_tags(_dd_origin)
1188
+ span = tracer.trace(
1189
+ "aws.s3", service=service_name, resource=bucket_name, span_type="web"
1190
+ )
1202
1191
  if span:
1203
1192
  span.set_tags(tags)
1204
1193
  span.start = dt.replace(tzinfo=timezone.utc).timestamp()
@@ -1224,13 +1213,10 @@ def create_inferred_span_from_eventbridge_event(event, context):
1224
1213
  timestamp = event.get("time")
1225
1214
  dt = datetime.strptime(timestamp, dt_format)
1226
1215
 
1227
- args = {
1228
- "service": service_name,
1229
- "resource": source,
1230
- "span_type": "web",
1231
- }
1232
- tracer.set_tags({"_dd.origin": "lambda"})
1233
- span = tracer.trace("aws.eventbridge", **args)
1216
+ tracer.set_tags(_dd_origin)
1217
+ span = tracer.trace(
1218
+ "aws.eventbridge", service=service_name, resource=source, span_type="web"
1219
+ )
1234
1220
  if span:
1235
1221
  span.set_tags(tags)
1236
1222
  span.start = dt.replace(tzinfo=timezone.utc).timestamp()
@@ -1247,7 +1233,7 @@ def create_function_execution_span(
1247
1233
  trigger_tags,
1248
1234
  parent_span=None,
1249
1235
  ):
1250
- tags = {}
1236
+ tags = None
1251
1237
  if context:
1252
1238
  function_arn = (context.invoked_function_arn or "").lower()
1253
1239
  tk = function_arn.split(":")
@@ -1266,18 +1252,19 @@ def create_function_execution_span(
1266
1252
  "dd_trace": ddtrace_version,
1267
1253
  "span.name": "aws.lambda",
1268
1254
  }
1255
+ tags = tags or {}
1269
1256
  if is_proactive_init:
1270
1257
  tags["proactive_initialization"] = str(is_proactive_init).lower()
1271
1258
  if trace_context_source == TraceContextSource.XRAY and merge_xray_traces:
1272
1259
  tags["_dd.parent_source"] = trace_context_source
1273
1260
  tags.update(trigger_tags)
1274
- args = {
1275
- "service": "aws.lambda",
1276
- "resource": function_name,
1277
- "span_type": "serverless",
1278
- }
1279
- tracer.set_tags({"_dd.origin": "lambda"})
1280
- span = tracer.trace("aws.lambda", **args)
1261
+ tracer.set_tags(_dd_origin)
1262
+ span = tracer.trace(
1263
+ "aws.lambda",
1264
+ service="aws.lambda",
1265
+ resource=function_name,
1266
+ span_type="serverless",
1267
+ )
1281
1268
  if span:
1282
1269
  span.set_tags(tags)
1283
1270
  if parent_span: