datadog_lambda 6.101.0__py3-none-any.whl → 6.103.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,9 +3,8 @@
3
3
  # This product includes software developed at Datadog (https://www.datadoghq.com/).
4
4
  # Copyright 2019 Datadog, Inc.
5
5
 
6
- # Datadog trace sampling priority
7
-
8
6
 
7
+ # Datadog trace sampling priority
9
8
  class SamplingPriority(object):
10
9
  USER_REJECT = -1
11
10
  AUTO_REJECT = 0
@@ -18,6 +17,7 @@ class TraceHeader(object):
18
17
  TRACE_ID = "x-datadog-trace-id"
19
18
  PARENT_ID = "x-datadog-parent-id"
20
19
  SAMPLING_PRIORITY = "x-datadog-sampling-priority"
20
+ TAGS = "x-datadog-tags"
21
21
 
22
22
 
23
23
  # X-Ray subsegment to save Datadog trace metadata
@@ -2,6 +2,7 @@ from itertools import chain
2
2
  import logging
3
3
  import os
4
4
  from typing import List
5
+ from typing import Optional
5
6
 
6
7
  from ddtrace._trace._span_pointer import _SpanPointerDirection
7
8
  from ddtrace._trace._span_pointer import _SpanPointerDescription
@@ -30,7 +31,7 @@ def calculate_span_pointers(
30
31
  return _calculate_dynamodb_span_pointers_for_event(event)
31
32
 
32
33
  except Exception as e:
33
- logger.warning(
34
+ logger.debug(
34
35
  "failed to calculate span pointers for event: %s",
35
36
  e,
36
37
  )
@@ -75,7 +76,7 @@ def _calculate_s3_span_pointers_for_object_created_s3_information(
75
76
  etag = s3_information["object"]["eTag"]
76
77
 
77
78
  except KeyError as e:
78
- logger.warning(
79
+ logger.debug(
79
80
  "missing s3 information required to make a span pointer: %s",
80
81
  e,
81
82
  )
@@ -86,17 +87,31 @@ def _calculate_s3_span_pointers_for_object_created_s3_information(
86
87
  _aws_s3_object_span_pointer_description,
87
88
  )
88
89
 
89
- return [
90
- _aws_s3_object_span_pointer_description(
90
+ try:
91
+ span_pointer_description = _aws_s3_object_span_pointer_description(
92
+ operation="S3.LambdaEvent",
91
93
  pointer_direction=_SpanPointerDirection.UPSTREAM,
92
94
  bucket=bucket,
93
95
  key=key,
94
96
  etag=etag,
95
97
  )
96
- ]
98
+ except TypeError:
99
+ # The older version of this function did not have an operation
100
+ # parameter.
101
+ span_pointer_description = _aws_s3_object_span_pointer_description(
102
+ pointer_direction=_SpanPointerDirection.UPSTREAM,
103
+ bucket=bucket,
104
+ key=key,
105
+ etag=etag,
106
+ )
107
+
108
+ if span_pointer_description is None:
109
+ return []
110
+
111
+ return [span_pointer_description]
97
112
 
98
113
  except Exception as e:
99
- logger.warning(
114
+ logger.debug(
100
115
  "failed to generate S3 span pointer: %s",
101
116
  e,
102
117
  )
@@ -120,10 +135,13 @@ def _calculate_dynamodb_span_pointers_for_event_record(
120
135
  ) -> List[_SpanPointerDescription]:
121
136
  try:
122
137
  table_name = _extract_table_name_from_dynamodb_stream_record(record)
138
+ if table_name is None:
139
+ return []
140
+
123
141
  primary_key = record["dynamodb"]["Keys"]
124
142
 
125
143
  except Exception as e:
126
- logger.warning(
144
+ logger.debug(
127
145
  "missing DynamoDB information required to make a span pointer: %s",
128
146
  e,
129
147
  )
@@ -134,23 +152,36 @@ def _calculate_dynamodb_span_pointers_for_event_record(
134
152
  _aws_dynamodb_item_span_pointer_description,
135
153
  )
136
154
 
137
- return [
138
- _aws_dynamodb_item_span_pointer_description(
155
+ try:
156
+ span_pointer_description = _aws_dynamodb_item_span_pointer_description(
157
+ operation="DynamoDB.LambdaEvent",
139
158
  pointer_direction=_SpanPointerDirection.UPSTREAM,
140
159
  table_name=table_name,
141
160
  primary_key=primary_key,
142
161
  )
143
- ]
162
+ except TypeError:
163
+ # The older version of this function did not have an operation
164
+ # parameter.
165
+ span_pointer_description = _aws_dynamodb_item_span_pointer_description(
166
+ pointer_direction=_SpanPointerDirection.UPSTREAM,
167
+ table_name=table_name,
168
+ primary_key=primary_key,
169
+ )
170
+
171
+ if span_pointer_description is None:
172
+ return []
173
+
174
+ return [span_pointer_description]
144
175
 
145
176
  except Exception as e:
146
- logger.warning(
177
+ logger.debug(
147
178
  "failed to generate DynamoDB span pointer: %s",
148
179
  e,
149
180
  )
150
181
  return []
151
182
 
152
183
 
153
- def _extract_table_name_from_dynamodb_stream_record(record) -> str:
184
+ def _extract_table_name_from_dynamodb_stream_record(record) -> Optional[str]:
154
185
  # Example eventSourceARN:
155
186
  # arn:aws:dynamodb:us-east-2:123456789012:table/my-table/stream/2024-06-10T19:26:16.525
156
187
  event_source_arn = record["eventSourceARN"]
@@ -159,10 +190,12 @@ def _extract_table_name_from_dynamodb_stream_record(record) -> str:
159
190
  ":", maxsplit=5
160
191
  )
161
192
  if _arn != "arn" or _aws != "aws" or _dynamodb != "dynamodb":
162
- raise ValueError(f"unexpected eventSourceARN format: {event_source_arn}")
193
+ logger.debug("unexpected eventSourceARN format: %s", event_source_arn)
194
+ return None
163
195
 
164
196
  [_table, table_name, _stream, _timestamp] = dynamodb_info.split("/")
165
197
  if _table != "table" or _stream != "stream":
166
- raise ValueError(f"unexpected eventSourceARN format: {event_source_arn}")
198
+ logger.debug("unexpected eventSourceARN format: %s", event_source_arn)
199
+ return None
167
200
 
168
201
  return table_name
datadog_lambda/tracing.py CHANGED
@@ -356,9 +356,8 @@ def extract_context_from_kinesis_event(event, lambda_context):
356
356
  return extract_context_from_lambda_context(lambda_context)
357
357
 
358
358
 
359
- def _deterministic_sha256_hash(s: str, part: str) -> (int, int):
359
+ def _deterministic_sha256_hash(s: str, part: str) -> int:
360
360
  sha256_hash = hashlib.sha256(s.encode()).hexdigest()
361
-
362
361
  # First two chars is '0b'. zfill to ensure 256 bits, but we only care about the first 128 bits
363
362
  binary_hash = bin(int(sha256_hash, 16))[2:].zfill(256)
364
363
  if part == HIGHER_64_BITS:
@@ -371,36 +370,88 @@ def _deterministic_sha256_hash(s: str, part: str) -> (int, int):
371
370
  return result
372
371
 
373
372
 
373
+ def _parse_high_64_bits(trace_tags: str) -> str:
374
+ """
375
+ Parse a list of trace tags such as [_dd.p.tid=66bcb5eb00000000,_dd.p.dm=-0] and return the
376
+ value of the _dd.p.tid tag or an empty string if not found.
377
+ """
378
+ if trace_tags:
379
+ for tag in trace_tags.split(","):
380
+ if "_dd.p.tid=" in tag:
381
+ return tag.split("=")[1]
382
+
383
+ return ""
384
+
385
+
386
+ def _generate_sfn_parent_id(context: dict) -> int:
387
+ execution_id = context.get("Execution").get("Id")
388
+ state_name = context.get("State").get("Name")
389
+ state_entered_time = context.get("State").get("EnteredTime")
390
+
391
+ return _deterministic_sha256_hash(
392
+ f"{execution_id}#{state_name}#{state_entered_time}", HIGHER_64_BITS
393
+ )
394
+
395
+
396
+ def _generate_sfn_trace_id(execution_id: str, part: str):
397
+ """
398
+ Take the SHA-256 hash of the execution_id to calculate the trace ID. If the high 64 bits are
399
+ specified, we take those bits and use hex to encode it. We also remove the first two characters
400
+ as they will be '0x in the hex string.
401
+
402
+ We care about full 128 bits because they will break up into traditional traceID and
403
+ _dd.p.tid tag.
404
+ """
405
+ if part == HIGHER_64_BITS:
406
+ return hex(_deterministic_sha256_hash(execution_id, part))[2:]
407
+ return _deterministic_sha256_hash(execution_id, part)
408
+
409
+
374
410
  def extract_context_from_step_functions(event, lambda_context):
375
411
  """
376
412
  Only extract datadog trace context when Step Functions Context Object is injected
377
413
  into lambda's event dict.
414
+
415
+ If '_datadog' header is present, we have two cases:
416
+ 1. Root is a Lambda and we use its traceID
417
+ 2. Root is a SFN, and we use its executionARN to calculate the traceID
418
+ We calculate the parentID the same in both cases by using the parent SFN's context object.
419
+
420
+ Otherwise, we're dealing with the legacy case where we only have the parent SFN's context
421
+ object.
378
422
  """
379
423
  try:
380
- execution_id = event.get("Execution").get("Id")
381
- state_name = event.get("State").get("Name")
382
- state_entered_time = event.get("State").get("EnteredTime")
383
- # returning 128 bits since 128bit traceId will be break up into
384
- # traditional traceId and _dd.p.tid tag
385
- # https://github.com/DataDog/dd-trace-py/blob/3e34d21cb9b5e1916e549047158cb119317b96ab/ddtrace/propagation/http.py#L232-L240
386
- trace_id = _deterministic_sha256_hash(execution_id, LOWER_64_BITS)
387
-
388
- parent_id = _deterministic_sha256_hash(
389
- f"{execution_id}#{state_name}#{state_entered_time}", HIGHER_64_BITS
390
- )
424
+ meta = {}
425
+ dd_data = event.get("_datadog")
426
+
427
+ if dd_data and dd_data.get("serverless-version") == "v1":
428
+ if "x-datadog-trace-id" in dd_data: # lambda root
429
+ trace_id = int(dd_data.get("x-datadog-trace-id"))
430
+ high_64_bit_trace_id = _parse_high_64_bits(
431
+ dd_data.get("x-datadog-tags")
432
+ )
433
+ if high_64_bit_trace_id:
434
+ meta["_dd.p.tid"] = high_64_bit_trace_id
435
+ else: # sfn root
436
+ root_execution_id = dd_data.get("RootExecutionId")
437
+ trace_id = _generate_sfn_trace_id(root_execution_id, LOWER_64_BITS)
438
+ meta["_dd.p.tid"] = _generate_sfn_trace_id(
439
+ root_execution_id, HIGHER_64_BITS
440
+ )
441
+
442
+ parent_id = _generate_sfn_parent_id(dd_data)
443
+ else:
444
+ execution_id = event.get("Execution").get("Id")
445
+ trace_id = _generate_sfn_trace_id(execution_id, LOWER_64_BITS)
446
+ meta["_dd.p.tid"] = _generate_sfn_trace_id(execution_id, HIGHER_64_BITS)
447
+ parent_id = _generate_sfn_parent_id(event)
391
448
 
392
449
  sampling_priority = SamplingPriority.AUTO_KEEP
393
450
  return Context(
394
451
  trace_id=trace_id,
395
452
  span_id=parent_id,
396
453
  sampling_priority=sampling_priority,
397
- # take the higher 64 bits as _dd.p.tid tag and use hex to encode
398
- # [2:] to remove '0x' in the hex str
399
- meta={
400
- "_dd.p.tid": hex(
401
- _deterministic_sha256_hash(execution_id, HIGHER_64_BITS)
402
- )[2:]
403
- },
454
+ meta=meta,
404
455
  )
405
456
  except Exception as e:
406
457
  logger.debug("The Step Functions trace extractor returned with error %s", e)
@@ -415,7 +466,10 @@ def is_legacy_lambda_step_function(event):
415
466
  return False
416
467
 
417
468
  event = event.get("Payload")
418
- return "Execution" in event and "StateMachine" in event and "State" in event
469
+ return isinstance(event, dict) and (
470
+ "_datadog" in event
471
+ or ("Execution" in event and "StateMachine" in event and "State" in event)
472
+ )
419
473
 
420
474
 
421
475
  def extract_context_custom_extractor(extractor, event, lambda_context):
datadog_lambda/trigger.py CHANGED
@@ -146,7 +146,9 @@ def parse_event_source(event: dict) -> _EventSource:
146
146
  if event.get("source") == "aws.events" or has_event_categories:
147
147
  event_source = _EventSource(EventTypes.CLOUDWATCH_EVENTS)
148
148
 
149
- if "Execution" in event and "StateMachine" in event and "State" in event:
149
+ if (
150
+ "_datadog" in event and event.get("_datadog").get("serverless-version") == "v1"
151
+ ) or ("Execution" in event and "StateMachine" in event and "State" in event):
150
152
  event_source = _EventSource(EventTypes.STEPFUNCTIONS)
151
153
 
152
154
  event_record = get_first_record(event)
@@ -254,6 +256,13 @@ def parse_event_source_arn(source: _EventSource, event: dict, context: Any) -> s
254
256
  if source.event_type == EventTypes.CLOUDWATCH_EVENTS and event.get("resources"):
255
257
  return event.get("resources")[0]
256
258
 
259
+ # Returning state machine arn as event source arn.
260
+ if source.event_type == EventTypes.STEPFUNCTIONS:
261
+ context = event
262
+ if "_datadog" in event:
263
+ context = event.get("_datadog")
264
+ return context.get("StateMachine").get("Id")
265
+
257
266
 
258
267
  def get_event_source_arn(source: _EventSource, event: dict, context: Any) -> str:
259
268
  event_source_arn = event.get("eventSourceARN") or event.get("eventSourceArn")
datadog_lambda/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "6.101.0"
1
+ __version__ = "6.103.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datadog_lambda
3
- Version: 6.101.0
3
+ Version: 6.103.0
4
4
  Summary: The Datadog AWS Lambda Library
5
5
  Home-page: https://github.com/DataDog/datadog-lambda-python
6
6
  License: Apache-2.0
@@ -1,7 +1,7 @@
1
1
  datadog_lambda/__init__.py,sha256=F_KG5XbNq9QEzbioec7DXzyTv3oYJHxeEgXE88dxXRQ,637
2
2
  datadog_lambda/api.py,sha256=0yxijMKG6a6drq4ZRkm_mu5Q0tAE2DkwRoIOHIZaZnU,3570
3
3
  datadog_lambda/cold_start.py,sha256=x-wj0F9QDoxzmYWhZRTajxmSCYM1ZNM6XXTs_61BP-w,8205
4
- datadog_lambda/constants.py,sha256=DeujbnguBT9nDioiaYlgQQdZ6Ps53sWXmYhruLVoCHE,1669
4
+ datadog_lambda/constants.py,sha256=0y6O9s_8RLflYR507SDMQjKmYY16tr1yi2KQuuF1GaY,1696
5
5
  datadog_lambda/dogstatsd.py,sha256=HCyl72oQUSF3E4y1ivrHaGTHL9WG1asGjB1Xo2D_Abc,4769
6
6
  datadog_lambda/extension.py,sha256=ZU64QpA2K9K9C0jfqusBgpiWQe0QA2dcJCNk7UgjVfw,621
7
7
  datadog_lambda/handler.py,sha256=YuReCUXLyJCNTeRP_VgWzjCqUl6K6IRs_PU6RHm0VeE,1351
@@ -9,20 +9,20 @@ datadog_lambda/logger.py,sha256=nGxNMouF7wcjmoPsgivzzjNLvSy3WbGtKElxOvITZDg,766
9
9
  datadog_lambda/metric.py,sha256=hxA5nmKkUKpoo13f0Z5Ug87F9A-fv1hP-l2fWYfq6ao,6276
10
10
  datadog_lambda/module_name.py,sha256=5FmOCjjgjq78b6a83QePZZFmqahAoy9XHdUNWdq2D1Q,139
11
11
  datadog_lambda/patch.py,sha256=6a-BqovSRKsU5hTQpzxgY-_bducT-UEVCLvd3fdxeWc,4710
12
- datadog_lambda/span_pointers.py,sha256=fjugvYFLld641uHyq5WLV7FkmUW0lnQ4jUoQWs8ObTY,4945
12
+ datadog_lambda/span_pointers.py,sha256=P0SBWcKnc6GNWHZ5VPuUgGojFC_pIkOkKt6SfEXRpUw,6142
13
13
  datadog_lambda/stats_writer.py,sha256=SIac96wu45AxDOZ4GraCbK3r1RKr4AFgXcEPHg1VX0A,243
14
14
  datadog_lambda/statsd_writer.py,sha256=F4SCJ6-J6YfvQNh0uQfAkP6QYiAtV3-MCsxz4QnaBBI,403
15
15
  datadog_lambda/tag_object.py,sha256=cZ7W9Ae5k3YxLOZzN5Hu8UqvOKtq5AWARele0L18Gjs,2091
16
16
  datadog_lambda/tags.py,sha256=wy6uH8eAGMn7cfZEdHpL9uEGoM85bVyyXhYwSQtfHHc,2532
17
17
  datadog_lambda/thread_stats_writer.py,sha256=zOHdzdkQFCHnLU09CSYWsGq1optbTKzHVLcDXDjtiHU,2891
18
- datadog_lambda/tracing.py,sha256=_H-nilG6bUmiDobO3A5V_8byjlTVaa8-KKJWMN6EmSU,50421
19
- datadog_lambda/trigger.py,sha256=3S7u-LBHxX-o2Ds8BAtXEQMNPXz-Av8OdWBQuozLl54,12445
20
- datadog_lambda/version.py,sha256=y1mDHBp0_G49g2v4-4tRNZoCtIQZtnbKTHBqCSQ7h28,24
18
+ datadog_lambda/tracing.py,sha256=jEipyPfV1d9TXKH92gjH14ynZvVJ612QVSOfaUY-eUM,52414
19
+ datadog_lambda/trigger.py,sha256=MGfEsxRmxdm4pD7Hh4VPyxHT9fuh0Tl_ysRL-XQSfxE,12809
20
+ datadog_lambda/version.py,sha256=E1PTTMXKd-RwiMXLLCxh-Y9cCRMK8QJT9aNcg0e0soU,24
21
21
  datadog_lambda/wrapper.py,sha256=xTOO-NhrndReagcPJ0Ab9Xd8NRMCqsgZH4bJ7mpQXnI,15972
22
22
  datadog_lambda/xray.py,sha256=jvA4Fk76PLMgsjUoUZ7gp2otv53hFt39Nvso1ZNaivg,3749
23
- datadog_lambda-6.101.0.dist-info/LICENSE,sha256=4yQmjpKp1MKL7DdRDPVHkKYc2W0aezm5SIDske8oAdM,11379
24
- datadog_lambda-6.101.0.dist-info/LICENSE-3rdparty.csv,sha256=9CDAR1GKawwTbZkqt1RP0uwEcaRM3RhOeTB5tWXr8Ts,1381
25
- datadog_lambda-6.101.0.dist-info/METADATA,sha256=TB-BFsE4c23dMEXWHZXQUnuSz8haVDkeHWhr-cwSR9Y,7469
26
- datadog_lambda-6.101.0.dist-info/NOTICE,sha256=Jue-d8mQ1ENIHDZdYc2-X8mVYtScXb8pzF1pTLN-kRc,141
27
- datadog_lambda-6.101.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
28
- datadog_lambda-6.101.0.dist-info/RECORD,,
23
+ datadog_lambda-6.103.0.dist-info/LICENSE,sha256=4yQmjpKp1MKL7DdRDPVHkKYc2W0aezm5SIDske8oAdM,11379
24
+ datadog_lambda-6.103.0.dist-info/LICENSE-3rdparty.csv,sha256=9CDAR1GKawwTbZkqt1RP0uwEcaRM3RhOeTB5tWXr8Ts,1381
25
+ datadog_lambda-6.103.0.dist-info/METADATA,sha256=DHs5ty-wHML5ucs8Zm0aZmhQ-PTEpWhcWP11ZNe2UkI,7469
26
+ datadog_lambda-6.103.0.dist-info/NOTICE,sha256=Jue-d8mQ1ENIHDZdYc2-X8mVYtScXb8pzF1pTLN-kRc,141
27
+ datadog_lambda-6.103.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
28
+ datadog_lambda-6.103.0.dist-info/RECORD,,