datadog_lambda 6.109.0__tar.gz → 6.111.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/PKG-INFO +2 -1
  2. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/README.md +1 -0
  3. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/api.py +7 -8
  4. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/cold_start.py +3 -9
  5. datadog_lambda-6.111.0/datadog_lambda/config.py +145 -0
  6. datadog_lambda-6.111.0/datadog_lambda/dsm.py +38 -0
  7. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/metric.py +5 -12
  8. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/patch.py +4 -8
  9. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/span_pointers.py +2 -7
  10. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/tag_object.py +3 -4
  11. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/tracing.py +20 -26
  12. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/trigger.py +34 -9
  13. datadog_lambda-6.111.0/datadog_lambda/version.py +1 -0
  14. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/wrapper.py +51 -113
  15. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/pyproject.toml +1 -1
  16. datadog_lambda-6.109.0/datadog_lambda/fips.py +0 -19
  17. datadog_lambda-6.109.0/datadog_lambda/version.py +0 -1
  18. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/LICENSE +0 -0
  19. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/LICENSE-3rdparty.csv +0 -0
  20. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/NOTICE +0 -0
  21. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/__init__.py +0 -0
  22. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/constants.py +0 -0
  23. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/dogstatsd.py +0 -0
  24. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/extension.py +0 -0
  25. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/handler.py +0 -0
  26. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/logger.py +0 -0
  27. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/module_name.py +0 -0
  28. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/stats_writer.py +0 -0
  29. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/statsd_writer.py +0 -0
  30. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/tags.py +0 -0
  31. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/thread_stats_writer.py +0 -0
  32. {datadog_lambda-6.109.0 → datadog_lambda-6.111.0}/datadog_lambda/xray.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datadog_lambda
3
- Version: 6.109.0
3
+ Version: 6.111.0
4
4
  Summary: The Datadog AWS Lambda Library
5
5
  Home-page: https://github.com/DataDog/datadog-lambda-python
6
6
  License: Apache-2.0
@@ -60,6 +60,7 @@ Besides the environment variables supported by dd-trace-py, the datadog-lambda-p
60
60
  | DD_COLD_START_TRACE_SKIP_LIB | optionally skip creating Cold Start Spans for a comma-separated list of libraries. Useful to limit depth or skip known libraries. | `ddtrace.internal.compat,ddtrace.filters` |
61
61
  | DD_CAPTURE_LAMBDA_PAYLOAD | [Captures incoming and outgoing AWS Lambda payloads][1] in the Datadog APM spans for Lambda invocations. | `false` |
62
62
  | DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH | Determines the level of detail captured from AWS Lambda payloads, which are then assigned as tags for the `aws.lambda` span. It specifies the nesting depth of the JSON payload structure to process. Once the specified maximum depth is reached, the tag's value is set to the stringified value of any nested elements beyond this level. <br> For example, given the input payload: <pre>{<br> "lv1" : {<br> "lv2": {<br> "lv3": "val"<br> }<br> }<br>}</pre> If the depth is set to `2`, the resulting tag's key is set to `function.request.lv1.lv2` and the value is `{\"lv3\": \"val\"}`. <br> If the depth is set to `0`, the resulting tag's key is set to `function.request` and value is `{\"lv1\":{\"lv2\":{\"lv3\": \"val\"}}}` | `10` |
63
+ | DD_EXCEPTION_REPLAY_ENABLED | When set to `true`, the Lambda will run with Error Tracking Exception Replay enabled, capturing local variables. | `false` |
63
64
 
64
65
 
65
66
  ## Opening Issues
@@ -29,6 +29,7 @@ Besides the environment variables supported by dd-trace-py, the datadog-lambda-p
29
29
  | DD_COLD_START_TRACE_SKIP_LIB | optionally skip creating Cold Start Spans for a comma-separated list of libraries. Useful to limit depth or skip known libraries. | `ddtrace.internal.compat,ddtrace.filters` |
30
30
  | DD_CAPTURE_LAMBDA_PAYLOAD | [Captures incoming and outgoing AWS Lambda payloads][1] in the Datadog APM spans for Lambda invocations. | `false` |
31
31
  | DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH | Determines the level of detail captured from AWS Lambda payloads, which are then assigned as tags for the `aws.lambda` span. It specifies the nesting depth of the JSON payload structure to process. Once the specified maximum depth is reached, the tag's value is set to the stringified value of any nested elements beyond this level. <br> For example, given the input payload: <pre>{<br> "lv1" : {<br> "lv2": {<br> "lv3": "val"<br> }<br> }<br>}</pre> If the depth is set to `2`, the resulting tag's key is set to `function.request.lv1.lv2` and the value is `{\"lv3\": \"val\"}`. <br> If the depth is set to `0`, the resulting tag's key is set to `function.request` and value is `{\"lv1\":{\"lv2\":{\"lv3\": \"val\"}}}` | `10` |
32
+ | DD_EXCEPTION_REPLAY_ENABLED | When set to `true`, the Lambda will run with Error Tracking Exception Replay enabled, capturing local variables. | `false` |
32
33
 
33
34
 
34
35
  ## Opening Issues
@@ -1,7 +1,7 @@
1
1
  import logging
2
2
  import os
3
3
 
4
- from datadog_lambda.fips import fips_mode_enabled
4
+ from datadog_lambda.config import config
5
5
 
6
6
  logger = logging.getLogger(__name__)
7
7
  KMS_ENCRYPTION_CONTEXT_KEY = "LambdaFunctionName"
@@ -29,7 +29,6 @@ def decrypt_kms_api_key(kms_client, ciphertext):
29
29
  is added. We need to try decrypting the API key both with and without the encryption context.
30
30
  """
31
31
  # Try without encryption context, in case API key was encrypted using the AWS CLI
32
- function_name = os.environ.get("AWS_LAMBDA_FUNCTION_NAME")
33
32
  try:
34
33
  plaintext = kms_client.decrypt(CiphertextBlob=decoded_bytes)[
35
34
  "Plaintext"
@@ -43,7 +42,7 @@ def decrypt_kms_api_key(kms_client, ciphertext):
43
42
  plaintext = kms_client.decrypt(
44
43
  CiphertextBlob=decoded_bytes,
45
44
  EncryptionContext={
46
- KMS_ENCRYPTION_CONTEXT_KEY: function_name,
45
+ KMS_ENCRYPTION_CONTEXT_KEY: config.function_name,
47
46
  },
48
47
  )["Plaintext"].decode("utf-8")
49
48
 
@@ -66,7 +65,7 @@ def get_api_key() -> str:
66
65
  DD_API_KEY = os.environ.get("DD_API_KEY", os.environ.get("DATADOG_API_KEY", ""))
67
66
 
68
67
  LAMBDA_REGION = os.environ.get("AWS_REGION", "")
69
- if fips_mode_enabled:
68
+ if config.fips_mode_enabled:
70
69
  logger.debug(
71
70
  "FIPS mode is enabled, using FIPS endpoints for secrets management."
72
71
  )
@@ -82,7 +81,7 @@ def get_api_key() -> str:
82
81
  return ""
83
82
  endpoint_url = (
84
83
  f"https://secretsmanager-fips.{secrets_region}.amazonaws.com"
85
- if fips_mode_enabled
84
+ if config.fips_mode_enabled
86
85
  else None
87
86
  )
88
87
  secrets_manager_client = _boto3_client(
@@ -95,7 +94,7 @@ def get_api_key() -> str:
95
94
  # SSM endpoints: https://docs.aws.amazon.com/general/latest/gr/ssm.html
96
95
  fips_endpoint = (
97
96
  f"https://ssm-fips.{LAMBDA_REGION}.amazonaws.com"
98
- if fips_mode_enabled
97
+ if config.fips_mode_enabled
99
98
  else None
100
99
  )
101
100
  ssm_client = _boto3_client("ssm", endpoint_url=fips_endpoint)
@@ -106,7 +105,7 @@ def get_api_key() -> str:
106
105
  # KMS endpoints: https://docs.aws.amazon.com/general/latest/gr/kms.html
107
106
  fips_endpoint = (
108
107
  f"https://kms-fips.{LAMBDA_REGION}.amazonaws.com"
109
- if fips_mode_enabled
108
+ if config.fips_mode_enabled
110
109
  else None
111
110
  )
112
111
  kms_client = _boto3_client("kms", endpoint_url=fips_endpoint)
@@ -118,7 +117,7 @@ def get_api_key() -> str:
118
117
 
119
118
 
120
119
  def init_api():
121
- if not os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true":
120
+ if not config.flush_to_log:
122
121
  # Make sure that this package would always be lazy-loaded/outside from the critical path
123
122
  # since underlying packages are quite heavy to load
124
123
  # and useless with the extension unless sending metrics with timestamps
@@ -1,8 +1,9 @@
1
1
  import time
2
- import os
3
2
  from typing import List, Hashable
4
3
  import logging
5
4
 
5
+ from datadog_lambda.config import config
6
+
6
7
  logger = logging.getLogger(__name__)
7
8
 
8
9
  _cold_start = True
@@ -86,14 +87,12 @@ def reset_node_stacks():
86
87
 
87
88
  def push_node(module_name, file_path):
88
89
  node = ImportNode(module_name, file_path, time.time_ns())
89
- global import_stack
90
90
  if import_stack:
91
91
  import_stack[-1].children.append(node)
92
92
  import_stack.append(node)
93
93
 
94
94
 
95
95
  def pop_node(module_name):
96
- global import_stack
97
96
  if not import_stack:
98
97
  return
99
98
  node = import_stack.pop()
@@ -102,7 +101,6 @@ def pop_node(module_name):
102
101
  end_time_ns = time.time_ns()
103
102
  node.end_time_ns = end_time_ns
104
103
  if not import_stack: # import_stack empty, a root node has been found
105
- global root_nodes
106
104
  root_nodes.append(node)
107
105
 
108
106
 
@@ -147,11 +145,7 @@ def wrap_find_spec(original_find_spec):
147
145
 
148
146
 
149
147
  def initialize_cold_start_tracing():
150
- if (
151
- is_new_sandbox()
152
- and os.environ.get("DD_TRACE_ENABLED", "true").lower() == "true"
153
- and os.environ.get("DD_COLD_START_TRACING", "true").lower() == "true"
154
- ):
148
+ if is_new_sandbox() and config.cold_start_tracing:
155
149
  from sys import meta_path
156
150
 
157
151
  for importer in meta_path:
@@ -0,0 +1,145 @@
1
+ # Unless explicitly stated otherwise all files in this repository are licensed
2
+ # under the Apache License Version 2.0.
3
+ # This product includes software developed at Datadog (https://www.datadoghq.com/).
4
+ # Copyright 2019 Datadog, Inc.
5
+
6
+ import logging
7
+ import os
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ def _get_env(key, default=None, cast=None, depends_on_tracing=False):
13
+ @property
14
+ def _getter(self):
15
+ if not hasattr(self, prop_key):
16
+ val = self._resolve_env(key, default, cast, depends_on_tracing)
17
+ setattr(self, prop_key, val)
18
+ return getattr(self, prop_key)
19
+
20
+ prop_key = f"_config_{key}"
21
+ return _getter
22
+
23
+
24
+ def as_bool(val):
25
+ return val.lower() == "true" or val == "1"
26
+
27
+
28
+ def as_list(val):
29
+ return [val.strip() for val in val.split(",") if val.strip()]
30
+
31
+
32
+ class Config:
33
+ def _resolve_env(self, key, default=None, cast=None, depends_on_tracing=False):
34
+ if depends_on_tracing and not self.trace_enabled:
35
+ return False
36
+ val = os.environ.get(key, default)
37
+ if cast is not None:
38
+ try:
39
+ val = cast(val)
40
+ except (ValueError, TypeError):
41
+ msg = (
42
+ "Failed to cast environment variable '%s' with "
43
+ "value '%s' to type %s. Using default value '%s'."
44
+ )
45
+ logger.warning(msg, key, val, cast.__name__, default)
46
+ val = default
47
+ return val
48
+
49
+ service = _get_env("DD_SERVICE")
50
+ env = _get_env("DD_ENV")
51
+
52
+ cold_start_tracing = _get_env(
53
+ "DD_COLD_START_TRACING", "true", as_bool, depends_on_tracing=True
54
+ )
55
+ min_cold_start_trace_duration = _get_env("DD_MIN_COLD_START_DURATION", 3, int)
56
+ cold_start_trace_skip_lib = _get_env(
57
+ "DD_COLD_START_TRACE_SKIP_LIB",
58
+ "ddtrace.internal.compat,ddtrace.filters",
59
+ as_list,
60
+ )
61
+
62
+ capture_payload_max_depth = _get_env("DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH", 10, int)
63
+ capture_payload_enabled = _get_env("DD_CAPTURE_LAMBDA_PAYLOAD", "false", as_bool)
64
+
65
+ trace_enabled = _get_env("DD_TRACE_ENABLED", "true", as_bool)
66
+ make_inferred_span = _get_env(
67
+ "DD_TRACE_MANAGED_SERVICES", "true", as_bool, depends_on_tracing=True
68
+ )
69
+ encode_authorizer_context = _get_env(
70
+ "DD_ENCODE_AUTHORIZER_CONTEXT", "true", as_bool, depends_on_tracing=True
71
+ )
72
+ decode_authorizer_context = _get_env(
73
+ "DD_DECODE_AUTHORIZER_CONTEXT", "true", as_bool, depends_on_tracing=True
74
+ )
75
+ add_span_pointers = _get_env("DD_BOTOCORE_ADD_SPAN_POINTERS", "true", as_bool)
76
+ trace_extractor = _get_env("DD_TRACE_EXTRACTOR")
77
+
78
+ enhanced_metrics_enabled = _get_env("DD_ENHANCED_METRICS", "true", as_bool)
79
+
80
+ flush_in_thread = _get_env("DD_FLUSH_IN_THREAD", "false", as_bool)
81
+ flush_to_log = _get_env("DD_FLUSH_TO_LOG", "false", as_bool)
82
+ logs_injection = _get_env("DD_LOGS_INJECTION", "true", as_bool)
83
+ merge_xray_traces = _get_env("DD_MERGE_XRAY_TRACES", "false", as_bool)
84
+
85
+ telemetry_enabled = _get_env(
86
+ "DD_INSTRUMENTATION_TELEMETRY_ENABLED",
87
+ "false",
88
+ as_bool,
89
+ depends_on_tracing=True,
90
+ )
91
+ otel_enabled = _get_env("DD_TRACE_OTEL_ENABLED", "false", as_bool)
92
+ profiling_enabled = _get_env("DD_PROFILING_ENABLED", "false", as_bool)
93
+ llmobs_enabled = _get_env("DD_LLMOBS_ENABLED", "false", as_bool)
94
+ exception_replay_enabled = _get_env("DD_EXCEPTION_REPLAY_ENABLED", "false", as_bool)
95
+ data_streams_enabled = _get_env(
96
+ "DD_DATA_STREAMS_ENABLED", "false", as_bool, depends_on_tracing=True
97
+ )
98
+
99
+ is_gov_region = _get_env("AWS_REGION", "", lambda x: x.startswith("us-gov-"))
100
+
101
+ local_test = _get_env("DD_LOCAL_TEST", "false", as_bool)
102
+ integration_test = _get_env("DD_INTEGRATION_TEST", "false", as_bool)
103
+
104
+ aws_lambda_function_name = _get_env("AWS_LAMBDA_FUNCTION_NAME")
105
+
106
+ @property
107
+ def function_name(self):
108
+ if not hasattr(self, "_config_function_name"):
109
+ if self.aws_lambda_function_name is None:
110
+ self._config_function_name = "function"
111
+ else:
112
+ self._config_function_name = self.aws_lambda_function_name
113
+ return self._config_function_name
114
+
115
+ @property
116
+ def is_lambda_context(self):
117
+ if not hasattr(self, "_config_is_lambda_context"):
118
+ self._config_is_lambda_context = bool(self.aws_lambda_function_name)
119
+ return self._config_is_lambda_context
120
+
121
+ @property
122
+ def fips_mode_enabled(self):
123
+ if not hasattr(self, "_config_fips_mode_enabled"):
124
+ self._config_fips_mode_enabled = (
125
+ os.environ.get(
126
+ "DD_LAMBDA_FIPS_MODE",
127
+ "true" if self.is_gov_region else "false",
128
+ ).lower()
129
+ == "true"
130
+ )
131
+ return self._config_fips_mode_enabled
132
+
133
+ def _reset(self):
134
+ for attr in dir(self):
135
+ if attr.startswith("_config_"):
136
+ delattr(self, attr)
137
+
138
+
139
+ config = Config()
140
+
141
+ if config.is_gov_region or config.fips_mode_enabled:
142
+ logger.debug(
143
+ "Python Lambda Layer FIPS mode is %s.",
144
+ "enabled" if config.fips_mode_enabled else "not enabled",
145
+ )
@@ -0,0 +1,38 @@
1
+ from datadog_lambda import logger
2
+ from datadog_lambda.trigger import EventTypes
3
+
4
+
5
+ def set_dsm_context(event, event_source):
6
+
7
+ if event_source.equals(EventTypes.SQS):
8
+ _dsm_set_sqs_context(event)
9
+
10
+
11
+ def _dsm_set_sqs_context(event):
12
+ from datadog_lambda.wrapper import format_err_with_traceback
13
+ from ddtrace.internal.datastreams import data_streams_processor
14
+ from ddtrace.internal.datastreams.processor import DsmPathwayCodec
15
+ from ddtrace.internal.datastreams.botocore import (
16
+ get_datastreams_context,
17
+ calculate_sqs_payload_size,
18
+ )
19
+
20
+ records = event.get("Records")
21
+ if records is None:
22
+ return
23
+ processor = data_streams_processor()
24
+
25
+ for record in records:
26
+ try:
27
+ queue_arn = record.get("eventSourceARN", "")
28
+
29
+ contextjson = get_datastreams_context(record)
30
+ payload_size = calculate_sqs_payload_size(record)
31
+
32
+ ctx = DsmPathwayCodec.decode(contextjson, processor)
33
+ ctx.set_checkpoint(
34
+ ["direction:in", f"topic:{queue_arn}", "type:sqs"],
35
+ payload_size=payload_size,
36
+ )
37
+ except Exception as e:
38
+ logger.error(format_err_with_traceback(e))
@@ -5,14 +5,13 @@
5
5
 
6
6
  import enum
7
7
  import logging
8
- import os
9
8
  import time
10
9
  from datetime import datetime, timedelta
11
10
 
12
11
  import ujson as json
13
12
 
13
+ from datadog_lambda.config import config
14
14
  from datadog_lambda.extension import should_use_extension
15
- from datadog_lambda.fips import fips_mode_enabled
16
15
  from datadog_lambda.tags import dd_lambda_layer_tag, get_enhanced_metrics_tags
17
16
 
18
17
  logger = logging.getLogger(__name__)
@@ -28,10 +27,10 @@ class MetricsHandler(enum.Enum):
28
27
  def _select_metrics_handler():
29
28
  if should_use_extension:
30
29
  return MetricsHandler.EXTENSION
31
- if os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true":
30
+ if config.flush_to_log:
32
31
  return MetricsHandler.FORWARDER
33
32
 
34
- if fips_mode_enabled:
33
+ if config.fips_mode_enabled:
35
34
  logger.debug(
36
35
  "With FIPS mode enabled, the Datadog API metrics handler is unavailable."
37
36
  )
@@ -58,14 +57,8 @@ elif metrics_handler == MetricsHandler.DATADOG_API:
58
57
  from datadog_lambda.api import init_api
59
58
  from datadog_lambda.thread_stats_writer import ThreadStatsWriter
60
59
 
61
- flush_in_thread = os.environ.get("DD_FLUSH_IN_THREAD", "").lower() == "true"
62
60
  init_api()
63
- lambda_stats = ThreadStatsWriter(flush_in_thread)
64
-
65
-
66
- enhanced_metrics_enabled = (
67
- os.environ.get("DD_ENHANCED_METRICS", "true").lower() == "true"
68
- )
61
+ lambda_stats = ThreadStatsWriter(config.flush_in_thread)
69
62
 
70
63
 
71
64
  def lambda_metric(metric_name, value, timestamp=None, tags=None, force_async=False):
@@ -191,7 +184,7 @@ def submit_enhanced_metric(metric_name, lambda_context):
191
184
  metric_name (str): metric name w/o enhanced prefix i.e. "invocations" or "errors"
192
185
  lambda_context (object): Lambda context dict passed to the function by AWS
193
186
  """
194
- if not enhanced_metrics_enabled:
187
+ if not config.enhanced_metrics_enabled:
195
188
  logger.debug(
196
189
  "Not submitting enhanced metric %s because enhanced metrics are disabled",
197
190
  metric_name,
@@ -3,7 +3,6 @@
3
3
  # This product includes software developed at Datadog (https://www.datadoghq.com/).
4
4
  # Copyright 2019 Datadog, Inc.
5
5
 
6
- import os
7
6
  import sys
8
7
  import logging
9
8
  import zlib
@@ -13,10 +12,8 @@ from wrapt import wrap_function_wrapper as wrap
13
12
  from wrapt.importer import when_imported
14
13
  from ddtrace import patch_all as patch_all_dd
15
14
 
16
- from datadog_lambda.tracing import (
17
- get_dd_trace_context,
18
- dd_tracing_enabled,
19
- )
15
+ from datadog_lambda.config import config
16
+ from datadog_lambda.tracing import get_dd_trace_context
20
17
  from collections.abc import MutableMapping
21
18
 
22
19
  logger = logging.getLogger(__name__)
@@ -32,7 +29,7 @@ def patch_all():
32
29
  """
33
30
  _patch_for_integration_tests()
34
31
 
35
- if dd_tracing_enabled:
32
+ if config.trace_enabled:
36
33
  patch_all_dd()
37
34
  else:
38
35
  _patch_http()
@@ -44,8 +41,7 @@ def _patch_for_integration_tests():
44
41
  Patch `requests` to log the outgoing requests for integration tests.
45
42
  """
46
43
  global _integration_tests_patched
47
- is_in_tests = os.environ.get("DD_INTEGRATION_TEST", "false").lower() == "true"
48
- if not _integration_tests_patched and is_in_tests:
44
+ if not _integration_tests_patched and config.integration_test:
49
45
  wrap("requests", "Session.send", _log_request)
50
46
  _integration_tests_patched = True
51
47
 
@@ -1,12 +1,12 @@
1
1
  from itertools import chain
2
2
  import logging
3
- import os
4
3
  from typing import List
5
4
  from typing import Optional
6
5
 
7
6
  from ddtrace._trace._span_pointer import _SpanPointerDirection
8
7
  from ddtrace._trace._span_pointer import _SpanPointerDescription
9
8
 
9
+ from datadog_lambda.config import config
10
10
  from datadog_lambda.metric import submit_dynamodb_stream_type_metric
11
11
  from datadog_lambda.trigger import EventTypes
12
12
 
@@ -14,15 +14,10 @@ from datadog_lambda.trigger import EventTypes
14
14
  logger = logging.getLogger(__name__)
15
15
 
16
16
 
17
- dd_botocore_add_span_pointers = os.environ.get(
18
- "DD_BOTOCORE_ADD_SPAN_POINTERS", "true"
19
- ).lower() in ("true", "1")
20
-
21
-
22
17
  def calculate_span_pointers(
23
18
  event_source,
24
19
  event,
25
- botocore_add_span_pointers=dd_botocore_add_span_pointers,
20
+ botocore_add_span_pointers=config.add_span_pointers,
26
21
  ) -> List[_SpanPointerDescription]:
27
22
  try:
28
23
  if botocore_add_span_pointers:
@@ -4,18 +4,17 @@
4
4
  # Copyright 2021 Datadog, Inc.
5
5
 
6
6
  from decimal import Decimal
7
- import logging
8
7
  import ujson as json
9
8
 
9
+ from datadog_lambda.config import config
10
+
10
11
  redactable_keys = ["authorization", "x-authorization", "password", "token"]
11
- max_depth = 10
12
- logger = logging.getLogger(__name__)
13
12
 
14
13
 
15
14
  def tag_object(span, key, obj, depth=0):
16
15
  if obj is None:
17
16
  return span.set_tag(key, obj)
18
- if depth >= max_depth:
17
+ if depth >= config.capture_payload_max_depth:
19
18
  return span.set_tag(key, _redact_val(key, str(obj)[0:5000]))
20
19
  depth += 1
21
20
  if _should_try_string(obj):
@@ -32,6 +32,8 @@ from ddtrace import patch
32
32
  from ddtrace import __version__ as ddtrace_version
33
33
  from ddtrace.propagation.http import HTTPPropagator
34
34
  from ddtrace.trace import Context, Span, tracer
35
+
36
+ from datadog_lambda.config import config
35
37
  from datadog_lambda import __version__ as datadog_lambda_version
36
38
  from datadog_lambda.trigger import (
37
39
  _EventSource,
@@ -42,10 +44,7 @@ from datadog_lambda.trigger import (
42
44
  EventSubtypes,
43
45
  )
44
46
 
45
- dd_trace_otel_enabled = (
46
- os.environ.get("DD_TRACE_OTEL_ENABLED", "false").lower() == "true"
47
- )
48
- if dd_trace_otel_enabled:
47
+ if config.otel_enabled:
49
48
  from opentelemetry.trace import set_tracer_provider
50
49
  from ddtrace.opentelemetry import TracerProvider
51
50
 
@@ -55,18 +54,11 @@ if dd_trace_otel_enabled:
55
54
  logger = logging.getLogger(__name__)
56
55
 
57
56
  dd_trace_context = None
58
- dd_tracing_enabled = os.environ.get("DD_TRACE_ENABLED", "false").lower() == "true"
59
- if dd_tracing_enabled:
57
+ if config.telemetry_enabled:
60
58
  # Enable the telemetry client if the user has opted in
61
- if (
62
- os.environ.get("DD_INSTRUMENTATION_TELEMETRY_ENABLED", "false").lower()
63
- == "true"
64
- ):
65
- from ddtrace.internal.telemetry import telemetry_writer
66
-
67
- telemetry_writer.enable()
59
+ from ddtrace.internal.telemetry import telemetry_writer
68
60
 
69
- is_lambda_context = os.environ.get(XrayDaemon.FUNCTION_NAME_HEADER_NAME) != ""
61
+ telemetry_writer.enable()
70
62
 
71
63
  propagator = HTTPPropagator()
72
64
 
@@ -97,7 +89,7 @@ def _convert_xray_sampling(xray_sampled):
97
89
 
98
90
 
99
91
  def _get_xray_trace_context():
100
- if not is_lambda_context:
92
+ if not config.is_lambda_context:
101
93
  return None
102
94
 
103
95
  xray_trace_entity = parse_xray_header(
@@ -639,13 +631,11 @@ def get_dd_trace_context_obj():
639
631
  automatically, but this function can be used to manually inject the trace
640
632
  context to an outgoing request.
641
633
  """
642
- if dd_tracing_enabled:
634
+ if config.trace_enabled:
643
635
  dd_trace_py_context = _get_dd_trace_py_context()
644
636
  if _is_context_complete(dd_trace_py_context):
645
637
  return dd_trace_py_context
646
638
 
647
- global dd_trace_context
648
-
649
639
  try:
650
640
  xray_context = _get_xray_trace_context() # xray (sub)segment
651
641
  except Exception as e:
@@ -690,10 +680,10 @@ def set_correlation_ids():
690
680
 
691
681
  TODO: Remove me when Datadog tracer is natively supported in Lambda.
692
682
  """
693
- if not is_lambda_context:
683
+ if not config.is_lambda_context:
694
684
  logger.debug("set_correlation_ids is only supported in LambdaContext")
695
685
  return
696
- if dd_tracing_enabled:
686
+ if config.trace_enabled:
697
687
  logger.debug("using ddtrace implementation for spans")
698
688
  return
699
689
 
@@ -850,13 +840,14 @@ def create_inferred_span_from_lambda_function_url_event(event, context):
850
840
  http = request_context.get("http")
851
841
  method = http.get("method") if http else None
852
842
  path = http.get("path") if http else None
843
+ http_url = f"https://{domain}{path}"
853
844
  resource = f"{method} {path}"
854
845
  tags = {
855
846
  "operation_name": "aws.lambda.url",
856
- "http.url": domain + path,
847
+ "http.url": http_url,
857
848
  "endpoint": path,
858
849
  "http.method": method,
859
- "resource_names": domain + path,
850
+ "resource_names": resource,
860
851
  "request_id": context.aws_request_id,
861
852
  }
862
853
  request_time_epoch = request_context.get("timeEpoch")
@@ -948,6 +939,7 @@ def create_inferred_span_from_api_gateway_websocket_event(
948
939
  request_context = event.get("requestContext")
949
940
  domain = request_context.get("domainName")
950
941
  endpoint = request_context.get("routeKey")
942
+ http_url = f"https://{domain}{endpoint}"
951
943
  api_id = request_context.get("apiId")
952
944
 
953
945
  service_name = determine_service_name(
@@ -955,7 +947,7 @@ def create_inferred_span_from_api_gateway_websocket_event(
955
947
  )
956
948
  tags = {
957
949
  "operation_name": "aws.apigateway.websocket",
958
- "http.url": domain + endpoint,
950
+ "http.url": http_url,
959
951
  "endpoint": endpoint,
960
952
  "resource_names": endpoint,
961
953
  "apiid": api_id,
@@ -1007,11 +999,12 @@ def create_inferred_span_from_api_gateway_event(
1007
999
  )
1008
1000
  method = event.get("httpMethod")
1009
1001
  path = event.get("path")
1002
+ http_url = f"https://{domain}{path}"
1010
1003
  resource_path = _get_resource_path(event, request_context)
1011
1004
  resource = f"{method} {resource_path}"
1012
1005
  tags = {
1013
1006
  "operation_name": "aws.apigateway.rest",
1014
- "http.url": domain + path,
1007
+ "http.url": http_url,
1015
1008
  "endpoint": path,
1016
1009
  "http.method": method,
1017
1010
  "resource_names": resource,
@@ -1073,12 +1066,13 @@ def create_inferred_span_from_http_api_event(
1073
1066
  http = request_context.get("http") or {}
1074
1067
  method = http.get("method")
1075
1068
  path = event.get("rawPath")
1069
+ http_url = f"https://{domain}{path}"
1076
1070
  resource_path = _get_resource_path(event, request_context)
1077
1071
  resource = f"{method} {resource_path}"
1078
1072
  tags = {
1079
1073
  "operation_name": "aws.httpapi",
1080
1074
  "endpoint": path,
1081
- "http.url": domain + path,
1075
+ "http.url": http_url,
1082
1076
  "http.method": http.get("method"),
1083
1077
  "http.protocol": http.get("protocol"),
1084
1078
  "http.source_ip": http.get("sourceIp"),
@@ -1476,7 +1470,7 @@ def emit_telemetry_on_exception_outside_of_handler(
1476
1470
  Emit an enhanced error metric and create a span for exceptions occurring outside the handler
1477
1471
  """
1478
1472
  submit_errors_metric(None)
1479
- if dd_tracing_enabled:
1473
+ if config.trace_enabled:
1480
1474
  span = tracer.trace(
1481
1475
  "aws.lambda",
1482
1476
  service="aws.lambda",
@@ -114,10 +114,14 @@ def parse_event_source(event: dict) -> _EventSource:
114
114
 
115
115
  event_source = None
116
116
 
117
+ # Get requestContext safely and ensure it's a dictionary
117
118
  request_context = event.get("requestContext")
119
+ if not isinstance(request_context, dict):
120
+ request_context = None
121
+
118
122
  if request_context and request_context.get("stage"):
119
123
  if "domainName" in request_context and detect_lambda_function_url_domain(
120
- request_context.get("domainName")
124
+ request_context.get("domainName", "")
121
125
  ):
122
126
  return _EventSource(EventTypes.LAMBDA_FUNCTION_URL)
123
127
  event_source = _EventSource(EventTypes.API_GATEWAY)
@@ -149,7 +153,7 @@ def parse_event_source(event: dict) -> _EventSource:
149
153
  event_source = _EventSource(EventTypes.STEPFUNCTIONS)
150
154
 
151
155
  event_record = get_first_record(event)
152
- if event_record:
156
+ if event_record and isinstance(event_record, dict):
153
157
  aws_event_source = event_record.get("eventSource") or event_record.get(
154
158
  "EventSource"
155
159
  )
@@ -171,6 +175,8 @@ def parse_event_source(event: dict) -> _EventSource:
171
175
 
172
176
  def detect_lambda_function_url_domain(domain: str) -> bool:
173
177
  # e.g. "etsn5fibjr.lambda-url.eu-south-1.amazonaws.com"
178
+ if not isinstance(domain, str):
179
+ return False
174
180
  domain_parts = domain.split(".")
175
181
  if len(domain_parts) < 2:
176
182
  return False
@@ -283,17 +289,28 @@ def extract_http_tags(event):
283
289
  Extracts HTTP facet tags from the triggering event
284
290
  """
285
291
  http_tags = {}
292
+
293
+ # Safely get request_context and ensure it's a dictionary
286
294
  request_context = event.get("requestContext")
295
+ if not isinstance(request_context, dict):
296
+ request_context = None
297
+
287
298
  path = event.get("path")
288
299
  method = event.get("httpMethod")
300
+
289
301
  if request_context and request_context.get("stage"):
290
- if request_context.get("domainName"):
291
- http_tags["http.url"] = request_context.get("domainName")
302
+ domain_name = request_context.get("domainName")
303
+ if domain_name:
304
+ http_tags["http.url"] = f"https://{domain_name}"
292
305
 
293
306
  path = request_context.get("path")
294
307
  method = request_context.get("httpMethod")
308
+
295
309
  # Version 2.0 HTTP API Gateway
296
- apigateway_v2_http = request_context.get("http")
310
+ apigateway_v2_http = request_context.get("http", {})
311
+ if not isinstance(apigateway_v2_http, dict):
312
+ apigateway_v2_http = {}
313
+
297
314
  if event.get("version") == "2.0" and apigateway_v2_http:
298
315
  path = apigateway_v2_http.get("path")
299
316
  method = apigateway_v2_http.get("method")
@@ -303,15 +320,23 @@ def extract_http_tags(event):
303
320
  if method:
304
321
  http_tags["http.method"] = method
305
322
 
306
- headers = event.get("headers")
323
+ # Safely get headers
324
+ headers = event.get("headers", {})
325
+ if not isinstance(headers, dict):
326
+ headers = {}
327
+
307
328
  if headers and headers.get("Referer"):
308
329
  http_tags["http.referer"] = headers.get("Referer")
309
330
 
310
331
  # Try to get `routeKey` from API GW v2; otherwise try to get `resource` from API GW v1
311
332
  route = event.get("routeKey") or event.get("resource")
312
- if route:
313
- # "GET /my/endpoint" = > "/my/endpoint"
314
- http_tags["http.route"] = route.split(" ")[-1]
333
+ if route and isinstance(route, str):
334
+ try:
335
+ # "GET /my/endpoint" = > "/my/endpoint"
336
+ http_tags["http.route"] = route.split(" ")[-1]
337
+ except Exception:
338
+ # If splitting fails, use the route as is
339
+ http_tags["http.route"] = route
315
340
 
316
341
  return http_tags
317
342
 
@@ -0,0 +1 @@
1
+ __version__ = "6.111.0"
@@ -9,6 +9,7 @@ import ujson as json
9
9
  from importlib import import_module
10
10
  from time import time_ns
11
11
 
12
+ from datadog_lambda.dsm import set_dsm_context
12
13
  from datadog_lambda.extension import should_use_extension, flush_extension
13
14
  from datadog_lambda.cold_start import (
14
15
  set_cold_start,
@@ -17,6 +18,7 @@ from datadog_lambda.cold_start import (
17
18
  is_new_sandbox,
18
19
  ColdStartTracer,
19
20
  )
21
+ from datadog_lambda.config import config
20
22
  from datadog_lambda.constants import (
21
23
  TraceContextSource,
22
24
  XraySubsegment,
@@ -25,11 +27,11 @@ from datadog_lambda.constants import (
25
27
  from datadog_lambda.module_name import modify_module_name
26
28
  from datadog_lambda.patch import patch_all
27
29
  from datadog_lambda.span_pointers import calculate_span_pointers
30
+ from datadog_lambda.tag_object import tag_object
28
31
  from datadog_lambda.tracing import (
29
32
  extract_dd_trace_context,
30
33
  create_dd_dummy_metadata_subsegment,
31
34
  inject_correlation_ids,
32
- dd_tracing_enabled,
33
35
  mark_trace_as_error_for_5xx_responses,
34
36
  set_correlation_ids,
35
37
  set_dd_trace_py_root,
@@ -45,57 +47,20 @@ from datadog_lambda.trigger import (
45
47
  extract_http_status_code_tag,
46
48
  )
47
49
 
48
- profiling_env_var = os.environ.get("DD_PROFILING_ENABLED", "false").lower() == "true"
49
- if profiling_env_var:
50
+ if config.profiling_enabled:
50
51
  from ddtrace.profiling import profiler
51
52
 
52
- llmobs_env_var = os.environ.get("DD_LLMOBS_ENABLED", "false").lower() in ("true", "1")
53
- if llmobs_env_var:
53
+ if config.llmobs_enabled:
54
54
  from ddtrace.llmobs import LLMObs
55
55
 
56
+ if config.exception_replay_enabled:
57
+ from ddtrace.debugging._exception.replay import SpanExceptionHandler
58
+ from ddtrace.debugging._uploader import LogsIntakeUploaderV1
59
+
56
60
  logger = logging.getLogger(__name__)
57
61
 
58
- DD_FLUSH_TO_LOG = "DD_FLUSH_TO_LOG"
59
- DD_LOGS_INJECTION = "DD_LOGS_INJECTION"
60
- DD_MERGE_XRAY_TRACES = "DD_MERGE_XRAY_TRACES"
61
- AWS_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME"
62
- DD_LOCAL_TEST = "DD_LOCAL_TEST"
63
- DD_TRACE_EXTRACTOR = "DD_TRACE_EXTRACTOR"
64
- DD_TRACE_MANAGED_SERVICES = "DD_TRACE_MANAGED_SERVICES"
65
- DD_ENCODE_AUTHORIZER_CONTEXT = "DD_ENCODE_AUTHORIZER_CONTEXT"
66
- DD_DECODE_AUTHORIZER_CONTEXT = "DD_DECODE_AUTHORIZER_CONTEXT"
67
- DD_COLD_START_TRACING = "DD_COLD_START_TRACING"
68
- DD_MIN_COLD_START_DURATION = "DD_MIN_COLD_START_DURATION"
69
- DD_COLD_START_TRACE_SKIP_LIB = "DD_COLD_START_TRACE_SKIP_LIB"
70
- DD_CAPTURE_LAMBDA_PAYLOAD = "DD_CAPTURE_LAMBDA_PAYLOAD"
71
- DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH = "DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH"
72
62
  DD_REQUESTS_SERVICE_NAME = "DD_REQUESTS_SERVICE_NAME"
73
63
  DD_SERVICE = "DD_SERVICE"
74
- DD_ENV = "DD_ENV"
75
-
76
-
77
- def get_env_as_int(env_key, default_value: int) -> int:
78
- try:
79
- return int(os.environ.get(env_key, default_value))
80
- except Exception as e:
81
- logger.warn(
82
- f"Failed to parse {env_key} as int. Using default value: {default_value}. Error: {e}"
83
- )
84
- return default_value
85
-
86
-
87
- dd_capture_lambda_payload_enabled = (
88
- os.environ.get(DD_CAPTURE_LAMBDA_PAYLOAD, "false").lower() == "true"
89
- )
90
-
91
- if dd_capture_lambda_payload_enabled:
92
- import datadog_lambda.tag_object as tag_object
93
-
94
- tag_object.max_depth = get_env_as_int(
95
- DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH, tag_object.max_depth
96
- )
97
-
98
- env_env_var = os.environ.get(DD_ENV, None)
99
64
 
100
65
  init_timestamp_ns = time_ns()
101
66
 
@@ -152,56 +117,16 @@ class _LambdaDecorator(object):
152
117
  """Executes when the wrapped function gets wrapped"""
153
118
  try:
154
119
  self.func = func
155
- self.flush_to_log = os.environ.get(DD_FLUSH_TO_LOG, "").lower() == "true"
156
- self.logs_injection = (
157
- os.environ.get(DD_LOGS_INJECTION, "true").lower() == "true"
158
- )
159
- self.merge_xray_traces = (
160
- os.environ.get(DD_MERGE_XRAY_TRACES, "false").lower() == "true"
161
- )
162
- self.function_name = os.environ.get(AWS_LAMBDA_FUNCTION_NAME, "function")
163
- self.service = os.environ.get(DD_SERVICE, None)
164
- self.extractor_env = os.environ.get(DD_TRACE_EXTRACTOR, None)
165
120
  self.trace_extractor = None
166
121
  self.span = None
167
122
  self.inferred_span = None
168
- depends_on_dd_tracing_enabled = (
169
- lambda original_boolean: dd_tracing_enabled and original_boolean
170
- )
171
- self.make_inferred_span = depends_on_dd_tracing_enabled(
172
- os.environ.get(DD_TRACE_MANAGED_SERVICES, "true").lower() == "true"
173
- )
174
- self.encode_authorizer_context = depends_on_dd_tracing_enabled(
175
- os.environ.get(DD_ENCODE_AUTHORIZER_CONTEXT, "true").lower() == "true"
176
- )
177
- self.decode_authorizer_context = depends_on_dd_tracing_enabled(
178
- os.environ.get(DD_DECODE_AUTHORIZER_CONTEXT, "true").lower() == "true"
179
- )
180
- self.cold_start_tracing = depends_on_dd_tracing_enabled(
181
- os.environ.get(DD_COLD_START_TRACING, "true").lower() == "true"
182
- )
183
- self.min_cold_start_trace_duration = get_env_as_int(
184
- DD_MIN_COLD_START_DURATION, 3
185
- )
186
- self.local_testing_mode = os.environ.get(
187
- DD_LOCAL_TEST, "false"
188
- ).lower() in ("true", "1")
189
- self.cold_start_trace_skip_lib = [
190
- "ddtrace.internal.compat",
191
- "ddtrace.filters",
192
- ]
193
- if DD_COLD_START_TRACE_SKIP_LIB in os.environ:
194
- try:
195
- self.cold_start_trace_skip_lib = os.environ[
196
- DD_COLD_START_TRACE_SKIP_LIB
197
- ].split(",")
198
- except Exception:
199
- logger.debug(f"Malformatted for env {DD_COLD_START_TRACE_SKIP_LIB}")
200
123
  self.response = None
201
- if profiling_env_var:
202
- self.prof = profiler.Profiler(env=env_env_var, service=self.service)
203
- if self.extractor_env:
204
- extractor_parts = self.extractor_env.rsplit(".", 1)
124
+
125
+ if config.profiling_enabled:
126
+ self.prof = profiler.Profiler(env=config.env, service=config.service)
127
+
128
+ if config.trace_extractor:
129
+ extractor_parts = config.trace_extractor.rsplit(".", 1)
205
130
  if len(extractor_parts) == 2:
206
131
  (mod_name, extractor_name) = extractor_parts
207
132
  modified_extractor_name = modify_module_name(mod_name)
@@ -209,7 +134,7 @@ class _LambdaDecorator(object):
209
134
  self.trace_extractor = getattr(extractor_module, extractor_name)
210
135
 
211
136
  # Inject trace correlation ids to logs
212
- if self.logs_injection:
137
+ if config.logs_injection:
213
138
  inject_correlation_ids()
214
139
 
215
140
  # This prevents a breaking change in ddtrace v0.49 regarding the service name
@@ -221,9 +146,14 @@ class _LambdaDecorator(object):
221
146
  patch_all()
222
147
 
223
148
  # Enable LLM Observability
224
- if llmobs_env_var:
149
+ if config.llmobs_enabled:
225
150
  LLMObs.enable()
226
151
 
152
+ # Enable Exception Replay
153
+ if config.exception_replay_enabled:
154
+ logger.debug("Enabling exception replay")
155
+ SpanExceptionHandler.enable()
156
+
227
157
  logger.debug("datadog_lambda_wrapper initialized")
228
158
  except Exception as e:
229
159
  logger.error(format_err_with_traceback(e))
@@ -290,7 +220,7 @@ class _LambdaDecorator(object):
290
220
  event,
291
221
  context,
292
222
  extractor=self.trace_extractor,
293
- decode_authorizer_context=self.decode_authorizer_context,
223
+ decode_authorizer_context=config.decode_authorizer_context,
294
224
  )
295
225
  self.event_source = event_source
296
226
  # Create a Datadog X-Ray subsegment with the trace context
@@ -304,26 +234,28 @@ class _LambdaDecorator(object):
304
234
  XraySubsegment.TRACE_KEY,
305
235
  )
306
236
 
307
- if dd_tracing_enabled:
308
- set_dd_trace_py_root(trace_context_source, self.merge_xray_traces)
309
- if self.make_inferred_span:
237
+ if config.trace_enabled:
238
+ set_dd_trace_py_root(trace_context_source, config.merge_xray_traces)
239
+ if config.make_inferred_span:
310
240
  self.inferred_span = create_inferred_span(
311
- event, context, event_source, self.decode_authorizer_context
241
+ event, context, event_source, config.decode_authorizer_context
312
242
  )
243
+ if config.data_streams_enabled:
244
+ set_dsm_context(event, event_source)
313
245
  self.span = create_function_execution_span(
314
246
  context=context,
315
- function_name=self.function_name,
247
+ function_name=config.function_name,
316
248
  is_cold_start=is_cold_start(),
317
249
  is_proactive_init=is_proactive_init(),
318
250
  trace_context_source=trace_context_source,
319
- merge_xray_traces=self.merge_xray_traces,
251
+ merge_xray_traces=config.merge_xray_traces,
320
252
  trigger_tags=self.trigger_tags,
321
253
  parent_span=self.inferred_span,
322
254
  span_pointers=calculate_span_pointers(event_source, event),
323
255
  )
324
256
  else:
325
257
  set_correlation_ids()
326
- if profiling_env_var and is_new_sandbox():
258
+ if config.profiling_enabled and is_new_sandbox():
327
259
  self.prof.start(stop_on_exit=False, profile_children=True)
328
260
  logger.debug("datadog_lambda_wrapper _before() done")
329
261
  except Exception as e:
@@ -342,14 +274,14 @@ class _LambdaDecorator(object):
342
274
  create_dd_dummy_metadata_subsegment(
343
275
  self.trigger_tags, XraySubsegment.LAMBDA_FUNCTION_TAGS_KEY
344
276
  )
345
- should_trace_cold_start = self.cold_start_tracing and is_new_sandbox()
277
+ should_trace_cold_start = config.cold_start_tracing and is_new_sandbox()
346
278
  if should_trace_cold_start:
347
279
  trace_ctx = tracer.current_trace_context()
348
280
 
349
281
  if self.span:
350
- if dd_capture_lambda_payload_enabled:
351
- tag_object.tag_object(self.span, "function.request", event)
352
- tag_object.tag_object(self.span, "function.response", self.response)
282
+ if config.capture_payload_enabled:
283
+ tag_object(self.span, "function.request", event)
284
+ tag_object(self.span, "function.response", self.response)
353
285
 
354
286
  if status_code:
355
287
  self.span.set_tag("http.status_code", status_code)
@@ -359,8 +291,8 @@ class _LambdaDecorator(object):
359
291
  if status_code:
360
292
  self.inferred_span.set_tag("http.status_code", status_code)
361
293
 
362
- if self.service:
363
- self.inferred_span.set_tag("peer.service", self.service)
294
+ if config.service:
295
+ self.inferred_span.set_tag("peer.service", config.service)
364
296
 
365
297
  if InferredSpanInfo.is_async(self.inferred_span) and self.span:
366
298
  self.inferred_span.finish(finish_time=self.span.start)
@@ -372,29 +304,35 @@ class _LambdaDecorator(object):
372
304
  following_span = self.span or self.inferred_span
373
305
  ColdStartTracer(
374
306
  tracer,
375
- self.function_name,
307
+ config.function_name,
376
308
  following_span.start_ns,
377
309
  trace_ctx,
378
- self.min_cold_start_trace_duration,
379
- self.cold_start_trace_skip_lib,
310
+ config.min_cold_start_trace_duration,
311
+ config.cold_start_trace_skip_lib,
380
312
  ).trace()
381
313
  except Exception as e:
382
314
  logger.debug("Failed to create cold start spans. %s", e)
383
315
 
384
- if not self.flush_to_log or should_use_extension:
316
+ if not config.flush_to_log or should_use_extension:
385
317
  from datadog_lambda.metric import flush_stats
386
318
 
387
319
  flush_stats(context)
388
- if should_use_extension and self.local_testing_mode:
320
+ if should_use_extension and config.local_test:
389
321
  # when testing locally, the extension does not know when an
390
322
  # invocation completes because it does not have access to the
391
323
  # logs api
392
324
  flush_extension()
393
325
 
394
- if llmobs_env_var:
326
+ if config.llmobs_enabled:
395
327
  LLMObs.flush()
396
328
 
397
- if self.encode_authorizer_context and is_authorizer_response(self.response):
329
+ # Flush exception replay
330
+ if config.exception_replay_enabled:
331
+ LogsIntakeUploaderV1._instance.periodic()
332
+
333
+ if config.encode_authorizer_context and is_authorizer_response(
334
+ self.response
335
+ ):
398
336
  self._inject_authorizer_span_headers(
399
337
  event.get("requestContext", {}).get("requestId")
400
338
  )
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "datadog_lambda"
3
- version = "6.109.0"
3
+ version = "6.111.0"
4
4
  description = "The Datadog AWS Lambda Library"
5
5
  authors = ["Datadog, Inc. <dev@datadoghq.com>"]
6
6
  license = "Apache-2.0"
@@ -1,19 +0,0 @@
1
- import logging
2
- import os
3
-
4
- is_gov_region = os.environ.get("AWS_REGION", "").startswith("us-gov-")
5
-
6
- fips_mode_enabled = (
7
- os.environ.get(
8
- "DD_LAMBDA_FIPS_MODE",
9
- "true" if is_gov_region else "false",
10
- ).lower()
11
- == "true"
12
- )
13
-
14
- if is_gov_region or fips_mode_enabled:
15
- logger = logging.getLogger(__name__)
16
- logger.debug(
17
- "Python Lambda Layer FIPS mode is %s.",
18
- "enabled" if fips_mode_enabled else "not enabled",
19
- )
@@ -1 +0,0 @@
1
- __version__ = "6.109.0"