peak-sdk 1.5.0__py3-none-any.whl → 1.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. peak/_metadata.py +80 -5
  2. peak/_version.py +1 -1
  3. peak/cli/args.py +1 -0
  4. peak/cli/cli.py +3 -2
  5. peak/cli/helpers.py +1 -0
  6. peak/cli/press/apps/specs.py +2 -0
  7. peak/cli/press/blocks/specs.py +68 -24
  8. peak/cli/press/deployments.py +41 -0
  9. peak/cli/press/specs.py +4 -2
  10. peak/cli/resources/alerts/__init__.py +35 -0
  11. peak/cli/resources/alerts/emails.py +360 -0
  12. peak/cli/resources/images.py +19 -7
  13. peak/cli/resources/services.py +23 -0
  14. peak/cli/resources/tenants.py +4 -1
  15. peak/cli/resources/workflows.py +81 -19
  16. peak/cli/ruff.toml +5 -3
  17. peak/compression.py +2 -2
  18. peak/exceptions.py +4 -6
  19. peak/handler.py +3 -5
  20. peak/helpers.py +8 -9
  21. peak/output.py +2 -2
  22. peak/press/apps.py +18 -19
  23. peak/press/blocks.py +396 -155
  24. peak/press/deployments.py +30 -4
  25. peak/press/specs.py +12 -14
  26. peak/resources/__init__.py +3 -2
  27. peak/resources/alerts.py +309 -0
  28. peak/resources/artifacts.py +2 -4
  29. peak/resources/images.py +317 -100
  30. peak/resources/services.py +7 -6
  31. peak/resources/webapps.py +3 -5
  32. peak/resources/workflows.py +103 -13
  33. peak/sample_yaml/press/apps/specs/create_app_spec.yaml +2 -0
  34. peak/sample_yaml/press/apps/specs/create_app_spec_release.yaml +2 -0
  35. peak/sample_yaml/press/blocks/specs/service/api/create_block_spec.yaml +102 -0
  36. peak/sample_yaml/press/blocks/specs/service/api/create_block_spec_release.yaml +88 -0
  37. peak/sample_yaml/press/blocks/specs/service/webapp/create_block_spec.yaml +103 -0
  38. peak/sample_yaml/press/blocks/specs/service/webapp/create_block_spec_release.yaml +89 -0
  39. peak/sample_yaml/press/blocks/specs/{create_block_spec.yaml → workflow/create_block_spec.yaml} +20 -1
  40. peak/sample_yaml/press/blocks/specs/{create_block_spec_release.yaml → workflow/create_block_spec_release.yaml} +20 -1
  41. peak/sample_yaml/resources/emails/send_email.yaml +15 -0
  42. peak/sample_yaml/resources/images/dockerfile/create_image.yaml +3 -0
  43. peak/sample_yaml/resources/images/dockerfile/create_image_version.yaml +3 -0
  44. peak/sample_yaml/resources/images/dockerfile/update_version.yaml +3 -0
  45. peak/sample_yaml/resources/images/github/create_image.yaml +3 -0
  46. peak/sample_yaml/resources/images/github/create_image_version.yaml +3 -0
  47. peak/sample_yaml/resources/images/github/update_version.yaml +3 -0
  48. peak/sample_yaml/resources/images/upload/create_image.yaml +3 -0
  49. peak/sample_yaml/resources/images/upload/create_image_version.yaml +3 -0
  50. peak/sample_yaml/resources/images/upload/create_or_update_image.yaml +3 -0
  51. peak/sample_yaml/resources/images/upload/update_version.yaml +3 -0
  52. peak/sample_yaml/resources/services/create_or_update_service.yaml +1 -0
  53. peak/sample_yaml/resources/services/create_service.yaml +1 -0
  54. peak/sample_yaml/resources/services/update_service.yaml +1 -0
  55. peak/sample_yaml/resources/workflows/create_or_update_workflow.yaml +36 -0
  56. peak/sample_yaml/resources/workflows/create_workflow.yaml +19 -1
  57. peak/sample_yaml/resources/workflows/patch_workflow.yaml +36 -0
  58. peak/sample_yaml/resources/workflows/update_workflow.yaml +36 -0
  59. peak/session.py +1 -1
  60. peak/telemetry.py +1 -1
  61. peak/template.py +6 -4
  62. peak/tools/logging/__init__.py +26 -268
  63. peak/tools/logging/log_level.py +35 -3
  64. peak/tools/logging/logger.py +389 -0
  65. {peak_sdk-1.5.0.dist-info → peak_sdk-1.7.0.dist-info}/METADATA +19 -20
  66. {peak_sdk-1.5.0.dist-info → peak_sdk-1.7.0.dist-info}/RECORD +69 -60
  67. {peak_sdk-1.5.0.dist-info → peak_sdk-1.7.0.dist-info}/WHEEL +1 -1
  68. {peak_sdk-1.5.0.dist-info → peak_sdk-1.7.0.dist-info}/LICENSE +0 -0
  69. {peak_sdk-1.5.0.dist-info → peak_sdk-1.7.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,15 @@
1
+ # send_email.yaml
2
+
3
+ body:
4
+ recipients:
5
+ - person1@peak.ai
6
+ - person2@peak.ai
7
+ cc:
8
+ - person3@peak.ai
9
+ bcc:
10
+ - person4@peak.ai
11
+ subject: Hello from Peak
12
+ templateName: my-email-template
13
+ templateParameters:
14
+ name: John
15
+ company: Peak
@@ -13,4 +13,7 @@ body:
13
13
  value: param1
14
14
  - name: PARAM2
15
15
  value: param2
16
+ secrets:
17
+ - SECRET1
18
+ - SECRET2
16
19
  dockerfile: "FROM nginx"
@@ -11,4 +11,7 @@ body:
11
11
  value: param1
12
12
  - name: PARAM2
13
13
  value: param2
14
+ secrets:
15
+ - SECRET1
16
+ - SECRET2
14
17
  dockerfile: "FROM nginx"
@@ -10,4 +10,7 @@ body:
10
10
  value: param1
11
11
  - name: PARAM2
12
12
  value: param2
13
+ secrets:
14
+ - SECRET1
15
+ - SECRET2
13
16
  dockerfile: "FROM nginx"
@@ -16,4 +16,7 @@ body:
16
16
  value: param1
17
17
  - name: PARAM2
18
18
  value: param2
19
+ secrets:
20
+ - SECRET1
21
+ - SECRET2
19
22
  useCache: true
@@ -14,4 +14,7 @@ body:
14
14
  value: param1
15
15
  - name: PARAM2
16
16
  value: param2
17
+ secrets:
18
+ - SECRET1
19
+ - SECRET2
17
20
  useCache: true
@@ -13,4 +13,7 @@ body:
13
13
  value: param1
14
14
  - name: PARAM2
15
15
  value: param2
16
+ secrets:
17
+ - SECRET1
18
+ - SECRET2
16
19
  useCache: true
@@ -13,6 +13,9 @@ body:
13
13
  value: param1
14
14
  - name: PARAM2
15
15
  value: param2
16
+ secrets:
17
+ - SECRET1
18
+ - SECRET2
16
19
  context: .
17
20
  artifact:
18
21
  path: "."
@@ -12,6 +12,9 @@ body:
12
12
  value: param1
13
13
  - name: PARAM2
14
14
  value: param2
15
+ secrets:
16
+ - SECRET1
17
+ - SECRET2
15
18
  useCache: true
16
19
  artifact:
17
20
  path: "."
@@ -13,6 +13,9 @@ body:
13
13
  value: param1
14
14
  - name: PARAM2
15
15
  value: param2
16
+ secrets:
17
+ - SECRET1
18
+ - SECRET2
16
19
  useCache: true
17
20
  artifact:
18
21
  path: "."
@@ -9,6 +9,9 @@ body:
9
9
  buildArguments:
10
10
  - name: PARAM1
11
11
  value: param1
12
+ secrets:
13
+ - SECRET1
14
+ - SECRET2
12
15
  useCache: true
13
16
  artifact:
14
17
  path: "."
@@ -22,3 +22,4 @@ body:
22
22
  python
23
23
  app.py
24
24
  healthCheckURL: /health
25
+ minInstances: 1
@@ -22,3 +22,4 @@ body:
22
22
  python
23
23
  app.py
24
24
  healthCheckURL: /health
25
+ minInstances: 1
@@ -20,3 +20,4 @@ body:
20
20
  python
21
21
  app.py
22
22
  healthCheckURL: /health
23
+ minInstances: 1
@@ -4,6 +4,42 @@ body:
4
4
  name: my-new-workflow
5
5
  triggers:
6
6
  - webhook: true
7
+ watchers:
8
+ - user: abc@peak.ai
9
+ events:
10
+ success: false
11
+ fail: true
12
+ - webhook:
13
+ name: info
14
+ url: "https://abc.com/post"
15
+ payload: |
16
+ {
17
+ "system": "external_system",
18
+ "action": "update",
19
+ "data": {
20
+ "field": "value",
21
+ "timestamp": "2024-05-20T12:00:00Z"
22
+ }
23
+ }
24
+ events:
25
+ success: false
26
+ fail: true
27
+ runtimeExceeded: 10
28
+ - email:
29
+ name: "email-watcher-1"
30
+ recipients:
31
+ to:
32
+ - user1@peak.ai
33
+ - user2@peak.ai
34
+ events:
35
+ success: false
36
+ fail: true
37
+ runtimeExceeded: 10
38
+ retryOptions:
39
+ duration: 5
40
+ exitCodes: [1, 2]
41
+ exponentialBackoff: true
42
+ numberOfRetries: 3
7
43
  tags:
8
44
  - name: CLI
9
45
  steps:
@@ -12,7 +12,25 @@ body:
12
12
  - webhook:
13
13
  name: info
14
14
  url: "https://abc.com/post"
15
- payload: '{ "pingback-url": "https:/workflow/123" }'
15
+ payload: |
16
+ {
17
+ "system": "external_system",
18
+ "action": "update",
19
+ "data": {
20
+ "field": "value",
21
+ "timestamp": "2024-05-20T12:00:00Z"
22
+ }
23
+ }
24
+ events:
25
+ success: false
26
+ fail: true
27
+ runtimeExceeded: 10
28
+ - email:
29
+ name: "email-watcher-1"
30
+ recipients:
31
+ to:
32
+ - user1@peak.ai
33
+ - user2@peak.ai
16
34
  events:
17
35
  success: false
18
36
  fail: true
@@ -4,6 +4,42 @@ body:
4
4
  name: "updated-workflow"
5
5
  triggers:
6
6
  - {}
7
+ watchers:
8
+ - user: abc@peak.ai
9
+ events:
10
+ success: false
11
+ fail: true
12
+ - webhook:
13
+ name: info
14
+ url: "https://abc.com/post"
15
+ payload: |
16
+ {
17
+ "system": "external_system",
18
+ "action": "update",
19
+ "data": {
20
+ "field": "value",
21
+ "timestamp": "2024-05-20T12:00:00Z"
22
+ }
23
+ }
24
+ events:
25
+ success: false
26
+ fail: true
27
+ runtimeExceeded: 10
28
+ - email:
29
+ name: "email-watcher-1"
30
+ recipients:
31
+ to:
32
+ - user1@peak.ai
33
+ - user2@peak.ai
34
+ events:
35
+ success: false
36
+ fail: true
37
+ runtimeExceeded: 10
38
+ retryOptions:
39
+ duration: 5
40
+ exitCodes: [1, 2]
41
+ exponentialBackoff: true
42
+ numberOfRetries: 3
7
43
  steps:
8
44
  step1:
9
45
  imageId: 100
@@ -4,6 +4,42 @@ body:
4
4
  name: updated-workflow
5
5
  triggers:
6
6
  - webhook: true
7
+ watchers:
8
+ - user: abc@peak.ai
9
+ events:
10
+ success: false
11
+ fail: true
12
+ - webhook:
13
+ name: info
14
+ url: "https://abc.com/post"
15
+ payload: |
16
+ {
17
+ "system": "external_system",
18
+ "action": "update",
19
+ "data": {
20
+ "field": "value",
21
+ "timestamp": "2024-05-20T12:00:00Z"
22
+ }
23
+ }
24
+ events:
25
+ success: false
26
+ fail: true
27
+ runtimeExceeded: 10
28
+ - email:
29
+ name: "email-watcher-1"
30
+ recipients:
31
+ to:
32
+ - user1@peak.ai
33
+ - user2@peak.ai
34
+ events:
35
+ success: false
36
+ fail: true
37
+ runtimeExceeded: 10
38
+ retryOptions:
39
+ duration: 5
40
+ exitCodes: [1, 2]
41
+ exponentialBackoff: true
42
+ numberOfRetries: 3
7
43
  tags:
8
44
  - name: CLI
9
45
  steps:
peak/session.py CHANGED
@@ -179,7 +179,7 @@ class Session:
179
179
  page_count = response["pageCount"]
180
180
  yield from response[response_key]
181
181
  page_number += 1
182
- return f"No more {response_key} to list"
182
+ return f"No more {response_key} to list" # type: ignore[return-value]
183
183
 
184
184
  def create_download_request(
185
185
  self,
peak/telemetry.py CHANGED
@@ -80,7 +80,7 @@ def telemetry(make_request: F) -> F:
80
80
  """
81
81
  stage = Stage.PROD
82
82
  if session_meta:
83
- stage = session_meta["stage"] if "stage" in session_meta else Stage.PROD
83
+ stage = session_meta.get("stage", Stage.PROD)
84
84
  base_domain = get_base_domain(stage.value, "service")
85
85
  return f"{base_domain}/resource-usage/api/v1/telemetry"
86
86
 
peak/template.py CHANGED
@@ -38,10 +38,12 @@ from peak.helpers import remove_none_values
38
38
  def _parse_jinja_template(template_path: Path, params: Dict[str, Any]) -> str:
39
39
  """Read, parse and render the Jinja template text."""
40
40
  jinja_loader = _CustomJinjaLoader()
41
- jinja_env = jinja2.Environment( # TODO: show warning if variable not found in params # noqa: TD002, TD003, RUF100
42
- loader=jinja_loader,
43
- autoescape=False, # noqa: S701
44
- extensions=[_IncludeWithIndentation],
41
+ jinja_env = (
42
+ jinja2.Environment( # TODO: show warning if variable not found in params # noqa: TD002, TD003, RUF100, FIX002
43
+ loader=jinja_loader,
44
+ autoescape=False, # noqa: S701
45
+ extensions=[_IncludeWithIndentation],
46
+ )
45
47
  )
46
48
  jinja_template: jinja2.Template = jinja_env.get_template(str(template_path))
47
49
  return jinja_template.render(params, os_env=os.environ)
@@ -18,277 +18,35 @@
18
18
  # # You should have received a copy of the APACHE LICENSE, VERSION 2.0
19
19
  # # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
20
20
  #
21
- """Logging module, a wrapper around `structlog <https://www.structlog.org/en/stable/>`_ library."""
22
21
 
23
- from __future__ import annotations
24
-
25
- import logging
26
- import os
27
- from typing import Any, List, MutableMapping, Optional, Union
28
-
29
- import structlog
30
-
31
- from .log_handler import LogHandler
32
- from .log_level import LogLevel
33
- from .utils import mask_nested_pii_data
22
+ """Logging module for Peak SDK."""
34
23
 
35
- __title__ = "logging"
36
- __author__ = "PEAK AI"
37
- __license__ = "Apache License, Version 2.0"
38
- __copyright__ = "2023, Peak AI"
39
- __status__ = "production"
40
- __date__ = "28 August 2023"
24
+ from __future__ import annotations
41
25
 
42
- __all__: list[str] = [
43
- "get_logger",
26
+ from typing import List
27
+
28
+ from peak.tools.logging.logger import (
29
+ DEFAULT_SHARED_PROCESSORS,
30
+ LOG_LEVEL_NAMES_TO_LOG_LEVEL,
31
+ LogHandler,
32
+ LogLevel,
33
+ LogLevelNames,
34
+ PeakLogger,
35
+ default_processors_factory,
36
+ get_logger,
37
+ peak_contexts_processor,
38
+ pii_masking_processor,
39
+ )
40
+
41
+ __all__: List[str] = [
42
+ "DEFAULT_SHARED_PROCESSORS",
43
+ "LOG_LEVEL_NAMES_TO_LOG_LEVEL",
44
+ "LogHandler",
45
+ "LogLevelNames",
44
46
  "LogLevel",
45
47
  "PeakLogger",
46
- "LogHandler",
48
+ "default_processors_factory",
49
+ "get_logger",
50
+ "pii_masking_processor",
51
+ "peak_contexts_processor",
47
52
  ]
48
-
49
-
50
- # ---------------------------------------------------------------------------
51
- # Utility private functions
52
- # ---------------------------------------------------------------------------
53
-
54
-
55
- def _pii_masking_processor(
56
- _: str,
57
- __: str,
58
- event_dict: MutableMapping[str, Any],
59
- ) -> MutableMapping[str, Any]:
60
- """Masks sensitive PII data present in event_dict."""
61
- return mask_nested_pii_data(event_dict)
62
-
63
-
64
- def _default_context_processor(
65
- _: str,
66
- __: str,
67
- event_dict: MutableMapping[str, Any],
68
- ) -> MutableMapping[str, Any]:
69
- """Add the standard attribute to the event_dict."""
70
- attributes_to_add: dict[str, Any] = {
71
- "source": "peak-sdk",
72
- "runtime": os.getenv("PEAK_RUNTIME"),
73
- "press_deployment_id": os.getenv("PRESS_DEPLOYMENT_ID"),
74
- "run_id": os.getenv("PEAK_RUN_ID"),
75
- "exec_id": os.getenv("PEAK_EXEC_ID"),
76
- "stage": os.getenv("STAGE"),
77
- "tenant_name": os.getenv("TENANT_NAME", os.getenv("TENANT")),
78
- "tenant_id": os.getenv("TENANT_ID"),
79
- "api_name": os.getenv("PEAK_API_NAME"),
80
- "api_id": os.getenv("PEAK_API_ID"),
81
- "step_name": os.getenv("PEAK_STEP_NAME"),
82
- "step_id": os.getenv("PEAK_STEP_ID"),
83
- "webapp_name": os.getenv("PEAK_WEBAPP_NAME"),
84
- "webapp_id": os.getenv("PEAK_WEBAPP_ID"),
85
- "workflow_name": os.getenv("PEAK_WORKFLOW_NAME"),
86
- "workflow_id": os.getenv("PEAK_WORKFLOW_ID"),
87
- "workspace_name": os.getenv("PEAK_WORKSPACE_NAME"),
88
- "workspace_id": os.getenv("PEAK_WORKSPACE_ID"),
89
- "image_name": os.getenv("PEAK_IMAGE_NAME"),
90
- "image_id": os.getenv("PEAK_IMAGE_ID"),
91
- }
92
-
93
- for attr, value in attributes_to_add.items():
94
- if value:
95
- event_dict[attr] = value
96
-
97
- return event_dict
98
-
99
-
100
- # ---------------------------------------------------------------------------
101
- # Utility functions at module level.
102
- # Basically delegate everything to the structlog.
103
- # ---------------------------------------------------------------------------
104
-
105
-
106
- def get_logger(
107
- name: Optional[str] = None,
108
- level: LogLevel = LogLevel.INFO,
109
- pretty_print: Optional[bool] = None,
110
- disable_masking: Optional[bool] = None,
111
- handlers: Optional[List[LogHandler]] = None,
112
- file_name: Optional[str] = None,
113
- ) -> PeakLogger:
114
- """Return a logger with the specified settings.
115
-
116
- Args:
117
- name (Optional[str], optional): Name of the logger. Defaults to None.
118
- level (LogLevel): Log level. Defaults to LogLevel.INFO.
119
- pretty_print (Optional[bool], optional): Whether to enable pretty printing for JSON format. Defaults to False.
120
- disable_masking (Optional[bool], optional): Whether to disable masking of sensetive data. Defaults to False.
121
- handlers (Optional[List[Handlers]], optional): List of log handlers (CONSOLE, FILE). Defaults to CONSOLE.
122
- file_name (Optional[str], optional): Filename for FILE handler. Required if FILE handler is used. Defaults to None.
123
-
124
- Returns:
125
- PeakLogger: A logger instance configured with the specified settings.
126
-
127
- Raises:
128
- ValueError: If the `file_name` is not provided for FILE handler.
129
-
130
- """
131
- _log_level: int = logging.DEBUG if os.getenv("DEBUG", "false").lower() == "true" else level.value
132
- _processors: list[Any] = [
133
- structlog.contextvars.merge_contextvars,
134
- _pii_masking_processor,
135
- _default_context_processor,
136
- structlog.stdlib.filter_by_level,
137
- structlog.stdlib.add_logger_name,
138
- structlog.stdlib.add_log_level,
139
- structlog.stdlib.PositionalArgumentsFormatter(),
140
- structlog.processors.TimeStamper(fmt="iso"),
141
- structlog.processors.StackInfoRenderer(),
142
- structlog.processors.format_exc_info,
143
- structlog.processors.UnicodeDecoder(),
144
- structlog.processors.EventRenamer("message"),
145
- ]
146
- if disable_masking:
147
- _processors.remove(_pii_masking_processor)
148
- if pretty_print:
149
- _processors.append(structlog.processors.JSONRenderer(indent=2, sort_keys=True))
150
- else:
151
- _processors.append(structlog.processors.JSONRenderer(indent=None, sort_keys=True))
152
- handlers_list: list[Any] = []
153
- if not handlers or LogHandler.CONSOLE in handlers:
154
- handlers_list.append(logging.StreamHandler()) # Console handler
155
- if handlers and LogHandler.FILE in handlers:
156
- if file_name:
157
- handlers_list.append(logging.FileHandler(file_name)) # File handler
158
- else:
159
- msg = "filename must be provided for FILE handler."
160
- raise ValueError(msg)
161
- logging.basicConfig(level=_log_level, handlers=handlers_list, format="")
162
- structlog.configure(
163
- processors=_processors,
164
- context_class=dict,
165
- logger_factory=structlog.stdlib.LoggerFactory(),
166
- wrapper_class=structlog.make_filtering_bound_logger(_log_level),
167
- cache_logger_on_first_use=True,
168
- )
169
-
170
- return PeakLogger(structlog.get_logger(name))
171
-
172
-
173
- # ---------------------------------------------------------------------------
174
- # Wrapper Logger class
175
- # ---------------------------------------------------------------------------
176
-
177
-
178
- class PeakLogger:
179
- """Wrapper class for logging with various log levels."""
180
-
181
- def __init__(self, logger: Any) -> None:
182
- """Initialize with a logger object.
183
-
184
- Args:
185
- logger (Any): Logger object to wrap.
186
- """
187
- self._logger = logger
188
-
189
- def debug(self, message: str, *args: Any, **kwargs: Any) -> None:
190
- """Log a DEBUG level message.
191
-
192
- Args:
193
- message (str): The log message.
194
- *args: Additional positional arguments to be passed to the logger.
195
- **kwargs: Additional keyword arguments to be passed to the logger.
196
- """
197
- self._logger.debug(message, *args, **kwargs)
198
-
199
- def info(self, message: str, *args: Any, **kwargs: Any) -> None:
200
- """Log an INFO level message.
201
-
202
- Args:
203
- message (str): The log message.
204
- *args: Additional positional arguments to be passed to the logger.
205
- **kwargs: Additional keyword arguments to be passed to the logger.
206
- """
207
- self._logger.info(message, *args, **kwargs)
208
-
209
- def warn(self, message: str, *args: Any, **kwargs: Any) -> None:
210
- """Log a WARNING level message.
211
-
212
- Args:
213
- message (str): The log message.
214
- *args: Additional positional arguments to be passed to the logger.
215
- **kwargs: Additional keyword arguments to be passed to the logger.
216
- """
217
- self._logger.warning(message, *args, **kwargs)
218
-
219
- def error(self, message: str, *args: Any, **kwargs: Any) -> None:
220
- """Log an ERROR level message.
221
-
222
- Args:
223
- message (str): The log message.
224
- *args: Additional positional arguments to be passed to the logger.
225
- **kwargs: Additional keyword arguments to be passed to the logger.
226
- """
227
- self._logger.error(message, *args, **kwargs)
228
-
229
- def critical(self, message: str, *args: Any, **kwargs: Any) -> None:
230
- """Log a CRITICAL level message.
231
-
232
- Args:
233
- message (str): The log message.
234
- *args: Additional positional arguments to be passed to the logger.
235
- **kwargs: Additional keyword arguments to be passed to the logger.
236
- """
237
- self._logger.critical(message, *args, **kwargs)
238
-
239
- def exception(self, message: str, *args: Any, **kwargs: Any) -> None:
240
- """Log an ERROR level message with exception info.
241
-
242
- Args:
243
- message (str): The log message.
244
- *args: Additional positional arguments to be passed to the logger.
245
- **kwargs: Additional keyword arguments to be passed to the logger.
246
- """
247
- self._logger.exception(message, *args, **kwargs)
248
-
249
- def bind(self, context: Union[dict[str, Any], None] = None, **kwargs: Any) -> None:
250
- """Bind contextual information to the logger, enriching log messages.
251
-
252
- This method allows attaching context data to the logger, such as additional information
253
- or system details, to provide more context in log messages.
254
-
255
- Args:
256
- context (Union[dict[str, Any], None]): A dictionary or None for contextual information.
257
- **kwargs: Additional key-value pairs to enhance context.
258
- """
259
- if context is None:
260
- context = {}
261
-
262
- if kwargs:
263
- # file deepcode ignore AttributeLoadOnNone: false positive
264
- context.update(kwargs)
265
-
266
- self._logger = self._logger.bind(**context)
267
-
268
- def unbind(self, keys: list[str]) -> None:
269
- """Unbind specified keys from the logger's context.
270
-
271
- Args:
272
- keys (list[str]): List of keys to unbind.
273
- """
274
- context: dict[str, Any] | dict[Any, Any] = structlog.get_context(self._logger)
275
-
276
- for key in keys:
277
- if key in context:
278
- del context[key]
279
-
280
- # Rebind the modified context to the logger
281
- self._logger = self._logger.bind(**context)
282
-
283
- def set_log_level(self, level: LogLevel) -> None:
284
- """Set the log level of the root logger.
285
-
286
- Args:
287
- level (LogLevel): Log level to set.
288
- """
289
- if self._is_valid_log_level(level):
290
- logging.getLogger().setLevel(level.value)
291
-
292
- def _is_valid_log_level(self, level: LogLevel) -> bool:
293
- """Check if a given log level is valid."""
294
- return level in LogLevel