peak-sdk 1.5.0__py3-none-any.whl → 1.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. peak/_metadata.py +58 -3
  2. peak/_version.py +1 -1
  3. peak/cli/cli.py +3 -2
  4. peak/cli/helpers.py +1 -0
  5. peak/cli/press/blocks/specs.py +2 -2
  6. peak/cli/press/specs.py +4 -2
  7. peak/cli/resources/alerts/__init__.py +35 -0
  8. peak/cli/resources/alerts/emails.py +360 -0
  9. peak/cli/resources/images.py +1 -1
  10. peak/cli/resources/services.py +23 -0
  11. peak/cli/resources/workflows.py +77 -15
  12. peak/cli/ruff.toml +5 -3
  13. peak/compression.py +2 -2
  14. peak/exceptions.py +4 -6
  15. peak/handler.py +3 -5
  16. peak/helpers.py +8 -9
  17. peak/output.py +2 -2
  18. peak/press/apps.py +8 -16
  19. peak/press/blocks.py +8 -16
  20. peak/press/deployments.py +2 -4
  21. peak/press/specs.py +12 -14
  22. peak/resources/__init__.py +3 -2
  23. peak/resources/alerts.py +309 -0
  24. peak/resources/artifacts.py +2 -4
  25. peak/resources/images.py +8 -14
  26. peak/resources/services.py +7 -6
  27. peak/resources/webapps.py +3 -5
  28. peak/resources/workflows.py +103 -13
  29. peak/sample_yaml/resources/emails/send_email.yaml +15 -0
  30. peak/sample_yaml/resources/services/create_or_update_service.yaml +1 -0
  31. peak/sample_yaml/resources/services/create_service.yaml +1 -0
  32. peak/sample_yaml/resources/services/update_service.yaml +1 -0
  33. peak/sample_yaml/resources/workflows/create_or_update_workflow.yaml +28 -0
  34. peak/sample_yaml/resources/workflows/create_workflow.yaml +10 -0
  35. peak/sample_yaml/resources/workflows/patch_workflow.yaml +28 -0
  36. peak/sample_yaml/resources/workflows/update_workflow.yaml +28 -0
  37. peak/telemetry.py +1 -1
  38. peak/template.py +6 -4
  39. peak/tools/logging/__init__.py +26 -268
  40. peak/tools/logging/log_level.py +35 -3
  41. peak/tools/logging/logger.py +389 -0
  42. {peak_sdk-1.5.0.dist-info → peak_sdk-1.6.0.dist-info}/METADATA +6 -7
  43. {peak_sdk-1.5.0.dist-info → peak_sdk-1.6.0.dist-info}/RECORD +46 -41
  44. {peak_sdk-1.5.0.dist-info → peak_sdk-1.6.0.dist-info}/WHEEL +1 -1
  45. {peak_sdk-1.5.0.dist-info → peak_sdk-1.6.0.dist-info}/LICENSE +0 -0
  46. {peak_sdk-1.5.0.dist-info → peak_sdk-1.6.0.dist-info}/entry_points.txt +0 -0
@@ -18,277 +18,35 @@
18
18
  # # You should have received a copy of the APACHE LICENSE, VERSION 2.0
19
19
  # # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
20
20
  #
21
- """Logging module, a wrapper around `structlog <https://www.structlog.org/en/stable/>`_ library."""
22
21
 
23
- from __future__ import annotations
24
-
25
- import logging
26
- import os
27
- from typing import Any, List, MutableMapping, Optional, Union
28
-
29
- import structlog
30
-
31
- from .log_handler import LogHandler
32
- from .log_level import LogLevel
33
- from .utils import mask_nested_pii_data
22
+ """Logging module for Peak SDK."""
34
23
 
35
- __title__ = "logging"
36
- __author__ = "PEAK AI"
37
- __license__ = "Apache License, Version 2.0"
38
- __copyright__ = "2023, Peak AI"
39
- __status__ = "production"
40
- __date__ = "28 August 2023"
24
+ from __future__ import annotations
41
25
 
42
- __all__: list[str] = [
43
- "get_logger",
26
+ from typing import List
27
+
28
+ from peak.tools.logging.logger import (
29
+ DEFAULT_SHARED_PROCESSORS,
30
+ LOG_LEVEL_NAMES_TO_LOG_LEVEL,
31
+ LogHandler,
32
+ LogLevel,
33
+ LogLevelNames,
34
+ PeakLogger,
35
+ default_processors_factory,
36
+ get_logger,
37
+ peak_contexts_processor,
38
+ pii_masking_processor,
39
+ )
40
+
41
+ __all__: List[str] = [
42
+ "DEFAULT_SHARED_PROCESSORS",
43
+ "LOG_LEVEL_NAMES_TO_LOG_LEVEL",
44
+ "LogHandler",
45
+ "LogLevelNames",
44
46
  "LogLevel",
45
47
  "PeakLogger",
46
- "LogHandler",
48
+ "default_processors_factory",
49
+ "get_logger",
50
+ "pii_masking_processor",
51
+ "peak_contexts_processor",
47
52
  ]
48
-
49
-
50
- # ---------------------------------------------------------------------------
51
- # Utility private functions
52
- # ---------------------------------------------------------------------------
53
-
54
-
55
- def _pii_masking_processor(
56
- _: str,
57
- __: str,
58
- event_dict: MutableMapping[str, Any],
59
- ) -> MutableMapping[str, Any]:
60
- """Masks sensitive PII data present in event_dict."""
61
- return mask_nested_pii_data(event_dict)
62
-
63
-
64
- def _default_context_processor(
65
- _: str,
66
- __: str,
67
- event_dict: MutableMapping[str, Any],
68
- ) -> MutableMapping[str, Any]:
69
- """Add the standard attribute to the event_dict."""
70
- attributes_to_add: dict[str, Any] = {
71
- "source": "peak-sdk",
72
- "runtime": os.getenv("PEAK_RUNTIME"),
73
- "press_deployment_id": os.getenv("PRESS_DEPLOYMENT_ID"),
74
- "run_id": os.getenv("PEAK_RUN_ID"),
75
- "exec_id": os.getenv("PEAK_EXEC_ID"),
76
- "stage": os.getenv("STAGE"),
77
- "tenant_name": os.getenv("TENANT_NAME", os.getenv("TENANT")),
78
- "tenant_id": os.getenv("TENANT_ID"),
79
- "api_name": os.getenv("PEAK_API_NAME"),
80
- "api_id": os.getenv("PEAK_API_ID"),
81
- "step_name": os.getenv("PEAK_STEP_NAME"),
82
- "step_id": os.getenv("PEAK_STEP_ID"),
83
- "webapp_name": os.getenv("PEAK_WEBAPP_NAME"),
84
- "webapp_id": os.getenv("PEAK_WEBAPP_ID"),
85
- "workflow_name": os.getenv("PEAK_WORKFLOW_NAME"),
86
- "workflow_id": os.getenv("PEAK_WORKFLOW_ID"),
87
- "workspace_name": os.getenv("PEAK_WORKSPACE_NAME"),
88
- "workspace_id": os.getenv("PEAK_WORKSPACE_ID"),
89
- "image_name": os.getenv("PEAK_IMAGE_NAME"),
90
- "image_id": os.getenv("PEAK_IMAGE_ID"),
91
- }
92
-
93
- for attr, value in attributes_to_add.items():
94
- if value:
95
- event_dict[attr] = value
96
-
97
- return event_dict
98
-
99
-
100
- # ---------------------------------------------------------------------------
101
- # Utility functions at module level.
102
- # Basically delegate everything to the structlog.
103
- # ---------------------------------------------------------------------------
104
-
105
-
106
- def get_logger(
107
- name: Optional[str] = None,
108
- level: LogLevel = LogLevel.INFO,
109
- pretty_print: Optional[bool] = None,
110
- disable_masking: Optional[bool] = None,
111
- handlers: Optional[List[LogHandler]] = None,
112
- file_name: Optional[str] = None,
113
- ) -> PeakLogger:
114
- """Return a logger with the specified settings.
115
-
116
- Args:
117
- name (Optional[str], optional): Name of the logger. Defaults to None.
118
- level (LogLevel): Log level. Defaults to LogLevel.INFO.
119
- pretty_print (Optional[bool], optional): Whether to enable pretty printing for JSON format. Defaults to False.
120
- disable_masking (Optional[bool], optional): Whether to disable masking of sensetive data. Defaults to False.
121
- handlers (Optional[List[Handlers]], optional): List of log handlers (CONSOLE, FILE). Defaults to CONSOLE.
122
- file_name (Optional[str], optional): Filename for FILE handler. Required if FILE handler is used. Defaults to None.
123
-
124
- Returns:
125
- PeakLogger: A logger instance configured with the specified settings.
126
-
127
- Raises:
128
- ValueError: If the `file_name` is not provided for FILE handler.
129
-
130
- """
131
- _log_level: int = logging.DEBUG if os.getenv("DEBUG", "false").lower() == "true" else level.value
132
- _processors: list[Any] = [
133
- structlog.contextvars.merge_contextvars,
134
- _pii_masking_processor,
135
- _default_context_processor,
136
- structlog.stdlib.filter_by_level,
137
- structlog.stdlib.add_logger_name,
138
- structlog.stdlib.add_log_level,
139
- structlog.stdlib.PositionalArgumentsFormatter(),
140
- structlog.processors.TimeStamper(fmt="iso"),
141
- structlog.processors.StackInfoRenderer(),
142
- structlog.processors.format_exc_info,
143
- structlog.processors.UnicodeDecoder(),
144
- structlog.processors.EventRenamer("message"),
145
- ]
146
- if disable_masking:
147
- _processors.remove(_pii_masking_processor)
148
- if pretty_print:
149
- _processors.append(structlog.processors.JSONRenderer(indent=2, sort_keys=True))
150
- else:
151
- _processors.append(structlog.processors.JSONRenderer(indent=None, sort_keys=True))
152
- handlers_list: list[Any] = []
153
- if not handlers or LogHandler.CONSOLE in handlers:
154
- handlers_list.append(logging.StreamHandler()) # Console handler
155
- if handlers and LogHandler.FILE in handlers:
156
- if file_name:
157
- handlers_list.append(logging.FileHandler(file_name)) # File handler
158
- else:
159
- msg = "filename must be provided for FILE handler."
160
- raise ValueError(msg)
161
- logging.basicConfig(level=_log_level, handlers=handlers_list, format="")
162
- structlog.configure(
163
- processors=_processors,
164
- context_class=dict,
165
- logger_factory=structlog.stdlib.LoggerFactory(),
166
- wrapper_class=structlog.make_filtering_bound_logger(_log_level),
167
- cache_logger_on_first_use=True,
168
- )
169
-
170
- return PeakLogger(structlog.get_logger(name))
171
-
172
-
173
- # ---------------------------------------------------------------------------
174
- # Wrapper Logger class
175
- # ---------------------------------------------------------------------------
176
-
177
-
178
- class PeakLogger:
179
- """Wrapper class for logging with various log levels."""
180
-
181
- def __init__(self, logger: Any) -> None:
182
- """Initialize with a logger object.
183
-
184
- Args:
185
- logger (Any): Logger object to wrap.
186
- """
187
- self._logger = logger
188
-
189
- def debug(self, message: str, *args: Any, **kwargs: Any) -> None:
190
- """Log a DEBUG level message.
191
-
192
- Args:
193
- message (str): The log message.
194
- *args: Additional positional arguments to be passed to the logger.
195
- **kwargs: Additional keyword arguments to be passed to the logger.
196
- """
197
- self._logger.debug(message, *args, **kwargs)
198
-
199
- def info(self, message: str, *args: Any, **kwargs: Any) -> None:
200
- """Log an INFO level message.
201
-
202
- Args:
203
- message (str): The log message.
204
- *args: Additional positional arguments to be passed to the logger.
205
- **kwargs: Additional keyword arguments to be passed to the logger.
206
- """
207
- self._logger.info(message, *args, **kwargs)
208
-
209
- def warn(self, message: str, *args: Any, **kwargs: Any) -> None:
210
- """Log a WARNING level message.
211
-
212
- Args:
213
- message (str): The log message.
214
- *args: Additional positional arguments to be passed to the logger.
215
- **kwargs: Additional keyword arguments to be passed to the logger.
216
- """
217
- self._logger.warning(message, *args, **kwargs)
218
-
219
- def error(self, message: str, *args: Any, **kwargs: Any) -> None:
220
- """Log an ERROR level message.
221
-
222
- Args:
223
- message (str): The log message.
224
- *args: Additional positional arguments to be passed to the logger.
225
- **kwargs: Additional keyword arguments to be passed to the logger.
226
- """
227
- self._logger.error(message, *args, **kwargs)
228
-
229
- def critical(self, message: str, *args: Any, **kwargs: Any) -> None:
230
- """Log a CRITICAL level message.
231
-
232
- Args:
233
- message (str): The log message.
234
- *args: Additional positional arguments to be passed to the logger.
235
- **kwargs: Additional keyword arguments to be passed to the logger.
236
- """
237
- self._logger.critical(message, *args, **kwargs)
238
-
239
- def exception(self, message: str, *args: Any, **kwargs: Any) -> None:
240
- """Log an ERROR level message with exception info.
241
-
242
- Args:
243
- message (str): The log message.
244
- *args: Additional positional arguments to be passed to the logger.
245
- **kwargs: Additional keyword arguments to be passed to the logger.
246
- """
247
- self._logger.exception(message, *args, **kwargs)
248
-
249
- def bind(self, context: Union[dict[str, Any], None] = None, **kwargs: Any) -> None:
250
- """Bind contextual information to the logger, enriching log messages.
251
-
252
- This method allows attaching context data to the logger, such as additional information
253
- or system details, to provide more context in log messages.
254
-
255
- Args:
256
- context (Union[dict[str, Any], None]): A dictionary or None for contextual information.
257
- **kwargs: Additional key-value pairs to enhance context.
258
- """
259
- if context is None:
260
- context = {}
261
-
262
- if kwargs:
263
- # file deepcode ignore AttributeLoadOnNone: false positive
264
- context.update(kwargs)
265
-
266
- self._logger = self._logger.bind(**context)
267
-
268
- def unbind(self, keys: list[str]) -> None:
269
- """Unbind specified keys from the logger's context.
270
-
271
- Args:
272
- keys (list[str]): List of keys to unbind.
273
- """
274
- context: dict[str, Any] | dict[Any, Any] = structlog.get_context(self._logger)
275
-
276
- for key in keys:
277
- if key in context:
278
- del context[key]
279
-
280
- # Rebind the modified context to the logger
281
- self._logger = self._logger.bind(**context)
282
-
283
- def set_log_level(self, level: LogLevel) -> None:
284
- """Set the log level of the root logger.
285
-
286
- Args:
287
- level (LogLevel): Log level to set.
288
- """
289
- if self._is_valid_log_level(level):
290
- logging.getLogger().setLevel(level.value)
291
-
292
- def _is_valid_log_level(self, level: LogLevel) -> bool:
293
- """Check if a given log level is valid."""
294
- return level in LogLevel
@@ -19,12 +19,14 @@
19
19
  # # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
20
20
  #
21
21
  """Supported Log levels."""
22
+ from __future__ import annotations
22
23
 
23
24
  import logging
24
- from enum import Enum
25
+ from enum import IntEnum
26
+ from typing import Final, Literal
25
27
 
26
28
 
27
- class LogLevel(Enum):
29
+ class LogLevel(IntEnum):
28
30
  """Enumeration of log levels to be used in logging.
29
31
 
30
32
  Each enum member corresponds to a specific log level defined in the logging module.
@@ -34,12 +36,42 @@ class LogLevel(Enum):
34
36
  DEBUG: Debug log level. Intended for detailed debugging information.
35
37
  INFO: Info log level. Used for general information about program execution.
36
38
  WARN: Warning log level. Indicates potential issues or unexpected behavior.
39
+ WARNING: Warning log level. Indicates potential issues or unexpected behavior.
37
40
  ERROR: Error log level. Indicates errors that do not prevent program execution.
41
+ EXCEPTION: Error log level. Indicates errors that do not prevent program execution.
38
42
  CRITICAL: Critical log level. Indicates severe errors that might lead to program failure.
43
+ FATAL: Critical log level. Indicates severe errors that might lead to program failure.
39
44
  """
40
45
 
41
46
  DEBUG = logging.DEBUG
42
47
  INFO = logging.INFO
43
- WARN = logging.WARN
48
+ WARN = logging.WARNING
49
+ WARNING = logging.WARNING
44
50
  ERROR = logging.ERROR
51
+ EXCEPTION = logging.ERROR
45
52
  CRITICAL = logging.CRITICAL
53
+ FATAL = logging.FATAL
54
+
55
+
56
+ LogLevelNames = Literal[
57
+ "DEBUG",
58
+ "INFO",
59
+ "WARN",
60
+ "WARNING",
61
+ "ERROR",
62
+ "EXCEPTION",
63
+ "CRITICAL",
64
+ "FATAL",
65
+ ]
66
+
67
+
68
+ LOG_LEVEL_NAMES_TO_LOG_LEVEL: Final[dict[LogLevelNames, LogLevel]] = {
69
+ "DEBUG": LogLevel.DEBUG,
70
+ "INFO": LogLevel.INFO,
71
+ "WARNING": LogLevel.WARN,
72
+ "WARN": LogLevel.WARN,
73
+ "ERROR": LogLevel.ERROR,
74
+ "EXCEPTION": LogLevel.ERROR,
75
+ "FATAL": LogLevel.CRITICAL,
76
+ "CRITICAL": LogLevel.CRITICAL,
77
+ }