peak-sdk 1.5.0__py3-none-any.whl → 1.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. peak/_metadata.py +80 -5
  2. peak/_version.py +1 -1
  3. peak/cli/args.py +1 -0
  4. peak/cli/cli.py +3 -2
  5. peak/cli/helpers.py +1 -0
  6. peak/cli/press/apps/specs.py +2 -0
  7. peak/cli/press/blocks/specs.py +68 -24
  8. peak/cli/press/deployments.py +41 -0
  9. peak/cli/press/specs.py +4 -2
  10. peak/cli/resources/alerts/__init__.py +35 -0
  11. peak/cli/resources/alerts/emails.py +360 -0
  12. peak/cli/resources/images.py +19 -7
  13. peak/cli/resources/services.py +23 -0
  14. peak/cli/resources/tenants.py +4 -1
  15. peak/cli/resources/workflows.py +81 -19
  16. peak/cli/ruff.toml +5 -3
  17. peak/compression.py +2 -2
  18. peak/exceptions.py +4 -6
  19. peak/handler.py +3 -5
  20. peak/helpers.py +8 -9
  21. peak/output.py +2 -2
  22. peak/press/apps.py +18 -19
  23. peak/press/blocks.py +396 -155
  24. peak/press/deployments.py +30 -4
  25. peak/press/specs.py +12 -14
  26. peak/resources/__init__.py +3 -2
  27. peak/resources/alerts.py +309 -0
  28. peak/resources/artifacts.py +2 -4
  29. peak/resources/images.py +317 -100
  30. peak/resources/services.py +7 -6
  31. peak/resources/webapps.py +3 -5
  32. peak/resources/workflows.py +103 -13
  33. peak/sample_yaml/press/apps/specs/create_app_spec.yaml +2 -0
  34. peak/sample_yaml/press/apps/specs/create_app_spec_release.yaml +2 -0
  35. peak/sample_yaml/press/blocks/specs/service/api/create_block_spec.yaml +102 -0
  36. peak/sample_yaml/press/blocks/specs/service/api/create_block_spec_release.yaml +88 -0
  37. peak/sample_yaml/press/blocks/specs/service/webapp/create_block_spec.yaml +103 -0
  38. peak/sample_yaml/press/blocks/specs/service/webapp/create_block_spec_release.yaml +89 -0
  39. peak/sample_yaml/press/blocks/specs/{create_block_spec.yaml → workflow/create_block_spec.yaml} +20 -1
  40. peak/sample_yaml/press/blocks/specs/{create_block_spec_release.yaml → workflow/create_block_spec_release.yaml} +20 -1
  41. peak/sample_yaml/resources/emails/send_email.yaml +15 -0
  42. peak/sample_yaml/resources/images/dockerfile/create_image.yaml +3 -0
  43. peak/sample_yaml/resources/images/dockerfile/create_image_version.yaml +3 -0
  44. peak/sample_yaml/resources/images/dockerfile/update_version.yaml +3 -0
  45. peak/sample_yaml/resources/images/github/create_image.yaml +3 -0
  46. peak/sample_yaml/resources/images/github/create_image_version.yaml +3 -0
  47. peak/sample_yaml/resources/images/github/update_version.yaml +3 -0
  48. peak/sample_yaml/resources/images/upload/create_image.yaml +3 -0
  49. peak/sample_yaml/resources/images/upload/create_image_version.yaml +3 -0
  50. peak/sample_yaml/resources/images/upload/create_or_update_image.yaml +3 -0
  51. peak/sample_yaml/resources/images/upload/update_version.yaml +3 -0
  52. peak/sample_yaml/resources/services/create_or_update_service.yaml +1 -0
  53. peak/sample_yaml/resources/services/create_service.yaml +1 -0
  54. peak/sample_yaml/resources/services/update_service.yaml +1 -0
  55. peak/sample_yaml/resources/workflows/create_or_update_workflow.yaml +36 -0
  56. peak/sample_yaml/resources/workflows/create_workflow.yaml +19 -1
  57. peak/sample_yaml/resources/workflows/patch_workflow.yaml +36 -0
  58. peak/sample_yaml/resources/workflows/update_workflow.yaml +36 -0
  59. peak/session.py +1 -1
  60. peak/telemetry.py +1 -1
  61. peak/template.py +6 -4
  62. peak/tools/logging/__init__.py +26 -268
  63. peak/tools/logging/log_level.py +35 -3
  64. peak/tools/logging/logger.py +389 -0
  65. {peak_sdk-1.5.0.dist-info → peak_sdk-1.7.0.dist-info}/METADATA +19 -20
  66. {peak_sdk-1.5.0.dist-info → peak_sdk-1.7.0.dist-info}/RECORD +69 -60
  67. {peak_sdk-1.5.0.dist-info → peak_sdk-1.7.0.dist-info}/WHEEL +1 -1
  68. {peak_sdk-1.5.0.dist-info → peak_sdk-1.7.0.dist-info}/LICENSE +0 -0
  69. {peak_sdk-1.5.0.dist-info → peak_sdk-1.7.0.dist-info}/entry_points.txt +0 -0
@@ -19,12 +19,14 @@
19
19
  # # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
20
20
  #
21
21
  """Supported Log levels."""
22
+ from __future__ import annotations
22
23
 
23
24
  import logging
24
- from enum import Enum
25
+ from enum import IntEnum
26
+ from typing import Final, Literal
25
27
 
26
28
 
27
- class LogLevel(Enum):
29
+ class LogLevel(IntEnum):
28
30
  """Enumeration of log levels to be used in logging.
29
31
 
30
32
  Each enum member corresponds to a specific log level defined in the logging module.
@@ -34,12 +36,42 @@ class LogLevel(Enum):
34
36
  DEBUG: Debug log level. Intended for detailed debugging information.
35
37
  INFO: Info log level. Used for general information about program execution.
36
38
  WARN: Warning log level. Indicates potential issues or unexpected behavior.
39
+ WARNING: Warning log level. Indicates potential issues or unexpected behavior.
37
40
  ERROR: Error log level. Indicates errors that do not prevent program execution.
41
+ EXCEPTION: Error log level. Indicates errors that do not prevent program execution.
38
42
  CRITICAL: Critical log level. Indicates severe errors that might lead to program failure.
43
+ FATAL: Critical log level. Indicates severe errors that might lead to program failure.
39
44
  """
40
45
 
41
46
  DEBUG = logging.DEBUG
42
47
  INFO = logging.INFO
43
- WARN = logging.WARN
48
+ WARN = logging.WARNING
49
+ WARNING = logging.WARNING
44
50
  ERROR = logging.ERROR
51
+ EXCEPTION = logging.ERROR
45
52
  CRITICAL = logging.CRITICAL
53
+ FATAL = logging.FATAL
54
+
55
+
56
+ LogLevelNames = Literal[
57
+ "DEBUG",
58
+ "INFO",
59
+ "WARN",
60
+ "WARNING",
61
+ "ERROR",
62
+ "EXCEPTION",
63
+ "CRITICAL",
64
+ "FATAL",
65
+ ]
66
+
67
+
68
+ LOG_LEVEL_NAMES_TO_LOG_LEVEL: Final[dict[LogLevelNames, LogLevel]] = {
69
+ "DEBUG": LogLevel.DEBUG,
70
+ "INFO": LogLevel.INFO,
71
+ "WARNING": LogLevel.WARN,
72
+ "WARN": LogLevel.WARN,
73
+ "ERROR": LogLevel.ERROR,
74
+ "EXCEPTION": LogLevel.ERROR,
75
+ "FATAL": LogLevel.CRITICAL,
76
+ "CRITICAL": LogLevel.CRITICAL,
77
+ }
@@ -0,0 +1,389 @@
1
+ #
2
+ # # Copyright © 2024 Peak AI Limited. or its affiliates. All Rights Reserved.
3
+ # #
4
+ # # Licensed under the Apache License, Version 2.0 (the "License"). You
5
+ # # may not use this file except in compliance with the License. A copy of
6
+ # # the License is located at:
7
+ # #
8
+ # # https://github.com/PeakBI/peak-sdk/blob/main/LICENSE
9
+ # #
10
+ # # or in the "license" file accompanying this file. This file is
11
+ # # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
12
+ # # ANY KIND, either express or implied. See the License for the specific
13
+ # # language governing permissions and limitations under the License.
14
+ # #
15
+ # # This file is part of the peak-sdk.
16
+ # # see (https://github.com/PeakBI/peak-sdk)
17
+ # #
18
+ # # You should have received a copy of the APACHE LICENSE, VERSION 2.0
19
+ # # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
20
+ #
21
+ """Logging module, a wrapper around `structlog <https://www.structlog.org/en/stable/>`_ library."""
22
+
23
+ from __future__ import annotations
24
+
25
+ import functools
26
+ import inspect
27
+ import logging
28
+ import os
29
+ import sys
30
+ from types import MappingProxyType
31
+ from typing import Any, Callable, Final, Hashable, List, MutableMapping, Optional, Tuple, Union
32
+
33
+ import orjson
34
+ import structlog
35
+
36
+ from .log_handler import LogHandler
37
+ from .log_level import LOG_LEVEL_NAMES_TO_LOG_LEVEL, LogLevel, LogLevelNames
38
+ from .utils import mask_nested_pii_data
39
+
40
+ __title__ = "logging"
41
+ __author__ = "PEAK AI"
42
+ __license__ = "Apache License, Version 2.0"
43
+ __copyright__ = "2024, Peak AI"
44
+ __status__ = "production"
45
+ __date__ = "14 March 2024"
46
+
47
+ __all__: List[str] = [
48
+ "DEFAULT_SHARED_PROCESSORS",
49
+ "LOG_LEVEL_NAMES_TO_LOG_LEVEL",
50
+ "LogHandler",
51
+ "LogLevelNames",
52
+ "LogLevel",
53
+ "PeakLogger",
54
+ "default_processors_factory",
55
+ "get_logger",
56
+ "pii_masking_processor",
57
+ "peak_contexts_processor",
58
+ ]
59
+
60
+
61
+ # ---------------------------------------------------------------------------
62
+ # Utility private functions
63
+ # ---------------------------------------------------------------------------
64
+
65
+
66
+ def pii_masking_processor(
67
+ _: str,
68
+ __: str,
69
+ event_dict: MutableMapping[str, Any],
70
+ ) -> MutableMapping[str, Any]:
71
+ """Masks sensitive PII data present in event_dict."""
72
+ return mask_nested_pii_data(event_dict)
73
+
74
+
75
+ def peak_contexts_processor(
76
+ _: str,
77
+ __: str,
78
+ event_dict: MutableMapping[str, Any],
79
+ ) -> MutableMapping[str, Any]:
80
+ """Add the standard attribute to the event_dict."""
81
+ attributes_to_add: dict[str, Any] = {
82
+ "source": "peak-sdk",
83
+ "runtime": os.getenv("PEAK_RUNTIME"),
84
+ "press_deployment_id": os.getenv("PRESS_DEPLOYMENT_ID"),
85
+ "run_id": os.getenv("PEAK_RUN_ID"),
86
+ "exec_id": os.getenv("PEAK_EXEC_ID"),
87
+ "stage": os.getenv("STAGE"),
88
+ "tenant_name": os.getenv("TENANT_NAME", os.getenv("TENANT")),
89
+ "tenant_id": os.getenv("TENANT_ID"),
90
+ "api_name": os.getenv("PEAK_API_NAME"),
91
+ "api_id": os.getenv("PEAK_API_ID"),
92
+ "step_name": os.getenv("PEAK_STEP_NAME"),
93
+ "step_id": os.getenv("PEAK_STEP_ID"),
94
+ "webapp_name": os.getenv("PEAK_WEBAPP_NAME"),
95
+ "webapp_id": os.getenv("PEAK_WEBAPP_ID"),
96
+ "workflow_name": os.getenv("PEAK_WORKFLOW_NAME"),
97
+ "workflow_id": os.getenv("PEAK_WORKFLOW_ID"),
98
+ "workspace_name": os.getenv("PEAK_WORKSPACE_NAME"),
99
+ "workspace_id": os.getenv("PEAK_WORKSPACE_ID"),
100
+ "image_name": os.getenv("PEAK_IMAGE_NAME"),
101
+ "image_id": os.getenv("PEAK_IMAGE_ID"),
102
+ }
103
+
104
+ for attr, value in attributes_to_add.items():
105
+ if value:
106
+ event_dict[attr] = value
107
+
108
+ return event_dict
109
+
110
+
111
+ # ---------------------------------------------------------------------------
112
+ # Utility functions at module level for main logger factory
113
+ # ---------------------------------------------------------------------------
114
+
115
+
116
+ DEFAULT_SHARED_PROCESSORS: Tuple[structlog.types.Processor | Any, ...] = (
117
+ structlog.contextvars.merge_contextvars,
118
+ peak_contexts_processor,
119
+ structlog.stdlib.filter_by_level,
120
+ structlog.stdlib.add_logger_name,
121
+ structlog.stdlib.add_log_level,
122
+ structlog.stdlib.PositionalArgumentsFormatter(),
123
+ pii_masking_processor,
124
+ structlog.processors.TimeStamper(fmt="iso", utc=True),
125
+ structlog.processors.StackInfoRenderer(),
126
+ structlog.processors.format_exc_info,
127
+ structlog.processors.UnicodeDecoder(),
128
+ structlog.processors.EventRenamer("message"),
129
+ )
130
+
131
+ _ORJSON_OPTS: Final[int] = (
132
+ orjson.OPT_SERIALIZE_NUMPY
133
+ | orjson.OPT_SERIALIZE_DATACLASS
134
+ | orjson.OPT_SERIALIZE_UUID
135
+ | orjson.OPT_NON_STR_KEYS
136
+ | orjson.OPT_SORT_KEYS
137
+ )
138
+
139
+
140
+ def _orjson_serializer(
141
+ obj: Any,
142
+ sort_keys: Optional[bool] = None,
143
+ default: Callable[[Any], Any] = str,
144
+ ) -> str:
145
+ """Custom serializer using orjson.dumps for structlog."""
146
+ apply_opts: int = (_ORJSON_OPTS | orjson.OPT_SORT_KEYS) if sort_keys else _ORJSON_OPTS
147
+
148
+ return orjson.dumps(obj, option=apply_opts, default=default).decode("utf-8")
149
+
150
+
151
+ @functools.lru_cache(maxsize=2, typed=True) # Only 2 different combinations of disable_masking are possible
152
+ def default_processors_factory(
153
+ disable_masking: Optional[bool],
154
+ ) -> list[structlog.types.Processor | Any]:
155
+ """Return the default processors for PeakLogger.
156
+
157
+ Args:
158
+ disable_masking (Optional[bool], optional): Whether to disable masking of sensitive data. Defaults to False.
159
+
160
+ Returns:
161
+ list[structlog.types.Processor | Any]: List of processors to be used by the logger.
162
+ """
163
+ _processors = list(DEFAULT_SHARED_PROCESSORS)
164
+
165
+ if disable_masking:
166
+ _processors.remove(pii_masking_processor)
167
+
168
+ # add renderer based on the environment
169
+ if sys.stdout.isatty():
170
+ # Pretty printing when we run in a terminal session.
171
+ _processors.remove(structlog.processors.format_exc_info)
172
+ _processors.append(
173
+ structlog.dev.ConsoleRenderer(
174
+ colors=True,
175
+ event_key="message",
176
+ timestamp_key="timestamp",
177
+ exception_formatter=structlog.dev.RichTracebackFormatter(color_system="truecolor"),
178
+ ),
179
+ )
180
+ else:
181
+ # Print JSON when we run in production
182
+ _processors.append(structlog.processors.JSONRenderer(serializer=_orjson_serializer, sort_keys=True))
183
+
184
+ return _processors
185
+
186
+
187
+ @functools.lru_cache(maxsize=128, typed=True)
188
+ def _handle_and_patch_processor_factory_kwargs(
189
+ func: Callable[..., List[structlog.types.Processor | Any]],
190
+ **kwargs: Hashable,
191
+ ) -> List[structlog.types.Processor | Any]:
192
+ """Handle keyword arguments for custom_processors_factory using inspect.signature, additionally patch the processors list to include EventRenamer in the right position if not already present.
193
+
194
+ Unknown keyword arguments are ignored.
195
+
196
+ Args:
197
+ func (Callable[..., List[structlog.types.Processor | Any]]): Custom processor factory function.
198
+ **kwargs: Additional keyword arguments to be passed to the custom_processors_factory, if provided.
199
+ kwargs received by the factory function must be hashable else TypeError will be raised by this wrapper.
200
+
201
+ Returns:
202
+ List[structlog.types.Processor | Any]: List of processors to be used by the logger.
203
+
204
+ Raises:
205
+ ValueError: If multiple renderers are found in the processor factory's returned processors list.
206
+ """
207
+ func_params: MappingProxyType[str, inspect.Parameter] = inspect.signature(func).parameters
208
+ _processors = func(**{k: v for k, v in kwargs.items() if k in func_params})
209
+
210
+ if "structlog.processors.EventRenamer" not in str(_processors):
211
+
212
+ # find index of KeyValueRenderer/JSONRenderer/ConsoleRenderer and push EventRenamer to before either of them
213
+ indices_for_insertion: list[int] = [
214
+ _processors.index(processor)
215
+ for processor in _processors
216
+ if getattr(processor, "__name__", processor.__class__.__name__)
217
+ in ("KeyValueRenderer", "JSONRenderer", "ConsoleRenderer")
218
+ ]
219
+
220
+ if len(indices_for_insertion) > 1:
221
+ multiple_renderer_error_msg: str = f"""
222
+ Multiple renderers found in the processors list returned by the `custom_processors_factory` function: {func.__name__}.
223
+ Please ensure only one of KeyValueRenderer, JSONRenderer, or ConsoleRenderer is present in the processors list.
224
+ """
225
+ raise ValueError(multiple_renderer_error_msg)
226
+
227
+ _processors.insert(
228
+ min([*indices_for_insertion, len(_processors)]),
229
+ structlog.processors.EventRenamer("message"),
230
+ )
231
+
232
+ return _processors
233
+
234
+
235
+ # ---------------------------------------------------------------------------
236
+ # Logger factory function
237
+ # ---------------------------------------------------------------------------
238
+
239
+
240
+ def get_logger(
241
+ name: Optional[str] = None,
242
+ level: Optional[LogLevel] = LogLevel.INFO,
243
+ custom_processors_factory: Optional[Callable[..., List[structlog.types.Processor | Any]]] = None,
244
+ disable_masking: Optional[bool] = False, # noqa: FBT002
245
+ handlers: Optional[List[LogHandler]] = None,
246
+ file_name: Optional[str] = None,
247
+ **kwargs: Any,
248
+ ) -> PeakLogger:
249
+ """Return a logger with the specified settings.
250
+
251
+ When using the default implementation, pretty-printing is automatically enabled when logger is run in a terminal session (sys.stdout.isatty() == True)
252
+ and JSON printing is enabled when logger is run in production via the `structlog.processors.JSONRenderer` processor.
253
+
254
+ Args:
255
+ name (Optional[str], optional): Name of the logger. Defaults to None.
256
+ level (LogLevel): Log level. Defaults to LogLevel.INFO.
257
+ custom_processors_factory (Optional[Callable[..., List[structlog.types.Processor | Any]]], optional): A factory function that returns a list of custom processors.
258
+ Defaults to None. This disables the default processors provided with the default implementation.
259
+ disable_masking (Optional[bool], optional): Whether to disable masking of sensitive data. Defaults to False.
260
+ Only applicable when using the default processors, as custom processors can be used to handle masking on their own.
261
+ handlers (Optional[List[Handlers]], optional): List of log handlers (CONSOLE, FILE). Defaults to CONSOLE.
262
+ file_name (Optional[str], optional): Filename for FILE handler. Required if FILE handler is used. Defaults to None.
263
+ **kwargs: Additional keyword arguments to be passed to the custom_processors_factory, if provided.
264
+ `disable_masking` is automatically passed to the custom_processors_factory and should not be provided here.
265
+ if `custom_processors_factory` does not accept any keyword arguments, they will all be ignored.
266
+ Additionally, all kwargs receivable by the factory function must be hashable else TypeError will be raised by the `_handle_and_patch_processor_factory_kwargs` wrapper.
267
+
268
+ Returns:
269
+ PeakLogger: A logger instance configured with the specified settings.
270
+
271
+ Raises:
272
+ ValueError: If the `file_name` is not provided for FILE handler or if `multiple renderers` are found in the `processor`(s) list returned by the `custom_processors_factory`.
273
+ """
274
+ _log_level: int = (
275
+ level.value
276
+ if level is not None
277
+ else logging.DEBUG if os.getenv("DEBUG", "false").lower() == "true" else LogLevel.INFO.value
278
+ )
279
+ _processors: list[structlog.types.Processor | Any] = (
280
+ _handle_and_patch_processor_factory_kwargs(custom_processors_factory, disable_masking=disable_masking, **kwargs)
281
+ if custom_processors_factory is not None
282
+ else default_processors_factory(
283
+ disable_masking=disable_masking,
284
+ )
285
+ )
286
+ handlers_list: list[Any] = []
287
+ if not handlers or LogHandler.CONSOLE in handlers:
288
+ handlers_list.append(logging.StreamHandler()) # Console handler
289
+ if handlers and LogHandler.FILE in handlers:
290
+ if file_name:
291
+ handlers_list.append(logging.FileHandler(file_name)) # File handler
292
+ else:
293
+ msg = "filename must be provided for FILE handler."
294
+ raise ValueError(msg)
295
+
296
+ # Set the log level and add the handlers to the root logger
297
+ # This is required to ensure that the log level and handlers are applied to the root logger
298
+ logging.basicConfig(level=_log_level, handlers=handlers_list, format="", force=True)
299
+
300
+ # configure structlog with the specified settings
301
+ structlog.configure(
302
+ processors=[*_processors],
303
+ context_class=dict,
304
+ logger_factory=structlog.stdlib.LoggerFactory(),
305
+ wrapper_class=structlog.stdlib.BoundLogger,
306
+ cache_logger_on_first_use=True,
307
+ )
308
+
309
+ return PeakLogger(structlog.get_logger(name))
310
+
311
+
312
+ # ---------------------------------------------------------------------------
313
+ # Wrapper Logger class
314
+ # Basically delegate everything to `structlog`.
315
+ # ---------------------------------------------------------------------------
316
+
317
+
318
+ class PeakLogger:
319
+ """Wrapper class for logging with various log levels."""
320
+
321
+ # use __slots__ to avoid dynamic attribute creation
322
+ __slots__: list[str] = ["_logger"]
323
+
324
+ def __init__(self, logger: Any) -> None:
325
+ """Initialize with a logger object.
326
+
327
+ Args:
328
+ logger (Any): Logger object to wrap.
329
+ """
330
+ self._logger: structlog.stdlib.BoundLogger = logger
331
+
332
+ def __getattribute__(self, __name: str) -> Any:
333
+ """Return the attribute from the wrapped logger object."""
334
+ if __name in [*PeakLogger.__slots__, *PeakLogger.__dict__.keys()]:
335
+ return object.__getattribute__(self, __name)
336
+ return getattr(self._logger, __name)
337
+
338
+ def bind(self, context: Union[dict[str, Any], None] = None, **kwargs: Any) -> None:
339
+ """Bind contextual information to the logger, enriching log messages.
340
+
341
+ This method allows attaching context data to the logger, such as additional information
342
+ or system details, to provide more context in log messages.
343
+
344
+ Args:
345
+ context (Union[dict[str, Any], None]): A dictionary or None for contextual information.
346
+ **kwargs: Additional key-value pairs to enhance context.
347
+ """
348
+ if context is None:
349
+ context = {}
350
+
351
+ if kwargs:
352
+ # file deepcode ignore AttributeLoadOnNone: false positive
353
+ context.update(kwargs)
354
+
355
+ self._logger = self._logger.bind(**context)
356
+
357
+ def unbind(self, keys: list[str]) -> None:
358
+ """Unbind specified keys from the logger's context.
359
+
360
+ Args:
361
+ keys (list[str]): List of keys to unbind.
362
+ """
363
+ context: dict[str, Any] | dict[Any, Any] = structlog.get_context(self._logger)
364
+
365
+ for key in keys:
366
+ if key in context:
367
+ del context[key]
368
+
369
+ # Rebind the modified context to the logger
370
+ self._logger = self._logger.bind(**context)
371
+
372
+ def clone_with_context(self, context: Union[dict[str, Any], None] = None, **kwargs: Any) -> PeakLogger:
373
+ """Return a frozen copy of this logger with the specified context added."""
374
+ new_logger = PeakLogger(self._logger.new())
375
+ new_logger.bind(context, **kwargs)
376
+ return new_logger
377
+
378
+ def set_log_level(self, level: LogLevel) -> None:
379
+ """Set the log level of the root logger.
380
+
381
+ Args:
382
+ level (LogLevel): Log level to set.
383
+ """
384
+ if self._is_valid_log_level(level):
385
+ logging.getLogger().setLevel(level.value)
386
+
387
+ def _is_valid_log_level(self, level: LogLevel) -> bool:
388
+ """Check if a given log level is valid."""
389
+ return level in LogLevel
@@ -1,8 +1,8 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: peak-sdk
3
- Version: 1.5.0
3
+ Version: 1.7.0
4
4
  Summary: Python SDK for interacting with the Peak platform
5
- Home-page: https://docs.peak.ai/sdk
5
+ Home-page: https://docs.peak.ai/sdk/latest/
6
6
  License: Apache-2.0
7
7
  Author: Peak
8
8
  Author-email: support@peak.ai
@@ -16,29 +16,28 @@ Classifier: Programming Language :: Python :: 3
16
16
  Classifier: Programming Language :: Python :: 3.9
17
17
  Classifier: Programming Language :: Python :: 3.10
18
18
  Classifier: Programming Language :: Python :: 3.11
19
- Classifier: Programming Language :: Python :: 3.10
20
19
  Classifier: Programming Language :: Python :: 3.8
21
- Classifier: Programming Language :: Python :: 3.9
22
20
  Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
23
21
  Classifier: Typing :: Typed
24
- Requires-Dist: certifi (>=2023.7.22)
22
+ Requires-Dist: certifi (>=2024.2.2)
25
23
  Requires-Dist: jinja2 (>=3.1,<4.0)
26
- Requires-Dist: pathspec (>=0.11,<0.12)
24
+ Requires-Dist: orjson (>=3.9.15,<4.0.0)
25
+ Requires-Dist: pathspec
27
26
  Requires-Dist: pyyaml (>=6.0,<7.0)
28
- Requires-Dist: requests (>=2.30,<3.0)
27
+ Requires-Dist: requests (>=2.31,<3.0)
29
28
  Requires-Dist: requests-toolbelt (>=1.0,<2.0)
30
- Requires-Dist: shellingham (<1.5.2)
29
+ Requires-Dist: shellingham (<1.5.4)
31
30
  Requires-Dist: structlog (>=24.1.0,<25.0.0)
32
31
  Requires-Dist: typer[all] (>=0.9,<0.10)
33
32
  Requires-Dist: urllib3 (<2)
34
- Project-URL: Documentation, https://docs.peak.ai/sdk/
33
+ Project-URL: Documentation, https://docs.peak.ai/sdk/latest/
35
34
  Description-Content-Type: text/markdown
36
35
 
37
36
  # Peak SDK
38
37
 
39
38
  [![PyPI](https://img.shields.io/pypi/v/peak-sdk.svg)](https://pypi.org/project/peak-sdk/)
40
- [![Python Version](https://img.shields.io/pypi/pyversions/peak-sdk)](https://docs.peak.ai/sdk/#platform-support)
41
- [![License](https://img.shields.io/pypi/l/peak-sdk)](https://docs.peak.ai/sdk/license.html)
39
+ [![Python Version](https://img.shields.io/pypi/pyversions/peak-sdk)](https://docs.peak.ai/sdk/latest/#platform-support)
40
+ [![License](https://img.shields.io/pypi/l/peak-sdk)](https://docs.peak.ai/sdk/latest/license.html)
42
41
 
43
42
  ## What is Peak SDK?
44
43
 
@@ -105,15 +104,15 @@ Follow these steps to create a virtual environment using Python's built-in `venv
105
104
 
106
105
  ### Documentation
107
106
 
108
- You can access the documentation for the SDK and CLI at [https://docs.peak.ai/sdk/](https://docs.peak.ai/sdk/).
107
+ You can access the documentation for the SDK and CLI at [https://docs.peak.ai/sdk/latest/](https://docs.peak.ai/sdk/latest/).
109
108
  Here are some quick links to help you navigate easily:
110
109
 
111
- - [SDK Reference](https://docs.peak.ai/sdk/reference.html)
112
- - [CLI Reference](https://docs.peak.ai/sdk/cli/reference.html)
113
- - [Usage](https://docs.peak.ai/sdk/usage.html)
114
- - [CLI Usage](https://docs.peak.ai/sdk/cli/usage.html)
115
- - [Migration Guide](https://docs.peak.ai/sdk/migration-guide.html)
116
- - [FAQ](https://docs.peak.ai/sdk/faq.html)
110
+ - [SDK Reference](https://docs.peak.ai/sdk/latest/reference.html)
111
+ - [CLI Reference](https://docs.peak.ai/sdk/latest/cli/reference.html)
112
+ - [Usage](https://docs.peak.ai/sdk/latest/usage.html)
113
+ - [CLI Usage](https://docs.peak.ai/sdk/latest/cli/usage.html)
114
+ - [Migration Guide](https://docs.peak.ai/sdk/latest/migration-guide.html)
115
+ - [FAQ](https://docs.peak.ai/sdk/latest/faq.html)
117
116
 
118
117
  ### Platform Support
119
118
 
@@ -195,6 +194,6 @@ Here are some quick links to help you navigate easily:
195
194
 
196
195
  ## More Resources
197
196
 
198
- - [License](https://docs.peak.ai/sdk/license.html)
199
- - [Changelog](https://docs.peak.ai/sdk/changelog.html)
197
+ - [License](https://docs.peak.ai/sdk/latest/license.html)
198
+ - [Changelog](https://docs.peak.ai/sdk/latest/changelog.html)
200
199