peak-sdk 1.16.1__py3-none-any.whl → 1.18.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,650 @@
1
+ #
2
+ # # Copyright © 2025 Peak AI Limited. or its affiliates. All Rights Reserved.
3
+ # #
4
+ # # Licensed under the Apache License, Version 2.0 (the "License"). You
5
+ # # may not use this file except in compliance with the License. A copy of
6
+ # # the License is located at:
7
+ # #
8
+ # # https://github.com/PeakBI/peak-sdk/blob/main/LICENSE
9
+ # #
10
+ # # or in the "license" file accompanying this file. This file is
11
+ # # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
12
+ # # ANY KIND, either express or implied. See the License for the specific
13
+ # # language governing permissions and limitations under the License.
14
+ # #
15
+ # # This file is part of the peak-sdk.
16
+ # # see (https://github.com/PeakBI/peak-sdk)
17
+ # #
18
+ # # You should have received a copy of the APACHE LICENSE, VERSION 2.0
19
+ # # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
20
+ #
21
+ """Peak Cache Client.
22
+
23
+ Provides caching functionality with tenant-based key prefixing.
24
+ Supports cache operations with JSON serialization and connection management.
25
+ """
26
+
27
+ from __future__ import annotations
28
+
29
+ import json
30
+ import logging
31
+ from typing import Any, Dict, List, Optional, cast
32
+ from urllib.parse import quote_plus, unquote_plus, urlparse
33
+
34
+ import valkey
35
+ from valkey.exceptions import ConnectionError, TimeoutError, ValkeyError
36
+
37
+ from peak.base_client import BaseClient
38
+ from peak.constants import ContentType, HttpMethods
39
+ from peak.session import Session
40
+
41
+ logger = logging.getLogger(__name__)
42
+
43
+ DEFAULT_VALKEY_PORT = 6379
44
+ SECURE_VALKEY_PORT = 6380
45
+
46
+
47
+ class CacheError(Exception):
48
+ """Base exception for cache operations."""
49
+
50
+
51
+ class CacheConnectionError(CacheError):
52
+ """Exception for cache connection issues."""
53
+
54
+
55
+ def _raise_connection_error(message: str) -> None:
56
+ """Raise a CacheConnectionError with the given message."""
57
+ raise CacheConnectionError(message)
58
+
59
+
60
+ def _raise_cache_error(message: str) -> None:
61
+ """Raise a CacheError with the given message."""
62
+ raise CacheError(message)
63
+
64
+
65
+ class CacheClient(BaseClient):
66
+ """Peak Cache Client for caching operations.
67
+
68
+ Provides auto key prefixing based on tenant names to ensure
69
+ proper isolation and access control patterns.
70
+
71
+ Inherits from BaseClient to use the default session pattern.
72
+ """
73
+
74
+ def __init__(
75
+ self,
76
+ session: Optional[Session] = None,
77
+ *,
78
+ debug_logs: bool = True,
79
+ additional_prefix: Optional[str] = None,
80
+ connection_config: Optional[Dict[str, Any]] = None,
81
+ ) -> None:
82
+ """Initialize cache client.
83
+
84
+ Args:
85
+ session: Peak session for authentication (optional)
86
+ debug_logs: Enable or disable debug logging (default: True)
87
+ additional_prefix: Additional prefix to add after tenant name (optional)
88
+ connection_config: Custom connection configuration overrides (optional)
89
+ Available options:
90
+ - decode_responses: bool (default: True)
91
+ - socket_timeout: float (default: 5.0)
92
+ - socket_connect_timeout: float (default: 5.0)
93
+ - retry_on_timeout: bool (default: True)
94
+ - health_check_interval: int (default: 60)
95
+ - max_connections: int (default: None)
96
+ - retry_on_error: list (default: None)
97
+ - socket_keepalive: bool (default: None)
98
+ - socket_keepalive_options: dict (default: None)
99
+ """
100
+ super().__init__(session)
101
+ self._client: Optional[valkey.Valkey] = None
102
+ self._connection_config: Optional[Dict[str, Any]] = None
103
+ self._tenant_name: Optional[str] = None
104
+ self._debug_logs = debug_logs
105
+ self._additional_prefix = additional_prefix
106
+ self._custom_connection_config = connection_config or {}
107
+
108
+ def _debug_log(self, message: str) -> None:
109
+ """Log debug message if debug logging is enabled."""
110
+ if self._debug_logs:
111
+ logger.debug(message)
112
+
113
+ def _get_connection_config(self) -> Dict[str, Any]:
114
+ """Get cache connection configuration from credentials endpoint."""
115
+ if self._connection_config is None:
116
+ try:
117
+ self._debug_log("Getting cache credentials...")
118
+ response = self.session.create_request(
119
+ endpoint="connections/api/v1/connections/valkey-credentials",
120
+ method=HttpMethods.GET,
121
+ content_type=ContentType.APPLICATION_JSON,
122
+ subdomain="service",
123
+ )
124
+
125
+ self._tenant_name = response.get("tenant")
126
+ if not self._tenant_name:
127
+ _raise_connection_error("Tenant information not found in cache credentials response")
128
+
129
+ engine = response.get("engine", "valkey")
130
+
131
+ url = response.get("url")
132
+ if not url:
133
+ host = response.get("host")
134
+ port = response.get("port", DEFAULT_VALKEY_PORT)
135
+ username = response.get("userId")
136
+ password = response.get("password")
137
+
138
+ if not all([host, username, password]):
139
+ _raise_connection_error("Missing required cache connection credentials")
140
+
141
+ encoded_username = quote_plus(username)
142
+ encoded_password = quote_plus(password)
143
+ use_ssl = port == SECURE_VALKEY_PORT or engine == "valkey"
144
+ scheme = "rediss" if use_ssl else "redis"
145
+ url = f"{scheme}://{encoded_username}:{encoded_password}@{host}:{port}"
146
+
147
+ parsed = urlparse(url)
148
+
149
+ decoded_username = unquote_plus(parsed.username) if parsed.username else None
150
+ decoded_password = unquote_plus(parsed.password) if parsed.password else None
151
+
152
+ if engine == "valkey":
153
+ decoded_username = self._tenant_name
154
+
155
+ self._validate_connection_config(
156
+ {
157
+ "host": parsed.hostname,
158
+ "port": parsed.port,
159
+ "username": decoded_username,
160
+ "password": decoded_password,
161
+ },
162
+ )
163
+
164
+ use_ssl = parsed.scheme == "rediss"
165
+
166
+ config = {
167
+ "host": parsed.hostname,
168
+ "port": parsed.port or DEFAULT_VALKEY_PORT,
169
+ "password": decoded_password,
170
+ "username": decoded_username,
171
+ "ssl": use_ssl,
172
+ "decode_responses": True,
173
+ "socket_timeout": 5.0,
174
+ "socket_connect_timeout": 5.0,
175
+ "retry_on_timeout": True,
176
+ "health_check_interval": 60,
177
+ }
178
+
179
+ # Merge custom configuration (only allow safe overrides)
180
+ safe_overrides = {
181
+ "decode_responses",
182
+ "socket_timeout",
183
+ "socket_connect_timeout",
184
+ "retry_on_timeout",
185
+ "health_check_interval",
186
+ "max_connections",
187
+ "retry_on_error",
188
+ "socket_keepalive",
189
+ "socket_keepalive_options",
190
+ }
191
+
192
+ for key, value in self._custom_connection_config.items():
193
+ if key in safe_overrides:
194
+ config[key] = value
195
+ self._debug_log(f"Cache config override: {key} = {value}")
196
+ else:
197
+ logger.warning("Ignoring unsafe connection config override: %s", key)
198
+
199
+ self._connection_config = config
200
+
201
+ logger.info("Cache configured for tenant: %s", self._tenant_name)
202
+
203
+ except Exception as e:
204
+ logger.exception("Failed to get cache credentials")
205
+ msg = f"Failed to get cache credentials: {e}"
206
+ raise CacheConnectionError(msg) from e
207
+
208
+ return self._connection_config
209
+
210
+ def _validate_connection_config(self, config: Dict[str, Any]) -> None:
211
+ """Validate connection configuration."""
212
+ required_fields = ["host", "port", "password", "username"]
213
+ missing = [field for field in required_fields if not config.get(field)]
214
+ if missing:
215
+ _raise_connection_error(f"Missing required connection fields: {missing}")
216
+
217
+ def _get_client(self) -> valkey.Valkey:
218
+ """Get or create cache client."""
219
+ if self._client is None:
220
+ try:
221
+ config = self._get_connection_config()
222
+ self._client = valkey.Valkey(**config)
223
+ self._debug_log("Cache client created successfully")
224
+ except Exception as e:
225
+ logger.exception("Failed to create cache client")
226
+ msg = f"Failed to create cache client: {e}"
227
+ raise CacheConnectionError(msg) from e
228
+ return self._client
229
+
230
+ def _prefix_key(self, key: str) -> str:
231
+ """Add tenant prefix to key."""
232
+ if not self._tenant_name:
233
+ self._get_connection_config()
234
+ prefix = f"{self._tenant_name}:"
235
+ if self._additional_prefix:
236
+ prefix += f"{self._additional_prefix}:"
237
+ return f"{prefix}{key}"
238
+
239
+ def _prefix_keys(self, keys: List[str]) -> List[str]:
240
+ """Add tenant prefix to multiple keys."""
241
+ return [self._prefix_key(key) for key in keys]
242
+
243
+ def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
244
+ """Set a key-value pair in the cache.
245
+
246
+ Args:
247
+ key: The key to set
248
+ value: The value to set (will be JSON serialized if not string)
249
+ ttl: Time to live in seconds (optional)
250
+
251
+ Returns:
252
+ bool: True if successful, False otherwise
253
+
254
+ Raises:
255
+ CacheError: If the operation fails
256
+ """
257
+ try:
258
+ client = self._get_client()
259
+ prefixed_key = self._prefix_key(key)
260
+
261
+ serialized_value = value if isinstance(value, str) else json.dumps(value)
262
+
263
+ result = client.set(prefixed_key, serialized_value, ex=ttl)
264
+ self._debug_log(f"Set key: {key} (prefixed: {prefixed_key})")
265
+ return bool(result)
266
+
267
+ except (ConnectionError, TimeoutError, ValkeyError) as e:
268
+ logger.exception("Cache set operation failed for key: %s", key)
269
+ msg = f"Failed to set cache key: {e}"
270
+ raise CacheError(msg) from e
271
+
272
+ def get(self, key: str, default: Any = None) -> Any:
273
+ """Get a value from the cache.
274
+
275
+ Args:
276
+ key: The key to get
277
+ default: Default value if key doesn't exist
278
+
279
+ Returns:
280
+ Any: The value (JSON deserialized if applicable) or default
281
+
282
+ Raises:
283
+ CacheError: If the operation fails
284
+ """
285
+ try:
286
+ client = self._get_client()
287
+ prefixed_key = self._prefix_key(key)
288
+
289
+ value = client.get(prefixed_key)
290
+ if value is None:
291
+ self._debug_log(f"Key not found: {key}")
292
+ return default
293
+
294
+ if isinstance(value, str):
295
+ value_str = value
296
+ else:
297
+ value_str = value.decode("utf-8") if isinstance(value, bytes) else str(value)
298
+
299
+ if value_str.startswith(("{", "[")):
300
+ try:
301
+ result = json.loads(value_str)
302
+ except json.JSONDecodeError:
303
+ pass
304
+ else:
305
+ self._debug_log(f"Got key: {key} (JSON deserialized)")
306
+ return result
307
+
308
+ except (ConnectionError, TimeoutError, ValkeyError) as e:
309
+ logger.exception("Cache get operation failed for key: %s", key)
310
+ msg = f"Failed to get cache key: {e}"
311
+ raise CacheError(msg) from e
312
+ else:
313
+ self._debug_log(f"Got key: {key} (as string)")
314
+ return value_str
315
+
316
+ def delete(self, *keys: str) -> int:
317
+ """Delete one or more keys from the cache.
318
+
319
+ Args:
320
+ keys: Keys to delete
321
+
322
+ Returns:
323
+ int: Number of keys deleted
324
+
325
+ Raises:
326
+ CacheError: If the operation fails
327
+ """
328
+ if not keys:
329
+ return 0
330
+
331
+ try:
332
+ client = self._get_client()
333
+ prefixed_keys = self._prefix_keys(list(keys))
334
+
335
+ result = client.delete(*prefixed_keys)
336
+ self._debug_log(f"Deleted {result} keys: {list(keys)}")
337
+ return int(cast(int, result))
338
+
339
+ except (ConnectionError, TimeoutError, ValkeyError) as e:
340
+ logger.exception("Cache delete operation failed for keys: %s", keys)
341
+ msg = f"Failed to delete cache keys: {e}"
342
+ raise CacheError(msg) from e
343
+
344
+ def exists(self, *keys: str) -> int:
345
+ """Check if one or more keys exist in the cache.
346
+
347
+ Args:
348
+ keys: Keys to check
349
+
350
+ Returns:
351
+ int: Number of keys that exist
352
+
353
+ Raises:
354
+ CacheError: If the operation fails
355
+ """
356
+ if not keys:
357
+ return 0
358
+
359
+ try:
360
+ client = self._get_client()
361
+ prefixed_keys = self._prefix_keys(list(keys))
362
+
363
+ result = client.exists(*prefixed_keys)
364
+ self._debug_log(f"Checked existence of {len(keys)} keys, {result} exist")
365
+ return int(cast(int, result))
366
+
367
+ except (ConnectionError, TimeoutError, ValkeyError) as e:
368
+ logger.exception("Cache exists operation failed for keys: %s", keys)
369
+ msg = f"Failed to check cache key existence: {e}"
370
+ raise CacheError(msg) from e
371
+
372
+ def expire(self, key: str, ttl: int) -> bool:
373
+ """Set expiration time for a key.
374
+
375
+ Args:
376
+ key: The key to set expiration for
377
+ ttl: Time to live in seconds
378
+
379
+ Returns:
380
+ bool: True if successful, False if key doesn't exist
381
+
382
+ Raises:
383
+ CacheError: If the operation fails
384
+ """
385
+ try:
386
+ client = self._get_client()
387
+ prefixed_key = self._prefix_key(key)
388
+
389
+ result = client.expire(prefixed_key, ttl)
390
+ self._debug_log(f"Set expiration for key: {key} (TTL: {ttl}s)")
391
+ return bool(result)
392
+
393
+ except (ConnectionError, TimeoutError, ValkeyError) as e:
394
+ logger.exception("Cache expire operation failed for key: %s", key)
395
+ msg = f"Failed to set cache key expiration: {e}"
396
+ raise CacheError(msg) from e
397
+
398
+ def ttl(self, key: str) -> int:
399
+ """Get the time to live for a key.
400
+
401
+ Args:
402
+ key: The key to check
403
+
404
+ Returns:
405
+ int: TTL in seconds (-1 if no expiration, -2 if key doesn't exist)
406
+
407
+ Raises:
408
+ CacheError: If the operation fails
409
+ """
410
+ try:
411
+ client = self._get_client()
412
+ prefixed_key = self._prefix_key(key)
413
+
414
+ result = client.ttl(prefixed_key)
415
+ self._debug_log(f"Got TTL for key: {key} (TTL: {result}s)")
416
+ return int(cast(int, result))
417
+
418
+ except (ConnectionError, TimeoutError, ValkeyError) as e:
419
+ logger.exception("Cache TTL operation failed for key: %s", key)
420
+ msg = f"Failed to get cache key TTL: {e}"
421
+ raise CacheError(msg) from e
422
+
423
+ def mget(self, *keys: str) -> List[Any]:
424
+ """Get multiple values from the cache.
425
+
426
+ Args:
427
+ keys: Keys to get
428
+
429
+ Returns:
430
+ List[Any]: List of values (None for missing keys)
431
+
432
+ Raises:
433
+ CacheError: If the operation fails
434
+ """
435
+ if not keys:
436
+ return []
437
+
438
+ try:
439
+ client = self._get_client()
440
+ prefixed_keys = self._prefix_keys(list(keys))
441
+
442
+ values = client.mget(prefixed_keys)
443
+ results: List[Any] = []
444
+
445
+ for value in cast(List[Any], values):
446
+ if value is None:
447
+ results.append(None)
448
+ continue
449
+
450
+ if isinstance(value, str):
451
+ value_str = value
452
+ else:
453
+ value_str = value.decode("utf-8") if isinstance(value, bytes) else str(value)
454
+
455
+ if value_str.startswith(("{", "[")):
456
+ try:
457
+ results.append(json.loads(value_str))
458
+ except json.JSONDecodeError:
459
+ pass
460
+ else:
461
+ continue
462
+
463
+ results.append(value_str)
464
+
465
+ except (ConnectionError, TimeoutError, ValkeyError) as e:
466
+ logger.exception("Cache mget operation failed for keys: %s", keys)
467
+ msg = f"Failed to get multiple cache keys: {e}"
468
+ raise CacheError(msg) from e
469
+ else:
470
+ self._debug_log(f"Got {len(keys)} keys via mget")
471
+ return results
472
+
473
+ def mset(self, mapping: Dict[str, Any], ttl: Optional[int] = None) -> bool:
474
+ """Set multiple key-value pairs in the cache.
475
+
476
+ Args:
477
+ mapping: Dictionary of key-value pairs to set
478
+ ttl: Time to live in seconds (optional, applies to all keys)
479
+
480
+ Returns:
481
+ bool: True if successful
482
+
483
+ Raises:
484
+ CacheError: If the operation fails
485
+ """
486
+ if not mapping:
487
+ return True
488
+
489
+ try:
490
+ client = self._get_client()
491
+ prefixed_mapping = {}
492
+
493
+ for key, value in mapping.items():
494
+ prefixed_key = self._prefix_key(key)
495
+ serialized_value = json.dumps(value) if not isinstance(value, str) else value
496
+ prefixed_mapping[prefixed_key] = serialized_value
497
+
498
+ result = client.mset(prefixed_mapping)
499
+
500
+ if ttl is not None:
501
+ for prefixed_key in prefixed_mapping:
502
+ client.expire(prefixed_key, ttl)
503
+
504
+ except (ConnectionError, TimeoutError, ValkeyError) as e:
505
+ logger.exception("Cache mset operation failed")
506
+ msg = f"Failed to set multiple cache keys: {e}"
507
+ raise CacheError(msg) from e
508
+ else:
509
+ self._debug_log(f"Set {len(mapping)} keys via mset")
510
+ return bool(result)
511
+
512
+ def flush_tenant(self) -> int:
513
+ """Flush all keys for the current tenant.
514
+
515
+ Returns:
516
+ int: Number of keys deleted
517
+
518
+ Raises:
519
+ CacheError: If the operation fails
520
+ """
521
+ try:
522
+ client = self._get_client()
523
+ if not self._tenant_name:
524
+ self._get_connection_config()
525
+
526
+ pattern = f"{self._tenant_name}:*"
527
+ keys = list(client.scan_iter(match=pattern))
528
+
529
+ if not keys:
530
+ self._debug_log("No keys found for tenant flush")
531
+ return 0
532
+
533
+ result = client.delete(*keys)
534
+ self._debug_log(f"Flushed {result} keys for tenant: {self._tenant_name}")
535
+ return int(cast(int, result))
536
+
537
+ except (ConnectionError, TimeoutError, ValkeyError) as e:
538
+ logger.exception("Cache flush_tenant operation failed")
539
+ msg = f"Failed to flush tenant cache: {e}"
540
+ raise CacheError(msg) from e
541
+
542
+ def flush_by_pattern(self, pattern: str) -> int:
543
+ """Flush keys matching a pattern within the tenant namespace.
544
+
545
+ Args:
546
+ pattern: Pattern to match (will be prefixed with tenant name)
547
+
548
+ Returns:
549
+ int: Number of keys deleted
550
+
551
+ Raises:
552
+ CacheError: If the operation fails
553
+ """
554
+ try:
555
+ client = self._get_client()
556
+ if not self._tenant_name:
557
+ self._get_connection_config()
558
+
559
+ prefixed_pattern = self._prefix_key(pattern)
560
+ keys = list(client.scan_iter(match=prefixed_pattern))
561
+
562
+ if not keys:
563
+ self._debug_log(f"No keys found for pattern: {pattern}")
564
+ return 0
565
+
566
+ result = client.delete(*keys)
567
+ self._debug_log(f"Flushed {result} keys for pattern: {pattern}")
568
+ return int(cast(int, result))
569
+
570
+ except (ConnectionError, TimeoutError, ValkeyError) as e:
571
+ logger.exception("Cache flush_by_pattern operation failed for pattern: %s", pattern)
572
+ msg = f"Failed to flush cache by pattern: {e}"
573
+ raise CacheError(msg) from e
574
+
575
+ def set_additional_prefix(self, additional_prefix: Optional[str]) -> None:
576
+ """Set additional prefix for cache keys.
577
+
578
+ Args:
579
+ additional_prefix: Additional prefix to add after tenant name
580
+ """
581
+ self._additional_prefix = additional_prefix
582
+ self._debug_log(f"Set additional prefix: {additional_prefix}")
583
+
584
+ def get_additional_prefix(self) -> Optional[str]:
585
+ """Get current additional prefix.
586
+
587
+ Returns:
588
+ Optional[str]: Current additional prefix
589
+ """
590
+ return self._additional_prefix
591
+
592
+ def ping(self) -> bool:
593
+ """Test the connection to the cache.
594
+
595
+ Returns:
596
+ bool: True if connection is successful
597
+
598
+ Raises:
599
+ CacheError: If the connection fails
600
+ """
601
+ try:
602
+ client = self._get_client()
603
+ result = client.ping()
604
+ self._debug_log("Cache ping successful")
605
+ return bool(result)
606
+
607
+ except (ConnectionError, TimeoutError, ValkeyError) as e:
608
+ logger.exception("Cache ping failed")
609
+ msg = f"Cache connection test failed: {e}"
610
+ raise CacheError(msg) from e
611
+
612
+ def close(self) -> None:
613
+ """Close the cache connection."""
614
+ if self._client is not None:
615
+ try:
616
+ self._client.close() # type: ignore[no-untyped-call]
617
+ self._debug_log("Cache connection closed")
618
+ except (ConnectionError, TimeoutError, ValkeyError) as e:
619
+ logger.debug("Error closing cache connection: %s", e)
620
+ finally:
621
+ self._client = None
622
+
623
+
624
+ def get_client(
625
+ session: Optional[Session] = None,
626
+ *,
627
+ debug_logs: bool = True,
628
+ additional_prefix: Optional[str] = None,
629
+ connection_config: Optional[Dict[str, Any]] = None,
630
+ ) -> CacheClient:
631
+ """Get a cache client instance.
632
+
633
+ Args:
634
+ session: Peak session for authentication (optional)
635
+ debug_logs: Enable or disable debug logging (default: True)
636
+ additional_prefix: Additional prefix to add after tenant name (optional)
637
+ connection_config: Custom connection configuration overrides (optional)
638
+
639
+ Returns:
640
+ CacheClient: Cache client instance
641
+ """
642
+ return CacheClient(
643
+ session=session,
644
+ debug_logs=debug_logs,
645
+ additional_prefix=additional_prefix,
646
+ connection_config=connection_config,
647
+ )
648
+
649
+
650
+ __all__: List[str] = ["CacheClient", "CacheError", "CacheConnectionError", "get_client"]
@@ -2,5 +2,10 @@
2
2
 
3
3
  body:
4
4
  namespace: dev
5
+ namespaceDescription: "This is sample namespace"
6
+ namespaceMetadata:
7
+ key: "description"
8
+ value: "This is sample namespace"
9
+
5
10
  artifact:
6
11
  path: metrics
@@ -0,0 +1,7 @@
1
+ # update-namespace.yaml
2
+
3
+ namespace: dev
4
+ description: "This is updated Description of the namespace"
5
+ metadata:
6
+ key: "description"
7
+ value: "This is update metadata"
@@ -239,7 +239,7 @@ def _handle_and_patch_processor_factory_kwargs(
239
239
 
240
240
  def get_logger(
241
241
  name: Optional[str] = None,
242
- level: Optional[LogLevel] = LogLevel.INFO,
242
+ level: Optional[LogLevel] = None,
243
243
  custom_processors_factory: Optional[Callable[..., List[structlog.types.Processor | Any]]] = None,
244
244
  disable_masking: Optional[bool] = False, # noqa: FBT002
245
245
  handlers: Optional[List[LogHandler]] = None,
@@ -271,11 +271,18 @@ def get_logger(
271
271
  Raises:
272
272
  ValueError: If the `file_name` is not provided for FILE handler or if `multiple renderers` are found in the `processor`(s) list returned by the `custom_processors_factory`.
273
273
  """
274
- _log_level: int = (
275
- level.value
276
- if level is not None
277
- else logging.DEBUG if os.getenv("DEBUG", "false").lower() == "true" else LogLevel.INFO.value
278
- )
274
+ if level is not None:
275
+ _log_level = level.value
276
+ elif os.getenv("LOG_LEVEL") is not None and os.getenv("LOG_LEVEL", "INFO").upper() in LOG_LEVEL_NAMES_TO_LOG_LEVEL:
277
+ _log_level = LOG_LEVEL_NAMES_TO_LOG_LEVEL.get(
278
+ os.getenv("LOG_LEVEL", "INFO").upper(), # type: ignore # noqa: PGH003
279
+ "INFO",
280
+ ).value
281
+ elif os.getenv("DEBUG", "false").lower() == "true":
282
+ _log_level = logging.DEBUG
283
+ else:
284
+ _log_level = LogLevel.INFO.value
285
+
279
286
  _processors: list[structlog.types.Processor | Any] = (
280
287
  _handle_and_patch_processor_factory_kwargs(custom_processors_factory, disable_masking=disable_masking, **kwargs)
281
288
  if custom_processors_factory is not None