miso-client 0.1.0__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of miso-client might be problematic. Click here for more details.
- miso_client/__init__.py +104 -84
- miso_client/errors.py +30 -4
- miso_client/models/__init__.py +4 -0
- miso_client/models/config.py +56 -35
- miso_client/models/error_response.py +41 -0
- miso_client/services/__init__.py +5 -5
- miso_client/services/auth.py +65 -48
- miso_client/services/cache.py +42 -41
- miso_client/services/encryption.py +18 -17
- miso_client/services/logger.py +115 -100
- miso_client/services/permission.py +27 -36
- miso_client/services/redis.py +17 -15
- miso_client/services/role.py +25 -36
- miso_client/utils/__init__.py +3 -3
- miso_client/utils/config_loader.py +24 -16
- miso_client/utils/data_masker.py +104 -33
- miso_client/utils/http_client.py +462 -254
- miso_client/utils/internal_http_client.py +471 -0
- miso_client/utils/jwt_tools.py +14 -17
- miso_client/utils/sensitive_fields_loader.py +116 -0
- {miso_client-0.1.0.dist-info → miso_client-0.4.0.dist-info}/METADATA +165 -3
- miso_client-0.4.0.dist-info/RECORD +26 -0
- miso_client-0.1.0.dist-info/RECORD +0 -23
- {miso_client-0.1.0.dist-info → miso_client-0.4.0.dist-info}/WHEEL +0 -0
- {miso_client-0.1.0.dist-info → miso_client-0.4.0.dist-info}/licenses/LICENSE +0 -0
- {miso_client-0.1.0.dist-info → miso_client-0.4.0.dist-info}/top_level.txt +0 -0
miso_client/services/logger.py
CHANGED
|
@@ -9,91 +9,93 @@ import os
|
|
|
9
9
|
import random
|
|
10
10
|
import sys
|
|
11
11
|
from datetime import datetime
|
|
12
|
-
from typing import
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
ClientLoggingOptions
|
|
16
|
-
)
|
|
12
|
+
from typing import Any, Dict, Literal, Optional
|
|
13
|
+
|
|
14
|
+
from ..models.config import ClientLoggingOptions, LogEntry
|
|
17
15
|
from ..services.redis import RedisService
|
|
18
|
-
from ..utils.http_client import HttpClient
|
|
19
16
|
from ..utils.data_masker import DataMasker
|
|
17
|
+
from ..utils.internal_http_client import InternalHttpClient
|
|
20
18
|
from ..utils.jwt_tools import decode_token
|
|
21
19
|
|
|
22
20
|
|
|
23
21
|
class LoggerService:
|
|
24
22
|
"""Logger service for application logging and audit events."""
|
|
25
|
-
|
|
26
|
-
def __init__(self,
|
|
23
|
+
|
|
24
|
+
def __init__(self, internal_http_client: InternalHttpClient, redis: RedisService):
|
|
27
25
|
"""
|
|
28
26
|
Initialize logger service.
|
|
29
|
-
|
|
27
|
+
|
|
30
28
|
Args:
|
|
31
|
-
|
|
29
|
+
internal_http_client: Internal HTTP client instance (used for log sending)
|
|
32
30
|
redis: Redis service instance
|
|
33
31
|
"""
|
|
34
|
-
self.config =
|
|
35
|
-
self.
|
|
32
|
+
self.config = internal_http_client.config
|
|
33
|
+
self.internal_http_client = internal_http_client
|
|
36
34
|
self.redis = redis
|
|
37
35
|
self.mask_sensitive_data = True # Default: mask sensitive data
|
|
38
36
|
self.correlation_counter = 0
|
|
39
37
|
self.performance_metrics: Dict[str, Dict[str, Any]] = {}
|
|
40
|
-
|
|
38
|
+
|
|
41
39
|
def set_masking(self, enabled: bool) -> None:
|
|
42
40
|
"""
|
|
43
41
|
Enable or disable sensitive data masking.
|
|
44
|
-
|
|
42
|
+
|
|
45
43
|
Args:
|
|
46
44
|
enabled: Whether to enable data masking
|
|
47
45
|
"""
|
|
48
46
|
self.mask_sensitive_data = enabled
|
|
49
|
-
|
|
47
|
+
|
|
50
48
|
def _generate_correlation_id(self) -> str:
|
|
51
49
|
"""
|
|
52
50
|
Generate unique correlation ID for request tracking.
|
|
53
|
-
|
|
51
|
+
|
|
54
52
|
Format: {clientId[0:10]}-{timestamp}-{counter}-{random}
|
|
55
|
-
|
|
53
|
+
|
|
56
54
|
Returns:
|
|
57
55
|
Correlation ID string
|
|
58
56
|
"""
|
|
59
57
|
self.correlation_counter = (self.correlation_counter + 1) % 10000
|
|
60
58
|
timestamp = int(datetime.now().timestamp() * 1000)
|
|
61
|
-
random_part =
|
|
62
|
-
client_prefix =
|
|
59
|
+
random_part = "".join(random.choices("abcdefghijklmnopqrstuvwxyz0123456789", k=6))
|
|
60
|
+
client_prefix = (
|
|
61
|
+
self.config.client_id[:10] if len(self.config.client_id) > 10 else self.config.client_id
|
|
62
|
+
)
|
|
63
63
|
return f"{client_prefix}-{timestamp}-{self.correlation_counter}-{random_part}"
|
|
64
|
-
|
|
64
|
+
|
|
65
65
|
def _extract_jwt_context(self, token: Optional[str]) -> Dict[str, Any]:
|
|
66
66
|
"""
|
|
67
67
|
Extract JWT token information.
|
|
68
|
-
|
|
68
|
+
|
|
69
69
|
Args:
|
|
70
70
|
token: JWT token string
|
|
71
|
-
|
|
71
|
+
|
|
72
72
|
Returns:
|
|
73
73
|
Dictionary with userId, applicationId, sessionId, roles, permissions
|
|
74
74
|
"""
|
|
75
75
|
if not token:
|
|
76
76
|
return {}
|
|
77
|
-
|
|
77
|
+
|
|
78
78
|
try:
|
|
79
79
|
decoded = decode_token(token)
|
|
80
80
|
if not decoded:
|
|
81
81
|
return {}
|
|
82
|
-
|
|
82
|
+
|
|
83
83
|
# Extract roles - handle different formats
|
|
84
84
|
roles = []
|
|
85
85
|
if "roles" in decoded:
|
|
86
86
|
roles = decoded["roles"] if isinstance(decoded["roles"], list) else []
|
|
87
87
|
elif "realm_access" in decoded and isinstance(decoded["realm_access"], dict):
|
|
88
88
|
roles = decoded["realm_access"].get("roles", [])
|
|
89
|
-
|
|
89
|
+
|
|
90
90
|
# Extract permissions - handle different formats
|
|
91
91
|
permissions = []
|
|
92
92
|
if "permissions" in decoded:
|
|
93
|
-
permissions =
|
|
93
|
+
permissions = (
|
|
94
|
+
decoded["permissions"] if isinstance(decoded["permissions"], list) else []
|
|
95
|
+
)
|
|
94
96
|
elif "scope" in decoded and isinstance(decoded["scope"], str):
|
|
95
97
|
permissions = decoded["scope"].split()
|
|
96
|
-
|
|
98
|
+
|
|
97
99
|
return {
|
|
98
100
|
"userId": decoded.get("sub") or decoded.get("userId") or decoded.get("user_id"),
|
|
99
101
|
"applicationId": decoded.get("applicationId") or decoded.get("app_id"),
|
|
@@ -104,98 +106,104 @@ class LoggerService:
|
|
|
104
106
|
except Exception:
|
|
105
107
|
# JWT parsing failed, return empty context
|
|
106
108
|
return {}
|
|
107
|
-
|
|
109
|
+
|
|
108
110
|
def _extract_metadata(self) -> Dict[str, Any]:
|
|
109
111
|
"""
|
|
110
112
|
Extract metadata from environment (browser or Node.js).
|
|
111
|
-
|
|
113
|
+
|
|
112
114
|
Returns:
|
|
113
115
|
Dictionary with hostname, userAgent, etc.
|
|
114
116
|
"""
|
|
115
117
|
metadata: Dict[str, Any] = {}
|
|
116
|
-
|
|
118
|
+
|
|
117
119
|
# Try to extract Node.js/Python metadata
|
|
118
120
|
if hasattr(os, "environ"):
|
|
119
121
|
metadata["hostname"] = os.environ.get("HOSTNAME", "unknown")
|
|
120
|
-
|
|
122
|
+
|
|
121
123
|
# In Python, we don't have browser metadata like in TypeScript
|
|
122
124
|
# But we can capture some environment info
|
|
123
125
|
metadata["platform"] = sys.platform
|
|
124
126
|
metadata["python_version"] = sys.version
|
|
125
|
-
|
|
127
|
+
|
|
126
128
|
return metadata
|
|
127
|
-
|
|
129
|
+
|
|
128
130
|
def start_performance_tracking(self, operation_id: str) -> None:
|
|
129
131
|
"""
|
|
130
132
|
Start performance tracking.
|
|
131
|
-
|
|
133
|
+
|
|
132
134
|
Args:
|
|
133
135
|
operation_id: Unique identifier for this operation
|
|
134
136
|
"""
|
|
135
137
|
try:
|
|
136
138
|
import psutil
|
|
139
|
+
|
|
137
140
|
process = psutil.Process()
|
|
138
141
|
memory_info = process.memory_info()
|
|
139
142
|
memory_usage = {
|
|
140
143
|
"rss": memory_info.rss,
|
|
141
144
|
"heapTotal": memory_info.rss, # Approximation
|
|
142
|
-
"heapUsed": memory_info.rss - memory_info.available
|
|
145
|
+
"heapUsed": memory_info.rss - memory_info.available
|
|
146
|
+
if hasattr(memory_info, "available")
|
|
147
|
+
else memory_info.rss,
|
|
143
148
|
"external": 0,
|
|
144
149
|
"arrayBuffers": 0,
|
|
145
150
|
}
|
|
146
151
|
except ImportError:
|
|
147
152
|
# psutil not available
|
|
148
153
|
memory_usage = None
|
|
149
|
-
|
|
154
|
+
|
|
150
155
|
self.performance_metrics[operation_id] = {
|
|
151
156
|
"startTime": int(datetime.now().timestamp() * 1000),
|
|
152
157
|
"memoryUsage": memory_usage,
|
|
153
158
|
}
|
|
154
|
-
|
|
159
|
+
|
|
155
160
|
def end_performance_tracking(self, operation_id: str) -> Optional[Dict[str, Any]]:
|
|
156
161
|
"""
|
|
157
162
|
End performance tracking and get metrics.
|
|
158
|
-
|
|
163
|
+
|
|
159
164
|
Args:
|
|
160
165
|
operation_id: Unique identifier for this operation
|
|
161
|
-
|
|
166
|
+
|
|
162
167
|
Returns:
|
|
163
168
|
Performance metrics dictionary or None if not found
|
|
164
169
|
"""
|
|
165
170
|
if operation_id not in self.performance_metrics:
|
|
166
171
|
return None
|
|
167
|
-
|
|
172
|
+
|
|
168
173
|
metrics = self.performance_metrics[operation_id]
|
|
169
174
|
metrics["endTime"] = int(datetime.now().timestamp() * 1000)
|
|
170
175
|
metrics["duration"] = metrics["endTime"] - metrics["startTime"]
|
|
171
|
-
|
|
176
|
+
|
|
172
177
|
try:
|
|
173
178
|
import psutil
|
|
179
|
+
|
|
174
180
|
process = psutil.Process()
|
|
175
181
|
memory_info = process.memory_info()
|
|
176
182
|
metrics["memoryUsage"] = {
|
|
177
183
|
"rss": memory_info.rss,
|
|
178
184
|
"heapTotal": memory_info.rss,
|
|
179
|
-
"heapUsed": memory_info.rss - memory_info.available
|
|
185
|
+
"heapUsed": memory_info.rss - memory_info.available
|
|
186
|
+
if hasattr(memory_info, "available")
|
|
187
|
+
else memory_info.rss,
|
|
180
188
|
"external": 0,
|
|
181
189
|
"arrayBuffers": 0,
|
|
182
190
|
}
|
|
183
191
|
except (ImportError, Exception):
|
|
184
192
|
pass # psutil not available or error getting memory info
|
|
185
|
-
|
|
193
|
+
|
|
186
194
|
del self.performance_metrics[operation_id]
|
|
187
195
|
return metrics
|
|
188
|
-
|
|
196
|
+
|
|
189
197
|
async def error(
|
|
190
198
|
self,
|
|
191
199
|
message: str,
|
|
192
200
|
context: Optional[Dict[str, Any]] = None,
|
|
193
201
|
stack_trace: Optional[str] = None,
|
|
194
|
-
options: Optional[ClientLoggingOptions] = None
|
|
202
|
+
options: Optional[ClientLoggingOptions] = None,
|
|
195
203
|
) -> None:
|
|
196
204
|
"""
|
|
197
205
|
Log error message with optional stack trace and enhanced options.
|
|
198
|
-
|
|
206
|
+
|
|
199
207
|
Args:
|
|
200
208
|
message: Error message
|
|
201
209
|
context: Additional context data
|
|
@@ -203,55 +211,51 @@ class LoggerService:
|
|
|
203
211
|
options: Logging options
|
|
204
212
|
"""
|
|
205
213
|
await self._log("error", message, context, stack_trace, options)
|
|
206
|
-
|
|
214
|
+
|
|
207
215
|
async def audit(
|
|
208
216
|
self,
|
|
209
217
|
action: str,
|
|
210
218
|
resource: str,
|
|
211
219
|
context: Optional[Dict[str, Any]] = None,
|
|
212
|
-
options: Optional[ClientLoggingOptions] = None
|
|
220
|
+
options: Optional[ClientLoggingOptions] = None,
|
|
213
221
|
) -> None:
|
|
214
222
|
"""
|
|
215
223
|
Log audit event with enhanced options.
|
|
216
|
-
|
|
224
|
+
|
|
217
225
|
Args:
|
|
218
226
|
action: Action performed
|
|
219
227
|
resource: Resource affected
|
|
220
228
|
context: Additional context data
|
|
221
229
|
options: Logging options
|
|
222
230
|
"""
|
|
223
|
-
audit_context = {
|
|
224
|
-
"action": action,
|
|
225
|
-
"resource": resource,
|
|
226
|
-
**(context or {})
|
|
227
|
-
}
|
|
231
|
+
audit_context = {"action": action, "resource": resource, **(context or {})}
|
|
228
232
|
await self._log("audit", f"Audit: {action} on {resource}", audit_context, None, options)
|
|
229
|
-
|
|
233
|
+
|
|
230
234
|
async def info(
|
|
231
235
|
self,
|
|
232
236
|
message: str,
|
|
233
237
|
context: Optional[Dict[str, Any]] = None,
|
|
234
|
-
options: Optional[ClientLoggingOptions] = None
|
|
238
|
+
options: Optional[ClientLoggingOptions] = None,
|
|
235
239
|
) -> None:
|
|
236
240
|
"""
|
|
237
241
|
Log info message with enhanced options.
|
|
238
|
-
|
|
242
|
+
|
|
239
243
|
Args:
|
|
240
244
|
message: Info message
|
|
241
245
|
context: Additional context data
|
|
242
246
|
options: Logging options
|
|
243
247
|
"""
|
|
244
248
|
await self._log("info", message, context, None, options)
|
|
245
|
-
|
|
249
|
+
|
|
246
250
|
async def debug(
|
|
247
251
|
self,
|
|
248
252
|
message: str,
|
|
249
253
|
context: Optional[Dict[str, Any]] = None,
|
|
250
|
-
options: Optional[ClientLoggingOptions] = None
|
|
254
|
+
options: Optional[ClientLoggingOptions] = None,
|
|
251
255
|
) -> None:
|
|
252
256
|
"""
|
|
253
257
|
Log debug message with enhanced options.
|
|
254
|
-
|
|
258
|
+
|
|
255
259
|
Args:
|
|
256
260
|
message: Debug message
|
|
257
261
|
context: Additional context data
|
|
@@ -259,18 +263,18 @@ class LoggerService:
|
|
|
259
263
|
"""
|
|
260
264
|
if self.config.log_level == "debug":
|
|
261
265
|
await self._log("debug", message, context, None, options)
|
|
262
|
-
|
|
266
|
+
|
|
263
267
|
async def _log(
|
|
264
268
|
self,
|
|
265
269
|
level: Literal["error", "audit", "info", "debug"],
|
|
266
270
|
message: str,
|
|
267
271
|
context: Optional[Dict[str, Any]] = None,
|
|
268
272
|
stack_trace: Optional[str] = None,
|
|
269
|
-
options: Optional[ClientLoggingOptions] = None
|
|
273
|
+
options: Optional[ClientLoggingOptions] = None,
|
|
270
274
|
) -> None:
|
|
271
275
|
"""
|
|
272
276
|
Internal log method with enhanced features.
|
|
273
|
-
|
|
277
|
+
|
|
274
278
|
Args:
|
|
275
279
|
level: Log level
|
|
276
280
|
message: Log message
|
|
@@ -279,26 +283,32 @@ class LoggerService:
|
|
|
279
283
|
options: Logging options
|
|
280
284
|
"""
|
|
281
285
|
# Extract JWT context if token provided
|
|
282
|
-
jwt_context =
|
|
283
|
-
|
|
286
|
+
jwt_context = (
|
|
287
|
+
self._extract_jwt_context(options.token if options else None) if options else {}
|
|
288
|
+
)
|
|
289
|
+
|
|
284
290
|
# Extract environment metadata
|
|
285
291
|
metadata = self._extract_metadata()
|
|
286
|
-
|
|
292
|
+
|
|
287
293
|
# Generate correlation ID if not provided
|
|
288
|
-
correlation_id = (
|
|
289
|
-
|
|
294
|
+
correlation_id = (
|
|
295
|
+
options.correlationId if options else None
|
|
296
|
+
) or self._generate_correlation_id()
|
|
297
|
+
|
|
290
298
|
# Mask sensitive data in context if enabled
|
|
291
|
-
mask_sensitive = (
|
|
299
|
+
mask_sensitive = (
|
|
300
|
+
options.maskSensitiveData if options else None
|
|
301
|
+
) is not False and self.mask_sensitive_data
|
|
292
302
|
masked_context = (
|
|
293
|
-
DataMasker.mask_sensitive_data(context) if mask_sensitive and context
|
|
294
|
-
else context
|
|
303
|
+
DataMasker.mask_sensitive_data(context) if mask_sensitive and context else context
|
|
295
304
|
)
|
|
296
|
-
|
|
305
|
+
|
|
297
306
|
# Add performance metrics if requested
|
|
298
307
|
enhanced_context = masked_context
|
|
299
308
|
if options and options.performanceMetrics:
|
|
300
309
|
try:
|
|
301
310
|
import psutil
|
|
311
|
+
|
|
302
312
|
process = psutil.Process()
|
|
303
313
|
memory_info = process.memory_info()
|
|
304
314
|
enhanced_context = {
|
|
@@ -307,14 +317,16 @@ class LoggerService:
|
|
|
307
317
|
"memoryUsage": {
|
|
308
318
|
"rss": memory_info.rss,
|
|
309
319
|
"heapTotal": memory_info.rss,
|
|
310
|
-
"heapUsed": memory_info.rss - memory_info.available
|
|
320
|
+
"heapUsed": memory_info.rss - memory_info.available
|
|
321
|
+
if hasattr(memory_info, "available")
|
|
322
|
+
else memory_info.rss,
|
|
311
323
|
},
|
|
312
324
|
"uptime": psutil.boot_time() if hasattr(psutil, "boot_time") else 0,
|
|
313
|
-
}
|
|
325
|
+
},
|
|
314
326
|
}
|
|
315
327
|
except (ImportError, Exception):
|
|
316
328
|
pass # psutil not available or error getting memory info
|
|
317
|
-
|
|
329
|
+
|
|
318
330
|
log_entry_data = {
|
|
319
331
|
"timestamp": datetime.utcnow().isoformat(),
|
|
320
332
|
"level": level,
|
|
@@ -328,47 +340,50 @@ class LoggerService:
|
|
|
328
340
|
"userId": (options.userId if options else None) or jwt_context.get("userId"),
|
|
329
341
|
"sessionId": (options.sessionId if options else None) or jwt_context.get("sessionId"),
|
|
330
342
|
"requestId": options.requestId if options else None,
|
|
331
|
-
**metadata
|
|
343
|
+
**metadata,
|
|
332
344
|
}
|
|
333
|
-
|
|
345
|
+
|
|
334
346
|
# Remove None values
|
|
335
347
|
log_entry_data = {k: v for k, v in log_entry_data.items() if v is not None}
|
|
336
|
-
|
|
348
|
+
|
|
337
349
|
log_entry = LogEntry(**log_entry_data)
|
|
338
|
-
|
|
350
|
+
|
|
339
351
|
# Try Redis first (if available)
|
|
340
352
|
if self.redis.is_connected():
|
|
341
353
|
queue_name = f"logs:{self.config.client_id}"
|
|
342
354
|
success = await self.redis.rpush(queue_name, log_entry.model_dump_json())
|
|
343
|
-
|
|
355
|
+
|
|
344
356
|
if success:
|
|
345
357
|
return # Successfully queued in Redis
|
|
346
|
-
|
|
358
|
+
|
|
347
359
|
# Fallback to unified logging endpoint with client credentials
|
|
360
|
+
# Use InternalHttpClient to avoid circular dependency with HttpClient
|
|
348
361
|
try:
|
|
349
362
|
# Backend extracts environment and application from client credentials
|
|
350
|
-
log_payload = log_entry.model_dump(
|
|
351
|
-
|
|
363
|
+
log_payload = log_entry.model_dump(
|
|
364
|
+
exclude={"environment", "application"}, exclude_none=True
|
|
365
|
+
)
|
|
366
|
+
await self.internal_http_client.request("POST", "/api/logs", log_payload)
|
|
352
367
|
except Exception:
|
|
353
368
|
# Failed to send log to controller
|
|
354
369
|
# Silently fail to avoid infinite logging loops
|
|
355
370
|
# Application should implement retry or buffer strategy if needed
|
|
356
371
|
pass
|
|
357
|
-
|
|
372
|
+
|
|
358
373
|
def with_context(self, context: Dict[str, Any]) -> "LoggerChain":
|
|
359
374
|
"""Create logger chain with context."""
|
|
360
375
|
return LoggerChain(self, context, ClientLoggingOptions())
|
|
361
|
-
|
|
376
|
+
|
|
362
377
|
def with_token(self, token: str) -> "LoggerChain":
|
|
363
378
|
"""Create logger chain with token."""
|
|
364
379
|
return LoggerChain(self, {}, ClientLoggingOptions(token=token))
|
|
365
|
-
|
|
380
|
+
|
|
366
381
|
def with_performance(self) -> "LoggerChain":
|
|
367
382
|
"""Create logger chain with performance metrics."""
|
|
368
383
|
opts = ClientLoggingOptions()
|
|
369
384
|
opts.performanceMetrics = True
|
|
370
385
|
return LoggerChain(self, {}, opts)
|
|
371
|
-
|
|
386
|
+
|
|
372
387
|
def without_masking(self) -> "LoggerChain":
|
|
373
388
|
"""Create logger chain without data masking."""
|
|
374
389
|
opts = ClientLoggingOptions()
|
|
@@ -378,16 +393,16 @@ class LoggerService:
|
|
|
378
393
|
|
|
379
394
|
class LoggerChain:
|
|
380
395
|
"""Method chaining class for fluent logging API."""
|
|
381
|
-
|
|
396
|
+
|
|
382
397
|
def __init__(
|
|
383
398
|
self,
|
|
384
399
|
logger: LoggerService,
|
|
385
400
|
context: Optional[Dict[str, Any]] = None,
|
|
386
|
-
options: Optional[ClientLoggingOptions] = None
|
|
401
|
+
options: Optional[ClientLoggingOptions] = None,
|
|
387
402
|
):
|
|
388
403
|
"""
|
|
389
404
|
Initialize logger chain.
|
|
390
|
-
|
|
405
|
+
|
|
391
406
|
Args:
|
|
392
407
|
logger: Logger service instance
|
|
393
408
|
context: Initial context
|
|
@@ -396,62 +411,62 @@ class LoggerChain:
|
|
|
396
411
|
self.logger = logger
|
|
397
412
|
self.context = context or {}
|
|
398
413
|
self.options = options or ClientLoggingOptions()
|
|
399
|
-
|
|
414
|
+
|
|
400
415
|
def add_context(self, key: str, value: Any) -> "LoggerChain":
|
|
401
416
|
"""Add context key-value pair."""
|
|
402
417
|
self.context[key] = value
|
|
403
418
|
return self
|
|
404
|
-
|
|
419
|
+
|
|
405
420
|
def add_user(self, user_id: str) -> "LoggerChain":
|
|
406
421
|
"""Add user ID."""
|
|
407
422
|
if self.options is None:
|
|
408
423
|
self.options = ClientLoggingOptions()
|
|
409
424
|
self.options.userId = user_id
|
|
410
425
|
return self
|
|
411
|
-
|
|
426
|
+
|
|
412
427
|
def add_application(self, application_id: str) -> "LoggerChain":
|
|
413
428
|
"""Add application ID."""
|
|
414
429
|
if self.options is None:
|
|
415
430
|
self.options = ClientLoggingOptions()
|
|
416
431
|
self.options.applicationId = application_id
|
|
417
432
|
return self
|
|
418
|
-
|
|
433
|
+
|
|
419
434
|
def add_correlation(self, correlation_id: str) -> "LoggerChain":
|
|
420
435
|
"""Add correlation ID."""
|
|
421
436
|
if self.options is None:
|
|
422
437
|
self.options = ClientLoggingOptions()
|
|
423
438
|
self.options.correlationId = correlation_id
|
|
424
439
|
return self
|
|
425
|
-
|
|
440
|
+
|
|
426
441
|
def with_token(self, token: str) -> "LoggerChain":
|
|
427
442
|
"""Add token for context extraction."""
|
|
428
443
|
if self.options is None:
|
|
429
444
|
self.options = ClientLoggingOptions()
|
|
430
445
|
self.options.token = token
|
|
431
446
|
return self
|
|
432
|
-
|
|
447
|
+
|
|
433
448
|
def with_performance(self) -> "LoggerChain":
|
|
434
449
|
"""Enable performance metrics."""
|
|
435
450
|
if self.options is None:
|
|
436
451
|
self.options = ClientLoggingOptions()
|
|
437
452
|
self.options.performanceMetrics = True
|
|
438
453
|
return self
|
|
439
|
-
|
|
454
|
+
|
|
440
455
|
def without_masking(self) -> "LoggerChain":
|
|
441
456
|
"""Disable data masking."""
|
|
442
457
|
if self.options is None:
|
|
443
458
|
self.options = ClientLoggingOptions()
|
|
444
459
|
self.options.maskSensitiveData = False
|
|
445
460
|
return self
|
|
446
|
-
|
|
461
|
+
|
|
447
462
|
async def error(self, message: str, stack_trace: Optional[str] = None) -> None:
|
|
448
463
|
"""Log error."""
|
|
449
464
|
await self.logger.error(message, self.context, stack_trace, self.options)
|
|
450
|
-
|
|
465
|
+
|
|
451
466
|
async def info(self, message: str) -> None:
|
|
452
467
|
"""Log info."""
|
|
453
468
|
await self.logger.info(message, self.context, self.options)
|
|
454
|
-
|
|
469
|
+
|
|
455
470
|
async def audit(self, action: str, resource: str) -> None:
|
|
456
471
|
"""Log audit."""
|
|
457
472
|
await self.logger.audit(action, resource, self.context, self.options)
|