alma-memory 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alma/__init__.py +296 -194
- alma/compression/__init__.py +33 -0
- alma/compression/pipeline.py +980 -0
- alma/confidence/__init__.py +47 -47
- alma/confidence/engine.py +540 -540
- alma/confidence/types.py +351 -351
- alma/config/loader.py +157 -157
- alma/consolidation/__init__.py +23 -23
- alma/consolidation/engine.py +678 -678
- alma/consolidation/prompts.py +84 -84
- alma/core.py +1189 -322
- alma/domains/__init__.py +30 -30
- alma/domains/factory.py +359 -359
- alma/domains/schemas.py +448 -448
- alma/domains/types.py +272 -272
- alma/events/__init__.py +75 -75
- alma/events/emitter.py +285 -284
- alma/events/storage_mixin.py +246 -246
- alma/events/types.py +126 -126
- alma/events/webhook.py +425 -425
- alma/exceptions.py +49 -49
- alma/extraction/__init__.py +31 -31
- alma/extraction/auto_learner.py +265 -264
- alma/extraction/extractor.py +420 -420
- alma/graph/__init__.py +106 -81
- alma/graph/backends/__init__.py +32 -18
- alma/graph/backends/kuzu.py +624 -0
- alma/graph/backends/memgraph.py +432 -0
- alma/graph/backends/memory.py +236 -236
- alma/graph/backends/neo4j.py +417 -417
- alma/graph/base.py +159 -159
- alma/graph/extraction.py +198 -198
- alma/graph/store.py +860 -860
- alma/harness/__init__.py +35 -35
- alma/harness/base.py +386 -386
- alma/harness/domains.py +705 -705
- alma/initializer/__init__.py +37 -37
- alma/initializer/initializer.py +418 -418
- alma/initializer/types.py +250 -250
- alma/integration/__init__.py +62 -62
- alma/integration/claude_agents.py +444 -432
- alma/integration/helena.py +423 -423
- alma/integration/victor.py +471 -471
- alma/learning/__init__.py +101 -86
- alma/learning/decay.py +878 -0
- alma/learning/forgetting.py +1446 -1446
- alma/learning/heuristic_extractor.py +390 -390
- alma/learning/protocols.py +374 -374
- alma/learning/validation.py +346 -346
- alma/mcp/__init__.py +123 -45
- alma/mcp/__main__.py +156 -156
- alma/mcp/resources.py +122 -122
- alma/mcp/server.py +955 -591
- alma/mcp/tools.py +3254 -511
- alma/observability/__init__.py +91 -0
- alma/observability/config.py +302 -0
- alma/observability/guidelines.py +170 -0
- alma/observability/logging.py +424 -0
- alma/observability/metrics.py +583 -0
- alma/observability/tracing.py +440 -0
- alma/progress/__init__.py +21 -21
- alma/progress/tracker.py +607 -607
- alma/progress/types.py +250 -250
- alma/retrieval/__init__.py +134 -53
- alma/retrieval/budget.py +525 -0
- alma/retrieval/cache.py +1304 -1061
- alma/retrieval/embeddings.py +202 -202
- alma/retrieval/engine.py +850 -366
- alma/retrieval/modes.py +365 -0
- alma/retrieval/progressive.py +560 -0
- alma/retrieval/scoring.py +344 -344
- alma/retrieval/trust_scoring.py +637 -0
- alma/retrieval/verification.py +797 -0
- alma/session/__init__.py +19 -19
- alma/session/manager.py +442 -399
- alma/session/types.py +288 -288
- alma/storage/__init__.py +101 -61
- alma/storage/archive.py +233 -0
- alma/storage/azure_cosmos.py +1259 -1048
- alma/storage/base.py +1083 -525
- alma/storage/chroma.py +1443 -1443
- alma/storage/constants.py +103 -0
- alma/storage/file_based.py +614 -619
- alma/storage/migrations/__init__.py +21 -0
- alma/storage/migrations/base.py +321 -0
- alma/storage/migrations/runner.py +323 -0
- alma/storage/migrations/version_stores.py +337 -0
- alma/storage/migrations/versions/__init__.py +11 -0
- alma/storage/migrations/versions/v1_0_0.py +373 -0
- alma/storage/migrations/versions/v1_1_0_workflow_context.py +551 -0
- alma/storage/pinecone.py +1080 -1080
- alma/storage/postgresql.py +1948 -1452
- alma/storage/qdrant.py +1306 -1306
- alma/storage/sqlite_local.py +3041 -1358
- alma/testing/__init__.py +46 -0
- alma/testing/factories.py +301 -0
- alma/testing/mocks.py +389 -0
- alma/types.py +292 -264
- alma/utils/__init__.py +19 -0
- alma/utils/tokenizer.py +521 -0
- alma/workflow/__init__.py +83 -0
- alma/workflow/artifacts.py +170 -0
- alma/workflow/checkpoint.py +311 -0
- alma/workflow/context.py +228 -0
- alma/workflow/outcomes.py +189 -0
- alma/workflow/reducers.py +393 -0
- {alma_memory-0.5.0.dist-info → alma_memory-0.7.0.dist-info}/METADATA +244 -72
- alma_memory-0.7.0.dist-info/RECORD +112 -0
- alma_memory-0.5.0.dist-info/RECORD +0 -76
- {alma_memory-0.5.0.dist-info → alma_memory-0.7.0.dist-info}/WHEEL +0 -0
- {alma_memory-0.5.0.dist-info → alma_memory-0.7.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,424 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ALMA Structured Logging.
|
|
3
|
+
|
|
4
|
+
Provides structured JSON logging for log aggregation systems
|
|
5
|
+
(ELK, Splunk, DataDog, etc.) with contextual information.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
import sys
|
|
11
|
+
import traceback
|
|
12
|
+
from datetime import datetime, timezone
|
|
13
|
+
from typing import Any, Dict, Optional
|
|
14
|
+
|
|
15
|
+
# Global logger registry
|
|
16
|
+
_loggers: Dict[str, "StructuredLogger"] = {}
|
|
17
|
+
_logging_configured = False
|
|
18
|
+
_default_service_name = "alma-memory"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class JSONFormatter(logging.Formatter):
|
|
22
|
+
"""
|
|
23
|
+
JSON formatter for structured logging.
|
|
24
|
+
|
|
25
|
+
Outputs log records as JSON objects with consistent field names
|
|
26
|
+
for easy parsing by log aggregation systems.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def __init__(
|
|
30
|
+
self,
|
|
31
|
+
service_name: str = "alma-memory",
|
|
32
|
+
include_traceback: bool = True,
|
|
33
|
+
extra_fields: Optional[Dict[str, Any]] = None,
|
|
34
|
+
):
|
|
35
|
+
"""
|
|
36
|
+
Initialize JSON formatter.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
service_name: Name of the service for logs
|
|
40
|
+
include_traceback: Include full traceback for exceptions
|
|
41
|
+
extra_fields: Additional fields to include in every log
|
|
42
|
+
"""
|
|
43
|
+
super().__init__()
|
|
44
|
+
self.service_name = service_name
|
|
45
|
+
self.include_traceback = include_traceback
|
|
46
|
+
self.extra_fields = extra_fields or {}
|
|
47
|
+
|
|
48
|
+
def format(self, record: logging.LogRecord) -> str:
|
|
49
|
+
"""Format log record as JSON."""
|
|
50
|
+
# Build base log entry
|
|
51
|
+
log_entry = {
|
|
52
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
53
|
+
"level": record.levelname,
|
|
54
|
+
"logger": record.name,
|
|
55
|
+
"message": record.getMessage(),
|
|
56
|
+
"service": self.service_name,
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
# Add source location
|
|
60
|
+
log_entry["source"] = {
|
|
61
|
+
"file": record.filename,
|
|
62
|
+
"line": record.lineno,
|
|
63
|
+
"function": record.funcName,
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
# Add thread/process info
|
|
67
|
+
log_entry["thread"] = {
|
|
68
|
+
"id": record.thread,
|
|
69
|
+
"name": record.threadName,
|
|
70
|
+
}
|
|
71
|
+
log_entry["process"] = {
|
|
72
|
+
"id": record.process,
|
|
73
|
+
"name": record.processName,
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
# Add exception info if present
|
|
77
|
+
if record.exc_info:
|
|
78
|
+
log_entry["exception"] = {
|
|
79
|
+
"type": record.exc_info[0].__name__ if record.exc_info[0] else None,
|
|
80
|
+
"message": str(record.exc_info[1]) if record.exc_info[1] else None,
|
|
81
|
+
}
|
|
82
|
+
if self.include_traceback:
|
|
83
|
+
log_entry["exception"]["traceback"] = traceback.format_exception(
|
|
84
|
+
*record.exc_info
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
# Add extra fields from record
|
|
88
|
+
# Skip standard LogRecord attributes
|
|
89
|
+
skip_attrs = {
|
|
90
|
+
"name",
|
|
91
|
+
"msg",
|
|
92
|
+
"args",
|
|
93
|
+
"created",
|
|
94
|
+
"filename",
|
|
95
|
+
"funcName",
|
|
96
|
+
"levelname",
|
|
97
|
+
"levelno",
|
|
98
|
+
"lineno",
|
|
99
|
+
"module",
|
|
100
|
+
"msecs",
|
|
101
|
+
"pathname",
|
|
102
|
+
"process",
|
|
103
|
+
"processName",
|
|
104
|
+
"relativeCreated",
|
|
105
|
+
"stack_info",
|
|
106
|
+
"exc_info",
|
|
107
|
+
"exc_text",
|
|
108
|
+
"thread",
|
|
109
|
+
"threadName",
|
|
110
|
+
"message",
|
|
111
|
+
"taskName",
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
extra = {}
|
|
115
|
+
for key, value in record.__dict__.items():
|
|
116
|
+
if key not in skip_attrs and not key.startswith("_"):
|
|
117
|
+
# Try to serialize, skip if not possible
|
|
118
|
+
try:
|
|
119
|
+
json.dumps(value)
|
|
120
|
+
extra[key] = value
|
|
121
|
+
except (TypeError, ValueError):
|
|
122
|
+
extra[key] = str(value)
|
|
123
|
+
|
|
124
|
+
if extra:
|
|
125
|
+
log_entry["extra"] = extra
|
|
126
|
+
|
|
127
|
+
# Add configured extra fields
|
|
128
|
+
if self.extra_fields:
|
|
129
|
+
log_entry.update(self.extra_fields)
|
|
130
|
+
|
|
131
|
+
return json.dumps(log_entry, default=str)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
class TextFormatter(logging.Formatter):
|
|
135
|
+
"""
|
|
136
|
+
Enhanced text formatter with structured context.
|
|
137
|
+
|
|
138
|
+
Provides readable text output with optional context fields
|
|
139
|
+
for development and debugging.
|
|
140
|
+
"""
|
|
141
|
+
|
|
142
|
+
def __init__(
|
|
143
|
+
self,
|
|
144
|
+
include_context: bool = True,
|
|
145
|
+
include_location: bool = True,
|
|
146
|
+
):
|
|
147
|
+
"""
|
|
148
|
+
Initialize text formatter.
|
|
149
|
+
|
|
150
|
+
Args:
|
|
151
|
+
include_context: Include extra context fields
|
|
152
|
+
include_location: Include source file and line
|
|
153
|
+
"""
|
|
154
|
+
super().__init__()
|
|
155
|
+
self.include_context = include_context
|
|
156
|
+
self.include_location = include_location
|
|
157
|
+
|
|
158
|
+
def format(self, record: logging.LogRecord) -> str:
|
|
159
|
+
"""Format log record as readable text."""
|
|
160
|
+
# Base format: timestamp level [logger] message
|
|
161
|
+
timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]
|
|
162
|
+
parts = [
|
|
163
|
+
f"{timestamp} {record.levelname:8s} [{record.name}] {record.getMessage()}"
|
|
164
|
+
]
|
|
165
|
+
|
|
166
|
+
# Add location if enabled
|
|
167
|
+
if self.include_location:
|
|
168
|
+
parts.append(f" at {record.filename}:{record.lineno} in {record.funcName}")
|
|
169
|
+
|
|
170
|
+
# Add exception info if present
|
|
171
|
+
if record.exc_info:
|
|
172
|
+
parts.append(self.formatException(record.exc_info))
|
|
173
|
+
|
|
174
|
+
# Add extra context if enabled
|
|
175
|
+
if self.include_context:
|
|
176
|
+
skip_attrs = {
|
|
177
|
+
"name",
|
|
178
|
+
"msg",
|
|
179
|
+
"args",
|
|
180
|
+
"created",
|
|
181
|
+
"filename",
|
|
182
|
+
"funcName",
|
|
183
|
+
"levelname",
|
|
184
|
+
"levelno",
|
|
185
|
+
"lineno",
|
|
186
|
+
"module",
|
|
187
|
+
"msecs",
|
|
188
|
+
"pathname",
|
|
189
|
+
"process",
|
|
190
|
+
"processName",
|
|
191
|
+
"relativeCreated",
|
|
192
|
+
"stack_info",
|
|
193
|
+
"exc_info",
|
|
194
|
+
"exc_text",
|
|
195
|
+
"thread",
|
|
196
|
+
"threadName",
|
|
197
|
+
"message",
|
|
198
|
+
"taskName",
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
extra_items = []
|
|
202
|
+
for key, value in record.__dict__.items():
|
|
203
|
+
if key not in skip_attrs and not key.startswith("_"):
|
|
204
|
+
extra_items.append(f"{key}={value}")
|
|
205
|
+
|
|
206
|
+
if extra_items:
|
|
207
|
+
parts.append(f" context: {', '.join(extra_items)}")
|
|
208
|
+
|
|
209
|
+
return "\n".join(parts)
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
class StructuredLogger:
|
|
213
|
+
"""
|
|
214
|
+
Structured logger wrapper with context management.
|
|
215
|
+
|
|
216
|
+
Provides a convenient interface for logging with structured
|
|
217
|
+
context that is automatically included in all log messages.
|
|
218
|
+
"""
|
|
219
|
+
|
|
220
|
+
def __init__(
|
|
221
|
+
self,
|
|
222
|
+
name: str,
|
|
223
|
+
level: int = logging.INFO,
|
|
224
|
+
):
|
|
225
|
+
"""
|
|
226
|
+
Initialize structured logger.
|
|
227
|
+
|
|
228
|
+
Args:
|
|
229
|
+
name: Logger name (typically __name__)
|
|
230
|
+
level: Logging level
|
|
231
|
+
"""
|
|
232
|
+
self._logger = logging.getLogger(name)
|
|
233
|
+
self._logger.setLevel(level)
|
|
234
|
+
self._context: Dict[str, Any] = {}
|
|
235
|
+
|
|
236
|
+
def set_context(self, **kwargs):
|
|
237
|
+
"""Set persistent context fields for all subsequent logs."""
|
|
238
|
+
self._context.update(kwargs)
|
|
239
|
+
|
|
240
|
+
def clear_context(self):
|
|
241
|
+
"""Clear all context fields."""
|
|
242
|
+
self._context.clear()
|
|
243
|
+
|
|
244
|
+
def with_context(self, **kwargs) -> "LogContext":
|
|
245
|
+
"""
|
|
246
|
+
Create a context manager for temporary context.
|
|
247
|
+
|
|
248
|
+
Usage:
|
|
249
|
+
with logger.with_context(request_id="123"):
|
|
250
|
+
logger.info("Processing request")
|
|
251
|
+
"""
|
|
252
|
+
return LogContext(self, kwargs)
|
|
253
|
+
|
|
254
|
+
def _log(
|
|
255
|
+
self,
|
|
256
|
+
level: int,
|
|
257
|
+
msg: str,
|
|
258
|
+
*args,
|
|
259
|
+
exc_info: bool = False,
|
|
260
|
+
**kwargs,
|
|
261
|
+
):
|
|
262
|
+
"""Internal log method with context injection."""
|
|
263
|
+
# Merge context with kwargs
|
|
264
|
+
extra = {**self._context, **kwargs}
|
|
265
|
+
self._logger.log(level, msg, *args, exc_info=exc_info, extra=extra)
|
|
266
|
+
|
|
267
|
+
def debug(self, msg: str, *args, **kwargs):
|
|
268
|
+
"""Log debug message."""
|
|
269
|
+
self._log(logging.DEBUG, msg, *args, **kwargs)
|
|
270
|
+
|
|
271
|
+
def info(self, msg: str, *args, **kwargs):
|
|
272
|
+
"""Log info message."""
|
|
273
|
+
self._log(logging.INFO, msg, *args, **kwargs)
|
|
274
|
+
|
|
275
|
+
def warning(self, msg: str, *args, **kwargs):
|
|
276
|
+
"""Log warning message."""
|
|
277
|
+
self._log(logging.WARNING, msg, *args, **kwargs)
|
|
278
|
+
|
|
279
|
+
def error(self, msg: str, *args, exc_info: bool = False, **kwargs):
|
|
280
|
+
"""Log error message."""
|
|
281
|
+
self._log(logging.ERROR, msg, *args, exc_info=exc_info, **kwargs)
|
|
282
|
+
|
|
283
|
+
def exception(self, msg: str, *args, **kwargs):
|
|
284
|
+
"""Log exception with traceback."""
|
|
285
|
+
self._log(logging.ERROR, msg, *args, exc_info=True, **kwargs)
|
|
286
|
+
|
|
287
|
+
def critical(self, msg: str, *args, **kwargs):
|
|
288
|
+
"""Log critical message."""
|
|
289
|
+
self._log(logging.CRITICAL, msg, *args, **kwargs)
|
|
290
|
+
|
|
291
|
+
# Metrics-related log methods
|
|
292
|
+
|
|
293
|
+
def metric(
|
|
294
|
+
self,
|
|
295
|
+
name: str,
|
|
296
|
+
value: float,
|
|
297
|
+
unit: str = "",
|
|
298
|
+
tags: Optional[Dict[str, str]] = None,
|
|
299
|
+
):
|
|
300
|
+
"""
|
|
301
|
+
Log a metric value.
|
|
302
|
+
|
|
303
|
+
This is useful for logging metrics when OpenTelemetry
|
|
304
|
+
metrics are not available.
|
|
305
|
+
"""
|
|
306
|
+
self.info(
|
|
307
|
+
f"METRIC {name}={value}{unit}",
|
|
308
|
+
metric_name=name,
|
|
309
|
+
metric_value=value,
|
|
310
|
+
metric_unit=unit,
|
|
311
|
+
metric_tags=tags or {},
|
|
312
|
+
)
|
|
313
|
+
|
|
314
|
+
def timing(
|
|
315
|
+
self,
|
|
316
|
+
operation: str,
|
|
317
|
+
duration_ms: float,
|
|
318
|
+
success: bool = True,
|
|
319
|
+
tags: Optional[Dict[str, str]] = None,
|
|
320
|
+
):
|
|
321
|
+
"""Log an operation timing."""
|
|
322
|
+
self.info(
|
|
323
|
+
f"TIMING {operation} completed in {duration_ms:.2f}ms",
|
|
324
|
+
operation=operation,
|
|
325
|
+
duration_ms=duration_ms,
|
|
326
|
+
success=success,
|
|
327
|
+
**(tags or {}),
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
class LogContext:
|
|
332
|
+
"""Context manager for temporary logging context."""
|
|
333
|
+
|
|
334
|
+
def __init__(self, logger: StructuredLogger, context: Dict[str, Any]):
|
|
335
|
+
self._logger = logger
|
|
336
|
+
self._context = context
|
|
337
|
+
self._original_context: Dict[str, Any] = {}
|
|
338
|
+
|
|
339
|
+
def __enter__(self):
|
|
340
|
+
# Save original values for keys we're overwriting
|
|
341
|
+
for key in self._context:
|
|
342
|
+
if key in self._logger._context:
|
|
343
|
+
self._original_context[key] = self._logger._context[key]
|
|
344
|
+
# Set new context
|
|
345
|
+
self._logger.set_context(**self._context)
|
|
346
|
+
return self._logger
|
|
347
|
+
|
|
348
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
349
|
+
# Remove added context
|
|
350
|
+
for key in self._context:
|
|
351
|
+
if key in self._original_context:
|
|
352
|
+
self._logger._context[key] = self._original_context[key]
|
|
353
|
+
else:
|
|
354
|
+
self._logger._context.pop(key, None)
|
|
355
|
+
return False
|
|
356
|
+
|
|
357
|
+
|
|
358
|
+
def setup_logging(
|
|
359
|
+
level: str = "INFO",
|
|
360
|
+
format_type: str = "json",
|
|
361
|
+
service_name: str = "alma-memory",
|
|
362
|
+
output: str = "stderr",
|
|
363
|
+
extra_fields: Optional[Dict[str, Any]] = None,
|
|
364
|
+
):
|
|
365
|
+
"""
|
|
366
|
+
Setup logging with the specified configuration.
|
|
367
|
+
|
|
368
|
+
Args:
|
|
369
|
+
level: Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
|
370
|
+
format_type: "json" or "text"
|
|
371
|
+
service_name: Service name for logs
|
|
372
|
+
output: "stderr", "stdout", or a file path
|
|
373
|
+
extra_fields: Additional fields to include in JSON logs
|
|
374
|
+
"""
|
|
375
|
+
global _logging_configured, _default_service_name
|
|
376
|
+
|
|
377
|
+
_default_service_name = service_name
|
|
378
|
+
|
|
379
|
+
# Get root logger for alma
|
|
380
|
+
root_logger = logging.getLogger("alma")
|
|
381
|
+
root_logger.setLevel(getattr(logging, level.upper(), logging.INFO))
|
|
382
|
+
|
|
383
|
+
# Remove existing handlers
|
|
384
|
+
root_logger.handlers = []
|
|
385
|
+
|
|
386
|
+
# Create handler based on output
|
|
387
|
+
if output == "stderr":
|
|
388
|
+
handler = logging.StreamHandler(sys.stderr)
|
|
389
|
+
elif output == "stdout":
|
|
390
|
+
handler = logging.StreamHandler(sys.stdout)
|
|
391
|
+
else:
|
|
392
|
+
handler = logging.FileHandler(output)
|
|
393
|
+
|
|
394
|
+
# Create formatter based on format type
|
|
395
|
+
if format_type.lower() == "json":
|
|
396
|
+
formatter = JSONFormatter(
|
|
397
|
+
service_name=service_name,
|
|
398
|
+
extra_fields=extra_fields,
|
|
399
|
+
)
|
|
400
|
+
else:
|
|
401
|
+
formatter = TextFormatter()
|
|
402
|
+
|
|
403
|
+
handler.setFormatter(formatter)
|
|
404
|
+
root_logger.addHandler(handler)
|
|
405
|
+
|
|
406
|
+
_logging_configured = True
|
|
407
|
+
|
|
408
|
+
|
|
409
|
+
def get_logger(name: str) -> StructuredLogger:
|
|
410
|
+
"""
|
|
411
|
+
Get a structured logger for the given name.
|
|
412
|
+
|
|
413
|
+
Args:
|
|
414
|
+
name: Logger name (typically __name__)
|
|
415
|
+
|
|
416
|
+
Returns:
|
|
417
|
+
StructuredLogger instance
|
|
418
|
+
"""
|
|
419
|
+
global _loggers
|
|
420
|
+
|
|
421
|
+
if name not in _loggers:
|
|
422
|
+
_loggers[name] = StructuredLogger(name)
|
|
423
|
+
|
|
424
|
+
return _loggers[name]
|