ai-pipeline-core 0.1.12__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. ai_pipeline_core/__init__.py +83 -119
  2. ai_pipeline_core/deployment/__init__.py +34 -0
  3. ai_pipeline_core/deployment/base.py +861 -0
  4. ai_pipeline_core/deployment/contract.py +80 -0
  5. ai_pipeline_core/deployment/deploy.py +561 -0
  6. ai_pipeline_core/deployment/helpers.py +97 -0
  7. ai_pipeline_core/deployment/progress.py +126 -0
  8. ai_pipeline_core/deployment/remote.py +116 -0
  9. ai_pipeline_core/docs_generator/__init__.py +54 -0
  10. ai_pipeline_core/docs_generator/__main__.py +5 -0
  11. ai_pipeline_core/docs_generator/cli.py +196 -0
  12. ai_pipeline_core/docs_generator/extractor.py +324 -0
  13. ai_pipeline_core/docs_generator/guide_builder.py +644 -0
  14. ai_pipeline_core/docs_generator/trimmer.py +35 -0
  15. ai_pipeline_core/docs_generator/validator.py +114 -0
  16. ai_pipeline_core/document_store/__init__.py +13 -0
  17. ai_pipeline_core/document_store/_summary.py +9 -0
  18. ai_pipeline_core/document_store/_summary_worker.py +170 -0
  19. ai_pipeline_core/document_store/clickhouse.py +492 -0
  20. ai_pipeline_core/document_store/factory.py +38 -0
  21. ai_pipeline_core/document_store/local.py +312 -0
  22. ai_pipeline_core/document_store/memory.py +85 -0
  23. ai_pipeline_core/document_store/protocol.py +68 -0
  24. ai_pipeline_core/documents/__init__.py +14 -15
  25. ai_pipeline_core/documents/_context_vars.py +85 -0
  26. ai_pipeline_core/documents/_hashing.py +52 -0
  27. ai_pipeline_core/documents/attachment.py +85 -0
  28. ai_pipeline_core/documents/context.py +128 -0
  29. ai_pipeline_core/documents/document.py +349 -1062
  30. ai_pipeline_core/documents/mime_type.py +40 -85
  31. ai_pipeline_core/documents/utils.py +62 -7
  32. ai_pipeline_core/exceptions.py +10 -62
  33. ai_pipeline_core/images/__init__.py +309 -0
  34. ai_pipeline_core/images/_processing.py +151 -0
  35. ai_pipeline_core/llm/__init__.py +5 -3
  36. ai_pipeline_core/llm/ai_messages.py +284 -73
  37. ai_pipeline_core/llm/client.py +462 -209
  38. ai_pipeline_core/llm/model_options.py +86 -53
  39. ai_pipeline_core/llm/model_response.py +187 -241
  40. ai_pipeline_core/llm/model_types.py +34 -54
  41. ai_pipeline_core/logging/__init__.py +2 -9
  42. ai_pipeline_core/logging/logging.yml +1 -1
  43. ai_pipeline_core/logging/logging_config.py +27 -43
  44. ai_pipeline_core/logging/logging_mixin.py +17 -51
  45. ai_pipeline_core/observability/__init__.py +32 -0
  46. ai_pipeline_core/observability/_debug/__init__.py +30 -0
  47. ai_pipeline_core/observability/_debug/_auto_summary.py +94 -0
  48. ai_pipeline_core/observability/_debug/_config.py +95 -0
  49. ai_pipeline_core/observability/_debug/_content.py +764 -0
  50. ai_pipeline_core/observability/_debug/_processor.py +98 -0
  51. ai_pipeline_core/observability/_debug/_summary.py +312 -0
  52. ai_pipeline_core/observability/_debug/_types.py +75 -0
  53. ai_pipeline_core/observability/_debug/_writer.py +843 -0
  54. ai_pipeline_core/observability/_document_tracking.py +146 -0
  55. ai_pipeline_core/observability/_initialization.py +194 -0
  56. ai_pipeline_core/observability/_logging_bridge.py +57 -0
  57. ai_pipeline_core/observability/_summary.py +81 -0
  58. ai_pipeline_core/observability/_tracking/__init__.py +6 -0
  59. ai_pipeline_core/observability/_tracking/_client.py +178 -0
  60. ai_pipeline_core/observability/_tracking/_internal.py +28 -0
  61. ai_pipeline_core/observability/_tracking/_models.py +138 -0
  62. ai_pipeline_core/observability/_tracking/_processor.py +158 -0
  63. ai_pipeline_core/observability/_tracking/_service.py +311 -0
  64. ai_pipeline_core/observability/_tracking/_writer.py +229 -0
  65. ai_pipeline_core/observability/tracing.py +640 -0
  66. ai_pipeline_core/pipeline/__init__.py +10 -0
  67. ai_pipeline_core/pipeline/decorators.py +915 -0
  68. ai_pipeline_core/pipeline/options.py +16 -0
  69. ai_pipeline_core/prompt_manager.py +26 -105
  70. ai_pipeline_core/settings.py +41 -32
  71. ai_pipeline_core/testing.py +9 -0
  72. ai_pipeline_core-0.4.1.dist-info/METADATA +807 -0
  73. ai_pipeline_core-0.4.1.dist-info/RECORD +76 -0
  74. {ai_pipeline_core-0.1.12.dist-info → ai_pipeline_core-0.4.1.dist-info}/WHEEL +1 -1
  75. ai_pipeline_core/documents/document_list.py +0 -240
  76. ai_pipeline_core/documents/flow_document.py +0 -128
  77. ai_pipeline_core/documents/task_document.py +0 -133
  78. ai_pipeline_core/documents/temporary_document.py +0 -95
  79. ai_pipeline_core/flow/__init__.py +0 -9
  80. ai_pipeline_core/flow/config.py +0 -314
  81. ai_pipeline_core/flow/options.py +0 -75
  82. ai_pipeline_core/pipeline.py +0 -717
  83. ai_pipeline_core/prefect.py +0 -54
  84. ai_pipeline_core/simple_runner/__init__.py +0 -24
  85. ai_pipeline_core/simple_runner/cli.py +0 -255
  86. ai_pipeline_core/simple_runner/simple_runner.py +0 -385
  87. ai_pipeline_core/tracing.py +0 -475
  88. ai_pipeline_core-0.1.12.dist-info/METADATA +0 -450
  89. ai_pipeline_core-0.1.12.dist-info/RECORD +0 -36
  90. {ai_pipeline_core-0.1.12.dist-info → ai_pipeline_core-0.4.1.dist-info}/licenses/LICENSE +0 -0
@@ -1,475 +0,0 @@
1
- """Tracing utilities that integrate Laminar (``lmnr``) with our code-base.
2
-
3
- @public
4
-
5
- This module centralizes:
6
- - ``TraceInfo`` - a small helper object for propagating contextual metadata.
7
- - ``trace`` decorator - augments a callable with Laminar tracing, automatic
8
- ``observe`` instrumentation, and optional support for test runs.
9
- """
10
-
11
- from __future__ import annotations
12
-
13
- import inspect
14
- import os
15
- from functools import wraps
16
- from typing import Any, Callable, Literal, ParamSpec, TypeVar, cast, overload
17
-
18
- from lmnr import Instruments, Laminar, observe
19
- from pydantic import BaseModel
20
-
21
- from ai_pipeline_core.settings import settings
22
-
23
- # ---------------------------------------------------------------------------
24
- # Typing helpers
25
- # ---------------------------------------------------------------------------
26
- P = ParamSpec("P")
27
- R = TypeVar("R")
28
-
29
- TraceLevel = Literal["always", "debug", "off"]
30
- """Control level for tracing activation.
31
-
32
- @public
33
-
34
- Values:
35
- - "always": Always trace (default, production mode)
36
- - "debug": Only trace when LMNR_DEBUG == "true"
37
- - "off": Disable tracing completely
38
- """
39
-
40
-
41
- # ---------------------------------------------------------------------------
42
- # ``TraceInfo`` – metadata container
43
- # ---------------------------------------------------------------------------
44
- class TraceInfo(BaseModel):
45
- """Container for propagating trace context through the pipeline.
46
-
47
- TraceInfo provides a structured way to pass tracing metadata through
48
- function calls, ensuring consistent observability across the entire
49
- execution flow. It integrates with Laminar (LMNR) for distributed
50
- tracing and debugging.
51
-
52
- Attributes:
53
- session_id: Unique identifier for the current session/conversation.
54
- Falls back to LMNR_SESSION_ID environment variable.
55
- user_id: Identifier for the user triggering the operation.
56
- Falls back to LMNR_USER_ID environment variable.
57
- metadata: Key-value pairs for additional trace context.
58
- Useful for filtering and searching in LMNR dashboard.
59
- tags: List of tags for categorizing traces (e.g., ["production", "v2"]).
60
-
61
- Environment fallbacks:
62
- - LMNR_SESSION_ID: Default session_id if not explicitly set
63
- - LMNR_USER_ID: Default user_id if not explicitly set
64
- - LMNR_DEBUG: Controls debug-level tracing when set to "true"
65
-
66
- Note: These variables are read directly by the tracing layer and are
67
- not part of the Settings configuration.
68
-
69
- Example:
70
- >>> # Create trace context
71
- >>> trace_info = TraceInfo(
72
- ... session_id="sess_123",
73
- ... user_id="user_456",
74
- ... metadata={"flow": "document_analysis", "version": "1.2"},
75
- ... tags=["production", "high_priority"]
76
- ... )
77
- >>>
78
- >>> # Pass through function calls
79
- >>> @trace
80
- >>> async def process(data, trace_info: TraceInfo):
81
- ... # TraceInfo automatically propagates to nested calls
82
- ... result = await analyze(data, trace_info=trace_info)
83
- ... return result
84
-
85
- Note:
86
- TraceInfo is typically created at the entry point of a flow
87
- and passed through all subsequent function calls for
88
- consistent tracing context.
89
- """
90
-
91
- session_id: str | None = None
92
- user_id: str | None = None
93
- metadata: dict[str, str] = {}
94
- tags: list[str] = []
95
-
96
- def get_observe_kwargs(self) -> dict[str, Any]:
97
- """Convert TraceInfo to kwargs for Laminar's observe decorator.
98
-
99
- Transforms the TraceInfo fields into the format expected by
100
- the lmnr.observe() decorator, applying environment variable
101
- fallbacks for session_id and user_id.
102
-
103
- Returns:
104
- Dictionary with keys:
105
- - session_id: From field or LMNR_SESSION_ID env var
106
- - user_id: From field or LMNR_USER_ID env var
107
- - metadata: Dictionary of custom metadata (if set)
108
- - tags: List of tags (if set)
109
-
110
- Only non-empty values are included in the output.
111
-
112
- Example:
113
- >>> trace_info = TraceInfo(session_id="sess_123", tags=["test"])
114
- >>> kwargs = trace_info.get_observe_kwargs()
115
- >>> # Returns: {"session_id": "sess_123", "tags": ["test"]}
116
-
117
- Note:
118
- This method is called internally by the trace decorator
119
- to configure Laminar observation parameters.
120
- """
121
- kwargs: dict[str, Any] = {}
122
-
123
- # Use environment variable fallback for session_id
124
- session_id = self.session_id or os.getenv("LMNR_SESSION_ID")
125
- if session_id:
126
- kwargs["session_id"] = session_id
127
-
128
- # Use environment variable fallback for user_id
129
- user_id = self.user_id or os.getenv("LMNR_USER_ID")
130
- if user_id:
131
- kwargs["user_id"] = user_id
132
-
133
- if self.metadata:
134
- kwargs["metadata"] = self.metadata
135
- if self.tags:
136
- kwargs["tags"] = self.tags
137
- return kwargs
138
-
139
-
140
- # ---------------------------------------------------------------------------
141
- # ``trace`` decorator
142
- # ---------------------------------------------------------------------------
143
-
144
-
145
- def _initialise_laminar() -> None:
146
- """Initialize Laminar SDK with project configuration.
147
-
148
- Sets up the Laminar observability client with the project API key
149
- from settings. Disables automatic OpenAI instrumentation to avoid
150
- conflicts with our custom tracing.
151
-
152
- Configuration:
153
- - Uses settings.lmnr_project_api_key for authentication
154
- - Disables OPENAI instrument to prevent double-tracing
155
- - Called automatically by trace decorator on first use
156
-
157
- Note:
158
- This is an internal function called once per process.
159
- Multiple calls are safe (Laminar handles idempotency).
160
- """
161
- if settings.lmnr_project_api_key:
162
- Laminar.initialize(
163
- project_api_key=settings.lmnr_project_api_key,
164
- disabled_instruments=[Instruments.OPENAI] if Instruments.OPENAI else [],
165
- )
166
-
167
-
168
- # Overload for calls like @trace(name="...", level="debug")
169
- @overload
170
- def trace(
171
- *,
172
- level: TraceLevel = "always",
173
- name: str | None = None,
174
- session_id: str | None = None,
175
- user_id: str | None = None,
176
- metadata: dict[str, Any] | None = None,
177
- tags: list[str] | None = None,
178
- span_type: str | None = None,
179
- ignore_input: bool = False,
180
- ignore_output: bool = False,
181
- ignore_inputs: list[str] | None = None,
182
- input_formatter: Callable[..., str] | None = None,
183
- output_formatter: Callable[..., str] | None = None,
184
- ignore_exceptions: bool = False,
185
- preserve_global_context: bool = True,
186
- ) -> Callable[[Callable[P, R]], Callable[P, R]]: ...
187
-
188
-
189
- # Overload for the bare @trace call
190
- @overload
191
- def trace(func: Callable[P, R]) -> Callable[P, R]: ...
192
-
193
-
194
- # Actual implementation
195
- def trace(
196
- func: Callable[P, R] | None = None,
197
- *,
198
- level: TraceLevel = "always",
199
- name: str | None = None,
200
- session_id: str | None = None,
201
- user_id: str | None = None,
202
- metadata: dict[str, Any] | None = None,
203
- tags: list[str] | None = None,
204
- span_type: str | None = None,
205
- ignore_input: bool = False,
206
- ignore_output: bool = False,
207
- ignore_inputs: list[str] | None = None,
208
- input_formatter: Callable[..., str] | None = None,
209
- output_formatter: Callable[..., str] | None = None,
210
- ignore_exceptions: bool = False,
211
- preserve_global_context: bool = True,
212
- ) -> Callable[[Callable[P, R]], Callable[P, R]] | Callable[P, R]:
213
- """Add Laminar observability tracing to any function.
214
-
215
- @public
216
-
217
- The trace decorator integrates functions with Laminar (LMNR) for
218
- distributed tracing, performance monitoring, and debugging. It
219
- automatically handles both sync and async functions, propagates
220
- trace context, and provides fine-grained control over what gets traced.
221
-
222
- USAGE GUIDELINE - Defaults First:
223
- In 90% of cases, use WITHOUT any parameters.
224
- The defaults are optimized for most use cases.
225
-
226
- Args:
227
- func: Function to trace (when used without parentheses: @trace).
228
-
229
- level: Controls when tracing is active:
230
- - "always": Always trace (default, production mode)
231
- - "debug": Only trace when LMNR_DEBUG == "true"
232
- - "off": Disable tracing completely
233
-
234
- name: Custom span name in traces (defaults to function.__name__).
235
- Use descriptive names for better trace readability.
236
-
237
- session_id: Override session ID for this function's traces.
238
- Typically propagated via TraceInfo instead.
239
-
240
- user_id: Override user ID for this function's traces.
241
- Typically propagated via TraceInfo instead.
242
-
243
- metadata: Additional key-value metadata attached to spans.
244
- Searchable in LMNR dashboard. Merged with TraceInfo metadata.
245
-
246
- tags: List of tags for categorizing spans (e.g., ["api", "critical"]).
247
- Merged with TraceInfo tags.
248
-
249
- span_type: Semantic type of the span (e.g., "LLM", "CHAIN", "TOOL").
250
- Affects visualization in LMNR dashboard.
251
-
252
- ignore_input: Don't record function inputs in trace (privacy/size).
253
-
254
- ignore_output: Don't record function output in trace (privacy/size).
255
-
256
- ignore_inputs: List of parameter names to exclude from trace.
257
- Useful for sensitive data like API keys.
258
-
259
- input_formatter: Custom function to format inputs for tracing.
260
- Receives all function args, returns display string.
261
-
262
- output_formatter: Custom function to format output for tracing.
263
- Receives function result, returns display string.
264
-
265
- ignore_exceptions: Don't record exceptions in traces (default False).
266
-
267
- preserve_global_context: Maintain Laminar's global context across
268
- calls (default True). Set False for isolated traces.
269
-
270
- Returns:
271
- Decorated function with same signature but added tracing.
272
-
273
- TraceInfo propagation:
274
- If the decorated function has a 'trace_info' parameter, the decorator
275
- automatically creates or propagates a TraceInfo instance, ensuring
276
- consistent session/user tracking across the call chain.
277
-
278
- Example:
279
- >>> # RECOMMENDED - No parameters needed for most cases!
280
- >>> @trace
281
- >>> async def process_document(doc):
282
- ... return await analyze(doc)
283
- >>>
284
- >>> # With parameters (RARE - only when specifically needed):
285
- >>> @trace(level="debug") # Only for debug-specific tracing
286
- >>> async def debug_operation():
287
- ... pass
288
-
289
- >>> @trace(ignore_inputs=["api_key"]) # Only for sensitive data
290
- >>> async def api_call(data, api_key):
291
- ... return await external_api(data, api_key)
292
- >>>
293
- >>> # AVOID unnecessary configuration - defaults handle:
294
- >>> # - Automatic naming from function name
295
- >>> # - Standard trace level ("always")
296
- >>> # - Full input/output capture
297
- >>> # - Proper span type inference
298
- >>>
299
- >>> # Custom formatting
300
- >>> @trace(
301
- ... input_formatter=lambda doc: f"Document: {doc.id}",
302
- ... output_formatter=lambda res: f"Results: {len(res)} items"
303
- >>> )
304
- >>> def analyze(doc):
305
- ... return results
306
-
307
- Environment variables:
308
- - LMNR_DEBUG: Set to "true" to enable debug-level traces
309
- - LMNR_SESSION_ID: Default session ID if not in TraceInfo
310
- - LMNR_USER_ID: Default user ID if not in TraceInfo
311
- - LMNR_PROJECT_API_KEY: Required for trace submission
312
-
313
- Performance:
314
- - Tracing overhead is minimal (~1-2ms per call)
315
- - When level="off", decorator returns original function unchanged
316
- - Large inputs/outputs can be excluded with ignore_* parameters
317
-
318
- Note:
319
- - Automatically initializes Laminar on first use
320
- - Works with both sync and async functions
321
- - Preserves function signature and metadata
322
- - Thread-safe and async-safe
323
-
324
- See Also:
325
- - TraceInfo: Container for trace metadata
326
- - pipeline_task: Task decorator with built-in tracing
327
- - pipeline_flow: Flow decorator with built-in tracing
328
- """
329
- if level == "off":
330
- if func:
331
- return func
332
- return lambda f: f
333
-
334
- def decorator(f: Callable[P, R]) -> Callable[P, R]:
335
- """Apply tracing to the target function.
336
-
337
- Returns:
338
- Wrapped function with LMNR observability.
339
-
340
- Raises:
341
- TypeError: If function is already decorated with @pipeline_task or @pipeline_flow.
342
- """
343
- # Check if this is already a traced pipeline_task or pipeline_flow
344
- # This happens when @trace is applied after @pipeline_task/@pipeline_flow
345
- if hasattr(f, "__is_traced__") and f.__is_traced__: # type: ignore[attr-defined]
346
- # Check if it's a Prefect Task or Flow object (they have specific attributes)
347
- # Prefect objects have certain attributes that regular functions don't
348
- is_prefect_task = hasattr(f, "fn") and hasattr(f, "submit") and hasattr(f, "map")
349
- is_prefect_flow = hasattr(f, "fn") and hasattr(f, "serve")
350
- if is_prefect_task or is_prefect_flow:
351
- fname = getattr(f, "__name__", "function")
352
- raise TypeError(
353
- f"Function '{fname}' is already decorated with @pipeline_task or "
354
- f"@pipeline_flow. Remove the @trace decorator - pipeline decorators "
355
- f"include tracing automatically."
356
- )
357
-
358
- # Handle 'debug' level logic - only trace when LMNR_DEBUG is "true"
359
- if level == "debug" and os.getenv("LMNR_DEBUG", "").lower() != "true":
360
- return f
361
-
362
- # --- Pre-computation (done once when the function is decorated) ---
363
- _initialise_laminar()
364
- sig = inspect.signature(f)
365
- is_coroutine = inspect.iscoroutinefunction(f)
366
- observe_name = name or f.__name__
367
- _observe = observe
368
-
369
- # Store the new parameters
370
- _session_id = session_id
371
- _user_id = user_id
372
- _metadata = metadata
373
- _tags = tags or []
374
- _span_type = span_type
375
- _ignore_input = ignore_input
376
- _ignore_output = ignore_output
377
- _ignore_inputs = ignore_inputs
378
- _input_formatter = input_formatter
379
- _output_formatter = output_formatter
380
- _ignore_exceptions = ignore_exceptions
381
- _preserve_global_context = preserve_global_context
382
-
383
- # --- Helper function for runtime logic ---
384
- def _prepare_and_get_observe_params(runtime_kwargs: dict[str, Any]) -> dict[str, Any]:
385
- """Inspects runtime args, manages TraceInfo, and returns params for lmnr.observe.
386
-
387
- Modifies runtime_kwargs in place to inject TraceInfo if the function expects it.
388
-
389
- Returns:
390
- Dictionary of parameters for lmnr.observe decorator.
391
- """
392
- trace_info = runtime_kwargs.get("trace_info")
393
- if not isinstance(trace_info, TraceInfo):
394
- trace_info = TraceInfo()
395
- if "trace_info" in sig.parameters:
396
- runtime_kwargs["trace_info"] = trace_info
397
-
398
- observe_params = trace_info.get_observe_kwargs()
399
- observe_params["name"] = observe_name
400
-
401
- # Override with decorator-level session_id and user_id if provided
402
- if _session_id:
403
- observe_params["session_id"] = _session_id
404
- if _user_id:
405
- observe_params["user_id"] = _user_id
406
-
407
- # Merge decorator-level metadata and tags
408
- if _metadata:
409
- observe_params["metadata"] = {**observe_params.get("metadata", {}), **_metadata}
410
- if _tags:
411
- observe_params["tags"] = observe_params.get("tags", []) + _tags
412
- if _span_type:
413
- observe_params["span_type"] = _span_type
414
-
415
- # Add the new Laminar parameters
416
- if _ignore_input:
417
- observe_params["ignore_input"] = _ignore_input
418
- if _ignore_output:
419
- observe_params["ignore_output"] = _ignore_output
420
- if _ignore_inputs is not None:
421
- observe_params["ignore_inputs"] = _ignore_inputs
422
- if _input_formatter is not None:
423
- observe_params["input_formatter"] = _input_formatter
424
- if _output_formatter is not None:
425
- observe_params["output_formatter"] = _output_formatter
426
- if _ignore_exceptions:
427
- observe_params["ignore_exceptions"] = _ignore_exceptions
428
- if _preserve_global_context:
429
- observe_params["preserve_global_context"] = _preserve_global_context
430
-
431
- return observe_params
432
-
433
- # --- The actual wrappers ---
434
- @wraps(f)
435
- def sync_wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
436
- """Synchronous wrapper for traced function.
437
-
438
- Returns:
439
- The result of the wrapped function.
440
- """
441
- observe_params = _prepare_and_get_observe_params(kwargs)
442
- observed_func = _observe(**observe_params)(f)
443
- return observed_func(*args, **kwargs)
444
-
445
- @wraps(f)
446
- async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
447
- """Asynchronous wrapper for traced function.
448
-
449
- Returns:
450
- The result of the wrapped function.
451
- """
452
- observe_params = _prepare_and_get_observe_params(kwargs)
453
- observed_func = _observe(**observe_params)(f)
454
- return await observed_func(*args, **kwargs)
455
-
456
- wrapper = async_wrapper if is_coroutine else sync_wrapper
457
-
458
- # Mark function as traced for detection by pipeline decorators
459
- wrapper.__is_traced__ = True # type: ignore[attr-defined]
460
-
461
- # Preserve the original function signature
462
- try:
463
- wrapper.__signature__ = sig # type: ignore[attr-defined]
464
- except (AttributeError, ValueError):
465
- pass
466
-
467
- return cast(Callable[P, R], wrapper)
468
-
469
- if func:
470
- return decorator(func) # Called as @trace
471
- else:
472
- return decorator # Called as @trace(...)
473
-
474
-
475
- __all__ = ["trace", "TraceLevel", "TraceInfo"]