ai-pipeline-core 0.1.5__py3-none-any.whl → 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,7 +11,7 @@ from __future__ import annotations
11
11
  import inspect
12
12
  import os
13
13
  from functools import wraps
14
- from typing import Any, Callable, ParamSpec, TypeVar, cast, overload
14
+ from typing import Any, Callable, Literal, ParamSpec, TypeVar, cast, overload
15
15
 
16
16
  from lmnr import Instruments, Laminar, observe
17
17
  from pydantic import BaseModel
@@ -24,6 +24,8 @@ from ai_pipeline_core.settings import settings
24
24
  P = ParamSpec("P")
25
25
  R = TypeVar("R")
26
26
 
27
+ TraceLevel = Literal["always", "debug", "off"]
28
+
27
29
 
28
30
  # ---------------------------------------------------------------------------
29
31
  # ``TraceInfo`` – metadata container
@@ -67,22 +69,28 @@ def _initialise_laminar() -> None:
67
69
  if settings.lmnr_project_api_key:
68
70
  Laminar.initialize(
69
71
  project_api_key=settings.lmnr_project_api_key,
70
- disabled_instruments=[Instruments.OPENAI],
72
+ disabled_instruments=[Instruments.OPENAI] if Instruments.OPENAI else [],
71
73
  )
72
74
 
73
75
 
74
- # Overload for calls like @trace(name="...", test=True)
76
+ # Overload for calls like @trace(name="...", level="debug")
75
77
  @overload
76
78
  def trace(
77
79
  *,
80
+ level: TraceLevel = "always",
78
81
  name: str | None = None,
79
- test: bool = False,
80
- debug_only: bool = False,
82
+ session_id: str | None = None,
83
+ user_id: str | None = None,
84
+ metadata: dict[str, Any] | None = None,
85
+ tags: list[str] | None = None,
86
+ span_type: str | None = None,
81
87
  ignore_input: bool = False,
82
88
  ignore_output: bool = False,
83
89
  ignore_inputs: list[str] | None = None,
84
90
  input_formatter: Callable[..., str] | None = None,
85
91
  output_formatter: Callable[..., str] | None = None,
92
+ ignore_exceptions: bool = False,
93
+ preserve_global_context: bool = True,
86
94
  ) -> Callable[[Callable[P, R]], Callable[P, R]]: ...
87
95
 
88
96
 
@@ -95,56 +103,76 @@ def trace(func: Callable[P, R]) -> Callable[P, R]: ...
95
103
  def trace(
96
104
  func: Callable[P, R] | None = None,
97
105
  *,
106
+ level: TraceLevel = "always",
98
107
  name: str | None = None,
99
- test: bool = False,
100
- debug_only: bool = False,
108
+ session_id: str | None = None,
109
+ user_id: str | None = None,
110
+ metadata: dict[str, Any] | None = None,
111
+ tags: list[str] | None = None,
112
+ span_type: str | None = None,
101
113
  ignore_input: bool = False,
102
114
  ignore_output: bool = False,
103
115
  ignore_inputs: list[str] | None = None,
104
116
  input_formatter: Callable[..., str] | None = None,
105
117
  output_formatter: Callable[..., str] | None = None,
118
+ ignore_exceptions: bool = False,
106
119
  preserve_global_context: bool = True,
107
120
  ) -> Callable[[Callable[P, R]], Callable[P, R]] | Callable[P, R]:
108
121
  """Decorator that wires Laminar tracing and observation into a function.
109
122
 
110
123
  Args:
111
124
  func: The function to be traced (when used as @trace)
125
+ level: Trace level control:
126
+ - "always": Always trace (default)
127
+ - "debug": Only trace when LMNR_DEBUG environment variable is NOT set to "true"
128
+ - "off": Never trace
112
129
  name: Custom name for the observation (defaults to function name)
113
- test: Mark this trace as a test run
114
- debug_only: Only trace when LMNR_DEBUG=true environment variable is set
130
+ metadata: Additional metadata for the trace
131
+ tags: Additional tags for the trace
132
+ span_type: Type of span for the trace
115
133
  ignore_input: Ignore all inputs in the trace
116
134
  ignore_output: Ignore the output in the trace
117
135
  ignore_inputs: List of specific input parameter names to ignore
118
136
  input_formatter: Custom formatter for inputs (takes any arguments, returns string)
119
137
  output_formatter: Custom formatter for outputs (takes any arguments, returns string)
138
+ ignore_exceptions: Whether to ignore exceptions in tracing
139
+ preserve_global_context: Whether to preserve global context
120
140
 
121
141
  Returns:
122
142
  The decorated function with Laminar tracing enabled
123
143
  """
124
144
 
145
+ if level == "off":
146
+ if func:
147
+ return func
148
+ return lambda f: f
149
+
125
150
  def decorator(f: Callable[P, R]) -> Callable[P, R]:
151
+ # Handle 'debug' level logic - only trace when LMNR_DEBUG is NOT "true"
152
+ if level == "debug" and os.getenv("LMNR_DEBUG", "").lower() == "true":
153
+ return f
154
+
126
155
  # --- Pre-computation (done once when the function is decorated) ---
127
156
  _initialise_laminar()
128
157
  sig = inspect.signature(f)
129
158
  is_coroutine = inspect.iscoroutinefunction(f)
130
- decorator_test_flag = test
131
159
  observe_name = name or f.__name__
132
160
  _observe = observe
133
161
 
134
162
  # Store the new parameters
163
+ _session_id = session_id
164
+ _user_id = user_id
165
+ _metadata = metadata
166
+ _tags = tags or []
167
+ _span_type = span_type
135
168
  _ignore_input = ignore_input
136
169
  _ignore_output = ignore_output
137
170
  _ignore_inputs = ignore_inputs
138
171
  _input_formatter = input_formatter
139
172
  _output_formatter = output_formatter
173
+ _ignore_exceptions = ignore_exceptions
140
174
  _preserve_global_context = preserve_global_context
141
175
 
142
- # --- Check debug_only flag and environment variable ---
143
- if debug_only and os.getenv("LMNR_DEBUG", "").lower() != "true":
144
- # If debug_only is True but LMNR_DEBUG is not set to "true",
145
- # return the original function without tracing
146
- return f
147
-
148
176
  # --- Helper function for runtime logic ---
149
177
  def _prepare_and_get_observe_params(runtime_kwargs: dict[str, Any]) -> dict[str, Any]:
150
178
  """
@@ -157,13 +185,23 @@ def trace(
157
185
  if "trace_info" in sig.parameters:
158
186
  runtime_kwargs["trace_info"] = trace_info
159
187
 
160
- runtime_test_flag = bool(runtime_kwargs.get("test", False))
161
- if (decorator_test_flag or runtime_test_flag) and "test" not in trace_info.tags:
162
- trace_info.tags.append("test")
163
-
164
188
  observe_params = trace_info.get_observe_kwargs()
165
189
  observe_params["name"] = observe_name
166
190
 
191
+ # Override with decorator-level session_id and user_id if provided
192
+ if _session_id:
193
+ observe_params["session_id"] = _session_id
194
+ if _user_id:
195
+ observe_params["user_id"] = _user_id
196
+
197
+ # Merge decorator-level metadata and tags
198
+ if _metadata:
199
+ observe_params["metadata"] = {**observe_params.get("metadata", {}), **_metadata}
200
+ if _tags:
201
+ observe_params["tags"] = observe_params.get("tags", []) + _tags
202
+ if _span_type:
203
+ observe_params["span_type"] = _span_type
204
+
167
205
  # Add the new Laminar parameters
168
206
  if _ignore_input:
169
207
  observe_params["ignore_input"] = _ignore_input
@@ -175,6 +213,8 @@ def trace(
175
213
  observe_params["input_formatter"] = _input_formatter
176
214
  if _output_formatter is not None:
177
215
  observe_params["output_formatter"] = _output_formatter
216
+ if _ignore_exceptions:
217
+ observe_params["ignore_exceptions"] = _ignore_exceptions
178
218
  if _preserve_global_context:
179
219
  observe_params["preserve_global_context"] = _preserve_global_context
180
220
 
@@ -207,3 +247,6 @@ def trace(
207
247
  return decorator(func) # Called as @trace
208
248
  else:
209
249
  return decorator # Called as @trace(...)
250
+
251
+
252
+ __all__ = ["trace", "TraceLevel", "TraceInfo"]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ai-pipeline-core
3
- Version: 0.1.5
3
+ Version: 0.1.7
4
4
  Summary: Core utilities for AI-powered processing pipelines using prefect
5
5
  Project-URL: Homepage, https://github.com/bbarwik/ai-pipeline-core
6
6
  Project-URL: Repository, https://github.com/bbarwik/ai-pipeline-core
@@ -20,7 +20,7 @@ Classifier: Typing :: Typed
20
20
  Requires-Python: >=3.12
21
21
  Requires-Dist: httpx>=0.28.1
22
22
  Requires-Dist: jinja2>=3.1.6
23
- Requires-Dist: lmnr>=0.7.5
23
+ Requires-Dist: lmnr>=0.7.6
24
24
  Requires-Dist: openai>=1.99.9
25
25
  Requires-Dist: prefect>=3.4.13
26
26
  Requires-Dist: pydantic-settings>=2.10.1
@@ -151,40 +151,76 @@ async def process_document(doc: Document):
151
151
  return response.parsed
152
152
  ```
153
153
 
154
- ### Prefect Flow Integration
154
+ ### Enhanced Pipeline Decorators (New in v0.1.7)
155
155
  ```python
156
- from prefect import flow, task
157
- from ai_pipeline_core.documents import Document, DocumentList, FlowDocument
158
- from ai_pipeline_core.flow import FlowConfig
159
- from ai_pipeline_core.tracing import trace
156
+ from ai_pipeline_core import pipeline_flow, pipeline_task
157
+ from ai_pipeline_core.flow import FlowOptions
158
+ from ai_pipeline_core.documents import DocumentList, FlowDocument
160
159
 
161
- class OutputDocument(FlowDocument):
162
- """Custom output document type"""
163
- def get_type(self) -> str:
164
- return "output"
160
+ class CustomFlowOptions(FlowOptions):
161
+ """Extend base options with your custom fields"""
162
+ batch_size: int = 100
163
+ temperature: float = 0.7
165
164
 
166
- class MyFlowConfig(FlowConfig):
167
- INPUT_DOCUMENT_TYPES = [InputDocument]
168
- OUTPUT_DOCUMENT_TYPE = OutputDocument
169
-
170
- @task
171
- @trace
165
+ @pipeline_task(trace_level="always", retries=3)
172
166
  async def process_task(doc: Document) -> Document:
173
- # Task-level processing with automatic tracing
167
+ # Task with automatic tracing and retries
174
168
  result = await process_document(doc)
175
- # Convert result to JSON string for document content
176
- import json
177
- return OutputDocument(name="result", content=json.dumps(result.model_dump()).encode())
169
+ return OutputDocument(name="result", content=result.encode())
170
+
171
+ @pipeline_flow(trace_level="always")
172
+ async def my_pipeline(
173
+ project_name: str,
174
+ documents: DocumentList,
175
+ flow_options: CustomFlowOptions # Type-safe custom options
176
+ ) -> DocumentList:
177
+ # Pipeline flow with enforced signature and tracing
178
+ results = []
179
+ for doc in documents:
180
+ result = await process_task(doc)
181
+ results.append(result)
182
+ return DocumentList(results)
183
+ ```
178
184
 
179
- @flow
180
- async def my_pipeline(documents: DocumentList):
181
- config = MyFlowConfig()
182
- input_docs = config.get_input_documents(documents)
185
+ ### Simple Runner Utility (New in v0.1.7)
186
+ ```python
187
+ from ai_pipeline_core.simple_runner import run_cli, run_pipeline
188
+ from ai_pipeline_core.flow import FlowOptions
189
+
190
+ # CLI-based pipeline execution
191
+ if __name__ == "__main__":
192
+ run_cli(
193
+ flows=[my_pipeline],
194
+ flow_configs=[MyFlowConfig],
195
+ options_cls=CustomFlowOptions
196
+ )
183
197
 
184
- results = await process_task.map(input_docs)
198
+ # Or programmatic execution
199
+ async def main():
200
+ result = await run_pipeline(
201
+ project_name="my-project",
202
+ output_dir=Path("./output"),
203
+ flow=my_pipeline,
204
+ flow_config=MyFlowConfig,
205
+ flow_options=CustomFlowOptions(batch_size=50)
206
+ )
207
+ ```
185
208
 
186
- config.validate_output_documents(results)
187
- return results
209
+ ### Clean Prefect Decorators (New in v0.1.7)
210
+ ```python
211
+ # Import clean Prefect decorators without tracing
212
+ from ai_pipeline_core.prefect import flow, task
213
+
214
+ # Or use pipeline decorators with tracing
215
+ from ai_pipeline_core import pipeline_flow, pipeline_task
216
+
217
+ @task # Clean Prefect task
218
+ def compute(x: int) -> int:
219
+ return x * 2
220
+
221
+ @pipeline_task(trace_level="always") # With tracing
222
+ def compute_traced(x: int) -> int:
223
+ return x * 2
188
224
  ```
189
225
 
190
226
  ## Core Modules
@@ -291,8 +327,14 @@ ai_pipeline_core/
291
327
  │ ├── client.py # Async client implementation
292
328
  │ └── model_options.py # Configuration models
293
329
  ├── flow/ # Prefect flow utilities
294
- └── config.py # Type-safe flow configuration
330
+ ├── config.py # Type-safe flow configuration
331
+ │ └── options.py # FlowOptions base class (v0.1.7)
332
+ ├── simple_runner/ # Pipeline execution utilities (v0.1.7)
333
+ │ ├── cli.py # CLI interface
334
+ │ └── simple_runner.py # Core runner logic
295
335
  ├── logging/ # Structured logging
336
+ ├── pipeline.py # Enhanced decorators (v0.1.7)
337
+ ├── prefect.py # Clean Prefect exports (v0.1.7)
296
338
  ├── tracing.py # Observability decorators
297
339
  └── settings.py # Centralized configuration
298
340
  ```
@@ -469,9 +511,29 @@ Built with:
469
511
  - [LiteLLM](https://litellm.ai/) - LLM proxy
470
512
  - [Pydantic](https://pydantic-docs.helpmanual.io/) - Data validation
471
513
 
514
+ ## What's New in v0.1.7
515
+
516
+ ### Major Additions
517
+ - **Enhanced Pipeline Decorators**: New `pipeline_flow` and `pipeline_task` decorators combining Prefect functionality with automatic LMNR tracing
518
+ - **FlowOptions Base Class**: Extensible configuration system for flows with type-safe inheritance
519
+ - **Simple Runner Module**: CLI and programmatic utilities for easy pipeline execution
520
+ - **Clean Prefect Exports**: Separate imports for Prefect decorators with and without tracing
521
+ - **Expanded Exports**: All major components now accessible from top-level package import
522
+
523
+ ### API Improvements
524
+ - Better type inference for document flows with custom options
525
+ - Support for custom FlowOptions inheritance in pipeline flows
526
+ - Improved error messages for invalid flow signatures
527
+ - Enhanced document utility functions (`canonical_name_key`, `sanitize_url`)
528
+
529
+ ### Developer Experience
530
+ - Simplified imports - most components available from `ai_pipeline_core` directly
531
+ - Better separation of concerns between clean Prefect and traced pipeline decorators
532
+ - More intuitive flow configuration with `FlowOptions` inheritance
533
+
472
534
  ## Stability Notice
473
535
 
474
- **Current Version**: 0.1.5
536
+ **Current Version**: 0.1.7
475
537
  **Status**: Internal Preview
476
538
  **API Stability**: Unstable - Breaking changes expected
477
539
  **Recommended Use**: Learning and reference only
@@ -1,21 +1,24 @@
1
- ai_pipeline_core/__init__.py,sha256=xjZh6D4fkepTm3LK42qaTpNHgN2meK4uthrW9pjwLfE,779
1
+ ai_pipeline_core/__init__.py,sha256=INcTtHr2TFY8bR0eCg7RwvIRYY6px8knCgjyIvSSKP4,1602
2
2
  ai_pipeline_core/exceptions.py,sha256=_vW0Hbw2LGb5tcVvH0YzTKMff7QOPfCRr3w-w_zPyCE,968
3
+ ai_pipeline_core/pipeline.py,sha256=GOrPC53j756Xhpg_CShnkAKxSdkC16XHEoPeIhkjLIA,16569
4
+ ai_pipeline_core/prefect.py,sha256=VHYkkRcUmSpdwyWosOOxuExVCncIQgT6MypqGdjcYnM,241
3
5
  ai_pipeline_core/prompt_manager.py,sha256=XmNUdMIC0WrE9fF0LIcfozAKOGrlYwj8AfXvCndIH-o,4693
4
6
  ai_pipeline_core/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
7
  ai_pipeline_core/settings.py,sha256=Zl2BPa6IHzh-B5V7cg5mtySr1dhWZQYYKxXz3BwrHlQ,615
6
- ai_pipeline_core/tracing.py,sha256=_bijptKWXh7V_xENFQGF11-B70rOGwV6g0qdBoF-VCw,7890
7
- ai_pipeline_core/documents/__init__.py,sha256=rEnKj-sSlZ9WnFlZAmSGVi1P8vnsHmU9O9_YwtP40ms,242
8
- ai_pipeline_core/documents/document.py,sha256=cKa9v0GpDYh48Aqj0u_tCwh4_uCcn5YsKzC537f0v0I,10089
8
+ ai_pipeline_core/tracing.py,sha256=T-3fTyA37TejXxotkVzTNqL2a5nOfZ0bcHg9TClLvmg,9471
9
+ ai_pipeline_core/documents/__init__.py,sha256=TLW8eOEmthfDHOTssXjyBlqhgrZe9ZIyxlkd0LBJ3_s,340
10
+ ai_pipeline_core/documents/document.py,sha256=e3IBr0TThucBAaOHvdqv0X--iCcBrqh2jzFTyaOp7O0,12418
9
11
  ai_pipeline_core/documents/document_list.py,sha256=HOG_uZDazA9CJB7Lr_tNcDFzb5Ff9RUt0ELWQK_eYNM,4940
10
12
  ai_pipeline_core/documents/flow_document.py,sha256=qsV-2JYOMhkvAj7lW54ZNH_4QUclld9h06CoU59tWww,815
11
- ai_pipeline_core/documents/mime_type.py,sha256=tMWGH9PVmHe6a_IzdaJUqIHf4qnwQOwOCBhsgW2AyTE,2244
13
+ ai_pipeline_core/documents/mime_type.py,sha256=sBhNRoBJQ35JoHWhJzBGpp00WFDfMdEX0JZKKkR7QH0,3371
12
14
  ai_pipeline_core/documents/task_document.py,sha256=WjHqtl1d60XFBBqewNRdz1OqBErGI0jRx15oQYCTHo8,907
13
15
  ai_pipeline_core/documents/utils.py,sha256=BdE4taSl1vrBhxnFbOP5nDA7lXIcvY__AMRTHoaNb5M,2764
14
- ai_pipeline_core/flow/__init__.py,sha256=_Sji2yY1ICkvVX6QiiGWKzqIXtg9UAiuvhjHSK_gdO8,57
16
+ ai_pipeline_core/flow/__init__.py,sha256=54DRfZnjXQVrimgtKEVEm5u5ErImx31cjK2PpBvHjU4,116
15
17
  ai_pipeline_core/flow/config.py,sha256=crbe_OvNE6qulIKv1D8yKoe8xrEsIlvICyxjhqHHBxQ,2266
18
+ ai_pipeline_core/flow/options.py,sha256=WygJEwjqOa14l23a_Hp36hJX-WgxHMq-YzSieC31Z4Y,701
16
19
  ai_pipeline_core/llm/__init__.py,sha256=3XVK-bSJdOe0s6KmmO7PDbsXHfjlcZEG1MVBmaz3EeU,442
17
20
  ai_pipeline_core/llm/ai_messages.py,sha256=DwJJe05BtYdnMZeHbBbyEbDCqrW63SRvprxptoJUCn4,4586
18
- ai_pipeline_core/llm/client.py,sha256=IOcyjwyAKQWlqnwC5p2Hl4FeRCzOJAHC5Yqr_oCBQ8s,7703
21
+ ai_pipeline_core/llm/client.py,sha256=VMs1nQKCfoxbcvE2mypn5QF19u90Ua87-5IiZxWOj98,7784
19
22
  ai_pipeline_core/llm/model_options.py,sha256=TvAAlDFZN-TP9-J-RZBuU_dpSocskf6paaQMw1XY9UE,1321
20
23
  ai_pipeline_core/llm/model_response.py,sha256=fIWueaemgo0cMruvToMZyKsRPzKwL6IlvUJN7DLG710,5558
21
24
  ai_pipeline_core/llm/model_types.py,sha256=rIwY6voT8-xdfsKPDC0Gkdl2iTp9Q2LuvWGSRU9Mp3k,342
@@ -23,7 +26,10 @@ ai_pipeline_core/logging/__init__.py,sha256=DOO6ckgnMVXl29Sy7q6jhO-iW96h54pCHQDz
23
26
  ai_pipeline_core/logging/logging.yml,sha256=YTW48keO_K5bkkb-KXGM7ZuaYKiquLsjsURei8Ql0V4,1353
24
27
  ai_pipeline_core/logging/logging_config.py,sha256=6MBz9nnVNvqiLDoyy9-R3sWkn6927Re5hdz4hwTptpI,4903
25
28
  ai_pipeline_core/logging/logging_mixin.py,sha256=RDaR2ju2-vKTJRzXGa0DquGPT8_UxahWjvKJnaD0IV8,7810
26
- ai_pipeline_core-0.1.5.dist-info/METADATA,sha256=U1OIPjGwAGsuyJ3QnhUtJQWMzj-OqkXDuyH6cW8Dq70,15869
27
- ai_pipeline_core-0.1.5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
28
- ai_pipeline_core-0.1.5.dist-info/licenses/LICENSE,sha256=kKj8mfbdWwkyG3U6n7ztB3bAZlEwShTkAsvaY657i3I,1074
29
- ai_pipeline_core-0.1.5.dist-info/RECORD,,
29
+ ai_pipeline_core/simple_runner/__init__.py,sha256=OPbTCZvqpnYdwi1Knnkj-MpmD0Nvtg5O7UwIdAKz_AY,384
30
+ ai_pipeline_core/simple_runner/cli.py,sha256=TjiSh7lr1VnTbO1jA2DuVzC2AA6V_5sA5Z8XSuldQmc,3054
31
+ ai_pipeline_core/simple_runner/simple_runner.py,sha256=70BHT1iz-G368H2t4tsWAVni0jw2VkWVdnKICuVtLPw,5009
32
+ ai_pipeline_core-0.1.7.dist-info/METADATA,sha256=2Pi815TCTBlKnTp2duTaUJiKaextafqZ5yfPZdD_--o,18361
33
+ ai_pipeline_core-0.1.7.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
34
+ ai_pipeline_core-0.1.7.dist-info/licenses/LICENSE,sha256=kKj8mfbdWwkyG3U6n7ztB3bAZlEwShTkAsvaY657i3I,1074
35
+ ai_pipeline_core-0.1.7.dist-info/RECORD,,