ai-pipeline-core 0.1.10__py3-none-any.whl → 0.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. ai_pipeline_core/__init__.py +84 -4
  2. ai_pipeline_core/documents/__init__.py +9 -0
  3. ai_pipeline_core/documents/document.py +1034 -151
  4. ai_pipeline_core/documents/document_list.py +147 -38
  5. ai_pipeline_core/documents/flow_document.py +112 -11
  6. ai_pipeline_core/documents/mime_type.py +173 -15
  7. ai_pipeline_core/documents/task_document.py +117 -12
  8. ai_pipeline_core/documents/temporary_document.py +84 -5
  9. ai_pipeline_core/documents/utils.py +41 -9
  10. ai_pipeline_core/exceptions.py +47 -11
  11. ai_pipeline_core/flow/__init__.py +2 -0
  12. ai_pipeline_core/flow/config.py +232 -23
  13. ai_pipeline_core/flow/options.py +50 -1
  14. ai_pipeline_core/llm/__init__.py +6 -0
  15. ai_pipeline_core/llm/ai_messages.py +125 -27
  16. ai_pipeline_core/llm/client.py +278 -26
  17. ai_pipeline_core/llm/model_options.py +130 -1
  18. ai_pipeline_core/llm/model_response.py +239 -35
  19. ai_pipeline_core/llm/model_types.py +67 -0
  20. ai_pipeline_core/logging/__init__.py +13 -0
  21. ai_pipeline_core/logging/logging_config.py +72 -20
  22. ai_pipeline_core/logging/logging_mixin.py +38 -32
  23. ai_pipeline_core/pipeline.py +308 -60
  24. ai_pipeline_core/prefect.py +48 -1
  25. ai_pipeline_core/prompt_manager.py +209 -24
  26. ai_pipeline_core/settings.py +108 -4
  27. ai_pipeline_core/simple_runner/__init__.py +5 -0
  28. ai_pipeline_core/simple_runner/cli.py +96 -11
  29. ai_pipeline_core/simple_runner/simple_runner.py +237 -4
  30. ai_pipeline_core/tracing.py +232 -30
  31. ai_pipeline_core-0.1.11.dist-info/METADATA +450 -0
  32. ai_pipeline_core-0.1.11.dist-info/RECORD +36 -0
  33. ai_pipeline_core-0.1.10.dist-info/METADATA +0 -538
  34. ai_pipeline_core-0.1.10.dist-info/RECORD +0 -36
  35. {ai_pipeline_core-0.1.10.dist-info → ai_pipeline_core-0.1.11.dist-info}/WHEEL +0 -0
  36. {ai_pipeline_core-0.1.10.dist-info → ai_pipeline_core-0.1.11.dist-info}/licenses/LICENSE +0 -0
@@ -1,4 +1,84 @@
1
- """Pipeline Core - Shared infrastructure for AI pipelines."""
1
+ """AI Pipeline Core - Production-ready framework for building AI pipelines with LLMs.
2
+
3
+ @public
4
+
5
+ AI Pipeline Core is a high-performance async framework for building type-safe AI pipelines.
6
+ It combines document processing, LLM integration, and workflow orchestration into a unified
7
+ system designed for production use.
8
+
9
+ The framework enforces best practices through strong typing (Pydantic), automatic retries,
10
+ cost tracking, and distributed tracing. All I/O operations are async for maximum throughput.
11
+
12
+ **CRITICAL IMPORT RULE**:
13
+ Always import from the top-level package:
14
+ **CORRECT**:
15
+ from ai_pipeline_core import llm, pipeline_flow, FlowDocument, DocumentList
16
+
17
+ **WRONG** - Never import from submodules:
18
+ from ai_pipeline_core.llm import generate # NO!
19
+ from ai_pipeline_core.documents import FlowDocument # NO!
20
+
21
+ FRAMEWORK RULES (90% Use Cases):
22
+ 1. Decorators: Use @trace, @pipeline_task, @pipeline_flow WITHOUT parameters
23
+ 2. Logging: Use get_pipeline_logger(__name__) - NEVER print() or logging module
24
+ 3. LLM calls: Use AIMessages or str. Wrap Documents in AIMessages; do not call .text yourself
25
+ 4. Options: Omit ModelOptions unless specifically needed (defaults are optimal)
26
+ 5. Documents: Create with just name and content - skip description
27
+ 6. FlowConfig: OUTPUT_DOCUMENT_TYPE must differ from all INPUT_DOCUMENT_TYPES
28
+ 7. Initialization: PromptManager and logger at module scope, not in functions
29
+ 8. DocumentList: Use default constructor - no validation flags needed
30
+ 9. setup_logging(): Only in application main(), never at import time
31
+
32
+ Messages parameter type: AIMessages or str. Do not pass Document or DocumentList directly.
33
+
34
+ Core Capabilities:
35
+ - **Document Processing**: Type-safe handling of text, JSON, YAML, PDFs, and images
36
+ - **LLM Integration**: Unified interface to any model via LiteLLM with caching
37
+ - **Structured Output**: Type-safe generation with Pydantic model validation
38
+ - **Workflow Orchestration**: Prefect-based flows and tasks with retries
39
+ - **Observability**: Distributed tracing via Laminar (LMNR) for debugging
40
+ - **Local Development**: Simple runner for testing without infrastructure
41
+
42
+ Quick Start:
43
+ >>> from ai_pipeline_core import (
44
+ ... pipeline_flow, FlowDocument, DocumentList, FlowOptions, llm, AIMessages
45
+ ... )
46
+ >>>
47
+ >>> class OutputDoc(FlowDocument):
48
+ ... '''Analysis result document.'''
49
+ >>>
50
+ >>> @pipeline_flow
51
+ >>> async def analyze_flow(
52
+ ... project_name: str,
53
+ ... documents: DocumentList,
54
+ ... flow_options: FlowOptions
55
+ ... ) -> DocumentList:
56
+ ... # Messages accept AIMessages or str. Wrap documents: AIMessages([doc])
57
+ ... response = await llm.generate(
58
+ ... model="gpt-5",
59
+ ... messages=AIMessages([documents[0]])
60
+ ... )
61
+ ... result = OutputDoc.create(
62
+ ... name="analysis.txt",
63
+ ... content=response.content
64
+ ... )
65
+ ... return DocumentList([result])
66
+
67
+ Environment Variables (when using LiteLLM proxy):
68
+ - OPENAI_BASE_URL: LiteLLM proxy endpoint (e.g., http://localhost:4000)
69
+ - OPENAI_API_KEY: API key for LiteLLM proxy
70
+
71
+ Note: LiteLLM proxy uses OpenAI-compatible API format, hence the OPENAI_*
72
+ variable names are correct regardless of which LLM provider you're using.
73
+
74
+ Optional Environment Variables:
75
+ - PREFECT_API_URL: Prefect server for orchestration
76
+ - PREFECT_API_KEY: Prefect API authentication key
77
+ - LMNR_PROJECT_API_KEY: Laminar (LMNR) API key for tracing
78
+ - LMNR_DEBUG: Set to "true" to enable debug-level traces
79
+ - LMNR_SESSION_ID: Default session ID for traces
80
+ - LMNR_USER_ID: Default user ID for traces
81
+ """
2
82
 
3
83
  from . import llm
4
84
  from .documents import (
@@ -30,14 +110,14 @@ from .logging import get_pipeline_logger as get_logger
30
110
  from .pipeline import pipeline_flow, pipeline_task
31
111
  from .prefect import disable_run_logger, prefect_test_harness
32
112
  from .prompt_manager import PromptManager
33
- from .settings import settings
113
+ from .settings import Settings
34
114
  from .tracing import TraceInfo, TraceLevel, trace
35
115
 
36
- __version__ = "0.1.10"
116
+ __version__ = "0.1.11"
37
117
 
38
118
  __all__ = [
39
119
  # Config/Settings
40
- "settings",
120
+ "Settings",
41
121
  # Logging
42
122
  "get_logger",
43
123
  "get_pipeline_logger",
@@ -1,3 +1,12 @@
1
+ """Document abstraction system for AI pipeline flows.
2
+
3
+ @public
4
+
5
+ The documents package provides immutable, type-safe data structures for handling
6
+ various content types in AI pipelines, including text, images, PDFs, and other
7
+ binary data with automatic MIME type detection.
8
+ """
9
+
1
10
  from .document import Document
2
11
  from .document_list import DocumentList
3
12
  from .flow_document import FlowDocument