ai-pipeline-core 0.2.6__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. ai_pipeline_core/__init__.py +78 -125
  2. ai_pipeline_core/deployment/__init__.py +34 -0
  3. ai_pipeline_core/deployment/base.py +861 -0
  4. ai_pipeline_core/deployment/contract.py +80 -0
  5. ai_pipeline_core/deployment/deploy.py +561 -0
  6. ai_pipeline_core/deployment/helpers.py +97 -0
  7. ai_pipeline_core/deployment/progress.py +126 -0
  8. ai_pipeline_core/deployment/remote.py +116 -0
  9. ai_pipeline_core/docs_generator/__init__.py +54 -0
  10. ai_pipeline_core/docs_generator/__main__.py +5 -0
  11. ai_pipeline_core/docs_generator/cli.py +196 -0
  12. ai_pipeline_core/docs_generator/extractor.py +324 -0
  13. ai_pipeline_core/docs_generator/guide_builder.py +644 -0
  14. ai_pipeline_core/docs_generator/trimmer.py +35 -0
  15. ai_pipeline_core/docs_generator/validator.py +114 -0
  16. ai_pipeline_core/document_store/__init__.py +13 -0
  17. ai_pipeline_core/document_store/_summary.py +9 -0
  18. ai_pipeline_core/document_store/_summary_worker.py +170 -0
  19. ai_pipeline_core/document_store/clickhouse.py +492 -0
  20. ai_pipeline_core/document_store/factory.py +38 -0
  21. ai_pipeline_core/document_store/local.py +312 -0
  22. ai_pipeline_core/document_store/memory.py +85 -0
  23. ai_pipeline_core/document_store/protocol.py +68 -0
  24. ai_pipeline_core/documents/__init__.py +12 -14
  25. ai_pipeline_core/documents/_context_vars.py +85 -0
  26. ai_pipeline_core/documents/_hashing.py +52 -0
  27. ai_pipeline_core/documents/attachment.py +85 -0
  28. ai_pipeline_core/documents/context.py +128 -0
  29. ai_pipeline_core/documents/document.py +318 -1434
  30. ai_pipeline_core/documents/mime_type.py +37 -82
  31. ai_pipeline_core/documents/utils.py +4 -12
  32. ai_pipeline_core/exceptions.py +10 -62
  33. ai_pipeline_core/images/__init__.py +309 -0
  34. ai_pipeline_core/images/_processing.py +151 -0
  35. ai_pipeline_core/llm/__init__.py +6 -4
  36. ai_pipeline_core/llm/ai_messages.py +130 -81
  37. ai_pipeline_core/llm/client.py +327 -193
  38. ai_pipeline_core/llm/model_options.py +14 -86
  39. ai_pipeline_core/llm/model_response.py +60 -103
  40. ai_pipeline_core/llm/model_types.py +16 -34
  41. ai_pipeline_core/logging/__init__.py +2 -7
  42. ai_pipeline_core/logging/logging.yml +1 -1
  43. ai_pipeline_core/logging/logging_config.py +27 -37
  44. ai_pipeline_core/logging/logging_mixin.py +15 -41
  45. ai_pipeline_core/observability/__init__.py +32 -0
  46. ai_pipeline_core/observability/_debug/__init__.py +30 -0
  47. ai_pipeline_core/observability/_debug/_auto_summary.py +94 -0
  48. ai_pipeline_core/observability/_debug/_config.py +95 -0
  49. ai_pipeline_core/observability/_debug/_content.py +764 -0
  50. ai_pipeline_core/observability/_debug/_processor.py +98 -0
  51. ai_pipeline_core/observability/_debug/_summary.py +312 -0
  52. ai_pipeline_core/observability/_debug/_types.py +75 -0
  53. ai_pipeline_core/observability/_debug/_writer.py +843 -0
  54. ai_pipeline_core/observability/_document_tracking.py +146 -0
  55. ai_pipeline_core/observability/_initialization.py +194 -0
  56. ai_pipeline_core/observability/_logging_bridge.py +57 -0
  57. ai_pipeline_core/observability/_summary.py +81 -0
  58. ai_pipeline_core/observability/_tracking/__init__.py +6 -0
  59. ai_pipeline_core/observability/_tracking/_client.py +178 -0
  60. ai_pipeline_core/observability/_tracking/_internal.py +28 -0
  61. ai_pipeline_core/observability/_tracking/_models.py +138 -0
  62. ai_pipeline_core/observability/_tracking/_processor.py +158 -0
  63. ai_pipeline_core/observability/_tracking/_service.py +311 -0
  64. ai_pipeline_core/observability/_tracking/_writer.py +229 -0
  65. ai_pipeline_core/{tracing.py → observability/tracing.py} +139 -283
  66. ai_pipeline_core/pipeline/__init__.py +10 -0
  67. ai_pipeline_core/pipeline/decorators.py +915 -0
  68. ai_pipeline_core/pipeline/options.py +16 -0
  69. ai_pipeline_core/prompt_manager.py +16 -102
  70. ai_pipeline_core/settings.py +26 -31
  71. ai_pipeline_core/testing.py +9 -0
  72. ai_pipeline_core-0.4.1.dist-info/METADATA +807 -0
  73. ai_pipeline_core-0.4.1.dist-info/RECORD +76 -0
  74. {ai_pipeline_core-0.2.6.dist-info → ai_pipeline_core-0.4.1.dist-info}/WHEEL +1 -1
  75. ai_pipeline_core/documents/document_list.py +0 -420
  76. ai_pipeline_core/documents/flow_document.py +0 -112
  77. ai_pipeline_core/documents/task_document.py +0 -117
  78. ai_pipeline_core/documents/temporary_document.py +0 -74
  79. ai_pipeline_core/flow/__init__.py +0 -9
  80. ai_pipeline_core/flow/config.py +0 -483
  81. ai_pipeline_core/flow/options.py +0 -75
  82. ai_pipeline_core/pipeline.py +0 -718
  83. ai_pipeline_core/prefect.py +0 -63
  84. ai_pipeline_core/simple_runner/__init__.py +0 -14
  85. ai_pipeline_core/simple_runner/cli.py +0 -254
  86. ai_pipeline_core/simple_runner/simple_runner.py +0 -247
  87. ai_pipeline_core/storage/__init__.py +0 -8
  88. ai_pipeline_core/storage/storage.py +0 -628
  89. ai_pipeline_core/utils/__init__.py +0 -8
  90. ai_pipeline_core/utils/deploy.py +0 -373
  91. ai_pipeline_core/utils/remote_deployment.py +0 -269
  92. ai_pipeline_core-0.2.6.dist-info/METADATA +0 -500
  93. ai_pipeline_core-0.2.6.dist-info/RECORD +0 -41
  94. {ai_pipeline_core-0.2.6.dist-info → ai_pipeline_core-0.4.1.dist-info}/licenses/LICENSE +0 -0
@@ -1,500 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: ai-pipeline-core
3
- Version: 0.2.6
4
- Summary: Core utilities for AI-powered processing pipelines using prefect
5
- Project-URL: Homepage, https://github.com/bbarwik/ai-pipeline-core
6
- Project-URL: Repository, https://github.com/bbarwik/ai-pipeline-core
7
- Project-URL: Issues, https://github.com/bbarwik/ai-pipeline-core/issues
8
- Author-email: bbarwik <bbarwik@gmail.com>
9
- License: MIT
10
- License-File: LICENSE
11
- Classifier: Development Status :: 4 - Beta
12
- Classifier: Intended Audience :: Developers
13
- Classifier: License :: OSI Approved :: MIT License
14
- Classifier: Programming Language :: Python :: 3
15
- Classifier: Programming Language :: Python :: 3.12
16
- Classifier: Programming Language :: Python :: 3.13
17
- Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
18
- Classifier: Topic :: Software Development :: Libraries :: Python Modules
19
- Classifier: Typing :: Typed
20
- Requires-Python: >=3.12
21
- Requires-Dist: httpx>=0.28.1
22
- Requires-Dist: jinja2>=3.1.6
23
- Requires-Dist: lmnr>=0.7.18
24
- Requires-Dist: openai>=1.109.1
25
- Requires-Dist: prefect-gcp[cloud-storage]>=0.6.10
26
- Requires-Dist: prefect>=3.4.21
27
- Requires-Dist: pydantic-settings>=2.10.1
28
- Requires-Dist: pydantic>=2.11.9
29
- Requires-Dist: python-magic>=0.4.27
30
- Requires-Dist: ruamel-yaml>=0.18.14
31
- Requires-Dist: tiktoken>=0.12.0
32
- Provides-Extra: dev
33
- Requires-Dist: basedpyright>=1.31.2; extra == 'dev'
34
- Requires-Dist: bump2version>=1.0.1; extra == 'dev'
35
- Requires-Dist: interrogate>=1.5.0; extra == 'dev'
36
- Requires-Dist: pre-commit>=4.3.0; extra == 'dev'
37
- Requires-Dist: pydoc-markdown[jinja]>=4.8.0; extra == 'dev'
38
- Requires-Dist: pytest-asyncio>=1.1.0; extra == 'dev'
39
- Requires-Dist: pytest-cov>=5.0.0; extra == 'dev'
40
- Requires-Dist: pytest-mock>=3.14.0; extra == 'dev'
41
- Requires-Dist: pytest-xdist>=3.8.0; extra == 'dev'
42
- Requires-Dist: pytest>=8.4.1; extra == 'dev'
43
- Requires-Dist: ruff>=0.14.1; extra == 'dev'
44
- Description-Content-Type: text/markdown
45
-
46
- # AI Pipeline Core
47
-
48
- A high-performance async framework for building type-safe AI pipelines with LLMs, document processing, and workflow orchestration.
49
-
50
- [![Python Version](https://img.shields.io/badge/python-3.12%2B-blue)](https://www.python.org/downloads/)
51
- [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
52
- [![Code Style: Ruff](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://github.com/astral-sh/ruff)
53
- [![Type Checked: Basedpyright](https://img.shields.io/badge/type%20checked-basedpyright-blue)](https://github.com/DetachHead/basedpyright)
54
-
55
- ## Overview
56
-
57
- AI Pipeline Core is a production-ready framework that combines document processing, LLM integration, and workflow orchestration into a unified system. Built with strong typing (Pydantic), automatic retries, cost tracking, and distributed tracing, it enforces best practices while maintaining high performance through fully async operations.
58
-
59
- ### Key Features
60
-
61
- - **Document Processing**: Type-safe handling of text, JSON, YAML, PDFs, and images with automatic MIME type detection and provenance tracking
62
- - **LLM Integration**: Unified interface to any model via LiteLLM proxy with configurable context caching
63
- - **Structured Output**: Type-safe generation with Pydantic model validation
64
- - **Workflow Orchestration**: Prefect-based flows and tasks with automatic retries
65
- - **Observability**: Built-in distributed tracing via Laminar (LMNR) with cost tracking for debugging and monitoring
66
- - **Local Development**: Simple runner for testing pipelines without infrastructure
67
-
68
- ## Installation
69
-
70
- ```bash
71
- pip install ai-pipeline-core
72
- ```
73
-
74
- ### Requirements
75
-
76
- - Python 3.12 or higher
77
- - Linux/macOS (Windows via WSL2)
78
-
79
- ### Development Installation
80
-
81
- ```bash
82
- git clone https://github.com/bbarwik/ai-pipeline-core.git
83
- cd ai-pipeline-core
84
- pip install -e ".[dev]"
85
- make install-dev # Installs pre-commit hooks
86
- ```
87
-
88
- ## Quick Start
89
-
90
- ### Basic Pipeline
91
-
92
- ```python
93
- from ai_pipeline_core import (
94
- pipeline_flow,
95
- FlowDocument,
96
- DocumentList,
97
- FlowOptions,
98
- FlowConfig,
99
- llm,
100
- AIMessages
101
- )
102
-
103
- # Define document types
104
- class InputDoc(FlowDocument):
105
- """Input document for processing."""
106
-
107
- class OutputDoc(FlowDocument):
108
- """Analysis result document."""
109
-
110
- # Define flow configuration
111
- class AnalysisConfig(FlowConfig):
112
- INPUT_DOCUMENT_TYPES = [InputDoc]
113
- OUTPUT_DOCUMENT_TYPE = OutputDoc
114
-
115
- # Create pipeline flow with required config
116
- @pipeline_flow(config=AnalysisConfig)
117
- async def analyze_flow(
118
- project_name: str,
119
- documents: DocumentList,
120
- flow_options: FlowOptions
121
- ) -> DocumentList:
122
- # Process documents
123
- outputs = []
124
- for doc in documents:
125
- # Use AIMessages for LLM interaction
126
- response = await llm.generate(
127
- model="gpt-5",
128
- messages=AIMessages([doc])
129
- )
130
-
131
- output = OutputDoc.create(
132
- name=f"analysis_{doc.name}",
133
- content=response.content
134
- )
135
- outputs.append(output)
136
-
137
- # RECOMMENDED: Always validate output
138
- return AnalysisConfig.create_and_validate_output(outputs)
139
- ```
140
-
141
- ### Structured Output
142
-
143
- ```python
144
- from pydantic import BaseModel
145
- from ai_pipeline_core import llm
146
-
147
- class Analysis(BaseModel):
148
- summary: str
149
- sentiment: float
150
- key_points: list[str]
151
-
152
- # Generate structured output
153
- response = await llm.generate_structured(
154
- model="gpt-5",
155
- response_format=Analysis,
156
- messages="Analyze this product review: ..."
157
- )
158
-
159
- # Access parsed result with type safety
160
- analysis = response.parsed
161
- print(f"Sentiment: {analysis.sentiment}")
162
- for point in analysis.key_points:
163
- print(f"- {point}")
164
- ```
165
-
166
- ### Document Handling
167
-
168
- ```python
169
- from ai_pipeline_core import FlowDocument, TemporaryDocument
170
-
171
- # Create documents with automatic conversion
172
- doc = MyDocument.create(
173
- name="data.json",
174
- content={"key": "value"} # Automatically converted to JSON bytes
175
- )
176
-
177
- # Parse back to original type
178
- data = doc.parse(dict) # Returns {"key": "value"}
179
-
180
- # Document provenance tracking (new in v0.1.14)
181
- doc_with_sources = MyDocument.create(
182
- name="derived.json",
183
- content={"result": "processed"},
184
- sources=[source_doc.sha256, "https://api.example.com/data"]
185
- )
186
-
187
- # Check provenance
188
- for hash in doc_with_sources.get_source_documents():
189
- print(f"Derived from document: {hash}")
190
- for ref in doc_with_sources.get_source_references():
191
- print(f"External source: {ref}")
192
-
193
- # Temporary documents (never persisted)
194
- temp = TemporaryDocument.create(
195
- name="api_response.json",
196
- content={"status": "ok"}
197
- )
198
- ```
199
-
200
- ## Core Concepts
201
-
202
- ### Documents
203
-
204
- Documents are immutable Pydantic models that wrap binary content with metadata:
205
-
206
- - **FlowDocument**: Persists across flow runs, saved to filesystem
207
- - **TaskDocument**: Temporary within task execution, not persisted
208
- - **TemporaryDocument**: Never persisted, useful for sensitive data
209
-
210
- ```python
211
- class MyDocument(FlowDocument):
212
- """Custom document type."""
213
-
214
- # Use create() for automatic conversion
215
- doc = MyDocument.create(
216
- name="data.json",
217
- content={"key": "value"} # Auto-converts to JSON
218
- )
219
-
220
- # Access content
221
- if doc.is_text:
222
- print(doc.text)
223
-
224
- # Parse structured data
225
- data = doc.as_json() # or as_yaml(), as_pydantic_model()
226
-
227
- # Convert between document types (new in v0.2.1)
228
- task_doc = flow_doc.model_convert(TaskDocument) # Convert FlowDocument to TaskDocument
229
- new_doc = doc.model_convert(OtherDocType, content={"new": "data"}) # With content update
230
-
231
- # Enhanced filtering (new in v0.1.14)
232
- filtered = documents.filter_by([Doc1, Doc2, Doc3]) # Multiple types
233
- named = documents.filter_by(["file1.txt", "file2.txt"]) # Multiple names
234
-
235
- # Immutable collections (new in v0.2.1)
236
- frozen_docs = DocumentList(docs, frozen=True) # Immutable document list
237
- frozen_msgs = AIMessages(messages, frozen=True) # Immutable message list
238
- ```
239
-
240
- ### LLM Integration
241
-
242
- The framework provides a unified interface for LLM interactions with smart caching:
243
-
244
- ```python
245
- from ai_pipeline_core import llm, AIMessages, ModelOptions
246
-
247
- # Simple generation
248
- response = await llm.generate(
249
- model="gpt-5",
250
- messages="Explain quantum computing"
251
- )
252
- print(response.content)
253
-
254
- # With context caching (saves 50-90% tokens)
255
- static_context = AIMessages([large_document])
256
-
257
- # First call: caches context
258
- r1 = await llm.generate(
259
- model="gpt-5",
260
- context=static_context, # Cached for 120 seconds by default
261
- messages="Summarize" # Dynamic query
262
- )
263
-
264
- # Second call: reuses cache
265
- r2 = await llm.generate(
266
- model="gpt-5",
267
- context=static_context, # Reused from cache!
268
- messages="Key points?" # Different query
269
- )
270
-
271
- # Custom cache TTL (new in v0.1.14)
272
- response = await llm.generate(
273
- model="gpt-5",
274
- context=static_context,
275
- messages="Analyze",
276
- options=ModelOptions(cache_ttl="300s") # Cache for 5 minutes
277
- )
278
-
279
- # Disable caching for dynamic contexts
280
- response = await llm.generate(
281
- model="gpt-5",
282
- context=dynamic_context,
283
- messages="Process",
284
- options=ModelOptions(cache_ttl=None) # No caching
285
- )
286
- ```
287
-
288
- ### Flow Configuration
289
-
290
- Type-safe flow configuration ensures proper document flow:
291
-
292
- ```python
293
- from ai_pipeline_core import FlowConfig
294
-
295
- class ProcessingConfig(FlowConfig):
296
- INPUT_DOCUMENT_TYPES = [RawDataDocument]
297
- OUTPUT_DOCUMENT_TYPE = ProcessedDocument # Must be different!
298
-
299
- # Use in flows for validation
300
- @pipeline_flow(config=ProcessingConfig)
301
- async def process(
302
- project_name: str,
303
- documents: DocumentList,
304
- flow_options: FlowOptions
305
- ) -> DocumentList:
306
- # ... processing logic ...
307
- return ProcessingConfig.create_and_validate_output(outputs)
308
- ```
309
-
310
- ### Pipeline Decorators
311
-
312
- Enhanced decorators with built-in tracing and monitoring:
313
-
314
- ```python
315
- from ai_pipeline_core import pipeline_flow, pipeline_task, set_trace_cost
316
-
317
- @pipeline_task # Automatic retry, tracing, and monitoring
318
- async def process_chunk(data: str) -> str:
319
- result = await transform(data)
320
- set_trace_cost(0.05) # Track costs (new in v0.1.14)
321
- return result
322
-
323
- @pipeline_flow(
324
- config=MyFlowConfig,
325
- trace_trim_documents=True # Trim large documents in traces (new in v0.2.1)
326
- )
327
- async def main_flow(
328
- project_name: str,
329
- documents: DocumentList,
330
- flow_options: FlowOptions
331
- ) -> DocumentList:
332
- # Your pipeline logic
333
- # Large documents are automatically trimmed to 100 chars in traces
334
- # for better observability without overwhelming the tracing UI
335
- return DocumentList(results)
336
- ```
337
-
338
- ## Configuration
339
-
340
- ### Environment Variables
341
-
342
- ```bash
343
- # LLM Configuration (via LiteLLM proxy)
344
- OPENAI_BASE_URL=http://localhost:4000
345
- OPENAI_API_KEY=your-api-key
346
-
347
- # Optional: Observability
348
- LMNR_PROJECT_API_KEY=your-lmnr-key
349
- LMNR_DEBUG=true # Enable debug traces
350
-
351
- # Optional: Orchestration
352
- PREFECT_API_URL=http://localhost:4200/api
353
- PREFECT_API_KEY=your-prefect-key
354
-
355
- # Optional: Storage (for Google Cloud Storage)
356
- GCS_SERVICE_ACCOUNT_FILE=/path/to/service-account.json # GCS auth file
357
- ```
358
-
359
- ### Settings Management
360
-
361
- Create custom settings by inheriting from the base Settings class:
362
-
363
- ```python
364
- from ai_pipeline_core import Settings
365
-
366
- class ProjectSettings(Settings):
367
- """Project-specific configuration."""
368
- app_name: str = "my-app"
369
- max_retries: int = 3
370
- enable_cache: bool = True
371
-
372
- # Create singleton instance
373
- settings = ProjectSettings()
374
-
375
- # Access configuration
376
- print(settings.openai_base_url)
377
- print(settings.app_name)
378
- ```
379
-
380
- ## Best Practices
381
-
382
- ### Framework Rules (90% Use Cases)
383
-
384
- 1. **Decorators**: Use `@pipeline_task` WITHOUT parameters, `@pipeline_flow` WITH config
385
- 2. **Logging**: Use `get_pipeline_logger(__name__)` - NEVER `print()` or `logging` module
386
- 3. **LLM calls**: Use `AIMessages` or `str`. Wrap Documents in `AIMessages`
387
- 4. **Options**: Omit `ModelOptions` unless specifically needed (defaults are optimal)
388
- 5. **Documents**: Create with just `name` and `content` - skip `description`
389
- 6. **FlowConfig**: `OUTPUT_DOCUMENT_TYPE` must differ from all `INPUT_DOCUMENT_TYPES`
390
- 7. **Initialization**: `PromptManager` and logger at module scope, not in functions
391
- 8. **DocumentList**: Use default constructor - no validation flags needed
392
- 9. **setup_logging()**: Only in application `main()`, never at import time
393
-
394
- ### Import Convention
395
-
396
- Always import from the top-level package:
397
-
398
- ```python
399
- # CORRECT
400
- from ai_pipeline_core import llm, pipeline_flow, FlowDocument
401
-
402
- # WRONG - Never import from submodules
403
- from ai_pipeline_core.llm import generate # NO!
404
- from ai_pipeline_core.documents import FlowDocument # NO!
405
- ```
406
-
407
- ## Development
408
-
409
- ### Running Tests
410
-
411
- ```bash
412
- make test # Run all tests
413
- make test-cov # Run with coverage report
414
- make test-showcase # Test showcase example
415
- ```
416
-
417
- ### Code Quality
418
-
419
- ```bash
420
- make lint # Run linting
421
- make format # Auto-format code
422
- make typecheck # Type checking with basedpyright
423
- ```
424
-
425
- ### Building Documentation
426
-
427
- ```bash
428
- make docs-build # Generate API.md
429
- make docs-check # Verify documentation is up-to-date
430
- ```
431
-
432
- ## Examples
433
-
434
- The `examples/` directory contains:
435
-
436
- - `showcase.py` - Comprehensive example demonstrating all major features
437
- - Run with: `cd examples && python showcase.py /path/to/documents`
438
-
439
- ## API Reference
440
-
441
- See [API.md](API.md) for complete API documentation.
442
-
443
- ### Navigation Tips
444
-
445
- For humans:
446
- ```bash
447
- grep -n '^##' API.md # List all main sections
448
- grep -n '^###' API.md # List all classes and functions
449
- ```
450
-
451
- For AI assistants:
452
- - Use pattern `^##` to find module sections
453
- - Use pattern `^###` for classes and functions
454
- - Use pattern `^####` for methods and properties
455
-
456
- ## Project Structure
457
-
458
- ```
459
- ai-pipeline-core/
460
- ├── ai_pipeline_core/
461
- │ ├── documents/ # Document abstraction system
462
- │ ├── flow/ # Flow configuration and options
463
- │ ├── llm/ # LLM client and response handling
464
- │ ├── logging/ # Logging infrastructure
465
- │ ├── tracing.py # Distributed tracing
466
- │ ├── pipeline.py # Pipeline decorators
467
- │ ├── prompt_manager.py # Jinja2 template management
468
- │ └── settings.py # Configuration management
469
- ├── tests/ # Comprehensive test suite
470
- ├── examples/ # Usage examples
471
- ├── API.md # Complete API reference
472
- └── pyproject.toml # Project configuration
473
- ```
474
-
475
- ## Contributing
476
-
477
- 1. Fork the repository
478
- 2. Create a feature branch (`git checkout -b feature/amazing-feature`)
479
- 3. Make changes following the project's style guide
480
- 4. Run tests and linting (`make test lint typecheck`)
481
- 5. Commit your changes
482
- 6. Push to the branch (`git push origin feature/amazing-feature`)
483
- 7. Open a Pull Request
484
-
485
- ## License
486
-
487
- This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
488
-
489
- ## Support
490
-
491
- - **Issues**: [GitHub Issues](https://github.com/bbarwik/ai-pipeline-core/issues)
492
- - **Discussions**: [GitHub Discussions](https://github.com/bbarwik/ai-pipeline-core/discussions)
493
- - **Documentation**: [API Reference](API.md)
494
-
495
- ## Acknowledgments
496
-
497
- - Built on [Prefect](https://www.prefect.io/) for workflow orchestration
498
- - Uses [LiteLLM](https://github.com/BerriAI/litellm) for LLM provider abstraction
499
- - Integrates [Laminar (LMNR)](https://www.lmnr.ai/) for observability
500
- - Type checking with [Pydantic](https://pydantic.dev/) and [basedpyright](https://github.com/DetachHead/basedpyright)
@@ -1,41 +0,0 @@
1
- ai_pipeline_core/__init__.py,sha256=BWdVF64no62Cg9b_6GLiRub7ytBQQPk1RC38V0FGoAA,5720
2
- ai_pipeline_core/exceptions.py,sha256=vx-XLTw2fJSPs-vwtXVYtqoQUcOc0JeI7UmHqRqQYWU,1569
3
- ai_pipeline_core/pipeline.py,sha256=fWTVmrnOEIFge6o2NUYW2ndGef5UurpL8_fK5tkXbzI,28700
4
- ai_pipeline_core/prefect.py,sha256=91ZgLJHsDsRUW77CpNmkKxYs3RCJuucPM3pjKmNBeDg,2199
5
- ai_pipeline_core/prompt_manager.py,sha256=FAtb1yK7bGuAeuIJ523LOX9bd7TrcHG-TqZ7Lz4RJC0,12087
6
- ai_pipeline_core/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- ai_pipeline_core/settings.py,sha256=IMrFaX0i-WIlaOA5O53ipNSta6KQVSFHc1aJXmS3nSo,5078
8
- ai_pipeline_core/tracing.py,sha256=HT8heSwsVot6D6u8dPi-BHVlaemkPsPs5aXtG-iIzNk,31494
9
- ai_pipeline_core/documents/__init__.py,sha256=WHStvGZiSyybOcMTYxSV24U6MA3Am_0_Az5p-DuMFrk,738
10
- ai_pipeline_core/documents/document.py,sha256=hdTh36KGEcrDollTnQmTI66DJIqYfe4X42Y0q7Cm4fY,68153
11
- ai_pipeline_core/documents/document_list.py,sha256=Y_NCjfM_CjkIwHRD2iyGgYBuIykN8lT2IIH_uWOiGis,16254
12
- ai_pipeline_core/documents/flow_document.py,sha256=vSPzE4kGuDjGUfFykfpPaSfMuIO9_kDfTvdc8kZaE8U,4144
13
- ai_pipeline_core/documents/mime_type.py,sha256=JFEOq4HwlIW2snobyNfWwySdT7urZSWkobiRMVs2fSE,7959
14
- ai_pipeline_core/documents/task_document.py,sha256=4j94N-hkqXVmzjyUjbA9YW2oR4dqnOhqA3D5OWrmGkw,4303
15
- ai_pipeline_core/documents/temporary_document.py,sha256=Sam344Mm5AlZTm3_l01YdDWeF26F6pR2tytGRL1doQY,2711
16
- ai_pipeline_core/documents/utils.py,sha256=ZyJNjFN7ihWno0K7dJZed7twYmmPLA0z40UzFw1A3A8,5465
17
- ai_pipeline_core/flow/__init__.py,sha256=2BfWYMOPYW5teGzwo-qzpn_bom1lxxry0bPsjVgcsCk,188
18
- ai_pipeline_core/flow/config.py,sha256=3PCDph2n8dj-txqAvd9Wflbi_6lmfXFR9rUhM-szGSQ,18887
19
- ai_pipeline_core/flow/options.py,sha256=2rKR2GifhXcyw8avI_oiEDMLC2jm5Qzpw8z56pbxUMo,2285
20
- ai_pipeline_core/llm/__init__.py,sha256=3B_vtEzxrzidP1qOUNQ4RxlUmxZ2MBKQcUhQiTybM9g,661
21
- ai_pipeline_core/llm/ai_messages.py,sha256=tseyncD-T1IjaXKzUkzEgS3CGvz-WEDsA6v8wt9Emx0,14295
22
- ai_pipeline_core/llm/client.py,sha256=Ojb0Ew2w88KlTfNP7VyRsIjjHh5Z8A1WhfHsNQwfeak,23673
23
- ai_pipeline_core/llm/model_options.py,sha256=5XTBDdGZMxAo8JvhfEBIrtTOJC6piaMsgJ_J2cGiByo,11775
24
- ai_pipeline_core/llm/model_response.py,sha256=VsuEp8tMbxWOkpZBFH1WIJisbZSvg7Z4JPqPejz7YM8,13273
25
- ai_pipeline_core/llm/model_types.py,sha256=OCRdTbQ1ZZ95nT_2PgOm16n2et25QOQyBtB1zsqm_3U,2791
26
- ai_pipeline_core/logging/__init__.py,sha256=Nz6-ghAoENsgNmLD2ma9TW9M0U2_QfxuQ5DDW6Vt6M0,651
27
- ai_pipeline_core/logging/logging.yml,sha256=YTW48keO_K5bkkb-KXGM7ZuaYKiquLsjsURei8Ql0V4,1353
28
- ai_pipeline_core/logging/logging_config.py,sha256=pV2x6GgMPXrzPH27sicCSXfw56beio4C2JKCJ3NsXrg,6207
29
- ai_pipeline_core/logging/logging_mixin.py,sha256=OTye2pbUbG5oYZkI06TNkGCEa4y0ldePz5IAfdmNUPU,8090
30
- ai_pipeline_core/simple_runner/__init__.py,sha256=9krT-CcDAZ0jB2MjWqFYhaK5qtUDMpB5qWzjRLa4Zhk,322
31
- ai_pipeline_core/simple_runner/cli.py,sha256=p9Z1jtRMH10T5Bl3QfHPxyW6LL4qYvvXeOXbPGeeXeE,9308
32
- ai_pipeline_core/simple_runner/simple_runner.py,sha256=f6cIodYkul-Apu1d63T6kR5DZpiaCWpphUcEPp5XjFo,9102
33
- ai_pipeline_core/storage/__init__.py,sha256=tcIkjJ3zPBLCyetwiJDewBvS2sbRJrDlBh3gEsQm08E,184
34
- ai_pipeline_core/storage/storage.py,sha256=ClMr419Y-eU2RuOjZYd51dC0stWQk28Vb56PvQaoUwc,20007
35
- ai_pipeline_core/utils/__init__.py,sha256=TJSmEm1Quf-gKwXrxM96u2IGzVolUyeNNfLMPoLstXI,254
36
- ai_pipeline_core/utils/deploy.py,sha256=Y15-xoIWfs-HkEQeTobjq4UdTiXSTKzTghL0Jo8iMgU,13565
37
- ai_pipeline_core/utils/remote_deployment.py,sha256=cPTgnS5InK08qiWnuPz3e8YKjoT3sPBloSaDfNTzghs,10137
38
- ai_pipeline_core-0.2.6.dist-info/METADATA,sha256=rC6CVxTMsoL6iCIIw3b_Lr5zWdBBmzqCgboiS4vFXpY,15159
39
- ai_pipeline_core-0.2.6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
40
- ai_pipeline_core-0.2.6.dist-info/licenses/LICENSE,sha256=kKj8mfbdWwkyG3U6n7ztB3bAZlEwShTkAsvaY657i3I,1074
41
- ai_pipeline_core-0.2.6.dist-info/RECORD,,