ai-pipeline-core 0.1.3__tar.gz → 0.1.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/PKG-INFO +1 -1
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/llm/ai_messages.py +6 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/llm/client.py +13 -10
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/pyproject.toml +2 -2
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/.gitignore +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/LICENSE +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/README.md +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/__init__.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/documents/__init__.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/documents/document.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/documents/document_list.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/documents/flow_document.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/documents/mime_type.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/documents/task_document.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/documents/utils.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/exceptions.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/flow/__init__.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/flow/config.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/llm/__init__.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/llm/model_options.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/llm/model_response.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/llm/model_types.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/logging/__init__.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/logging/logging.yml +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/logging/logging_config.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/logging/logging_mixin.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/prompt_manager.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/py.typed +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/settings.py +0 -0
- {ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/tracing.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ai-pipeline-core
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.4
|
|
4
4
|
Summary: Core utilities for AI-powered processing pipelines using prefect
|
|
5
5
|
Project-URL: Homepage, https://github.com/bbarwik/ai-pipeline-core
|
|
6
6
|
Project-URL: Repository, https://github.com/bbarwik/ai-pipeline-core
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import base64
|
|
2
|
+
import hashlib
|
|
2
3
|
import json
|
|
3
4
|
|
|
4
5
|
from openai.types.chat import (
|
|
@@ -59,6 +60,11 @@ class AIMessages(list[AIMessageType]):
|
|
|
59
60
|
messages.append(message)
|
|
60
61
|
return messages
|
|
61
62
|
|
|
63
|
+
def get_prompt_cache_key(self, system_prompt: str | None = None) -> str:
|
|
64
|
+
if not system_prompt:
|
|
65
|
+
system_prompt = ""
|
|
66
|
+
return hashlib.sha256((system_prompt + json.dumps(self.to_prompt())).encode()).hexdigest()
|
|
67
|
+
|
|
62
68
|
@staticmethod
|
|
63
69
|
def document_to_prompt(document: Document) -> list[ChatCompletionContentPartParam]:
|
|
64
70
|
"""
|
|
@@ -48,15 +48,13 @@ def _process_messages(
|
|
|
48
48
|
# Use AIMessages.to_prompt() for context
|
|
49
49
|
context_messages = context.to_prompt()
|
|
50
50
|
|
|
51
|
-
# Apply caching to context
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
}
|
|
59
|
-
processed_messages.append(msg)
|
|
51
|
+
# Apply caching to last context message
|
|
52
|
+
context_messages[-1]["cache_control"] = { # type: ignore
|
|
53
|
+
"type": "ephemeral",
|
|
54
|
+
"ttl": "120s", # Cache for 2m
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
processed_messages.extend(context_messages)
|
|
60
58
|
|
|
61
59
|
# Process regular messages without caching
|
|
62
60
|
if messages:
|
|
@@ -108,9 +106,14 @@ async def _generate_with_retry(
|
|
|
108
106
|
**options.to_openai_completion_kwargs(),
|
|
109
107
|
}
|
|
110
108
|
|
|
109
|
+
if context:
|
|
110
|
+
completion_kwargs["prompt_cache_key"] = context.get_prompt_cache_key(options.system_prompt)
|
|
111
|
+
|
|
111
112
|
for attempt in range(options.retries):
|
|
112
113
|
try:
|
|
113
|
-
with Laminar.start_as_current_span(
|
|
114
|
+
with Laminar.start_as_current_span(
|
|
115
|
+
model, span_type="LLM", input=processed_messages
|
|
116
|
+
) as span:
|
|
114
117
|
response = await _generate(model, processed_messages, completion_kwargs)
|
|
115
118
|
span.set_attributes(response.get_laminar_metadata())
|
|
116
119
|
Laminar.set_span_output(response.content)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "ai-pipeline-core"
|
|
3
|
-
version = "0.1.
|
|
3
|
+
version = "0.1.4"
|
|
4
4
|
description = "Core utilities for AI-powered processing pipelines using prefect"
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
license = {text = "MIT"}
|
|
@@ -140,7 +140,7 @@ reportIncompatibleVariableOverride = "error"
|
|
|
140
140
|
reportMissingParameterType = "warning"
|
|
141
141
|
|
|
142
142
|
[tool.bumpversion]
|
|
143
|
-
current_version = "0.1.
|
|
143
|
+
current_version = "0.1.4"
|
|
144
144
|
commit = true
|
|
145
145
|
tag = true
|
|
146
146
|
tag_name = "v{new_version}"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/documents/document_list.py
RENAMED
|
File without changes
|
{ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/documents/flow_document.py
RENAMED
|
File without changes
|
|
File without changes
|
{ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/documents/task_document.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{ai_pipeline_core-0.1.3 → ai_pipeline_core-0.1.4}/ai_pipeline_core/logging/logging_config.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|