planar 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- planar/.__init__.py.un~ +0 -0
- planar/._version.py.un~ +0 -0
- planar/.app.py.un~ +0 -0
- planar/.cli.py.un~ +0 -0
- planar/.config.py.un~ +0 -0
- planar/.context.py.un~ +0 -0
- planar/.db.py.un~ +0 -0
- planar/.di.py.un~ +0 -0
- planar/.engine.py.un~ +0 -0
- planar/.files.py.un~ +0 -0
- planar/.log_context.py.un~ +0 -0
- planar/.log_metadata.py.un~ +0 -0
- planar/.logging.py.un~ +0 -0
- planar/.object_registry.py.un~ +0 -0
- planar/.otel.py.un~ +0 -0
- planar/.server.py.un~ +0 -0
- planar/.session.py.un~ +0 -0
- planar/.sqlalchemy.py.un~ +0 -0
- planar/.task_local.py.un~ +0 -0
- planar/.test_app.py.un~ +0 -0
- planar/.test_config.py.un~ +0 -0
- planar/.test_object_config.py.un~ +0 -0
- planar/.test_sqlalchemy.py.un~ +0 -0
- planar/.test_utils.py.un~ +0 -0
- planar/.util.py.un~ +0 -0
- planar/.utils.py.un~ +0 -0
- planar/__init__.py +26 -0
- planar/_version.py +1 -0
- planar/ai/.__init__.py.un~ +0 -0
- planar/ai/._models.py.un~ +0 -0
- planar/ai/.agent.py.un~ +0 -0
- planar/ai/.agent_utils.py.un~ +0 -0
- planar/ai/.events.py.un~ +0 -0
- planar/ai/.files.py.un~ +0 -0
- planar/ai/.models.py.un~ +0 -0
- planar/ai/.providers.py.un~ +0 -0
- planar/ai/.pydantic_ai.py.un~ +0 -0
- planar/ai/.pydantic_ai_agent.py.un~ +0 -0
- planar/ai/.pydantic_ai_provider.py.un~ +0 -0
- planar/ai/.step.py.un~ +0 -0
- planar/ai/.test_agent.py.un~ +0 -0
- planar/ai/.test_agent_serialization.py.un~ +0 -0
- planar/ai/.test_providers.py.un~ +0 -0
- planar/ai/.utils.py.un~ +0 -0
- planar/ai/__init__.py +15 -0
- planar/ai/agent.py +457 -0
- planar/ai/agent_utils.py +205 -0
- planar/ai/models.py +140 -0
- planar/ai/providers.py +1088 -0
- planar/ai/test_agent.py +1298 -0
- planar/ai/test_agent_serialization.py +229 -0
- planar/ai/test_providers.py +463 -0
- planar/ai/utils.py +102 -0
- planar/app.py +494 -0
- planar/cli.py +282 -0
- planar/config.py +544 -0
- planar/db/.db.py.un~ +0 -0
- planar/db/__init__.py +17 -0
- planar/db/alembic/env.py +136 -0
- planar/db/alembic/script.py.mako +28 -0
- planar/db/alembic/versions/3476068c153c_initial_system_tables_migration.py +339 -0
- planar/db/alembic.ini +128 -0
- planar/db/db.py +318 -0
- planar/files/.config.py.un~ +0 -0
- planar/files/.local.py.un~ +0 -0
- planar/files/.local_filesystem.py.un~ +0 -0
- planar/files/.model.py.un~ +0 -0
- planar/files/.models.py.un~ +0 -0
- planar/files/.s3.py.un~ +0 -0
- planar/files/.storage.py.un~ +0 -0
- planar/files/.test_files.py.un~ +0 -0
- planar/files/__init__.py +2 -0
- planar/files/models.py +162 -0
- planar/files/storage/.__init__.py.un~ +0 -0
- planar/files/storage/.base.py.un~ +0 -0
- planar/files/storage/.config.py.un~ +0 -0
- planar/files/storage/.context.py.un~ +0 -0
- planar/files/storage/.local_directory.py.un~ +0 -0
- planar/files/storage/.test_local_directory.py.un~ +0 -0
- planar/files/storage/.test_s3.py.un~ +0 -0
- planar/files/storage/base.py +61 -0
- planar/files/storage/config.py +44 -0
- planar/files/storage/context.py +15 -0
- planar/files/storage/local_directory.py +188 -0
- planar/files/storage/s3.py +220 -0
- planar/files/storage/test_local_directory.py +162 -0
- planar/files/storage/test_s3.py +299 -0
- planar/files/test_files.py +283 -0
- planar/human/.human.py.un~ +0 -0
- planar/human/.test_human.py.un~ +0 -0
- planar/human/__init__.py +2 -0
- planar/human/human.py +458 -0
- planar/human/models.py +80 -0
- planar/human/test_human.py +385 -0
- planar/logging/.__init__.py.un~ +0 -0
- planar/logging/.attributes.py.un~ +0 -0
- planar/logging/.formatter.py.un~ +0 -0
- planar/logging/.logger.py.un~ +0 -0
- planar/logging/.otel.py.un~ +0 -0
- planar/logging/.tracer.py.un~ +0 -0
- planar/logging/__init__.py +10 -0
- planar/logging/attributes.py +54 -0
- planar/logging/context.py +14 -0
- planar/logging/formatter.py +113 -0
- planar/logging/logger.py +114 -0
- planar/logging/otel.py +51 -0
- planar/modeling/.mixin.py.un~ +0 -0
- planar/modeling/.storage.py.un~ +0 -0
- planar/modeling/__init__.py +0 -0
- planar/modeling/field_helpers.py +59 -0
- planar/modeling/json_schema_generator.py +94 -0
- planar/modeling/mixins/__init__.py +10 -0
- planar/modeling/mixins/auditable.py +52 -0
- planar/modeling/mixins/test_auditable.py +97 -0
- planar/modeling/mixins/test_timestamp.py +134 -0
- planar/modeling/mixins/test_uuid_primary_key.py +52 -0
- planar/modeling/mixins/timestamp.py +53 -0
- planar/modeling/mixins/uuid_primary_key.py +19 -0
- planar/modeling/orm/.planar_base_model.py.un~ +0 -0
- planar/modeling/orm/__init__.py +18 -0
- planar/modeling/orm/planar_base_entity.py +29 -0
- planar/modeling/orm/query_filter_builder.py +122 -0
- planar/modeling/orm/reexports.py +15 -0
- planar/object_config/.object_config.py.un~ +0 -0
- planar/object_config/__init__.py +11 -0
- planar/object_config/models.py +114 -0
- planar/object_config/object_config.py +378 -0
- planar/object_registry.py +100 -0
- planar/registry_items.py +65 -0
- planar/routers/.__init__.py.un~ +0 -0
- planar/routers/.agents_router.py.un~ +0 -0
- planar/routers/.crud.py.un~ +0 -0
- planar/routers/.decision.py.un~ +0 -0
- planar/routers/.event.py.un~ +0 -0
- planar/routers/.file_attachment.py.un~ +0 -0
- planar/routers/.files.py.un~ +0 -0
- planar/routers/.files_router.py.un~ +0 -0
- planar/routers/.human.py.un~ +0 -0
- planar/routers/.info.py.un~ +0 -0
- planar/routers/.models.py.un~ +0 -0
- planar/routers/.object_config_router.py.un~ +0 -0
- planar/routers/.rule.py.un~ +0 -0
- planar/routers/.test_object_config_router.py.un~ +0 -0
- planar/routers/.test_workflow_router.py.un~ +0 -0
- planar/routers/.workflow.py.un~ +0 -0
- planar/routers/__init__.py +13 -0
- planar/routers/agents_router.py +197 -0
- planar/routers/entity_router.py +143 -0
- planar/routers/event.py +91 -0
- planar/routers/files.py +142 -0
- planar/routers/human.py +151 -0
- planar/routers/info.py +131 -0
- planar/routers/models.py +170 -0
- planar/routers/object_config_router.py +133 -0
- planar/routers/rule.py +108 -0
- planar/routers/test_agents_router.py +174 -0
- planar/routers/test_object_config_router.py +367 -0
- planar/routers/test_routes_security.py +169 -0
- planar/routers/test_rule_router.py +470 -0
- planar/routers/test_workflow_router.py +274 -0
- planar/routers/workflow.py +468 -0
- planar/rules/.decorator.py.un~ +0 -0
- planar/rules/.runner.py.un~ +0 -0
- planar/rules/.test_rules.py.un~ +0 -0
- planar/rules/__init__.py +23 -0
- planar/rules/decorator.py +184 -0
- planar/rules/models.py +355 -0
- planar/rules/rule_configuration.py +191 -0
- planar/rules/runner.py +64 -0
- planar/rules/test_rules.py +750 -0
- planar/scaffold_templates/app/__init__.py.j2 +0 -0
- planar/scaffold_templates/app/db/entities.py.j2 +11 -0
- planar/scaffold_templates/app/flows/process_invoice.py.j2 +67 -0
- planar/scaffold_templates/main.py.j2 +13 -0
- planar/scaffold_templates/planar.dev.yaml.j2 +34 -0
- planar/scaffold_templates/planar.prod.yaml.j2 +28 -0
- planar/scaffold_templates/pyproject.toml.j2 +10 -0
- planar/security/.jwt_middleware.py.un~ +0 -0
- planar/security/auth_context.py +148 -0
- planar/security/authorization.py +388 -0
- planar/security/default_policies.cedar +77 -0
- planar/security/jwt_middleware.py +116 -0
- planar/security/security_context.py +18 -0
- planar/security/tests/test_authorization_context.py +78 -0
- planar/security/tests/test_cedar_basics.py +41 -0
- planar/security/tests/test_cedar_policies.py +158 -0
- planar/security/tests/test_jwt_principal_context.py +179 -0
- planar/session.py +40 -0
- planar/sse/.constants.py.un~ +0 -0
- planar/sse/.example.html.un~ +0 -0
- planar/sse/.hub.py.un~ +0 -0
- planar/sse/.model.py.un~ +0 -0
- planar/sse/.proxy.py.un~ +0 -0
- planar/sse/constants.py +1 -0
- planar/sse/example.html +126 -0
- planar/sse/hub.py +216 -0
- planar/sse/model.py +8 -0
- planar/sse/proxy.py +257 -0
- planar/task_local.py +37 -0
- planar/test_app.py +51 -0
- planar/test_cli.py +372 -0
- planar/test_config.py +512 -0
- planar/test_object_config.py +527 -0
- planar/test_object_registry.py +14 -0
- planar/test_sqlalchemy.py +158 -0
- planar/test_utils.py +105 -0
- planar/testing/.client.py.un~ +0 -0
- planar/testing/.memory_storage.py.un~ +0 -0
- planar/testing/.planar_test_client.py.un~ +0 -0
- planar/testing/.predictable_tracer.py.un~ +0 -0
- planar/testing/.synchronizable_tracer.py.un~ +0 -0
- planar/testing/.test_memory_storage.py.un~ +0 -0
- planar/testing/.workflow_observer.py.un~ +0 -0
- planar/testing/__init__.py +0 -0
- planar/testing/memory_storage.py +78 -0
- planar/testing/planar_test_client.py +54 -0
- planar/testing/synchronizable_tracer.py +153 -0
- planar/testing/test_memory_storage.py +143 -0
- planar/testing/workflow_observer.py +73 -0
- planar/utils.py +70 -0
- planar/workflows/.__init__.py.un~ +0 -0
- planar/workflows/.builtin_steps.py.un~ +0 -0
- planar/workflows/.concurrency_tracing.py.un~ +0 -0
- planar/workflows/.context.py.un~ +0 -0
- planar/workflows/.contrib.py.un~ +0 -0
- planar/workflows/.decorators.py.un~ +0 -0
- planar/workflows/.durable_test.py.un~ +0 -0
- planar/workflows/.errors.py.un~ +0 -0
- planar/workflows/.events.py.un~ +0 -0
- planar/workflows/.exceptions.py.un~ +0 -0
- planar/workflows/.execution.py.un~ +0 -0
- planar/workflows/.human.py.un~ +0 -0
- planar/workflows/.lock.py.un~ +0 -0
- planar/workflows/.misc.py.un~ +0 -0
- planar/workflows/.model.py.un~ +0 -0
- planar/workflows/.models.py.un~ +0 -0
- planar/workflows/.notifications.py.un~ +0 -0
- planar/workflows/.orchestrator.py.un~ +0 -0
- planar/workflows/.runtime.py.un~ +0 -0
- planar/workflows/.serialization.py.un~ +0 -0
- planar/workflows/.step.py.un~ +0 -0
- planar/workflows/.step_core.py.un~ +0 -0
- planar/workflows/.sub_workflow_runner.py.un~ +0 -0
- planar/workflows/.sub_workflow_scheduler.py.un~ +0 -0
- planar/workflows/.test_concurrency.py.un~ +0 -0
- planar/workflows/.test_concurrency_detection.py.un~ +0 -0
- planar/workflows/.test_human.py.un~ +0 -0
- planar/workflows/.test_lock_timeout.py.un~ +0 -0
- planar/workflows/.test_orchestrator.py.un~ +0 -0
- planar/workflows/.test_race_conditions.py.un~ +0 -0
- planar/workflows/.test_serialization.py.un~ +0 -0
- planar/workflows/.test_suspend_deserialization.py.un~ +0 -0
- planar/workflows/.test_workflow.py.un~ +0 -0
- planar/workflows/.tracing.py.un~ +0 -0
- planar/workflows/.types.py.un~ +0 -0
- planar/workflows/.util.py.un~ +0 -0
- planar/workflows/.utils.py.un~ +0 -0
- planar/workflows/.workflow.py.un~ +0 -0
- planar/workflows/.workflow_wrapper.py.un~ +0 -0
- planar/workflows/.wrappers.py.un~ +0 -0
- planar/workflows/__init__.py +42 -0
- planar/workflows/context.py +44 -0
- planar/workflows/contrib.py +190 -0
- planar/workflows/decorators.py +217 -0
- planar/workflows/events.py +185 -0
- planar/workflows/exceptions.py +34 -0
- planar/workflows/execution.py +198 -0
- planar/workflows/lock.py +229 -0
- planar/workflows/misc.py +5 -0
- planar/workflows/models.py +154 -0
- planar/workflows/notifications.py +96 -0
- planar/workflows/orchestrator.py +383 -0
- planar/workflows/query.py +256 -0
- planar/workflows/serialization.py +409 -0
- planar/workflows/step_core.py +373 -0
- planar/workflows/step_metadata.py +357 -0
- planar/workflows/step_testing_utils.py +86 -0
- planar/workflows/sub_workflow_runner.py +191 -0
- planar/workflows/test_concurrency_detection.py +120 -0
- planar/workflows/test_lock_timeout.py +140 -0
- planar/workflows/test_serialization.py +1195 -0
- planar/workflows/test_suspend_deserialization.py +231 -0
- planar/workflows/test_workflow.py +1967 -0
- planar/workflows/tracing.py +106 -0
- planar/workflows/wrappers.py +41 -0
- planar-0.5.0.dist-info/METADATA +285 -0
- planar-0.5.0.dist-info/RECORD +289 -0
- planar-0.5.0.dist-info/WHEEL +4 -0
- planar-0.5.0.dist-info/entry_points.txt +3 -0
planar/test_utils.py
ADDED
@@ -0,0 +1,105 @@
|
|
1
|
+
import asyncio
|
2
|
+
import time
|
3
|
+
from datetime import UTC, datetime
|
4
|
+
|
5
|
+
import pytest
|
6
|
+
|
7
|
+
from planar.utils import asyncify, utc_now
|
8
|
+
|
9
|
+
|
10
|
+
async def test_asyncify_converts_sync_to_async():
|
11
|
+
"""Test that asyncify correctly converts a synchronous function to an asynchronous one."""
|
12
|
+
|
13
|
+
def sync_function(x, y):
|
14
|
+
return x + y
|
15
|
+
|
16
|
+
async_function = asyncify(sync_function)
|
17
|
+
|
18
|
+
# Check that the function is now a coroutine function
|
19
|
+
assert asyncio.iscoroutinefunction(async_function)
|
20
|
+
assert not asyncio.iscoroutinefunction(sync_function)
|
21
|
+
|
22
|
+
# Check that it can be awaited
|
23
|
+
result = await async_function(5, 3)
|
24
|
+
assert result == 8
|
25
|
+
|
26
|
+
|
27
|
+
async def test_asyncify_with_args_and_kwargs():
|
28
|
+
"""Test that asyncify correctly passes positional and keyword arguments."""
|
29
|
+
|
30
|
+
def complex_function(a, b, c=0, d=0):
|
31
|
+
return a + b + c + d
|
32
|
+
|
33
|
+
async_function = asyncify(complex_function)
|
34
|
+
|
35
|
+
# Test with positional args only
|
36
|
+
result1 = await async_function(1, 2)
|
37
|
+
assert result1 == 3
|
38
|
+
|
39
|
+
# Test with positional and keyword args
|
40
|
+
result2 = await async_function(1, 2, c=3, d=4)
|
41
|
+
assert result2 == 10
|
42
|
+
|
43
|
+
|
44
|
+
async def test_asyncify_preserves_exceptions():
|
45
|
+
"""Test that asyncify preserves exceptions raised by the wrapped function."""
|
46
|
+
|
47
|
+
def failing_function():
|
48
|
+
raise ValueError("Expected error")
|
49
|
+
|
50
|
+
async_function = asyncify(failing_function)
|
51
|
+
|
52
|
+
with pytest.raises(ValueError, match="Expected error"):
|
53
|
+
await async_function()
|
54
|
+
|
55
|
+
|
56
|
+
async def test_asyncify_non_blocking():
|
57
|
+
"""Test that asyncify runs the function in a way that doesn't block the event loop."""
|
58
|
+
# This counter will be incremented by a task running concurrently with our slow function
|
59
|
+
counter = 0
|
60
|
+
|
61
|
+
@asyncify
|
62
|
+
def slow_function():
|
63
|
+
time.sleep(0.5) # This would block the event loop if not run in executor
|
64
|
+
return counter
|
65
|
+
|
66
|
+
# This task will increment the counter while the slow function is running
|
67
|
+
async def increment_counter():
|
68
|
+
nonlocal counter
|
69
|
+
await asyncio.sleep(0.1) # Short sleep to allow the slow function to start
|
70
|
+
for _ in range(10):
|
71
|
+
counter += 1
|
72
|
+
await asyncio.sleep(0.01) # Short sleep to yield control
|
73
|
+
|
74
|
+
# Create increment task
|
75
|
+
task = asyncio.create_task(increment_counter())
|
76
|
+
|
77
|
+
# Run the async function
|
78
|
+
assert counter == 0
|
79
|
+
result = await slow_function()
|
80
|
+
# If the event loop was blocked, the counter would be 0
|
81
|
+
assert counter == 10
|
82
|
+
assert result == 10
|
83
|
+
|
84
|
+
await task
|
85
|
+
|
86
|
+
|
87
|
+
def test_raises_when_applied_to_async_function():
|
88
|
+
"""Test that asyncify raises an error when applied to an async function."""
|
89
|
+
|
90
|
+
async def async_function():
|
91
|
+
pass
|
92
|
+
|
93
|
+
with pytest.raises(ValueError, match="Function is already async"):
|
94
|
+
asyncify(async_function)
|
95
|
+
|
96
|
+
|
97
|
+
def test_utc_now_returns_naive_utc():
|
98
|
+
"""utc_now should return a naive datetime captured within two timestamps."""
|
99
|
+
|
100
|
+
before = datetime.now(UTC).replace(tzinfo=None)
|
101
|
+
result = utc_now()
|
102
|
+
after = datetime.now(UTC).replace(tzinfo=None)
|
103
|
+
|
104
|
+
assert result.tzinfo is None
|
105
|
+
assert before <= result <= after
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
File without changes
|
@@ -0,0 +1,78 @@
|
|
1
|
+
import io
|
2
|
+
import uuid
|
3
|
+
from typing import AsyncGenerator, Dict, Tuple
|
4
|
+
|
5
|
+
from planar.files.storage.base import Storage
|
6
|
+
from planar.logging import get_logger
|
7
|
+
|
8
|
+
logger = get_logger(__name__)
|
9
|
+
|
10
|
+
|
11
|
+
class MemoryStorage(Storage):
|
12
|
+
"""Stores files and mime types entirely in memory."""
|
13
|
+
|
14
|
+
def __init__(self):
|
15
|
+
"""Initializes MemoryStorage."""
|
16
|
+
self._blobs: Dict[str, bytes] = {}
|
17
|
+
self._mime_types: Dict[str, str] = {}
|
18
|
+
|
19
|
+
async def put(
|
20
|
+
self, stream: AsyncGenerator[bytes, None], mime_type: str | None = None
|
21
|
+
) -> str:
|
22
|
+
"""
|
23
|
+
Stores a stream and its mime type in memory dictionaries.
|
24
|
+
|
25
|
+
The storage reference returned is a unique UUID string.
|
26
|
+
"""
|
27
|
+
ref = str(uuid.uuid4())
|
28
|
+
buffer = io.BytesIO()
|
29
|
+
try:
|
30
|
+
async for chunk in stream:
|
31
|
+
buffer.write(chunk)
|
32
|
+
self._blobs[ref] = buffer.getvalue()
|
33
|
+
if mime_type:
|
34
|
+
self._mime_types[ref] = mime_type
|
35
|
+
logger.debug("stored ref in memory", ref=ref)
|
36
|
+
return ref
|
37
|
+
except Exception as e:
|
38
|
+
logger.exception("error during memory put operation", ref=ref)
|
39
|
+
# Clean up if storage failed mid-way (though less likely in memory)
|
40
|
+
self._blobs.pop(ref, None)
|
41
|
+
self._mime_types.pop(ref, None)
|
42
|
+
raise IOError(
|
43
|
+
f"Failed to store file or mime type in memory for ref {ref}"
|
44
|
+
) from e
|
45
|
+
|
46
|
+
async def get(self, ref: str) -> Tuple[AsyncGenerator[bytes, None], str | None]:
|
47
|
+
"""
|
48
|
+
Retrieves a stream and its mime type from memory.
|
49
|
+
"""
|
50
|
+
if ref not in self._blobs:
|
51
|
+
raise FileNotFoundError(f"Storage reference not found in memory: {ref}")
|
52
|
+
|
53
|
+
blob_data = self._blobs[ref]
|
54
|
+
mime_type = self._mime_types.get(ref)
|
55
|
+
|
56
|
+
async def _stream():
|
57
|
+
# Yield the entire blob data as a single chunk for simplicity
|
58
|
+
# Could be chunked if needed, but for memory storage, this is fine.
|
59
|
+
yield blob_data
|
60
|
+
|
61
|
+
logger.debug("retrieved ref from memory", ref=ref)
|
62
|
+
return _stream(), mime_type
|
63
|
+
|
64
|
+
async def delete(self, ref: str) -> None:
|
65
|
+
"""
|
66
|
+
Deletes the blob data and mime type from memory. Idempotent.
|
67
|
+
"""
|
68
|
+
blob_deleted = self._blobs.pop(ref, None) is not None
|
69
|
+
mime_deleted = self._mime_types.pop(ref, None) is not None
|
70
|
+
if blob_deleted or mime_deleted:
|
71
|
+
logger.debug("deleted ref from memory", ref=ref)
|
72
|
+
else:
|
73
|
+
logger.debug("attempted to delete non-existent ref from memory", ref=ref)
|
74
|
+
# No FileNotFoundError raised if ref doesn't exist, deletion is idempotent.
|
75
|
+
|
76
|
+
async def external_url(self, ref: str) -> str | None:
|
77
|
+
"""Memory storage does not provide external URLs."""
|
78
|
+
return None
|
@@ -0,0 +1,54 @@
|
|
1
|
+
import asyncio
|
2
|
+
from contextlib import asynccontextmanager
|
3
|
+
|
4
|
+
from httpx import ASGITransport, AsyncClient
|
5
|
+
|
6
|
+
from planar import PlanarApp
|
7
|
+
from planar.db import DatabaseManager
|
8
|
+
from planar.session import session_var
|
9
|
+
from planar.testing.workflow_observer import WorkflowObserver
|
10
|
+
|
11
|
+
|
12
|
+
class PlanarTestClient(AsyncClient):
|
13
|
+
def __init__(self, app: PlanarApp):
|
14
|
+
self.app = app
|
15
|
+
super().__init__(
|
16
|
+
base_url="http://testserver", transport=ASGITransport(app=app.fastapi)
|
17
|
+
)
|
18
|
+
|
19
|
+
|
20
|
+
@asynccontextmanager
|
21
|
+
async def planar_test_client(
|
22
|
+
app: PlanarApp, connection_string: str, observer: WorkflowObserver
|
23
|
+
):
|
24
|
+
# Override the db manager with a new one that uses test database
|
25
|
+
app.db_manager = DatabaseManager(connection_string)
|
26
|
+
app.db_manager.connect()
|
27
|
+
app.on_workflow_notification = observer.on_workflow_notification
|
28
|
+
|
29
|
+
async with PlanarTestClient(app) as client:
|
30
|
+
# run the app lifespan
|
31
|
+
async with app._lifespan(app.fastapi):
|
32
|
+
# Create a session and set it in the contextvar
|
33
|
+
async with app.db_manager.get_session() as session:
|
34
|
+
token = session_var.set(session)
|
35
|
+
try:
|
36
|
+
yield client
|
37
|
+
finally:
|
38
|
+
session_var.reset(token)
|
39
|
+
await wait_all_event_loop_tasks()
|
40
|
+
|
41
|
+
|
42
|
+
async def wait_all_event_loop_tasks():
|
43
|
+
# Workaround prevent the event loop from exiting before aiosqlite
|
44
|
+
# has a chance to cleanup its background threads:
|
45
|
+
# Keep yielding back to the event loop until the only task left is this one
|
46
|
+
current_task = asyncio.current_task()
|
47
|
+
while True:
|
48
|
+
other_tasks = [task for task in asyncio.all_tasks() if task is not current_task]
|
49
|
+
if not other_tasks:
|
50
|
+
break
|
51
|
+
try:
|
52
|
+
await asyncio.gather(*other_tasks)
|
53
|
+
except asyncio.CancelledError:
|
54
|
+
pass
|
@@ -0,0 +1,153 @@
|
|
1
|
+
import asyncio
|
2
|
+
from dataclasses import dataclass
|
3
|
+
from typing import Any
|
4
|
+
|
5
|
+
from planar.workflows.tracing import LoggingTracer, Tracer
|
6
|
+
|
7
|
+
|
8
|
+
@dataclass(kw_only=True, frozen=True)
|
9
|
+
class TraceSpec:
|
10
|
+
module_name: str | None = None
|
11
|
+
function_name: str | None = None
|
12
|
+
message: str | None = None
|
13
|
+
kwargs: dict[str, Any] | None = None
|
14
|
+
|
15
|
+
|
16
|
+
def matches(
|
17
|
+
spec: TraceSpec,
|
18
|
+
module_name: str,
|
19
|
+
function_name: str,
|
20
|
+
message: str,
|
21
|
+
kwargs: dict[str, Any],
|
22
|
+
) -> bool:
|
23
|
+
if spec.module_name is not None and module_name != spec.module_name:
|
24
|
+
return False
|
25
|
+
if spec.function_name is not None and function_name != spec.function_name:
|
26
|
+
return False
|
27
|
+
if spec.message is not None and message != spec.message:
|
28
|
+
return False
|
29
|
+
if spec.kwargs is not None:
|
30
|
+
for key, value in spec.kwargs.items():
|
31
|
+
if key in kwargs and kwargs[key] != value:
|
32
|
+
return False
|
33
|
+
return True
|
34
|
+
|
35
|
+
|
36
|
+
class JoinTask:
|
37
|
+
def __init__(self, task: asyncio.Task, signal: asyncio.Future):
|
38
|
+
self.__task = task
|
39
|
+
self.__signal = signal
|
40
|
+
|
41
|
+
def start(self):
|
42
|
+
if not self.__signal.done():
|
43
|
+
self.__signal.set_result(None)
|
44
|
+
return self.__task
|
45
|
+
|
46
|
+
|
47
|
+
class TraceController:
|
48
|
+
def __init__(self, watcher: asyncio.Future, spec: TraceSpec):
|
49
|
+
self.spec = spec
|
50
|
+
self.watcher = watcher
|
51
|
+
self.join_task: JoinTask | None = None
|
52
|
+
self.__resume_trace = None
|
53
|
+
self.__resumed = False
|
54
|
+
self.__auto_resume = False
|
55
|
+
|
56
|
+
async def wait(
|
57
|
+
self, auto_resume: bool = True, timeout: float = 10, raise_timeout: bool = True
|
58
|
+
):
|
59
|
+
try:
|
60
|
+
self.__resume_trace = await asyncio.wait_for(self.watcher, timeout=timeout)
|
61
|
+
if auto_resume or self.__auto_resume:
|
62
|
+
self.resume()
|
63
|
+
except asyncio.TimeoutError:
|
64
|
+
formatted_trace = Tracer.format(
|
65
|
+
module_name=self.spec.module_name or "(any)",
|
66
|
+
function_name=self.spec.function_name or "(any)",
|
67
|
+
task_name="(any)",
|
68
|
+
pid=0,
|
69
|
+
message=self.spec.message or "(any)",
|
70
|
+
kwargs=self.spec.kwargs or {},
|
71
|
+
)
|
72
|
+
if raise_timeout:
|
73
|
+
raise TimeoutError(f"timeout waiting for trace: {formatted_trace}")
|
74
|
+
|
75
|
+
def resume(self):
|
76
|
+
if self.__resumed:
|
77
|
+
raise ValueError("resume called multiple times")
|
78
|
+
if self.__resume_trace is None:
|
79
|
+
self.__auto_resume = True
|
80
|
+
else:
|
81
|
+
self.__resume_trace.set_result(None)
|
82
|
+
self.__resumed = True
|
83
|
+
|
84
|
+
|
85
|
+
class SynchronizableTracer(LoggingTracer):
|
86
|
+
def __init__(self):
|
87
|
+
self.timeout = 5
|
88
|
+
self.races_detected = 0
|
89
|
+
self.__controllers: list[TraceController] = []
|
90
|
+
|
91
|
+
def instrument(self, spec: TraceSpec):
|
92
|
+
future = asyncio.Future()
|
93
|
+
controller = TraceController(future, spec)
|
94
|
+
self.__controllers.append(controller)
|
95
|
+
return controller
|
96
|
+
|
97
|
+
def join(self, *trace_specs: TraceSpec):
|
98
|
+
async def join(signal: asyncio.Future, controllers: list[TraceController]):
|
99
|
+
await signal
|
100
|
+
tasks = [
|
101
|
+
asyncio.create_task(
|
102
|
+
controller.wait(
|
103
|
+
auto_resume=False, timeout=self.timeout * 2, raise_timeout=False
|
104
|
+
)
|
105
|
+
)
|
106
|
+
for controller in controllers
|
107
|
+
]
|
108
|
+
done, _ = await asyncio.wait(tasks, timeout=self.timeout)
|
109
|
+
for controller in controllers:
|
110
|
+
controller.resume()
|
111
|
+
if len(done) == len(controllers):
|
112
|
+
self.races_detected += 1
|
113
|
+
|
114
|
+
controllers = [self.instrument(spec) for spec in trace_specs]
|
115
|
+
signal = asyncio.Future()
|
116
|
+
task = JoinTask(asyncio.create_task(join(signal, controllers)), signal)
|
117
|
+
for controller in controllers:
|
118
|
+
controller.join_task = task
|
119
|
+
|
120
|
+
async def trace(
|
121
|
+
self,
|
122
|
+
module_name: str,
|
123
|
+
function_name: str,
|
124
|
+
message: str,
|
125
|
+
task_name: str,
|
126
|
+
pid: int,
|
127
|
+
kwargs: dict[str, Any],
|
128
|
+
):
|
129
|
+
i = 0
|
130
|
+
futures = []
|
131
|
+
while i < len(self.__controllers):
|
132
|
+
controller = self.__controllers[i]
|
133
|
+
if not matches(
|
134
|
+
controller.spec, module_name, function_name, message, kwargs
|
135
|
+
):
|
136
|
+
i += 1
|
137
|
+
continue
|
138
|
+
self.__controllers.pop(i)
|
139
|
+
future = asyncio.Future()
|
140
|
+
try:
|
141
|
+
controller.watcher.set_result(future)
|
142
|
+
except asyncio.InvalidStateError:
|
143
|
+
pass
|
144
|
+
if controller.join_task:
|
145
|
+
future = controller.join_task.start()
|
146
|
+
futures.append(future)
|
147
|
+
|
148
|
+
if futures:
|
149
|
+
await asyncio.wait(futures, return_when=asyncio.ALL_COMPLETED)
|
150
|
+
|
151
|
+
return await super().trace(
|
152
|
+
module_name, function_name, message, task_name, pid, kwargs
|
153
|
+
)
|
@@ -0,0 +1,143 @@
|
|
1
|
+
import asyncio
|
2
|
+
import uuid
|
3
|
+
|
4
|
+
import pytest
|
5
|
+
|
6
|
+
from planar.testing.memory_storage import MemoryStorage
|
7
|
+
|
8
|
+
|
9
|
+
@pytest.fixture
|
10
|
+
async def storage() -> MemoryStorage:
|
11
|
+
"""Provides an instance of MemoryStorage."""
|
12
|
+
return MemoryStorage()
|
13
|
+
|
14
|
+
|
15
|
+
async def test_put_get_bytes(storage: MemoryStorage):
|
16
|
+
"""Test storing and retrieving raw bytes."""
|
17
|
+
test_data = b"some binary data \x00\xff for memory"
|
18
|
+
mime_type = "application/octet-stream"
|
19
|
+
|
20
|
+
ref = await storage.put_bytes(test_data, mime_type=mime_type)
|
21
|
+
assert isinstance(ref, str)
|
22
|
+
try:
|
23
|
+
uuid.UUID(ref) # Check if ref is a valid UUID string
|
24
|
+
except ValueError:
|
25
|
+
pytest.fail(f"Returned ref '{ref}' is not a valid UUID string")
|
26
|
+
|
27
|
+
retrieved_data, retrieved_mime = await storage.get_bytes(ref)
|
28
|
+
|
29
|
+
assert retrieved_data == test_data
|
30
|
+
assert retrieved_mime == mime_type
|
31
|
+
|
32
|
+
# Check internal state (optional)
|
33
|
+
assert ref in storage._blobs
|
34
|
+
assert ref in storage._mime_types
|
35
|
+
assert storage._blobs[ref] == test_data
|
36
|
+
assert storage._mime_types[ref] == mime_type
|
37
|
+
|
38
|
+
|
39
|
+
async def test_put_get_string(storage: MemoryStorage):
|
40
|
+
"""Test storing and retrieving a string."""
|
41
|
+
test_string = "Hello, memory! This is a test string with Unicode: éàçü."
|
42
|
+
mime_type = "text/plain"
|
43
|
+
encoding = "utf-16"
|
44
|
+
|
45
|
+
# Store with explicit encoding and mime type
|
46
|
+
ref = await storage.put_string(test_string, encoding=encoding, mime_type=mime_type)
|
47
|
+
expected_mime_type = f"{mime_type}; charset={encoding}"
|
48
|
+
|
49
|
+
retrieved_string, retrieved_mime = await storage.get_string(ref, encoding=encoding)
|
50
|
+
|
51
|
+
assert retrieved_string == test_string
|
52
|
+
assert retrieved_mime == expected_mime_type
|
53
|
+
|
54
|
+
# Test default encoding (utf-8)
|
55
|
+
ref_utf8 = await storage.put_string(test_string, mime_type="text/html")
|
56
|
+
expected_mime_utf8 = "text/html; charset=utf-8"
|
57
|
+
retrieved_string_utf8, retrieved_mime_utf8 = await storage.get_string(ref_utf8)
|
58
|
+
assert retrieved_string_utf8 == test_string
|
59
|
+
assert retrieved_mime_utf8 == expected_mime_utf8
|
60
|
+
|
61
|
+
|
62
|
+
async def test_put_get_stream(storage: MemoryStorage):
|
63
|
+
"""Test storing data from an async generator stream."""
|
64
|
+
test_chunks = [b"mem_chunk1 ", b"mem_chunk2 ", b"mem_chunk3"]
|
65
|
+
full_data = b"".join(test_chunks)
|
66
|
+
mime_type = "image/gif"
|
67
|
+
|
68
|
+
async def _test_stream():
|
69
|
+
for chunk in test_chunks:
|
70
|
+
yield chunk
|
71
|
+
await asyncio.sleep(0.01) # Simulate async work
|
72
|
+
|
73
|
+
ref = await storage.put(_test_stream(), mime_type=mime_type)
|
74
|
+
|
75
|
+
stream, retrieved_mime = await storage.get(ref)
|
76
|
+
retrieved_data = b""
|
77
|
+
async for chunk in stream:
|
78
|
+
retrieved_data += chunk
|
79
|
+
|
80
|
+
assert retrieved_data == full_data
|
81
|
+
assert retrieved_mime == mime_type
|
82
|
+
|
83
|
+
|
84
|
+
async def test_put_no_mime_type(storage: MemoryStorage):
|
85
|
+
"""Test storing data without providing a mime type."""
|
86
|
+
test_data = b"memory data without mime"
|
87
|
+
|
88
|
+
ref = await storage.put_bytes(test_data)
|
89
|
+
retrieved_data, retrieved_mime = await storage.get_bytes(ref)
|
90
|
+
|
91
|
+
assert retrieved_data == test_data
|
92
|
+
assert retrieved_mime is None
|
93
|
+
|
94
|
+
# Check internal state
|
95
|
+
assert ref in storage._blobs
|
96
|
+
assert ref not in storage._mime_types
|
97
|
+
|
98
|
+
|
99
|
+
async def test_delete(storage: MemoryStorage):
|
100
|
+
"""Test deleting stored data."""
|
101
|
+
ref = await storage.put_bytes(b"to be deleted from memory", mime_type="text/plain")
|
102
|
+
|
103
|
+
# Verify data exists before delete (optional)
|
104
|
+
assert ref in storage._blobs
|
105
|
+
assert ref in storage._mime_types
|
106
|
+
|
107
|
+
await storage.delete(ref)
|
108
|
+
|
109
|
+
# Verify data is gone after delete
|
110
|
+
assert ref not in storage._blobs
|
111
|
+
assert ref not in storage._mime_types
|
112
|
+
|
113
|
+
# Try getting deleted ref
|
114
|
+
with pytest.raises(FileNotFoundError):
|
115
|
+
await storage.get(ref)
|
116
|
+
|
117
|
+
|
118
|
+
async def test_get_non_existent(storage: MemoryStorage):
|
119
|
+
"""Test getting a reference that does not exist."""
|
120
|
+
non_existent_ref = str(uuid.uuid4())
|
121
|
+
with pytest.raises(FileNotFoundError):
|
122
|
+
await storage.get(non_existent_ref)
|
123
|
+
|
124
|
+
|
125
|
+
async def test_delete_non_existent(storage: MemoryStorage):
|
126
|
+
"""Test deleting a reference that does not exist (should not raise error)."""
|
127
|
+
non_existent_ref = str(uuid.uuid4())
|
128
|
+
initial_blob_count = len(storage._blobs)
|
129
|
+
initial_mime_count = len(storage._mime_types)
|
130
|
+
try:
|
131
|
+
await storage.delete(non_existent_ref)
|
132
|
+
# Ensure no data was actually deleted
|
133
|
+
assert len(storage._blobs) == initial_blob_count
|
134
|
+
assert len(storage._mime_types) == initial_mime_count
|
135
|
+
except Exception as e:
|
136
|
+
pytest.fail(f"Deleting non-existent ref raised an exception: {e}")
|
137
|
+
|
138
|
+
|
139
|
+
async def test_external_url(storage: MemoryStorage):
|
140
|
+
"""Test that external_url returns None for memory storage."""
|
141
|
+
ref = await storage.put_bytes(b"some data for url test")
|
142
|
+
url = await storage.external_url(ref)
|
143
|
+
assert url is None
|
@@ -0,0 +1,73 @@
|
|
1
|
+
from asyncio import Future, wait_for
|
2
|
+
from collections import defaultdict
|
3
|
+
from uuid import UUID
|
4
|
+
|
5
|
+
from planar.workflows import Workflow, WorkflowNotification
|
6
|
+
from planar.workflows.models import WorkflowStep
|
7
|
+
|
8
|
+
|
9
|
+
class WorkflowObserver:
|
10
|
+
def __init__(self):
|
11
|
+
# Scope notification queues and waiters by workflow_id
|
12
|
+
|
13
|
+
self.notification_queues: defaultdict[UUID, list[WorkflowNotification]] = (
|
14
|
+
defaultdict(list)
|
15
|
+
)
|
16
|
+
self.waiters: dict[UUID, Future[None]] = {}
|
17
|
+
self.timeout = 10
|
18
|
+
|
19
|
+
def _get_workflow_id_from_notification(
|
20
|
+
self, notification: WorkflowNotification
|
21
|
+
) -> UUID:
|
22
|
+
"""Extract workflow_id from notification data"""
|
23
|
+
if isinstance(notification.data, Workflow):
|
24
|
+
return notification.data.id
|
25
|
+
else:
|
26
|
+
return notification.data.workflow_id
|
27
|
+
|
28
|
+
def on_workflow_notification(self, notification: WorkflowNotification):
|
29
|
+
workflow_id = UUID(str(self._get_workflow_id_from_notification(notification)))
|
30
|
+
|
31
|
+
# Add to the appropriate workflow's queue
|
32
|
+
self.notification_queues[workflow_id].append(notification)
|
33
|
+
|
34
|
+
# Wake up any waiter for this workflow
|
35
|
+
waiter = self.waiters.get(workflow_id)
|
36
|
+
if waiter is not None:
|
37
|
+
waiter.set_result(None)
|
38
|
+
del self.waiters[workflow_id]
|
39
|
+
|
40
|
+
async def wait(
|
41
|
+
self, kind: str, workflow_id: UUID, step_id: int | None = None
|
42
|
+
) -> WorkflowNotification:
|
43
|
+
workflow_id = UUID(str(workflow_id)) # ensure workflow_id is an UUID instance
|
44
|
+
|
45
|
+
while True:
|
46
|
+
matched = None
|
47
|
+
queue = self.notification_queues[workflow_id]
|
48
|
+
for i, notification in enumerate(queue):
|
49
|
+
if notification.kind == kind:
|
50
|
+
# Only check step_id filter for non-Workflow notifications
|
51
|
+
if isinstance(notification.data, WorkflowStep):
|
52
|
+
if step_id is not None and notification.data.step_id != step_id:
|
53
|
+
continue
|
54
|
+
matched = i
|
55
|
+
break
|
56
|
+
if matched is not None:
|
57
|
+
notification = queue[matched]
|
58
|
+
# Prune all previous notifications from this workflow's queue
|
59
|
+
self.notification_queues[workflow_id] = queue[matched + 1 :]
|
60
|
+
return notification
|
61
|
+
# notification hasn't arrived yet, lets create a future and sleep until
|
62
|
+
# on_workflow_notification is called
|
63
|
+
assert workflow_id not in self.waiters, (
|
64
|
+
f"waiter for workflow {workflow_id} should not be present"
|
65
|
+
)
|
66
|
+
waiter = Future()
|
67
|
+
self.waiters[workflow_id] = waiter
|
68
|
+
try:
|
69
|
+
await wait_for(waiter, timeout=self.timeout)
|
70
|
+
except TimeoutError:
|
71
|
+
assert False, (
|
72
|
+
f"Timeout waiting for notification {kind} with workflow_id={workflow_id} and step_id={step_id}"
|
73
|
+
)
|
planar/utils.py
ADDED
@@ -0,0 +1,70 @@
|
|
1
|
+
import asyncio
|
2
|
+
import functools
|
3
|
+
import inspect
|
4
|
+
import random
|
5
|
+
from datetime import UTC, datetime
|
6
|
+
from typing import Any, Callable, Coroutine, ParamSpec, TypeVar
|
7
|
+
|
8
|
+
from inflection import pluralize, underscore
|
9
|
+
|
10
|
+
|
11
|
+
def snake_case_to_camel_case(snake_case: str) -> str:
|
12
|
+
return "".join(word.capitalize() for word in snake_case.split("_"))
|
13
|
+
|
14
|
+
|
15
|
+
def create_path_prefix(model_name: str) -> str:
|
16
|
+
"""
|
17
|
+
Create a URL path prefix from a model name.
|
18
|
+
|
19
|
+
Example: 'Supplier' -> 'suppliers'
|
20
|
+
"""
|
21
|
+
return f"{pluralize(underscore(model_name))}"
|
22
|
+
|
23
|
+
|
24
|
+
P = ParamSpec("P")
|
25
|
+
T = TypeVar("T")
|
26
|
+
U = TypeVar("U")
|
27
|
+
R = TypeVar("R")
|
28
|
+
|
29
|
+
|
30
|
+
def asyncify(
|
31
|
+
func: Callable[P, R],
|
32
|
+
) -> Callable[P, Coroutine[Any, Any, R]]:
|
33
|
+
if inspect.iscoroutinefunction(func):
|
34
|
+
raise ValueError("Function is already async")
|
35
|
+
|
36
|
+
@functools.wraps(func)
|
37
|
+
async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
38
|
+
return await asyncio.to_thread(func, *args, **kwargs)
|
39
|
+
|
40
|
+
return wrapper
|
41
|
+
|
42
|
+
|
43
|
+
def utc_now() -> datetime:
|
44
|
+
return datetime.now(UTC).replace(tzinfo=None)
|
45
|
+
|
46
|
+
|
47
|
+
def exponential_backoff_with_jitter(
|
48
|
+
attempt: int, base_delay: int = 1, max_delay: int = 60, jitter_factor=0.1
|
49
|
+
):
|
50
|
+
"""
|
51
|
+
Calculate exponential backoff delay with random jitter.
|
52
|
+
|
53
|
+
Args:
|
54
|
+
attempt: Current attempt number (starts at 0)
|
55
|
+
base_delay: Initial delay in seconds (default: 1)
|
56
|
+
max_delay: Maximum delay in seconds (default: 60)
|
57
|
+
jitter_factor: Fraction of delay to use for jitter (default: 0.1)
|
58
|
+
|
59
|
+
Returns:
|
60
|
+
Delay in seconds with jitter applied. Minimum possible delay is 1
|
61
|
+
second.
|
62
|
+
"""
|
63
|
+
# Calculate exponential backoff: base_delay * 2^attempt
|
64
|
+
delay = min(base_delay * (2**attempt), max_delay)
|
65
|
+
|
66
|
+
# Add random jitter: ±jitter_factor * delay
|
67
|
+
jitter = delay * jitter_factor
|
68
|
+
actual_delay = delay + random.uniform(-jitter, jitter)
|
69
|
+
|
70
|
+
return max(1, actual_delay) # Ensure at least 1 second
|