planar 0.9.1__py3-none-any.whl → 0.9.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
planar/ai/agent.py CHANGED
@@ -289,6 +289,7 @@ class Agent[
289
289
  tool_result = await self.as_step_if_durable(
290
290
  tool_fn,
291
291
  step_type=StepType.TOOL_CALL,
292
+ display_name=tool_call.name,
292
293
  )(**tool_call.arguments)
293
294
  logger.info(
294
295
  "tool executed by agent",
planar/ai/agent_base.py CHANGED
@@ -12,6 +12,7 @@ from typing import (
12
12
  )
13
13
 
14
14
  from pydantic import BaseModel
15
+ from pydantic_ai.settings import ModelSettings
15
16
 
16
17
  from planar.ai.models import AgentConfig, AgentEventEmitter, AgentRunResult
17
18
  from planar.logging import get_logger
@@ -38,7 +39,10 @@ class AgentBase[
38
39
  user_prompt: str = ""
39
40
  tools: list[Callable] = field(default_factory=list)
40
41
  max_turns: int = 2
41
- model_parameters: dict[str, Any] = field(default_factory=dict)
42
+ # `ModelSettings` is a TypedDict; use a typed empty dict as default
43
+ model_parameters: ModelSettings = field(
44
+ default_factory=lambda: cast(ModelSettings, {})
45
+ )
42
46
  event_emitter: AgentEventEmitter | None = None
43
47
  durable: bool = True
44
48
 
planar/ai/agent_utils.py CHANGED
@@ -1,13 +1,10 @@
1
1
  import inspect
2
- from typing import (
3
- Any,
4
- Callable,
5
- Dict,
6
- )
2
+ from typing import Any, Callable, Dict, cast
7
3
 
8
4
  from jinja2 import StrictUndefined, TemplateError
9
5
  from jinja2.sandbox import SandboxedEnvironment
10
- from pydantic import BaseModel, create_model
6
+ from pydantic import BaseModel, Field, create_model
7
+ from pydantic_ai.settings import ModelSettings
11
8
 
12
9
  from planar.ai.models import (
13
10
  AgentConfig,
@@ -24,8 +21,9 @@ logger = get_logger(__name__)
24
21
  class ModelSpec(BaseModel):
25
22
  """Pydantic model for AI model specifications."""
26
23
 
24
+ model_config = {"arbitrary_types_allowed": True}
27
25
  model_id: str
28
- parameters: dict[str, Any] = {}
26
+ parameters: ModelSettings = Field(default_factory=lambda: cast(ModelSettings, {}))
29
27
 
30
28
 
31
29
  def extract_files_from_model(
planar/ai/models.py CHANGED
@@ -1,5 +1,3 @@
1
- from __future__ import annotations
2
-
3
1
  from enum import Enum
4
2
  from typing import (
5
3
  Annotated,
@@ -11,9 +9,11 @@ from typing import (
11
9
  Protocol,
12
10
  TypeVar,
13
11
  Union,
12
+ cast,
14
13
  )
15
14
 
16
15
  from pydantic import BaseModel, Field
16
+ from pydantic_ai.settings import ModelSettings
17
17
 
18
18
  from planar.files.models import PlanarFile
19
19
  from planar.modeling.field_helpers import JsonSchema
@@ -29,11 +29,17 @@ T = TypeVar("T", bound=Union[str, BaseModel])
29
29
  # This model allows storing configurations that override the default
30
30
  # settings defined in Agent instances.
31
31
  class AgentConfig(BaseModel):
32
+ # ModelSettings TypedDict has some fields that use non-serializable types
33
+ # so we need to allow arbitrary types
34
+ model_config = {"arbitrary_types_allowed": True}
32
35
  system_prompt: str
33
36
  user_prompt: str = Field()
34
37
  model: str = Field()
35
38
  max_turns: int = Field()
36
- model_parameters: Dict[str, Any] = Field(default_factory=dict)
39
+ # `ModelSettings` is a TypedDict; use a typed empty dict as default
40
+ model_parameters: ModelSettings = Field(
41
+ default_factory=lambda: cast(ModelSettings, {})
42
+ )
37
43
 
38
44
 
39
45
  class ToolDefinition(BaseModel):
planar/ai/pydantic_ai.py CHANGED
@@ -384,7 +384,7 @@ class ModelRunResponse[TOutput: BaseModel | str](BaseModel):
384
384
  async def model_run[TOutput: BaseModel | str](
385
385
  model: Model | KnownModelName,
386
386
  max_extra_turns: int,
387
- model_settings: dict[str, Any] | None = None,
387
+ model_settings: ModelSettings | None = None,
388
388
  messages: list[m.ModelMessage] = [],
389
389
  tools: list[m.ToolDefinition] = [],
390
390
  event_handler: m.AgentEventEmitter | None = None,
@@ -443,7 +443,7 @@ async def model_run[TOutput: BaseModel | str](
443
443
  model=model,
444
444
  messages=history,
445
445
  model_request_parameters=request_params,
446
- model_settings=cast(ModelSettings, model_settings),
446
+ model_settings=model_settings,
447
447
  ) as stream:
448
448
  async for event in stream:
449
449
  match event:
@@ -0,0 +1,78 @@
1
+ import os
2
+ from unittest.mock import patch
3
+
4
+ from sqlmodel import col, select
5
+
6
+ from planar.ai import models as m
7
+ from planar.ai.agent import Agent
8
+ from planar.ai.pydantic_ai import ModelRunResponse
9
+ from planar.workflows.decorators import workflow
10
+ from planar.workflows.execution import execute
11
+ from planar.workflows.models import StepType, WorkflowStep
12
+
13
+
14
+ async def test_agent_tool_step_has_display_name(session):
15
+ async def add(a: int, b: int) -> int:
16
+ return a + b
17
+
18
+ # Prepare mocked model responses: first triggers a tool call, then returns final content
19
+ first = ModelRunResponse[str](
20
+ response=m.CompletionResponse[str](
21
+ content=None,
22
+ tool_calls=[
23
+ m.ToolCall(id="call_1", name="add", arguments={"a": 2, "b": 3})
24
+ ],
25
+ text_content="",
26
+ reasoning_content=None,
27
+ ),
28
+ extra_turns_used=0,
29
+ )
30
+ second = ModelRunResponse[str](
31
+ response=m.CompletionResponse[str](
32
+ content="5",
33
+ tool_calls=[],
34
+ text_content="5",
35
+ reasoning_content=None,
36
+ ),
37
+ extra_turns_used=0,
38
+ )
39
+
40
+ responses = [first, second]
41
+
42
+ async def fake_model_run(*args, **kwargs):
43
+ assert responses, "No more fake responses configured"
44
+ return responses.pop(0)
45
+
46
+ # Patch the model run to avoid any network/model dependency
47
+ # Use unittest.mock.patch context managers to ensure cleanup
48
+ with (
49
+ patch.dict(os.environ, {"OPENAI_API_KEY": "test-key"}, clear=False),
50
+ patch("planar.ai.agent.model_run", side_effect=fake_model_run),
51
+ ):
52
+ agent = Agent[str, str](
53
+ name="test_agent",
54
+ system_prompt="",
55
+ user_prompt="",
56
+ model="openai:gpt-4o-mini",
57
+ tools=[add],
58
+ max_turns=3,
59
+ )
60
+
61
+ @workflow()
62
+ async def run_agent():
63
+ result = await agent("please add")
64
+ return result.output
65
+
66
+ wf = await run_agent.start()
67
+ result = await execute(wf)
68
+ assert result == "5"
69
+
70
+ steps = (
71
+ await session.exec(select(WorkflowStep).order_by(col(WorkflowStep.step_id)))
72
+ ).all()
73
+ # Ensure there is a tool call step with the display name set to the tool name
74
+ tool_steps = [s for s in steps if s.step_type == StepType.TOOL_CALL]
75
+ assert tool_steps, "Expected at least one TOOL_CALL step recorded"
76
+ assert any(s.display_name == "add" for s in tool_steps), (
77
+ f"Expected a TOOL_CALL step with display_name 'add', got {[s.display_name for s in tool_steps]}"
78
+ )
planar/cli.py CHANGED
@@ -175,9 +175,17 @@ def run_command(
175
175
 
176
176
  try:
177
177
  result = subprocess.run(
178
- ["uv", "run", str(app_path)], env=os.environ.copy(), check=True
178
+ ["uv", "run", str(app_path)], env=os.environ.copy(), check=False
179
179
  )
180
+ if result.returncode != 0:
181
+ typer.echo(
182
+ f"Error running script: Process exited with code {result.returncode}",
183
+ err=True,
184
+ )
180
185
  raise typer.Exit(code=result.returncode)
186
+ except typer.Exit:
187
+ # Re-raise typer.Exit without modification
188
+ raise
181
189
  except subprocess.CalledProcessError as e:
182
190
  typer.echo(f"Error running script: {e}", err=True)
183
191
  raise typer.Exit(code=e.returncode)
planar/db/alembic/env.py CHANGED
@@ -41,6 +41,13 @@ def run_migrations_offline() -> None:
41
41
  )
42
42
 
43
43
 
44
+ def include_name(name, type_, _):
45
+ if type_ == "schema":
46
+ return name == PLANAR_SCHEMA
47
+ else:
48
+ return True
49
+
50
+
44
51
  def run_migrations_online() -> None:
45
52
  """Run migrations in 'online' mode.
46
53
 
@@ -60,8 +67,8 @@ def run_migrations_online() -> None:
60
67
  target_metadata=target_metadata,
61
68
  # For SQLite, don't use schema since it's not supported
62
69
  version_table_schema=None if is_sqlite else PLANAR_SCHEMA,
63
- include_schemas=not is_sqlite,
64
- compare_type=True,
70
+ include_schemas=True,
71
+ include_name=include_name,
65
72
  # SQLite doesn't support alter table, so we need to use render_as_batch
66
73
  # to create the tables in a single transaction. For other databases,
67
74
  # the batch op is no-op.
@@ -95,7 +102,7 @@ def run_migrations_online() -> None:
95
102
  config_dict = config.get_section(config.config_ini_section, {})
96
103
  url = config_dict["sqlalchemy.url"]
97
104
  is_sqlite = url.startswith("sqlite://")
98
- translate_map = {"planar": None} if is_sqlite else {}
105
+ translate_map = {PLANAR_SCHEMA: None} if is_sqlite else {}
99
106
  connectable = engine_from_config(
100
107
  config_dict,
101
108
  prefix="sqlalchemy.",
@@ -110,15 +117,15 @@ def run_migrations_online() -> None:
110
117
  with connectable.connect() as connection:
111
118
  is_sqlite = connection.dialect.name == "sqlite"
112
119
  if is_sqlite:
113
- connection.dialect.default_schema_name = "planar"
120
+ connection.dialect.default_schema_name = PLANAR_SCHEMA
114
121
 
115
122
  context.configure(
116
123
  connection=connection,
117
124
  target_metadata=target_metadata,
118
125
  # For SQLite, don't use schema since it's not supported
119
126
  version_table_schema=None if is_sqlite else PLANAR_SCHEMA,
120
- include_schemas=not is_sqlite,
121
- compare_type=True,
127
+ include_schemas=True,
128
+ include_name=include_name,
122
129
  # SQLite doesn't support alter table, so we need to use render_as_batch
123
130
  # to create the tables in a single transaction. For other databases,
124
131
  # the batch op is no-op.
@@ -5,12 +5,12 @@ Revises: ${down_revision | comma,n}
5
5
  Create Date: ${create_date}
6
6
 
7
7
  """
8
+
8
9
  from typing import Sequence, Union
9
10
 
10
- from alembic import op
11
11
  import sqlalchemy as sa
12
12
  import sqlmodel.sql.sqltypes
13
- import planar.object_config.models
13
+ from alembic import op
14
14
  ${imports if imports else ""}
15
15
 
16
16
  # revision identifiers, used by Alembic.
@@ -0,0 +1,30 @@
1
+ """Add MESSAGE to step_type enum
2
+
3
+ Revision ID: 8855a78a408f
4
+ Revises: 3476068c153c
5
+ Create Date: 2025-09-16 16:19:25.917861
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+
13
+ # revision identifiers, used by Alembic.
14
+ revision: str = "8855a78a408f"
15
+ down_revision: Union[str, None] = "3476068c153c"
16
+ branch_labels: Union[str, Sequence[str], None] = None
17
+ depends_on: Union[str, Sequence[str], None] = None
18
+
19
+
20
+ def upgrade() -> None:
21
+ if op.get_context().dialect.name != "sqlite":
22
+ op.execute("ALTER TYPE steptype ADD VALUE 'MESSAGE'")
23
+
24
+
25
+ def downgrade() -> None:
26
+ # ### commands auto generated by Alembic - please adjust! ###
27
+ # Rolling this back would require updating any MESSAGE `WorkflowStep` rows to a different
28
+ # step type or deleting them before running a migration to drop the MESSAGE value.
29
+ pass
30
+ # ### end Alembic commands ###
planar/db/alembic.ini CHANGED
@@ -62,12 +62,12 @@ version_path_separator = os
62
62
  # output_encoding = utf-8
63
63
 
64
64
  # Development database for generating system migrations
65
- # It's safer for us to use a local postgres database for generating and testing migrations rather than sqlite,
65
+ # It's safer for us to use a local postgres database for generating and testing migrations rather than sqlite,
66
66
  # to be sure they'll work in production deployments.
67
- # Using postgres as the dev database for autogenerating revisions also is better because
68
- # we don't have the weird schema issues of Sqlite. Alembic doesn't fully support `schema_translate_map`
69
- # feature in SA that we use to remap `planar`->None in SQLite (due to it not supporting schemas),
70
- # so it sometimes incorrectly thinks it needs to re-generate things (like indices) that already
67
+ # Using postgres as the dev database for autogenerating revisions also is better because
68
+ # we don't have the weird schema issues of Sqlite. Alembic doesn't fully support `schema_translate_map`
69
+ # feature in SA that we use to remap `planar`->None in SQLite (due to it not supporting schemas),
70
+ # so it sometimes incorrectly thinks it needs to re-generate things (like indices) that already
71
71
  # exist in the database from a prior migration. Using postgres obviates that issue.
72
72
  # https://github.com/sqlalchemy/alembic/issues/555
73
73
  sqlalchemy.url = postgresql+psycopg2://postgres:postgres@localhost:5432/postgres
planar/files/__init__.py CHANGED
@@ -1,2 +1,5 @@
1
1
  from .models import PlanarFile, PlanarFileMetadata # noqa: F401
2
2
  from .storage.context import get_storage # noqa: F401
3
+
4
+ # re-export PlanarFile
5
+ __all__ = ["PlanarFile", "PlanarFileMetadata", "get_storage"]
planar/py.typed ADDED
File without changes
@@ -10,4 +10,9 @@ app = (
10
10
  .register_entity(Invoice)
11
11
  .register_workflow(process_invoice)
12
12
  .register_agent(invoice_agent)
13
- )
13
+ )
14
+
15
+
16
+ if __name__ == "__main__":
17
+ print("Planar app is ready!")
18
+ exit(0)
@@ -132,7 +132,7 @@ def tmp_postgresql_container():
132
132
  "--name",
133
133
  container_name,
134
134
  "-e",
135
- "POSTGRES_PASSWORD=123",
135
+ "POSTGRES_PASSWORD=postgres",
136
136
  "-p",
137
137
  "127.0.0.1:5432:5432",
138
138
  "docker.io/library/postgres",
@@ -205,7 +205,7 @@ def tmp_postgresql_url(request):
205
205
  if process.returncode != 0:
206
206
  raise Exception("Failed to create database")
207
207
 
208
- url = f"postgresql+asyncpg://postgres:123@127.0.0.1:5432/{db_name}"
208
+ url = f"postgresql+asyncpg://postgres:postgres@127.0.0.1:5432/{db_name}"
209
209
 
210
210
  try:
211
211
  yield url
planar/utils.py CHANGED
@@ -108,3 +108,20 @@ def partition[T](
108
108
  false_items.append(item)
109
109
 
110
110
  return false_items, true_items
111
+
112
+
113
+ def one_or_raise[T](iterable: Iterable[T]) -> T:
114
+ """Extract the single element from an iterable or raise an exception."""
115
+ iterator = iter(iterable)
116
+ try:
117
+ value = next(iterator)
118
+ except StopIteration:
119
+ raise ValueError("Expected exactly one element, but iterable is empty")
120
+
121
+ try:
122
+ next(iterator)
123
+ raise ValueError(
124
+ "Expected exactly one element, but iterable contains multiple elements"
125
+ )
126
+ except StopIteration:
127
+ return value
@@ -2,12 +2,15 @@ from datetime import datetime, timedelta
2
2
  from functools import wraps
3
3
  from typing import Any, Callable, Coroutine, Dict
4
4
 
5
+ from pydantic.main import BaseModel
6
+
5
7
  from planar.logging import get_logger
6
8
  from planar.session import get_session
7
9
  from planar.utils import P, T, U, utc_now
8
10
  from planar.workflows import step
9
11
  from planar.workflows.context import get_context
10
12
  from planar.workflows.events import check_event_exists, get_latest_event
13
+ from planar.workflows.models import StepType
11
14
  from planar.workflows.step_core import Suspend, suspend_workflow
12
15
  from planar.workflows.tracing import trace
13
16
 
@@ -19,6 +22,11 @@ async def get_deadline(max_wait_time: float) -> datetime:
19
22
  return utc_now() + timedelta(seconds=max_wait_time)
20
23
 
21
24
 
25
+ @step(step_type=StepType.MESSAGE)
26
+ async def message(message: str | BaseModel):
27
+ pass
28
+
29
+
22
30
  @step(display_name="Wait for event")
23
31
  async def wait_for_event(
24
32
  event_key: str,
@@ -30,6 +30,7 @@ class StepType(str, Enum):
30
30
  RULE = "rule"
31
31
  HUMAN_IN_THE_LOOP = "human_in_the_loop"
32
32
  TOOL_CALL = "tool_call"
33
+ MESSAGE = "message"
33
34
 
34
35
 
35
36
  class WorkflowStatus(str, Enum):
@@ -14,7 +14,8 @@ from sqlmodel.ext.asyncio.session import AsyncSession
14
14
 
15
15
  from planar.session import get_session
16
16
  from planar.testing.workflow_observer import WorkflowObserver
17
- from planar.utils import utc_now
17
+ from planar.utils import one_or_raise, utc_now
18
+ from planar.workflows.contrib import message
18
19
  from planar.workflows.decorators import (
19
20
  __AS_STEP_CACHE,
20
21
  __is_workflow_step,
@@ -1965,3 +1966,40 @@ async def test_child_workflow_called_as_start_step(session: AsyncSession):
1965
1966
  assert child_wf.parent_id is None
1966
1967
  assert child_wf.status == WorkflowStatus.SUCCEEDED
1967
1968
  assert child_wf.result == "child_result"
1969
+
1970
+
1971
+ # =============================================================================
1972
+ # Test for message steps
1973
+ # =============================================================================
1974
+ class Example(BaseModel):
1975
+ id: int
1976
+ msg: str
1977
+
1978
+
1979
+ @pytest.mark.parametrize("input", ["hello", Example(id=1, msg="hello")])
1980
+ async def test_message(session: AsyncSession, input: str | BaseModel):
1981
+ @workflow()
1982
+ async def msg_workflow(msg: str | BaseModel):
1983
+ await message(msg)
1984
+
1985
+ async with WorkflowOrchestrator.ensure_started() as orchestrator:
1986
+ wf = await msg_workflow.start(input)
1987
+ await orchestrator.wait_for_completion(wf.id)
1988
+
1989
+ await session.refresh(wf)
1990
+ steps = (
1991
+ await session.exec(
1992
+ select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
1993
+ )
1994
+ ).all()
1995
+
1996
+ step = one_or_raise(steps)
1997
+ # We recorded a single `WorkflowStep` of type `MESSAGE` to the DB.
1998
+ assert step.status is StepStatus.SUCCEEDED
1999
+ assert step.step_type is StepType.MESSAGE
2000
+ if isinstance(input, str):
2001
+ assert step.args == [input]
2002
+ else:
2003
+ assert step.args == [input.model_dump()]
2004
+ assert not step.kwargs
2005
+ assert step.result is None
@@ -1,9 +1,8 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: planar
3
- Version: 0.9.1
3
+ Version: 0.9.3
4
4
  Summary: Add your description here
5
5
  License-Expression: LicenseRef-Proprietary
6
- Requires-Python: >=3.12
7
6
  Requires-Dist: aiofiles>=24.1.0
8
7
  Requires-Dist: aiosqlite>=0.21.0
9
8
  Requires-Dist: alembic>=1.14.1
@@ -23,19 +22,20 @@ Requires-Dist: sqlmodel>=0.0.22
23
22
  Requires-Dist: typer>=0.15.2
24
23
  Requires-Dist: typing-extensions>=4.12.2
25
24
  Requires-Dist: zen-engine>=0.40.0
25
+ Requires-Dist: azure-storage-blob>=12.19.0 ; extra == 'azure'
26
+ Requires-Dist: azure-identity>=1.15.0 ; extra == 'azure'
27
+ Requires-Dist: aiohttp>=3.8.0 ; extra == 'azure'
28
+ Requires-Dist: ducklake>=0.1.1 ; extra == 'data'
29
+ Requires-Dist: ibis-framework[duckdb]>=10.8.0 ; extra == 'data'
30
+ Requires-Dist: polars>=1.31.0 ; extra == 'data'
31
+ Requires-Dist: opentelemetry-api>=1.34.1 ; extra == 'otel'
32
+ Requires-Dist: opentelemetry-exporter-otlp>=1.34.1 ; extra == 'otel'
33
+ Requires-Dist: opentelemetry-instrumentation-logging>=0.55b1 ; extra == 'otel'
34
+ Requires-Dist: opentelemetry-sdk>=1.34.1 ; extra == 'otel'
35
+ Requires-Python: >=3.12
26
36
  Provides-Extra: azure
27
- Requires-Dist: aiohttp>=3.8.0; extra == 'azure'
28
- Requires-Dist: azure-identity>=1.15.0; extra == 'azure'
29
- Requires-Dist: azure-storage-blob>=12.19.0; extra == 'azure'
30
37
  Provides-Extra: data
31
- Requires-Dist: ducklake>=0.1.1; extra == 'data'
32
- Requires-Dist: ibis-framework[duckdb]>=10.8.0; extra == 'data'
33
- Requires-Dist: polars>=1.31.0; extra == 'data'
34
38
  Provides-Extra: otel
35
- Requires-Dist: opentelemetry-api>=1.34.1; extra == 'otel'
36
- Requires-Dist: opentelemetry-exporter-otlp>=1.34.1; extra == 'otel'
37
- Requires-Dist: opentelemetry-instrumentation-logging>=0.55b1; extra == 'otel'
38
- Requires-Dist: opentelemetry-sdk>=1.34.1; extra == 'otel'
39
39
  Description-Content-Type: text/markdown
40
40
 
41
41
  # Planar
@@ -55,21 +55,22 @@ The workflow system in Planar is a sophisticated orchestration framework that en
55
55
  1. Core Concept: Implements a durable workflow system that can survive process restarts by storing workflow state in a database. It allows workflows to
56
56
  be suspended and resumed.
57
57
  2. Key Features:
58
- - Persistent Steps: Each step in a workflow is tracked in the database
59
- - Automatic Retries: Failed steps can be retried automatically
60
- - Suspendable Workflows: Workflows can be suspended and resumed later
61
- - Concurrency Control: Uses a locking mechanism to prevent multiple executions
62
- - Recovery: Can recover from crashes by detecting stalled workflows
58
+ - Persistent Steps: Each step in a workflow is tracked in the database
59
+ - Automatic Retries: Failed steps can be retried automatically
60
+ - Suspendable Workflows: Workflows can be suspended and resumed later
61
+ - Concurrency Control: Uses a locking mechanism to prevent multiple executions
62
+ - Recovery: Can recover from crashes by detecting stalled workflows
63
63
  3. Main Components:
64
- - `@workflow` decorator: Marks a function as a workflow with persistence
65
- - `@step` decorator: Wraps function calls inside a workflow to make them resumable
66
- - Suspend class: Allows pausing workflow execution
67
- - workflow_orchestrator: Background task that finds and resumes suspended workflows
64
+ - `@workflow` decorator: Marks a function as a workflow with persistence
65
+ - `@step` decorator: Wraps function calls inside a workflow to make them resumable
66
+ - Suspend class: Allows pausing workflow execution
67
+ - workflow_orchestrator: Background task that finds and resumes suspended workflows
68
68
  4. REST API Integration:
69
- - Automatically creates API endpoints for starting workflows
70
- - Provides status endpoints to check workflow progress
71
- This is essentially a state machine for managing long-running business processes that need to be resilient to failures and can span multiple
72
- requests/processes.
69
+ - Automatically creates API endpoints for starting workflows
70
+ - Provides status endpoints to check workflow progress
71
+
72
+ This is essentially a state machine for managing long-running business processes that need to be resilient to failures and can span multiple
73
+ requests/processes.
73
74
 
74
75
  ### Coroutines and the suspension mechanism
75
76
  Coroutines are the heart of Planar's workflow system. Here's how they work:
@@ -278,7 +279,7 @@ We use pytest for testing Planar:
278
279
  To test with PostgreSQL locally, you'll need a PostgreSQL container running:
279
280
 
280
281
  ```bash
281
- docker run --restart=always --name planar-postgres -e POSTGRES_PASSWORD=123 -p 127.0.0.1:5432:5432 -d docker.io/library/postgres
282
+ docker run --restart=always --name planar-postgres -e POSTGRES_PASSWORD=postgres -p 127.0.0.1:5432:5432 -d docker.io/library/postgres
282
283
  ```
283
284
 
284
285
  Ensure the container name is `planar-postgres`.
@@ -320,5 +321,3 @@ To install cairo, run the following command:
320
321
  brew install cairo libffi pkg-config
321
322
  export DYLD_FALLBACK_LIBRARY_PATH="/opt/homebrew/lib:${DYLD_FALLBACK_LIBRARY_PATH}"
322
323
  ```
323
-
324
-
@@ -1,43 +1,31 @@
1
1
  planar/__init__.py,sha256=FAYRGjuJOH2Y_XYFA0-BrRFjuKdPzIShNbaYwJbtu6A,499
2
- planar/_version.py,sha256=MDn0Ro0DvGxuAuRTGL8IBqcm5nbo1P640CIS7xBBu2k,18
3
- planar/app.py,sha256=VEs4jDlcisyOy9I9zEGMG_-Qm8ULKT36CSHjqrYit3o,18491
4
- planar/cli.py,sha256=2ObR5XkLGbdbnDqp5mrBzDVhSacHCNsVNSHnXkrMQzQ,9593
5
- planar/config.py,sha256=6J42G9rEVUiOyCAY3EwUTU3PPmWthGTnrHMzST9TMcc,17809
6
- planar/dependencies.py,sha256=PH78fGk3bQfGnz-AphxH49307Y0XVgl3EY0LdGJnoik,1008
7
- planar/object_registry.py,sha256=RMleX5XE8OKDxlnMeyLpJ1Y280duub-tx1smR1zTlDg,3219
8
- planar/registry_items.py,sha256=UhZRIpbSoa_CV9OTl17pJfRLxItYp4Pxd9f5ZbJkGaM,2055
9
- planar/session.py,sha256=xLS9WPvaiy9nr2Olju1-C-7_sU5VXK8RuNdjuKndul4,1020
10
- planar/task_local.py,sha256=pyvT0bdzAn15HL2yQUs9YrU5MVXh9njQt9MH51AGljs,1102
11
- planar/test_app.py,sha256=5dYhOW6lRbAx2X270DfqktkJ5IfuqfowX6bwxM1WQAM,4865
12
- planar/test_cli.py,sha256=faR6CSuooHHyyB5Yt-p8CIr7mGtKrrU2TLQbc4Oe9bA,13834
13
- planar/test_config.py,sha256=HcmDu1nwKZZhzHQLGVyP9oxje-_g_XubEsvzRj28QPg,14328
14
- planar/test_object_config.py,sha256=izn4s2HmSDWpGtgpOTDmKeUYN2-63WDR1QtVQrT-x00,20135
15
- planar/test_object_registry.py,sha256=R7IwbB2GACm2HUuVZTeVY4V12XB9_JgSSeppPxiCdfs,480
16
- planar/test_sqlalchemy.py,sha256=QTloaipWiFmlLTBGH6YCRkwi1R27gmQZnwprO7lPLfU,7058
17
- planar/test_utils.py,sha256=gKenXotj36SN_bb3bQpYPfD8t06IjnGBQqEgWpujHcA,3086
18
- planar/utils.py,sha256=v7q9AJyWgQWl9VPSN_0qxw3rBvYe-_Pb_KcwqSsjOFU,3103
19
2
  planar/ai/__init__.py,sha256=ABOKvqQOLlVJkptcvXcuLjVZZWEsK8h-1RyFGK7kib8,231
20
- planar/ai/agent.py,sha256=flgHU00LRT-UcP0TjMqDigi2jwWq6UoMpmCZSOTyyB0,12428
21
- planar/ai/agent_base.py,sha256=iOOiUwbTiqckrZ-ZtlpkPCjSNE117gMwxrdgegO-P-0,5303
22
- planar/ai/agent_utils.py,sha256=Yug1lt3uT7zLJ0X9uUBpKEomxucKaZiEUBIcf-RZILo,4052
23
- planar/ai/models.py,sha256=aH61vkHJEhmupvGJHS87Nv7bpCpcfBJDO-N8k3k2ixc,4292
24
- planar/ai/pydantic_ai.py,sha256=lYWtnIclOLRiEpBJi5r6Ey8gDBVlQIHTFa3iEzUNqWY,23525
3
+ planar/ai/agent.py,sha256=5U2dKIr_vy8ItLaj91uSbVX90DIy1OGRBzLbMyk2gbQ,12481
4
+ planar/ai/agent_base.py,sha256=rdK5ExCpkPf5sdVy-Wo5MKAx2O_GULFCwA24s0XO6Ek,5462
5
+ planar/ai/agent_utils.py,sha256=MYNerdAm2TPVbDSKAmBCUlGmR56NAc8seZmDAFOWvUA,4199
6
+ planar/ai/models.py,sha256=bZd4MoBBJMqzXJqsmsbMdZtOaRrNeX438CHAqOvmpfw,4598
7
+ planar/ai/pydantic_ai.py,sha256=FpD0pE7wWNYwmEUZ90D7_J8gbAoqKmWtrLr2fhAd7rg,23503
25
8
  planar/ai/test_agent_serialization.py,sha256=zYLIxhYdFhOZzBrEBoQNyYLyNcNxWwaMTkjt_ARTkZk,8073
9
+ planar/ai/test_agent_tool_step_display.py,sha256=GswT9wET4-vFnohOwIgP6-0r_-wt1vwpThmAD9ubATw,2582
26
10
  planar/ai/utils.py,sha256=WVBW0TGaoKytC4bNd_a9lXrBf5QsDRut4GBcA53U2Ww,3116
11
+ planar/app.py,sha256=VEs4jDlcisyOy9I9zEGMG_-Qm8ULKT36CSHjqrYit3o,18491
12
+ planar/cli.py,sha256=SIyQOY3MbNDuohNcXFRIrHJuGxFNNC8C_ihfCXIUvbE,9900
13
+ planar/config.py,sha256=6J42G9rEVUiOyCAY3EwUTU3PPmWthGTnrHMzST9TMcc,17809
27
14
  planar/data/__init__.py,sha256=LwrWl925w1CN0aW645Wpj_kDp0B8j5SsPzjr9iyrcmI,285
28
15
  planar/data/config.py,sha256=zp6ChI_2MUMbupEVQNY-BxzcdLvejXG33DCp0BujGVU,1209
29
16
  planar/data/dataset.py,sha256=P0NVE2OvJcXMKqVylYczY2lSGR0pSWlPAHM_upKoBWQ,9507
30
17
  planar/data/exceptions.py,sha256=AlhGQ_TReyEzfPSlqoXCjoZ1206Ut7dS4lrukVfGHaw,358
31
18
  planar/data/test_dataset.py,sha256=w2kay2PE-BhkryM3cOKX0nzSr2G0nCJxDuW1QCeFbyk,9985
32
19
  planar/db/__init__.py,sha256=SNgB6unQ1f1E9dB9O-KrsPsYM17KLsgOW1u0ajqs57I,318
33
- planar/db/alembic.ini,sha256=8G9IWbmF61Vwp1BXbkNOXTTgCEUMBQhOK_e-nnpnSYY,4309
34
- planar/db/db.py,sha256=VNpHH1R62tdWVLIV1I2ULmw3B8M6-RsM2ALG3VAVjSg,12790
35
- planar/db/alembic/env.py,sha256=cowI6O_4BMJPqDAukkbg69lzdsE44soi3ysxKGXbS_w,5207
36
- planar/db/alembic/script.py.mako,sha256=Cl7ixgLNtLk1gF5xFNXOnC9YYLX4cpFd8yHtEyY0_dY,699
20
+ planar/db/alembic/env.py,sha256=UlOrLBfFJ-WbNK0R1cgS2MC3yrqeE4-6rIirB3rGLYo,5344
21
+ planar/db/alembic/script.py.mako,sha256=BgXfi4ClINnJU-PaaWqh1-Sjqu4brkWpbVd-0rEPzLU,665
37
22
  planar/db/alembic/versions/3476068c153c_initial_system_tables_migration.py,sha256=1FbzJyfapjegM-Mxd3HMMVA-8zVU6AnrnzEgIoc6eoQ,13204
38
- planar/files/__init__.py,sha256=fms64l32M8hPK0SINXxNCykr2EpjBTcdgnezVgaCwkc,120
23
+ planar/db/alembic/versions/8855a78a408f_message_step_type.py,sha256=iH13r8swy79lw8icGNKW1lqN09TX93MvR1zi-qvWNlU,869
24
+ planar/db/alembic.ini,sha256=FI1S0DlTn7IVp3-eT17PNxbVBbqhn84k2VzwRHpNz_Q,4304
25
+ planar/db/db.py,sha256=VNpHH1R62tdWVLIV1I2ULmw3B8M6-RsM2ALG3VAVjSg,12790
26
+ planar/dependencies.py,sha256=PH78fGk3bQfGnz-AphxH49307Y0XVgl3EY0LdGJnoik,1008
27
+ planar/files/__init__.py,sha256=uXqwnoIaJAuDYXFA-9gqcSq1R4mZLNyfem1zZyGI5Ek,206
39
28
  planar/files/models.py,sha256=zbZvMkoqoSnn7yOo26SRtEgtlHJbFIvwSht75APHQXk,6145
40
- planar/files/test_files.py,sha256=nclsbLnbijCWQ-Aj8Yvo06hs72PygL1Wps7uk7716sc,8957
41
29
  planar/files/storage/azure_blob.py,sha256=PzCm8ZpyAMH9-N6VscTlLpud-CBLcQX9qC6YjbOSfZg,12316
42
30
  planar/files/storage/base.py,sha256=KO7jyKwjKg5fNSLvhxJWE-lsypv6LXXf7bgA34aflwY,2495
43
31
  planar/files/storage/config.py,sha256=jE9Dn6cG_a4x9pdaZkasOxjyWkK6hmplLrPjEsRXGLM,3473
@@ -47,6 +35,7 @@ planar/files/storage/s3.py,sha256=1861rSw3kplXtugUWD7mdSD_EnPSHME1mGc82V69r5g,82
47
35
  planar/files/storage/test_azure_blob.py,sha256=OFYpns6JyeCCBHCoLz56uUHR6tWWeSZldUant5llczI,14200
48
36
  planar/files/storage/test_local_directory.py,sha256=KtzRfjtZUew1U-KETtD2mb6ywwX6HmjzaaeixOP0Ebg,5751
49
37
  planar/files/storage/test_s3.py,sha256=QG-CH7fiaRmQRwffnqG2mLRrw9LIlR2-xRyHs6Wuspo,10565
38
+ planar/files/test_files.py,sha256=nclsbLnbijCWQ-Aj8Yvo06hs72PygL1Wps7uk7716sc,8957
50
39
  planar/human/__init__.py,sha256=FwpV-FFssKKlvKSjWoI4gJB1XTMaNb1UNCSBxjAtIBw,147
51
40
  planar/human/human.py,sha256=-oRtN_8bCtSV7Sxku7yG4rof7T5pr4j18Cfm3u4Z3PM,14925
52
41
  planar/human/models.py,sha256=Cec1Y9NGGtuAl1ZhqNc9PWIq__BbiWVTh7IYKR4yl3w,2317
@@ -75,6 +64,9 @@ planar/modeling/orm/reexports.py,sha256=sP7nw8e1yp1cahpfsefO84P5n4TNnBRk1jVHuCuH
75
64
  planar/object_config/__init__.py,sha256=8LbI3teg3jCKoUznZ7cal22C1URnHtJMpBokCHZQUWo,352
76
65
  planar/object_config/models.py,sha256=nCyK82JitZwzGwbaBa-dZVxHPnL51ZJ6h87a-KEwHAw,3078
77
66
  planar/object_config/object_config.py,sha256=MgaL-jBFJJtP6ipZ2eJs-KMhj94V_sT3QCSoVTpYP3Y,13609
67
+ planar/object_registry.py,sha256=RMleX5XE8OKDxlnMeyLpJ1Y280duub-tx1smR1zTlDg,3219
68
+ planar/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
69
+ planar/registry_items.py,sha256=UhZRIpbSoa_CV9OTl17pJfRLxItYp4Pxd9f5ZbJkGaM,2055
78
70
  planar/routers/__init__.py,sha256=B_ZEbBuosX4ahPfvWZsyMIPmQm0rt6ail4nJA6NLfOk,379
79
71
  planar/routers/agents_router.py,sha256=trb1JPYVlaV7O2uoYvKIrLuTNGP_PmQSLZmXYFWrHkg,8251
80
72
  planar/routers/entity_router.py,sha256=7Y1LDSqI_ovoOGr9DGylGM8BmRxF-WSPQSwITJHc6NE,4841
@@ -97,7 +89,6 @@ planar/rules/decorator.py,sha256=nxT17n9uwfXMOlk5lliw_cRS7Y83gMI6CQdrf_pB5yk,666
97
89
  planar/rules/models.py,sha256=vC38JLeGzmU87L8BX4AyVJLJHmRYjWRmoHQ6S6ZlhPg,10186
98
90
  planar/rules/rule_configuration.py,sha256=B2G6mPnfxA277nF-Gr-B_Uely-ZOhz2jAhiwQMZuY-k,6508
99
91
  planar/rules/runner.py,sha256=KIPrt_ri50qotvDLOY9xly40bNTWRh8GVT2kEJFFtFo,1714
100
- planar/rules/test_rules.py,sha256=6M7CSg1bwn7O7DOoNi38vyVG4UmPQfRFxEO9qGE6rz0,52011
101
92
  planar/rules/test_data/account_dormancy_management.json,sha256=9aMMELZrF5DTBluMKUXJptxwULEcva4GHEyaapIeerY,4776
102
93
  planar/rules/test_data/airline_loyalty_points_calculator.json,sha256=7S1koMe60yR3h2VQys34oLy5ynhsEQ5wadMLPHCRQZA,5689
103
94
  planar/rules/test_data/applicant_risk_assessment.json,sha256=rj-Q13NczdNt00x5wrvGLalw5IfdT1j-_RvpwCZa7Fc,9994
@@ -112,13 +103,14 @@ planar/rules/test_data/order_consolidation_system.json,sha256=kWJuVHAfAqsDW2xVdx
112
103
  planar/rules/test_data/portfolio_risk_monitor.json,sha256=tTvQOJJLhakGxG4CnA9fdBIECstJnp0B8ogFADkdy8s,15168
113
104
  planar/rules/test_data/supply_chain_risk.json,sha256=fO0wV5ZnsZQpOP19Zp2troTMADaX0-KMpCxG_uHG198,7263
114
105
  planar/rules/test_data/warehouse_cross_docking.json,sha256=IPfcgNkY2sds301BeW6CjgFtK_zRyr27gI3UcqCB2Uo,5549
115
- planar/scaffold_templates/main.py.j2,sha256=HcV0PVzcyRDaJvNdDQIFiDR1MJlLquNQzNO9oNkCKDQ,322
116
- planar/scaffold_templates/planar.dev.yaml.j2,sha256=I5-IqX7GJm6qA91WtUMw43L4hKACqgnER_H2racim4c,998
117
- planar/scaffold_templates/planar.prod.yaml.j2,sha256=FahJ2atDtvVH7IUCatGq6h9hmyF8meeiWC8RLfWphOQ,867
118
- planar/scaffold_templates/pyproject.toml.j2,sha256=nFfHWLp0sFK8cqjkdwBm6Hi6xsPzTNkaBeSgdTWTS-Q,183
106
+ planar/rules/test_rules.py,sha256=6M7CSg1bwn7O7DOoNi38vyVG4UmPQfRFxEO9qGE6rz0,52011
119
107
  planar/scaffold_templates/app/__init__.py.j2,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
120
108
  planar/scaffold_templates/app/db/entities.py.j2,sha256=wg9O3JtRaRMKlDtoWHHodyNRL0s1UILvsr9fCQ_O2-4,279
121
109
  planar/scaffold_templates/app/flows/process_invoice.py.j2,sha256=R3EII_O2DHV1kvffW_AApZyaS6rR9eikcpxI08XH9dI,1691
110
+ planar/scaffold_templates/main.py.j2,sha256=zrqsuv3Fp4lcknvB37RrRHy11msdFB1yDguYmTLLPhw,398
111
+ planar/scaffold_templates/planar.dev.yaml.j2,sha256=I5-IqX7GJm6qA91WtUMw43L4hKACqgnER_H2racim4c,998
112
+ planar/scaffold_templates/planar.prod.yaml.j2,sha256=FahJ2atDtvVH7IUCatGq6h9hmyF8meeiWC8RLfWphOQ,867
113
+ planar/scaffold_templates/pyproject.toml.j2,sha256=nFfHWLp0sFK8cqjkdwBm6Hi6xsPzTNkaBeSgdTWTS-Q,183
122
114
  planar/security/auth_context.py,sha256=i63JkHQ3oXNlTis7GIKRkZJbkcvZhD2jVDuO7blgbSc,5068
123
115
  planar/security/auth_middleware.py,sha256=Grrm0i2bstWZ83ukrNZsHvFbNzffN0rvbbCcb2OxRY0,5746
124
116
  planar/security/authorization.py,sha256=zoej88_VINVNSDXm7u2LJbwOpMqmXBKj_pmCaPTar7M,11721
@@ -129,28 +121,38 @@ planar/security/tests/test_authorization_context.py,sha256=cnsC3V13NBJwzyIwZaM9w
129
121
  planar/security/tests/test_cedar_basics.py,sha256=i1jLPjlJT1n_97onbeDYVpnwAzU2PmHvIPvaJSH1J2U,1026
130
122
  planar/security/tests/test_cedar_policies.py,sha256=-Vn_CQgCUAVg7YhdUd34FsOjNL1EmY_o92r-fzmknP8,4848
131
123
  planar/security/tests/test_jwt_principal_context.py,sha256=nGElTLtXbabkAxd3kXVpSFdH7kvSzHzSkp89g5Vu5Hc,4691
124
+ planar/session.py,sha256=xLS9WPvaiy9nr2Olju1-C-7_sU5VXK8RuNdjuKndul4,1020
132
125
  planar/sse/constants.py,sha256=jE3SooTEWPuuL_Bi6DisJYMR9pKOiHVfboU2h5QTJRg,22
133
126
  planar/sse/example.html,sha256=SgTJbdJ3B1F1DxLC2YWuX2F1XVwKcTjX34CbJCXoCTM,4144
134
127
  planar/sse/hub.py,sha256=5jhfk7zdCivau3TT1MxU2qtvETSskhqEiXzt-t0sRpE,6859
135
128
  planar/sse/model.py,sha256=fU_Fx9LS2ouS6-Dj1TIF-PLGul9YratKWafoWfZR1gc,123
136
129
  planar/sse/proxy.py,sha256=aJGo_-JIeQ0xSmE4HJdulZxIgCVRsBMMXqqSqtPvTvo,9177
130
+ planar/task_local.py,sha256=pyvT0bdzAn15HL2yQUs9YrU5MVXh9njQt9MH51AGljs,1102
131
+ planar/test_app.py,sha256=5dYhOW6lRbAx2X270DfqktkJ5IfuqfowX6bwxM1WQAM,4865
132
+ planar/test_cli.py,sha256=faR6CSuooHHyyB5Yt-p8CIr7mGtKrrU2TLQbc4Oe9bA,13834
133
+ planar/test_config.py,sha256=HcmDu1nwKZZhzHQLGVyP9oxje-_g_XubEsvzRj28QPg,14328
134
+ planar/test_object_config.py,sha256=izn4s2HmSDWpGtgpOTDmKeUYN2-63WDR1QtVQrT-x00,20135
135
+ planar/test_object_registry.py,sha256=R7IwbB2GACm2HUuVZTeVY4V12XB9_JgSSeppPxiCdfs,480
136
+ planar/test_sqlalchemy.py,sha256=QTloaipWiFmlLTBGH6YCRkwi1R27gmQZnwprO7lPLfU,7058
137
+ planar/test_utils.py,sha256=gKenXotj36SN_bb3bQpYPfD8t06IjnGBQqEgWpujHcA,3086
137
138
  planar/testing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
138
- planar/testing/fixtures.py,sha256=spK7iL1NSv-d8fd139ep-SDogZR2ZycGkD_voSAPPF4,8662
139
+ planar/testing/fixtures.py,sha256=YtSGbSUlGXdreDfTVNKZUaCflvAftYQegaTOQQLOrCA,8672
139
140
  planar/testing/memory_storage.py,sha256=apcuFisC3hW9KiU3kO8zwHQ6oK9Lu20NSX5fJ0LSZUY,2824
140
141
  planar/testing/planar_test_client.py,sha256=qPkI_ZHZho_38PpdSmEjcRBO1iHcIx3dOwo7c02Am10,1979
141
142
  planar/testing/synchronizable_tracer.py,sha256=SWeta1CgwGsN5duC0FR8NyXOQ1b1L8nDpvGdjZVJ9Bg,4938
142
143
  planar/testing/test_memory_storage.py,sha256=So32XL0gbLDFMTl-WJN445x9jL6O8Qsqw8IRaiZnsPs,4797
143
144
  planar/testing/workflow_observer.py,sha256=0Q2xsYuZzNGXHZVwvXBqL9KXPsdIXuSZGBJAxHopzJw,2976
145
+ planar/utils.py,sha256=YP37-ODS8nYOIfHPo11CwCpQRsg8oc57lQ0wkXwqCyo,3607
144
146
  planar/workflows/__init__.py,sha256=yFrrtKYUCx4jBPpHdEWDfKQgZXzGyr9voj5lFe9C-_w,826
145
147
  planar/workflows/context.py,sha256=93kPSmYniqjX_lv6--eUUPnzZEKZJi6IPaAjrT-hFRY,1271
146
- planar/workflows/contrib.py,sha256=b7WhCancxNCKO63mJCez9MahwMQc5_3zQxr_soJoXCY,6478
148
+ planar/workflows/contrib.py,sha256=tUqMZ42Jh8KMy1JP1VFJOD4rsiYxzMTd5pJfe2t3yzk,6650
147
149
  planar/workflows/decorators.py,sha256=Lsq9ZZdY60rv8-9Ok029x_kE4bHBvRqbfWZ8O0QRNfw,7960
148
150
  planar/workflows/events.py,sha256=xYGGTwbKFnqhFFI7SuoFIaEeS5oWOLS-1nP9MW0uOhs,6007
149
151
  planar/workflows/exceptions.py,sha256=G2Q4ZhaJwybMLpnpzXJNvKtFqUsEw7Vh0cRMkVxP7PU,927
150
152
  planar/workflows/execution.py,sha256=8c4a2L1qRMPQrCEJ8-sEk-gJi_xKq5gYKDSWSbSspVI,7479
151
153
  planar/workflows/lock.py,sha256=QU5_y_n8RHOC7ppLicH7yWX-Ni7N93hlB14D2zXOQ8A,8773
152
154
  planar/workflows/misc.py,sha256=g3XVRMeU9mgDRi_6RgFdydLEqvTAg49wbIGlmC7kOu8,140
153
- planar/workflows/models.py,sha256=54z19XaMp-OP9qE_HT2yhK12u8NC4ZD7SgwY8sGjyw4,5567
155
+ planar/workflows/models.py,sha256=SKBJTGhd4nVWxtlDkaQrU2RvRTtoj07PJhLT73cF_ac,5591
154
156
  planar/workflows/notifications.py,sha256=JrObfWD-uRZJlZLMSDJDqjDuXfYAoRSLfgEeyoy98Vs,3795
155
157
  planar/workflows/orchestrator.py,sha256=rneB1yOPDZiJcHFbD6UDZ4juU77iSBK1eu1gOFm58vM,15480
156
158
  planar/workflows/query.py,sha256=38B5SLwXf3AlA_1ChR5DicFWdcUqzpQzMkuAUCNHafI,8838
@@ -163,10 +165,10 @@ planar/workflows/test_concurrency_detection.py,sha256=yfgvLOMkPaK7EiW4ihm1KQx82Y
163
165
  planar/workflows/test_lock_timeout.py,sha256=H78N090wJtiEg6SaJosfRWijpX6HwnyWyNNb7WaGPe0,5746
164
166
  planar/workflows/test_serialization.py,sha256=JfaveBRQTNMkucqkTorIMGcvi8S0j6uRtboFaWpCmes,39586
165
167
  planar/workflows/test_suspend_deserialization.py,sha256=ddw2jToSJ-ebQ0RfT7KWTRMCOs1nis1lprQiGIGuaJ0,7751
166
- planar/workflows/test_workflow.py,sha256=KArm9m44IBXKY9j4v_O74MAweFN6jEb7tVRomziaeFU,64011
168
+ planar/workflows/test_workflow.py,sha256=hBLPQYqUsWEQ_SopKgi69ckRC5OpmQEBlsPcftGMu_Q,65266
167
169
  planar/workflows/tracing.py,sha256=E7E_kj2VBQisDqrllviIshbvOmB9QcEeRwMapunqio4,2732
168
170
  planar/workflows/wrappers.py,sha256=KON6RGg1D6yStboNbuMEeTXRpPTEa8S6Elh1tOnMAlM,1149
169
- planar-0.9.1.dist-info/METADATA,sha256=dXMHhfpYKGO-5bwwHaSSJLH2LyKxODUOwj0zTtT6gnY,12303
170
- planar-0.9.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
171
- planar-0.9.1.dist-info/entry_points.txt,sha256=ZtFgrZ0eeoVmhLA51ESipK0nHg2t_prjW0Cm8WhpP54,95
172
- planar-0.9.1.dist-info/RECORD,,
171
+ planar-0.9.3.dist-info/WHEEL,sha256=Pi5uDq5Fdo_Rr-HD5h9BiPn9Et29Y9Sh8NhcJNnFU1c,79
172
+ planar-0.9.3.dist-info/entry_points.txt,sha256=L3T0w9u2UPKWXv6JbXFWKU1d5xyEAq1xVWbpYS6mLNg,96
173
+ planar-0.9.3.dist-info/METADATA,sha256=TjcslwOgn9XhcvGPMUOyQks5nVakgTeFOFSzahwxcmE,12347
174
+ planar-0.9.3.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: uv 0.8.17
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -1,3 +1,4 @@
1
1
  [console_scripts]
2
2
  generate-llm-prompt = docs.generate_llm_prompt:main
3
3
  planar = planar.cli:main
4
+
planar/_version.py DELETED
@@ -1 +0,0 @@
1
- VERSION = "0.9.1"
@@ -1,4 +0,0 @@
1
- Wheel-Version: 1.0
2
- Generator: hatchling 1.27.0
3
- Root-Is-Purelib: true
4
- Tag: py3-none-any