planar 0.8.0__py3-none-any.whl → 0.9.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. planar/_version.py +1 -1
  2. planar/ai/agent.py +19 -3
  3. planar/ai/agent_base.py +1 -5
  4. planar/ai/agent_utils.py +0 -72
  5. planar/ai/models.py +30 -0
  6. planar/ai/pydantic_ai.py +12 -11
  7. planar/app.py +6 -11
  8. planar/config.py +6 -1
  9. planar/data/__init__.py +17 -0
  10. planar/data/config.py +49 -0
  11. planar/data/dataset.py +263 -0
  12. planar/data/exceptions.py +19 -0
  13. planar/data/test_dataset.py +354 -0
  14. planar/db/db.py +39 -21
  15. planar/dependencies.py +30 -0
  16. planar/files/test_files.py +6 -7
  17. planar/modeling/mixins/test_auditable.py +2 -2
  18. planar/modeling/orm/planar_base_entity.py +4 -1
  19. planar/routers/agents_router.py +52 -4
  20. planar/routers/test_agents_router.py +2 -2
  21. planar/routers/test_files_router.py +2 -2
  22. planar/routers/test_object_config_router.py +2 -2
  23. planar/routers/test_routes_security.py +3 -2
  24. planar/routers/test_rule_router.py +2 -2
  25. planar/routers/test_workflow_router.py +6 -8
  26. planar/rules/__init__.py +12 -18
  27. planar/scaffold_templates/app/flows/process_invoice.py.j2 +1 -2
  28. planar/scaffold_templates/planar.dev.yaml.j2 +9 -0
  29. planar/scaffold_templates/planar.prod.yaml.j2 +14 -0
  30. planar/scaffold_templates/pyproject.toml.j2 +2 -2
  31. planar/test_sqlalchemy.py +36 -1
  32. planar/testing/fixtures.py +3 -17
  33. planar/testing/workflow_observer.py +2 -2
  34. planar/workflows/notifications.py +39 -3
  35. planar/workflows/test_lock_timeout.py +4 -4
  36. {planar-0.8.0.dist-info → planar-0.9.1.dist-info}/METADATA +27 -13
  37. {planar-0.8.0.dist-info → planar-0.9.1.dist-info}/RECORD +39 -33
  38. {planar-0.8.0.dist-info → planar-0.9.1.dist-info}/WHEEL +0 -0
  39. {planar-0.8.0.dist-info → planar-0.9.1.dist-info}/entry_points.txt +0 -0
planar/_version.py CHANGED
@@ -1 +1 @@
1
- VERSION = "0.8.0"
1
+ VERSION = "0.9.1"
planar/ai/agent.py CHANGED
@@ -7,19 +7,20 @@ from pydantic_ai import models
7
7
 
8
8
  from planar.ai.agent_base import AgentBase
9
9
  from planar.ai.agent_utils import (
10
- AgentEventType,
11
10
  ModelSpec,
12
- ToolCallResult,
13
11
  create_tool_definition,
14
12
  extract_files_from_model,
15
13
  get_agent_config,
16
14
  render_template,
17
15
  )
18
16
  from planar.ai.models import (
17
+ AgentEventEmitter,
18
+ AgentEventType,
19
19
  AgentRunResult,
20
20
  AssistantMessage,
21
21
  ModelMessage,
22
22
  SystemMessage,
23
+ ToolCallResult,
23
24
  ToolDefinition,
24
25
  ToolMessage,
25
26
  ToolResponse,
@@ -29,10 +30,22 @@ from planar.ai.pydantic_ai import ModelRunResponse, model_run
29
30
  from planar.logging import get_logger
30
31
  from planar.utils import utc_now
31
32
  from planar.workflows.models import StepType
33
+ from planar.workflows.notifications import agent_text, agent_think
32
34
 
33
35
  logger = get_logger(__name__)
34
36
 
35
37
 
38
+ class AgentWorkflowNotifier(AgentEventEmitter):
39
+ def emit(self, event_type, data):
40
+ match event_type:
41
+ case AgentEventType.THINK:
42
+ agent_think(str(data))
43
+ case AgentEventType.TEXT:
44
+ agent_text(str(data))
45
+ case _:
46
+ ...
47
+
48
+
36
49
  @dataclass
37
50
  class Agent[
38
51
  TInput: BaseModel | str,
@@ -53,7 +66,10 @@ class Agent[
53
66
  Returns:
54
67
  AgentRunResult containing the agent's response
55
68
  """
56
- event_emitter = self.event_emitter
69
+ if self.event_emitter:
70
+ event_emitter = self.event_emitter
71
+ else:
72
+ event_emitter = AgentWorkflowNotifier()
57
73
  logger.debug(
58
74
  "agent run_step called", agent_name=self.name, input_type=type(input_value)
59
75
  )
planar/ai/agent_base.py CHANGED
@@ -13,11 +13,7 @@ from typing import (
13
13
 
14
14
  from pydantic import BaseModel
15
15
 
16
- from planar.ai.agent_utils import AgentEventEmitter
17
- from planar.ai.models import (
18
- AgentConfig,
19
- AgentRunResult,
20
- )
16
+ from planar.ai.models import AgentConfig, AgentEventEmitter, AgentRunResult
21
17
  from planar.logging import get_logger
22
18
  from planar.modeling.field_helpers import JsonSchema
23
19
  from planar.utils import P, R, T, U
planar/ai/agent_utils.py CHANGED
@@ -1,8 +1,4 @@
1
- import asyncio
2
1
  import inspect
3
- import json
4
- from collections.abc import AsyncGenerator
5
- from enum import Enum
6
2
  from typing import (
7
3
  Any,
8
4
  Callable,
@@ -20,7 +16,6 @@ from planar.ai.models import (
20
16
  from planar.files.models import PlanarFile
21
17
  from planar.logging import get_logger
22
18
  from planar.object_config import ConfigurableObjectType, ObjectConfigurationIO
23
- from planar.utils import utc_now
24
19
  from planar.workflows import step
25
20
 
26
21
  logger = get_logger(__name__)
@@ -33,73 +28,6 @@ class ModelSpec(BaseModel):
33
28
  parameters: dict[str, Any] = {}
34
29
 
35
30
 
36
- class AgentEventType(str, Enum):
37
- """Valid event types that can be emitted by an Agent."""
38
-
39
- RESPONSE = "response"
40
- TOOL_RESPONSE = "tool_response"
41
- COMPLETED = "completed"
42
- ERROR = "error"
43
- THINK = "think"
44
- TEXT = "text"
45
-
46
-
47
- class AgentEvent:
48
- def __init__(
49
- self,
50
- event_type: AgentEventType,
51
- data: BaseModel | str | None,
52
- ):
53
- self.event_type = event_type
54
- self.data = data
55
- self.timestamp = utc_now().isoformat()
56
-
57
-
58
- class AgentEventEmitter:
59
- def __init__(self):
60
- self.queue: asyncio.Queue[AgentEvent] = asyncio.Queue()
61
-
62
- def emit(self, event_type: AgentEventType, data: BaseModel | str | None):
63
- event = AgentEvent(event_type, data)
64
- self.queue.put_nowait(event)
65
-
66
- async def get_events(self) -> AsyncGenerator[str, None]:
67
- while True:
68
- event = await self.queue.get()
69
-
70
- if isinstance(event.data, BaseModel):
71
- data = {
72
- "data": event.data.model_dump(),
73
- "event_type": event.event_type,
74
- }
75
- else:
76
- data = {
77
- "data": event.data,
78
- "event_type": event.event_type,
79
- }
80
-
81
- yield f"data: {json.dumps(data)}\n\n"
82
-
83
- self.queue.task_done()
84
-
85
- if event.event_type in (AgentEventType.COMPLETED, AgentEventType.ERROR):
86
- break
87
-
88
- def is_empty(self) -> bool:
89
- """Check if the queue is empty."""
90
- return self.queue.empty()
91
-
92
-
93
- # Define JsonData type as a union of valid JSON values
94
- JsonData = str | int | float | bool | None | dict[str, Any] | list[Any]
95
-
96
-
97
- class ToolCallResult(BaseModel):
98
- tool_call_id: str
99
- tool_call_name: str
100
- content: BaseModel | JsonData
101
-
102
-
103
31
  def extract_files_from_model(
104
32
  model: BaseModel | str | None,
105
33
  ) -> list[PlanarFile]:
planar/ai/models.py CHANGED
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ from enum import Enum
3
4
  from typing import (
4
5
  Annotated,
5
6
  Any,
@@ -7,6 +8,7 @@ from typing import (
7
8
  List,
8
9
  Literal,
9
10
  Optional,
11
+ Protocol,
10
12
  TypeVar,
11
13
  Union,
12
14
  )
@@ -88,10 +90,23 @@ class ToolMessage(ModelMessage):
88
90
  tool_call_id: str # ID of the tool call this is responding to
89
91
 
90
92
 
93
+ # Define JsonData type as a union of valid JSON values
94
+ JsonData = str | int | float | bool | None | dict[str, Any] | list[Any]
95
+
96
+
97
+ class ToolCallResult(BaseModel):
98
+ tool_call_id: str
99
+ tool_call_name: str
100
+ content: BaseModel | JsonData
101
+
102
+
91
103
  class CompletionResponse[T: BaseModel | str](BaseModel):
92
104
  """Response object that may contain content or tool calls."""
93
105
 
94
106
  content: Optional[T] = None # Content as str or parsed Pydantic model
107
+ text_content: Optional[str] = (
108
+ None # Optional text content, if separate from structured output
109
+ )
95
110
  reasoning_content: Optional[str] = None # Optional reasoning content
96
111
  tool_calls: Optional[List[ToolCall]] = None # List of tool calls, if any
97
112
 
@@ -138,3 +153,18 @@ class AgentSerializeable(BaseModel):
138
153
 
139
154
  # TODO: actually fetch built_in_vars from agent object
140
155
  built_in_vars: dict[str, str] = Field(default_factory=dict)
156
+
157
+
158
+ class AgentEventType(str, Enum):
159
+ """Valid event types that can be emitted by an Agent."""
160
+
161
+ RESPONSE = "response"
162
+ TOOL_RESPONSE = "tool_response"
163
+ COMPLETED = "completed"
164
+ ERROR = "error"
165
+ THINK = "think"
166
+ TEXT = "text"
167
+
168
+
169
+ class AgentEventEmitter(Protocol):
170
+ def emit(self, event_type: AgentEventType, data: BaseModel | str | None): ...
planar/ai/pydantic_ai.py CHANGED
@@ -3,7 +3,7 @@ import json
3
3
  import os
4
4
  import re
5
5
  import textwrap
6
- from typing import Any, Literal, Protocol, Type, cast
6
+ from typing import Any, Type, cast
7
7
 
8
8
  from pydantic import BaseModel, ValidationError
9
9
  from pydantic_ai import BinaryContent
@@ -265,10 +265,6 @@ async def prepare_messages(
265
265
  return pydantic_messages
266
266
 
267
267
 
268
- class StreamEventHandler(Protocol):
269
- def emit(self, event: Literal["text", "think"], data: str) -> None: ...
270
-
271
-
272
268
  def setup_native_structured_output(
273
269
  request_params: ModelRequestParameters,
274
270
  output_type: Type[BaseModel],
@@ -330,12 +326,14 @@ def return_native_structured_output[TOutput: BaseModel](
330
326
  result = m.CompletionResponse(
331
327
  content=output_type.model_validate_json(content),
332
328
  tool_calls=final_tool_calls,
329
+ text_content=content,
333
330
  reasoning_content=thinking,
334
331
  )
335
332
  logger.info(
336
333
  "model run completed with structured output",
337
334
  content=result.content,
338
335
  reasoning_content=result.reasoning_content,
336
+ text_content=content,
339
337
  tool_calls=result.tool_calls,
340
338
  )
341
339
  return result
@@ -359,6 +357,7 @@ def return_tool_structured_output[TOutput: BaseModel](
359
357
  result = m.CompletionResponse(
360
358
  content=output_type.model_validate(final_result_tc.arguments),
361
359
  tool_calls=tool_calls,
360
+ text_content=content,
362
361
  reasoning_content=thinking,
363
362
  )
364
363
  logger.info(
@@ -388,7 +387,7 @@ async def model_run[TOutput: BaseModel | str](
388
387
  model_settings: dict[str, Any] | None = None,
389
388
  messages: list[m.ModelMessage] = [],
390
389
  tools: list[m.ToolDefinition] = [],
391
- event_handler: StreamEventHandler | None = None,
390
+ event_handler: m.AgentEventEmitter | None = None,
392
391
  output_type: Type[TOutput] = str,
393
392
  ) -> ModelRunResponse[TOutput]:
394
393
  # assert that the caller doesn't provide a tool called "final_result"
@@ -418,7 +417,7 @@ async def model_run[TOutput: BaseModel | str](
418
417
 
419
418
  structured_output = issubclass(output_type, BaseModel)
420
419
 
421
- def emit(event_type: Literal["text", "think"], content: str):
420
+ def emit(event_type: m.AgentEventType, content: str):
422
421
  if event_handler:
423
422
  event_handler.emit(event_type, content)
424
423
 
@@ -451,10 +450,10 @@ async def model_run[TOutput: BaseModel | str](
451
450
  case PartStartEvent(part=part):
452
451
  response_parts.append(part)
453
452
  if isinstance(part, TextPart):
454
- emit("text", part.content)
453
+ emit(m.AgentEventType.TEXT, part.content)
455
454
  text_buffer.append(part.content)
456
455
  elif isinstance(part, ThinkingPart):
457
- emit("think", part.content)
456
+ emit(m.AgentEventType.THINK, part.content)
458
457
  think_buffer.append(part.content)
459
458
  elif isinstance(part, ToolCallPart):
460
459
  if current_tool_call is not None:
@@ -480,14 +479,14 @@ async def model_run[TOutput: BaseModel | str](
480
479
  current = response_parts[-1]
481
480
  if isinstance(delta, TextPartDelta):
482
481
  assert isinstance(current, TextPart)
483
- emit("text", delta.content_delta)
482
+ emit(m.AgentEventType.TEXT, delta.content_delta)
484
483
  text_buffer.append(delta.content_delta)
485
484
  current.content += delta.content_delta
486
485
  elif (
487
486
  isinstance(delta, ThinkingPartDelta) and delta.content_delta
488
487
  ):
489
488
  assert isinstance(current, ThinkingPart)
490
- emit("think", delta.content_delta)
489
+ emit(m.AgentEventType.THINK, delta.content_delta)
491
490
  think_buffer.append(delta.content_delta)
492
491
  current.content += delta.content_delta
493
492
  elif isinstance(delta, ToolCallPartDelta):
@@ -547,6 +546,7 @@ async def model_run[TOutput: BaseModel | str](
547
546
  return ModelRunResponse(
548
547
  response=m.CompletionResponse(
549
548
  tool_calls=final_tool_calls,
549
+ text_content=content,
550
550
  reasoning_content=thinking,
551
551
  ),
552
552
  extra_turns_used=extra_turns_used,
@@ -623,6 +623,7 @@ async def model_run[TOutput: BaseModel | str](
623
623
  m.CompletionResponse(
624
624
  content=content,
625
625
  tool_calls=final_tool_calls,
626
+ text_content=content,
626
627
  reasoning_content=thinking,
627
628
  ),
628
629
  )
planar/app.py CHANGED
@@ -37,7 +37,6 @@ from planar.security.authorization import PolicyService, policy_service_context
37
37
  from planar.session import config_var, session_context
38
38
  from planar.sse.proxy import SSEProxy
39
39
  from planar.workflows import (
40
- Workflow,
41
40
  WorkflowNotification,
42
41
  WorkflowNotificationCallback,
43
42
  WorkflowOrchestrator,
@@ -81,7 +80,10 @@ class PlanarApp:
81
80
  )
82
81
  self.policy_service: PolicyService | None = None
83
82
 
84
- self.db_manager = DatabaseManager(db_url=self.config.connection_url())
83
+ self.db_manager = DatabaseManager(
84
+ db_url=self.config.connection_url(),
85
+ entity_schema=self.config.app.entity_schema,
86
+ )
85
87
 
86
88
  if self.config.storage:
87
89
  self.storage = create_from_config(self.config.storage)
@@ -169,13 +171,8 @@ class PlanarApp:
169
171
  return
170
172
 
171
173
  def on_workflow_notification(notification: WorkflowNotification):
172
- workflow_id = (
173
- notification.data.id
174
- if isinstance(notification.data, Workflow)
175
- else notification.data.workflow_id
176
- )
177
174
  self.sse_proxy.push(
178
- f"{notification.kind.value}:{workflow_id}",
175
+ f"{notification.kind.value}:{notification.workflow_id}",
179
176
  notification.data.model_dump(mode="json"),
180
177
  )
181
178
 
@@ -226,9 +223,7 @@ class PlanarApp:
226
223
 
227
224
  # Begin the normal lifespan logic
228
225
  self.db_manager.connect()
229
- await self.db_manager.migrate(
230
- self.config.use_alembic if self.config.use_alembic is not None else True
231
- )
226
+ await self.db_manager.migrate()
232
227
 
233
228
  self.orchestrator = WorkflowOrchestrator(self.db_manager.get_engine())
234
229
  config_tok = config_var.set(self.config)
planar/config.py CHANGED
@@ -21,6 +21,7 @@ from pydantic import (
21
21
  )
22
22
  from sqlalchemy import URL, make_url
23
23
 
24
+ from planar.data.config import DataConfig
24
25
  from planar.files.storage.config import LocalDirectoryConfig, StorageConfig
25
26
  from planar.logging import get_logger
26
27
 
@@ -126,6 +127,10 @@ DatabaseConfig = Annotated[
126
127
  class AppConfig(BaseModel):
127
128
  db_connection: str
128
129
  max_db_conflict_retries: int | None = None
130
+ # Default schema for user-defined entities (PlanarBaseEntity)
131
+ # Postgres: used as the target schema for user tables
132
+ # SQLite: ignored (SQLite has no schemas)
133
+ entity_schema: str = "planar_entity"
129
134
 
130
135
 
131
136
  def default_storage_config() -> StorageConfig:
@@ -223,8 +228,8 @@ class PlanarConfig(BaseModel):
223
228
  environment: Environment = Environment.DEV
224
229
  security: SecurityConfig = SecurityConfig()
225
230
  logging: dict[str, LoggerConfig] | None = None
226
- use_alembic: bool | None = True
227
231
  otel: OtelConfig | None = None
232
+ data: DataConfig | None = None
228
233
 
229
234
  # forbid extra keys in the config to prevent accidental misconfiguration
230
235
  model_config = ConfigDict(extra="forbid")
@@ -0,0 +1,17 @@
1
+ from typing import TYPE_CHECKING
2
+
3
+ from planar.dependencies import lazy_exports
4
+
5
+ lazy_exports(
6
+ __name__,
7
+ {
8
+ "PlanarDataset": (".dataset", "PlanarDataset"),
9
+ },
10
+ )
11
+
12
+ if TYPE_CHECKING:
13
+ from .dataset import PlanarDataset
14
+
15
+ __all__ = [
16
+ "PlanarDataset",
17
+ ]
planar/data/config.py ADDED
@@ -0,0 +1,49 @@
1
+ """Configuration for Planar data module."""
2
+
3
+ from typing import Annotated, Literal
4
+
5
+ from pydantic import BaseModel, Field
6
+
7
+ from planar.files.storage.config import StorageConfig
8
+
9
+
10
+ class DuckDBCatalogConfig(BaseModel):
11
+ """Configuration for DuckDB catalog backend."""
12
+
13
+ type: Literal["duckdb"]
14
+ path: str # Path to .ducklake file
15
+
16
+
17
+ class PostgresCatalogConfig(BaseModel):
18
+ """Configuration for PostgreSQL catalog backend."""
19
+
20
+ type: Literal["postgres"]
21
+ host: str | None = None
22
+ port: int | None = None
23
+ user: str | None = None
24
+ password: str | None = None
25
+ db: str
26
+
27
+
28
+ class SQLiteCatalogConfig(BaseModel):
29
+ """Configuration for SQLite catalog backend."""
30
+
31
+ type: Literal["sqlite"]
32
+ path: str # Path to .sqlite file
33
+
34
+
35
+ # Discriminated union for catalog configurations
36
+ CatalogConfig = Annotated[
37
+ DuckDBCatalogConfig | PostgresCatalogConfig | SQLiteCatalogConfig,
38
+ Field(discriminator="type"),
39
+ ]
40
+
41
+
42
+ class DataConfig(BaseModel):
43
+ """Configuration for data features."""
44
+
45
+ catalog: CatalogConfig
46
+ storage: StorageConfig # Reuse existing StorageConfig from files
47
+
48
+ # Optional settings
49
+ catalog_name: str = "planar_data" # Default catalog name in Ducklake