planar 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (289) hide show
  1. planar/.__init__.py.un~ +0 -0
  2. planar/._version.py.un~ +0 -0
  3. planar/.app.py.un~ +0 -0
  4. planar/.cli.py.un~ +0 -0
  5. planar/.config.py.un~ +0 -0
  6. planar/.context.py.un~ +0 -0
  7. planar/.db.py.un~ +0 -0
  8. planar/.di.py.un~ +0 -0
  9. planar/.engine.py.un~ +0 -0
  10. planar/.files.py.un~ +0 -0
  11. planar/.log_context.py.un~ +0 -0
  12. planar/.log_metadata.py.un~ +0 -0
  13. planar/.logging.py.un~ +0 -0
  14. planar/.object_registry.py.un~ +0 -0
  15. planar/.otel.py.un~ +0 -0
  16. planar/.server.py.un~ +0 -0
  17. planar/.session.py.un~ +0 -0
  18. planar/.sqlalchemy.py.un~ +0 -0
  19. planar/.task_local.py.un~ +0 -0
  20. planar/.test_app.py.un~ +0 -0
  21. planar/.test_config.py.un~ +0 -0
  22. planar/.test_object_config.py.un~ +0 -0
  23. planar/.test_sqlalchemy.py.un~ +0 -0
  24. planar/.test_utils.py.un~ +0 -0
  25. planar/.util.py.un~ +0 -0
  26. planar/.utils.py.un~ +0 -0
  27. planar/__init__.py +26 -0
  28. planar/_version.py +1 -0
  29. planar/ai/.__init__.py.un~ +0 -0
  30. planar/ai/._models.py.un~ +0 -0
  31. planar/ai/.agent.py.un~ +0 -0
  32. planar/ai/.agent_utils.py.un~ +0 -0
  33. planar/ai/.events.py.un~ +0 -0
  34. planar/ai/.files.py.un~ +0 -0
  35. planar/ai/.models.py.un~ +0 -0
  36. planar/ai/.providers.py.un~ +0 -0
  37. planar/ai/.pydantic_ai.py.un~ +0 -0
  38. planar/ai/.pydantic_ai_agent.py.un~ +0 -0
  39. planar/ai/.pydantic_ai_provider.py.un~ +0 -0
  40. planar/ai/.step.py.un~ +0 -0
  41. planar/ai/.test_agent.py.un~ +0 -0
  42. planar/ai/.test_agent_serialization.py.un~ +0 -0
  43. planar/ai/.test_providers.py.un~ +0 -0
  44. planar/ai/.utils.py.un~ +0 -0
  45. planar/ai/__init__.py +15 -0
  46. planar/ai/agent.py +457 -0
  47. planar/ai/agent_utils.py +205 -0
  48. planar/ai/models.py +140 -0
  49. planar/ai/providers.py +1088 -0
  50. planar/ai/test_agent.py +1298 -0
  51. planar/ai/test_agent_serialization.py +229 -0
  52. planar/ai/test_providers.py +463 -0
  53. planar/ai/utils.py +102 -0
  54. planar/app.py +494 -0
  55. planar/cli.py +282 -0
  56. planar/config.py +544 -0
  57. planar/db/.db.py.un~ +0 -0
  58. planar/db/__init__.py +17 -0
  59. planar/db/alembic/env.py +136 -0
  60. planar/db/alembic/script.py.mako +28 -0
  61. planar/db/alembic/versions/3476068c153c_initial_system_tables_migration.py +339 -0
  62. planar/db/alembic.ini +128 -0
  63. planar/db/db.py +318 -0
  64. planar/files/.config.py.un~ +0 -0
  65. planar/files/.local.py.un~ +0 -0
  66. planar/files/.local_filesystem.py.un~ +0 -0
  67. planar/files/.model.py.un~ +0 -0
  68. planar/files/.models.py.un~ +0 -0
  69. planar/files/.s3.py.un~ +0 -0
  70. planar/files/.storage.py.un~ +0 -0
  71. planar/files/.test_files.py.un~ +0 -0
  72. planar/files/__init__.py +2 -0
  73. planar/files/models.py +162 -0
  74. planar/files/storage/.__init__.py.un~ +0 -0
  75. planar/files/storage/.base.py.un~ +0 -0
  76. planar/files/storage/.config.py.un~ +0 -0
  77. planar/files/storage/.context.py.un~ +0 -0
  78. planar/files/storage/.local_directory.py.un~ +0 -0
  79. planar/files/storage/.test_local_directory.py.un~ +0 -0
  80. planar/files/storage/.test_s3.py.un~ +0 -0
  81. planar/files/storage/base.py +61 -0
  82. planar/files/storage/config.py +44 -0
  83. planar/files/storage/context.py +15 -0
  84. planar/files/storage/local_directory.py +188 -0
  85. planar/files/storage/s3.py +220 -0
  86. planar/files/storage/test_local_directory.py +162 -0
  87. planar/files/storage/test_s3.py +299 -0
  88. planar/files/test_files.py +283 -0
  89. planar/human/.human.py.un~ +0 -0
  90. planar/human/.test_human.py.un~ +0 -0
  91. planar/human/__init__.py +2 -0
  92. planar/human/human.py +458 -0
  93. planar/human/models.py +80 -0
  94. planar/human/test_human.py +385 -0
  95. planar/logging/.__init__.py.un~ +0 -0
  96. planar/logging/.attributes.py.un~ +0 -0
  97. planar/logging/.formatter.py.un~ +0 -0
  98. planar/logging/.logger.py.un~ +0 -0
  99. planar/logging/.otel.py.un~ +0 -0
  100. planar/logging/.tracer.py.un~ +0 -0
  101. planar/logging/__init__.py +10 -0
  102. planar/logging/attributes.py +54 -0
  103. planar/logging/context.py +14 -0
  104. planar/logging/formatter.py +113 -0
  105. planar/logging/logger.py +114 -0
  106. planar/logging/otel.py +51 -0
  107. planar/modeling/.mixin.py.un~ +0 -0
  108. planar/modeling/.storage.py.un~ +0 -0
  109. planar/modeling/__init__.py +0 -0
  110. planar/modeling/field_helpers.py +59 -0
  111. planar/modeling/json_schema_generator.py +94 -0
  112. planar/modeling/mixins/__init__.py +10 -0
  113. planar/modeling/mixins/auditable.py +52 -0
  114. planar/modeling/mixins/test_auditable.py +97 -0
  115. planar/modeling/mixins/test_timestamp.py +134 -0
  116. planar/modeling/mixins/test_uuid_primary_key.py +52 -0
  117. planar/modeling/mixins/timestamp.py +53 -0
  118. planar/modeling/mixins/uuid_primary_key.py +19 -0
  119. planar/modeling/orm/.planar_base_model.py.un~ +0 -0
  120. planar/modeling/orm/__init__.py +18 -0
  121. planar/modeling/orm/planar_base_entity.py +29 -0
  122. planar/modeling/orm/query_filter_builder.py +122 -0
  123. planar/modeling/orm/reexports.py +15 -0
  124. planar/object_config/.object_config.py.un~ +0 -0
  125. planar/object_config/__init__.py +11 -0
  126. planar/object_config/models.py +114 -0
  127. planar/object_config/object_config.py +378 -0
  128. planar/object_registry.py +100 -0
  129. planar/registry_items.py +65 -0
  130. planar/routers/.__init__.py.un~ +0 -0
  131. planar/routers/.agents_router.py.un~ +0 -0
  132. planar/routers/.crud.py.un~ +0 -0
  133. planar/routers/.decision.py.un~ +0 -0
  134. planar/routers/.event.py.un~ +0 -0
  135. planar/routers/.file_attachment.py.un~ +0 -0
  136. planar/routers/.files.py.un~ +0 -0
  137. planar/routers/.files_router.py.un~ +0 -0
  138. planar/routers/.human.py.un~ +0 -0
  139. planar/routers/.info.py.un~ +0 -0
  140. planar/routers/.models.py.un~ +0 -0
  141. planar/routers/.object_config_router.py.un~ +0 -0
  142. planar/routers/.rule.py.un~ +0 -0
  143. planar/routers/.test_object_config_router.py.un~ +0 -0
  144. planar/routers/.test_workflow_router.py.un~ +0 -0
  145. planar/routers/.workflow.py.un~ +0 -0
  146. planar/routers/__init__.py +13 -0
  147. planar/routers/agents_router.py +197 -0
  148. planar/routers/entity_router.py +143 -0
  149. planar/routers/event.py +91 -0
  150. planar/routers/files.py +142 -0
  151. planar/routers/human.py +151 -0
  152. planar/routers/info.py +131 -0
  153. planar/routers/models.py +170 -0
  154. planar/routers/object_config_router.py +133 -0
  155. planar/routers/rule.py +108 -0
  156. planar/routers/test_agents_router.py +174 -0
  157. planar/routers/test_object_config_router.py +367 -0
  158. planar/routers/test_routes_security.py +169 -0
  159. planar/routers/test_rule_router.py +470 -0
  160. planar/routers/test_workflow_router.py +274 -0
  161. planar/routers/workflow.py +468 -0
  162. planar/rules/.decorator.py.un~ +0 -0
  163. planar/rules/.runner.py.un~ +0 -0
  164. planar/rules/.test_rules.py.un~ +0 -0
  165. planar/rules/__init__.py +23 -0
  166. planar/rules/decorator.py +184 -0
  167. planar/rules/models.py +355 -0
  168. planar/rules/rule_configuration.py +191 -0
  169. planar/rules/runner.py +64 -0
  170. planar/rules/test_rules.py +750 -0
  171. planar/scaffold_templates/app/__init__.py.j2 +0 -0
  172. planar/scaffold_templates/app/db/entities.py.j2 +11 -0
  173. planar/scaffold_templates/app/flows/process_invoice.py.j2 +67 -0
  174. planar/scaffold_templates/main.py.j2 +13 -0
  175. planar/scaffold_templates/planar.dev.yaml.j2 +34 -0
  176. planar/scaffold_templates/planar.prod.yaml.j2 +28 -0
  177. planar/scaffold_templates/pyproject.toml.j2 +10 -0
  178. planar/security/.jwt_middleware.py.un~ +0 -0
  179. planar/security/auth_context.py +148 -0
  180. planar/security/authorization.py +388 -0
  181. planar/security/default_policies.cedar +77 -0
  182. planar/security/jwt_middleware.py +116 -0
  183. planar/security/security_context.py +18 -0
  184. planar/security/tests/test_authorization_context.py +78 -0
  185. planar/security/tests/test_cedar_basics.py +41 -0
  186. planar/security/tests/test_cedar_policies.py +158 -0
  187. planar/security/tests/test_jwt_principal_context.py +179 -0
  188. planar/session.py +40 -0
  189. planar/sse/.constants.py.un~ +0 -0
  190. planar/sse/.example.html.un~ +0 -0
  191. planar/sse/.hub.py.un~ +0 -0
  192. planar/sse/.model.py.un~ +0 -0
  193. planar/sse/.proxy.py.un~ +0 -0
  194. planar/sse/constants.py +1 -0
  195. planar/sse/example.html +126 -0
  196. planar/sse/hub.py +216 -0
  197. planar/sse/model.py +8 -0
  198. planar/sse/proxy.py +257 -0
  199. planar/task_local.py +37 -0
  200. planar/test_app.py +51 -0
  201. planar/test_cli.py +372 -0
  202. planar/test_config.py +512 -0
  203. planar/test_object_config.py +527 -0
  204. planar/test_object_registry.py +14 -0
  205. planar/test_sqlalchemy.py +158 -0
  206. planar/test_utils.py +105 -0
  207. planar/testing/.client.py.un~ +0 -0
  208. planar/testing/.memory_storage.py.un~ +0 -0
  209. planar/testing/.planar_test_client.py.un~ +0 -0
  210. planar/testing/.predictable_tracer.py.un~ +0 -0
  211. planar/testing/.synchronizable_tracer.py.un~ +0 -0
  212. planar/testing/.test_memory_storage.py.un~ +0 -0
  213. planar/testing/.workflow_observer.py.un~ +0 -0
  214. planar/testing/__init__.py +0 -0
  215. planar/testing/memory_storage.py +78 -0
  216. planar/testing/planar_test_client.py +54 -0
  217. planar/testing/synchronizable_tracer.py +153 -0
  218. planar/testing/test_memory_storage.py +143 -0
  219. planar/testing/workflow_observer.py +73 -0
  220. planar/utils.py +70 -0
  221. planar/workflows/.__init__.py.un~ +0 -0
  222. planar/workflows/.builtin_steps.py.un~ +0 -0
  223. planar/workflows/.concurrency_tracing.py.un~ +0 -0
  224. planar/workflows/.context.py.un~ +0 -0
  225. planar/workflows/.contrib.py.un~ +0 -0
  226. planar/workflows/.decorators.py.un~ +0 -0
  227. planar/workflows/.durable_test.py.un~ +0 -0
  228. planar/workflows/.errors.py.un~ +0 -0
  229. planar/workflows/.events.py.un~ +0 -0
  230. planar/workflows/.exceptions.py.un~ +0 -0
  231. planar/workflows/.execution.py.un~ +0 -0
  232. planar/workflows/.human.py.un~ +0 -0
  233. planar/workflows/.lock.py.un~ +0 -0
  234. planar/workflows/.misc.py.un~ +0 -0
  235. planar/workflows/.model.py.un~ +0 -0
  236. planar/workflows/.models.py.un~ +0 -0
  237. planar/workflows/.notifications.py.un~ +0 -0
  238. planar/workflows/.orchestrator.py.un~ +0 -0
  239. planar/workflows/.runtime.py.un~ +0 -0
  240. planar/workflows/.serialization.py.un~ +0 -0
  241. planar/workflows/.step.py.un~ +0 -0
  242. planar/workflows/.step_core.py.un~ +0 -0
  243. planar/workflows/.sub_workflow_runner.py.un~ +0 -0
  244. planar/workflows/.sub_workflow_scheduler.py.un~ +0 -0
  245. planar/workflows/.test_concurrency.py.un~ +0 -0
  246. planar/workflows/.test_concurrency_detection.py.un~ +0 -0
  247. planar/workflows/.test_human.py.un~ +0 -0
  248. planar/workflows/.test_lock_timeout.py.un~ +0 -0
  249. planar/workflows/.test_orchestrator.py.un~ +0 -0
  250. planar/workflows/.test_race_conditions.py.un~ +0 -0
  251. planar/workflows/.test_serialization.py.un~ +0 -0
  252. planar/workflows/.test_suspend_deserialization.py.un~ +0 -0
  253. planar/workflows/.test_workflow.py.un~ +0 -0
  254. planar/workflows/.tracing.py.un~ +0 -0
  255. planar/workflows/.types.py.un~ +0 -0
  256. planar/workflows/.util.py.un~ +0 -0
  257. planar/workflows/.utils.py.un~ +0 -0
  258. planar/workflows/.workflow.py.un~ +0 -0
  259. planar/workflows/.workflow_wrapper.py.un~ +0 -0
  260. planar/workflows/.wrappers.py.un~ +0 -0
  261. planar/workflows/__init__.py +42 -0
  262. planar/workflows/context.py +44 -0
  263. planar/workflows/contrib.py +190 -0
  264. planar/workflows/decorators.py +217 -0
  265. planar/workflows/events.py +185 -0
  266. planar/workflows/exceptions.py +34 -0
  267. planar/workflows/execution.py +198 -0
  268. planar/workflows/lock.py +229 -0
  269. planar/workflows/misc.py +5 -0
  270. planar/workflows/models.py +154 -0
  271. planar/workflows/notifications.py +96 -0
  272. planar/workflows/orchestrator.py +383 -0
  273. planar/workflows/query.py +256 -0
  274. planar/workflows/serialization.py +409 -0
  275. planar/workflows/step_core.py +373 -0
  276. planar/workflows/step_metadata.py +357 -0
  277. planar/workflows/step_testing_utils.py +86 -0
  278. planar/workflows/sub_workflow_runner.py +191 -0
  279. planar/workflows/test_concurrency_detection.py +120 -0
  280. planar/workflows/test_lock_timeout.py +140 -0
  281. planar/workflows/test_serialization.py +1195 -0
  282. planar/workflows/test_suspend_deserialization.py +231 -0
  283. planar/workflows/test_workflow.py +1967 -0
  284. planar/workflows/tracing.py +106 -0
  285. planar/workflows/wrappers.py +41 -0
  286. planar-0.5.0.dist-info/METADATA +285 -0
  287. planar-0.5.0.dist-info/RECORD +289 -0
  288. planar-0.5.0.dist-info/WHEEL +4 -0
  289. planar-0.5.0.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,154 @@
1
+ from datetime import datetime
2
+ from enum import Enum
3
+ from typing import Any, Dict, cast
4
+ from uuid import UUID, uuid4
5
+
6
+ from sqlalchemy import types
7
+ from sqlmodel import (
8
+ JSON,
9
+ Column,
10
+ Field,
11
+ Integer,
12
+ col,
13
+ func,
14
+ literal,
15
+ )
16
+
17
+ from planar.db import PlanarInternalBase
18
+ from planar.modeling.mixins import TimestampMixin, timestamp_column
19
+
20
+
21
+ class StepStatus(str, Enum):
22
+ SUCCEEDED = "succeeded" # has finished execution
23
+ RUNNING = "running" # step currently running
24
+ FAILED = "failed" # Has encountered an error
25
+
26
+
27
+ class StepType(str, Enum):
28
+ COMPUTE = "compute"
29
+ AGENT = "agent"
30
+ RULE = "rule"
31
+ HUMAN_IN_THE_LOOP = "human_in_the_loop"
32
+ TOOL_CALL = "tool_call"
33
+
34
+
35
+ class WorkflowStatus(str, Enum):
36
+ # Persisted statuses (stored in database)
37
+ PENDING = "pending" # waiting to be executed
38
+ SUCCEEDED = "succeeded" # has finished execution
39
+ FAILED = "failed" # Has encountered an error
40
+
41
+ # Virtual statuses (computed from other fields, never persisted)
42
+ RUNNING = "running" # currently running (computed from lock_until field)
43
+ SUSPENDED = "suspended" # waiting for event or wakeup time (computed from wakeup_at or waiting_for_event fields)
44
+
45
+
46
+ class Workflow(PlanarInternalBase, TimestampMixin, table=True):
47
+ """
48
+ Represents a workflow instance with its execution state.
49
+ """
50
+
51
+ function_name: str
52
+ id: UUID = Field(default_factory=uuid4, primary_key=True)
53
+ parent_id: UUID | None = Field(
54
+ default=None, index=True, foreign_key="planar.workflow.id"
55
+ )
56
+ status: WorkflowStatus = Field(default=WorkflowStatus.PENDING, index=True)
57
+ args: list[Any] | None = Field(sa_column=Column(JSON))
58
+ kwargs: Dict[str, Any] | None = Field(sa_column=Column(JSON))
59
+ result: Any | None = Field(sa_column=Column(JSON), default=None)
60
+ error: Dict[str, Any] | None = Field(sa_column=Column(JSON), default=None)
61
+ wakeup_at: datetime | None = Field(default=None, nullable=True, index=True)
62
+ # Event key this workflow is waiting for, if any
63
+ waiting_for_event: str | None = Field(default=None, index=True)
64
+
65
+
66
+ class WorkflowStep(PlanarInternalBase, TimestampMixin, table=True):
67
+ """
68
+ Represents a single step within a workflow execution.
69
+ """
70
+
71
+ step_id: int = Field(primary_key=True)
72
+ workflow_id: UUID = Field(primary_key=True, foreign_key="planar.workflow.id")
73
+ parent_step_id: int | None = Field(default=None, index=True)
74
+ function_name: str
75
+ display_name: str | None = Field(
76
+ default=None,
77
+ description="Custom display name, for scenarios where we don't want to use the simplified function name as the display name",
78
+ )
79
+ status: StepStatus = Field(default=StepStatus.RUNNING)
80
+ step_type: StepType
81
+ args: list[Any] | None = Field(sa_column=Column(JSON))
82
+ kwargs: Dict[str, Any] | None = Field(sa_column=Column(JSON))
83
+ result: Any | None = Field(sa_column=Column(JSON), default=None)
84
+ sub_step_count: int = Field(default=0)
85
+ error: Dict[str, Any] | None = Field(sa_column=Column(JSON), default=None)
86
+ retry_count: int = Field(default=0)
87
+
88
+
89
+ class WorkflowEvent(PlanarInternalBase, table=True):
90
+ """
91
+ Immutable record of events that workflows might be waiting for.
92
+ Events form an append-only log that the workflow orchestrator can use
93
+ to identify and wake up workflows that are waiting for specific events.
94
+ """
95
+
96
+ # Unique identifier for this event occurrence
97
+ id: UUID = Field(default_factory=uuid4, primary_key=True)
98
+
99
+ # Event type identifier (e.g., "order_approved", "payment_received")
100
+ event_key: str = Field(index=True)
101
+
102
+ # Optional association with a specific workflow
103
+ workflow_id: UUID | None = Field(
104
+ default=None, index=True, foreign_key="planar.workflow.id"
105
+ )
106
+
107
+ # Optional payload data associated with the event
108
+ payload: Dict[str, Any] | None = Field(sa_column=Column(JSON), default=None)
109
+
110
+ # When the event was created
111
+ timestamp: datetime = timestamp_column(index=True)
112
+
113
+
114
+ class LockedResource(PlanarInternalBase, table=True):
115
+ """
116
+ Represents a locked resource with expiration.
117
+ Used for workflow execution locks and other concurrency control mechanisms.
118
+ """
119
+
120
+ lock_key: str = Field(primary_key=True)
121
+
122
+ # lock_until field is used to ensure that workflows are not stuck if a worker crashes
123
+ # after setting the status to RUNNING, but before setting the status to SUCCESS or FAILED
124
+ lock_until: datetime | None = Field(default=None, nullable=True, index=True)
125
+
126
+ # Enable SQLAlchemy row version tracking to detect concurrency conflicts
127
+ version_id: int = Field(default=1, sa_column=Column(Integer, nullable=False))
128
+ __mapper_args__ = {"version_id_col": cast(Any, version_id).sa_column}
129
+
130
+
131
+ __WORKFLOW_EXEC_LOCK_PREFIX = "workflow-execution:"
132
+
133
+
134
+ def workflow_exec_lock_key(workflow_id: UUID) -> str:
135
+ return __WORKFLOW_EXEC_LOCK_PREFIX + str(workflow_id).replace("-", "").lower()
136
+
137
+
138
+ def workflow_lock_join_cond():
139
+ # Join condition for getting locked workflows in a query.
140
+
141
+ return col(LockedResource.lock_key) == (
142
+ literal(__WORKFLOW_EXEC_LOCK_PREFIX)
143
+ # SQLite uses strings for UUID columns, but it removes all "-"
144
+ # characters It is possible that some dbs with native UUID support will
145
+ # have "-" after converting to string, so we remove them here to make
146
+ # the comparison consistent
147
+ + func.lower(
148
+ func.replace(
149
+ func.cast(col(Workflow.id), types.Text),
150
+ "-",
151
+ "",
152
+ )
153
+ )
154
+ )
@@ -0,0 +1,96 @@
1
+ from contextlib import asynccontextmanager
2
+ from contextvars import ContextVar
3
+ from enum import Enum
4
+ from typing import Callable, Union
5
+
6
+ from pydantic import BaseModel
7
+
8
+ from planar.logging import get_logger
9
+ from planar.workflows.models import Workflow, WorkflowStep
10
+
11
+ logger = get_logger(__name__)
12
+
13
+
14
+ class Notification(str, Enum):
15
+ WORKFLOW_STARTED = "workflow-started"
16
+ WORKFLOW_SUSPENDED = "workflow-suspended"
17
+ WORKFLOW_RESUMED = "workflow-resumed"
18
+ WORKFLOW_SUCCEEDED = "workflow-succeeded"
19
+ WORKFLOW_FAILED = "workflow-failed"
20
+ STEP_RUNNING = "step-running"
21
+ STEP_SUCCEEDED = "step-succeeded"
22
+ STEP_FAILED = "step-failed"
23
+
24
+
25
+ class WorkflowNotification(BaseModel):
26
+ kind: Notification
27
+ data: Union[Workflow, WorkflowStep]
28
+
29
+
30
+ WorkflowNotificationCallback = Callable[[WorkflowNotification], None]
31
+
32
+ workflow_notification_callback_var: ContextVar[WorkflowNotificationCallback] = (
33
+ ContextVar("workflow_notification_callback")
34
+ )
35
+
36
+
37
+ def workflow_notify(workflow: Workflow, kind: Notification):
38
+ callback = workflow_notification_callback_var.get(None)
39
+ if callback is not None:
40
+ logger.debug("notifying workflow event", kind=kind, workflow_id=workflow.id)
41
+ callback(WorkflowNotification(kind=kind, data=workflow))
42
+
43
+
44
+ def workflow_started(workflow: Workflow):
45
+ return workflow_notify(workflow, Notification.WORKFLOW_STARTED)
46
+
47
+
48
+ def workflow_suspended(workflow: Workflow):
49
+ return workflow_notify(workflow, Notification.WORKFLOW_SUSPENDED)
50
+
51
+
52
+ def workflow_resumed(workflow: Workflow):
53
+ return workflow_notify(workflow, Notification.WORKFLOW_RESUMED)
54
+
55
+
56
+ def workflow_succeeded(workflow: Workflow):
57
+ return workflow_notify(workflow, Notification.WORKFLOW_SUCCEEDED)
58
+
59
+
60
+ def workflow_failed(workflow: Workflow):
61
+ return workflow_notify(workflow, Notification.WORKFLOW_FAILED)
62
+
63
+
64
+ def step_notify(step: WorkflowStep, kind: Notification):
65
+ callback = workflow_notification_callback_var.get(None)
66
+ if callback is not None:
67
+ logger.debug(
68
+ "notifying step event",
69
+ kind=kind,
70
+ workflow_id=step.workflow_id,
71
+ step_id=step.step_id,
72
+ )
73
+ callback(WorkflowNotification(kind=kind, data=step))
74
+
75
+
76
+ def step_running(step: WorkflowStep):
77
+ return step_notify(step, Notification.STEP_RUNNING)
78
+
79
+
80
+ def step_succeeded(step: WorkflowStep):
81
+ return step_notify(step, Notification.STEP_SUCCEEDED)
82
+
83
+
84
+ def step_failed(step: WorkflowStep):
85
+ return step_notify(step, Notification.STEP_FAILED)
86
+
87
+
88
+ @asynccontextmanager
89
+ async def workflow_notification_context(callback: WorkflowNotificationCallback):
90
+ """Context manager for setting up and tearing down Workflow notification context"""
91
+
92
+ tok = workflow_notification_callback_var.set(callback)
93
+ try:
94
+ yield
95
+ finally:
96
+ workflow_notification_callback_var.reset(tok)
@@ -0,0 +1,383 @@
1
+ from asyncio import (
2
+ FIRST_COMPLETED,
3
+ AbstractEventLoop,
4
+ CancelledError,
5
+ Task,
6
+ create_task,
7
+ get_running_loop,
8
+ sleep,
9
+ wait,
10
+ )
11
+ from contextlib import asynccontextmanager
12
+ from contextvars import ContextVar
13
+ from datetime import timedelta
14
+ from heapq import heappop, heappush
15
+ from time import monotonic
16
+ from uuid import UUID
17
+
18
+ from sqlalchemy.ext.asyncio import AsyncEngine
19
+ from sqlalchemy.orm import aliased
20
+ from sqlmodel import col, delete, exists, select
21
+
22
+ from planar.db import new_session
23
+ from planar.logging import get_logger
24
+ from planar.session import engine_var, get_engine, get_session, session_context
25
+ from planar.utils import utc_now
26
+ from planar.workflows.execution import (
27
+ _DEFAULT_LOCK_DURATION,
28
+ lock_and_execute,
29
+ workflow_result,
30
+ )
31
+ from planar.workflows.models import (
32
+ LockedResource,
33
+ Workflow,
34
+ WorkflowStatus,
35
+ workflow_lock_join_cond,
36
+ )
37
+ from planar.workflows.notifications import (
38
+ WorkflowNotificationCallback,
39
+ workflow_notification_context,
40
+ )
41
+ from planar.workflows.step_core import Suspend
42
+ from planar.workflows.tracing import trace
43
+
44
+ logger = get_logger(__name__)
45
+
46
+
47
+ def workflow_can_be_executed():
48
+ ChildWorkflow = aliased(Workflow)
49
+ return (
50
+ # condition 1: workflow must be pending.
51
+ (col(Workflow.status) == WorkflowStatus.PENDING)
52
+ &
53
+ # condition 2:
54
+ # (wakeup_at must be NULL (not suspended) AND must not be waiting for event) OR
55
+ # wakeup_at is in the past
56
+ (
57
+ (
58
+ col(Workflow.wakeup_at).is_(None)
59
+ & col(Workflow.waiting_for_event).is_(None)
60
+ )
61
+ | (col(Workflow.wakeup_at) < utc_now())
62
+ )
63
+ &
64
+ # condition 3: lock_until must be NULL (not locked) or in the past (lock expired)
65
+ (
66
+ (col(LockedResource.lock_until).is_(None))
67
+ | (col(LockedResource.lock_until) < utc_now())
68
+ )
69
+ &
70
+ # condition 4: workflow cannot have any pending children
71
+ ~(
72
+ exists().where(
73
+ (
74
+ (col(ChildWorkflow.status) == WorkflowStatus.PENDING)
75
+ & (col(ChildWorkflow.parent_id) == col(Workflow.id))
76
+ )
77
+ )
78
+ )
79
+ )
80
+
81
+
82
+ class WorkflowOrchestrator:
83
+ context_var = ContextVar["WorkflowOrchestrator"]("orchestrator")
84
+
85
+ def __init__(self, engine: AsyncEngine):
86
+ self.__engine = engine
87
+ self.__event_loop: AbstractEventLoop | None = None
88
+ self.__running = False
89
+ self.__next_poll_time: float = 0
90
+ # This will be managed with heapq push/pop, making it behave like a
91
+ # priority queue. In other words, the list will always have the
92
+ # smallest poll time at index 0
93
+ self.__extra_polls: list[float] = []
94
+ # keep track of workflow currently being processed.
95
+ self.__active_workflows: dict[UUID, Task] = {}
96
+
97
+ @staticmethod
98
+ def get():
99
+ return WorkflowOrchestrator.context_var.get()
100
+
101
+ @staticmethod
102
+ def is_set():
103
+ return WorkflowOrchestrator.context_var.get(None) is not None
104
+
105
+ @staticmethod
106
+ def set(orchestrator: "WorkflowOrchestrator"):
107
+ return WorkflowOrchestrator.context_var.set(orchestrator)
108
+
109
+ @staticmethod
110
+ def reset(token):
111
+ return WorkflowOrchestrator.context_var.reset(token)
112
+
113
+ @asynccontextmanager
114
+ @staticmethod
115
+ async def ensure_started(**run_kwargs):
116
+ is_set = WorkflowOrchestrator.context_var.get(None) is not None
117
+ orchestrator = None
118
+ tok = None
119
+ task = None
120
+ if not is_set:
121
+ orchestrator = WorkflowOrchestrator(get_engine())
122
+ task = create_task(orchestrator.run(**run_kwargs))
123
+ tok = WorkflowOrchestrator.set(orchestrator)
124
+ try:
125
+ yield WorkflowOrchestrator.get()
126
+ finally:
127
+ if task:
128
+ WorkflowOrchestrator.reset(tok)
129
+ task.cancel()
130
+ try:
131
+ await task
132
+ except CancelledError:
133
+ pass
134
+
135
+ async def __enqueue_suspended_workflows(
136
+ self,
137
+ query_limit: int,
138
+ lock_duration: timedelta,
139
+ ):
140
+ # exclude workflows that are currently being processed
141
+ # or that have been enqueued for processing
142
+ active_workflow_ids = set(self.__active_workflows.keys())
143
+
144
+ condition = workflow_can_be_executed()
145
+ if active_workflow_ids:
146
+ condition &= col(Workflow.id).not_in(active_workflow_ids)
147
+ async with new_session(self.__engine) as session:
148
+ # delete expired locks
149
+ async with session.begin():
150
+ deleted = (
151
+ await session.exec(
152
+ delete(LockedResource) # type: ignore
153
+ .where(col(LockedResource.lock_until) < utc_now())
154
+ .returning(col(LockedResource.lock_key)),
155
+ )
156
+ ).all()
157
+ await trace(
158
+ "delete-expired-lock",
159
+ deleted_count=len(deleted),
160
+ )
161
+
162
+ workflow_ids = (
163
+ await session.exec(
164
+ select(Workflow.id)
165
+ .select_from(Workflow)
166
+ .outerjoin(LockedResource, workflow_lock_join_cond())
167
+ .where(condition)
168
+ .limit(query_limit)
169
+ )
170
+ ).all()
171
+
172
+ for workflow_id in workflow_ids:
173
+ task = create_task(
174
+ self.__resume_workflow(
175
+ workflow_id,
176
+ lock_duration=lock_duration,
177
+ )
178
+ )
179
+ # add the current task to the active dictionary
180
+ self.__active_workflows[workflow_id] = task
181
+ return len(workflow_ids)
182
+
183
+ async def __resume_workflow(
184
+ self,
185
+ workflow_id: UUID,
186
+ lock_duration: timedelta = _DEFAULT_LOCK_DURATION,
187
+ ):
188
+ async with session_context(self.__engine) as session:
189
+ try:
190
+ logger.debug("resuming workflow", workflow_id=workflow_id)
191
+ async with session.begin():
192
+ # Wrap this in a transaction to ensure we hold no locks
193
+ # when entering "execute", which will first try to acquire
194
+ # the lock before starting actual execution.
195
+ workflow = await session.get(Workflow, workflow_id)
196
+ if not workflow:
197
+ raise ValueError(f"Workflow {workflow_id} not found")
198
+
199
+ parent_id = workflow.parent_id
200
+
201
+ result = await lock_and_execute(
202
+ workflow,
203
+ lock_duration=lock_duration,
204
+ )
205
+
206
+ if isinstance(result, Suspend):
207
+ if result.wakeup_at is not None:
208
+ # calculate in how many seconds it is supposed to wakeup
209
+ interval_seconds = (
210
+ result.wakeup_at - utc_now()
211
+ ).total_seconds()
212
+ logger.info(
213
+ "workflow suspended",
214
+ workflow_id=workflow_id,
215
+ interval_seconds=interval_seconds,
216
+ )
217
+ # get current monotonic time
218
+ monotonic_now = monotonic()
219
+ # compute next poll time required to wakeup the workflow
220
+ next_poll_time = monotonic_now + interval_seconds
221
+ self.poll_soon(next_poll_time)
222
+ logger.info(
223
+ "scheduling poll",
224
+ workflow_id=workflow_id,
225
+ next_poll_time=next_poll_time,
226
+ )
227
+ elif parent_id is not None:
228
+ # Workflow has a parent, adjust poll time.
229
+ # We could also call self.enqueue_workflow here, but that
230
+ # would be assuming that the parent workflow is ready to be
231
+ # executed, and I'd rather leave the decision to the query
232
+ # logic.
233
+ logger.info(
234
+ "adjusting poll time to run parent",
235
+ workflow_id=workflow_id,
236
+ parent_id=parent_id,
237
+ )
238
+ self.poll_soon()
239
+
240
+ except BaseException as e:
241
+ if isinstance(e, GeneratorExit):
242
+ # GeneratorExit should never be handled
243
+ raise
244
+ logger.exception(
245
+ "exception during workflow resumption", workflow_id=workflow_id
246
+ )
247
+ finally:
248
+ # remove the task from the active dictionary
249
+ logger.debug("removing from active workflows", workflow_id=workflow_id)
250
+ self.__active_workflows.pop(workflow_id, None)
251
+
252
+ async def wait_for_completion(self, workflow_id: UUID):
253
+ self.poll_soon()
254
+ session = get_session()
255
+ async with session.begin_read():
256
+ workflow = await session.get(Workflow, workflow_id)
257
+ assert workflow
258
+ while True:
259
+ async with session.begin_read():
260
+ await session.refresh(workflow)
261
+ if workflow.status != WorkflowStatus.PENDING:
262
+ return workflow_result(workflow)
263
+ # Currently this method is only used in tests and when calling subworkflows.
264
+ # When calling subworkflows, the parent always suspends when the child has not
265
+ # completed, so in practice this poll won't be used a lot.
266
+ await sleep(1)
267
+
268
+ def poll_soon(self, time: float | None = None):
269
+ if time is None:
270
+ time = monotonic()
271
+ heappush(self.__extra_polls, time)
272
+
273
+ async def __run(
274
+ self,
275
+ *,
276
+ poll_interval: float,
277
+ max_concurrent_workflows: int,
278
+ lock_duration: timedelta,
279
+ ):
280
+ event_loop = get_running_loop()
281
+ if self.__event_loop is not None and self.__event_loop != event_loop:
282
+ raise RuntimeError("Orchestrator already started on a different event loop")
283
+ if self.__running:
284
+ raise RuntimeError("Orchestrator already running")
285
+ self.__event_loop = event_loop
286
+ self.__running = True
287
+ orchestrator_tok = WorkflowOrchestrator.set(self)
288
+ engine_tok = engine_var.set(self.__engine)
289
+
290
+ # This loop will sleep for 1 second between each iteration, sending a
291
+ # poll query to the database when current time >= self.__next_poll_time or when
292
+ # another poll was scheduled in `self.__extra_polls`
293
+ # self.__next_poll_time advanced by poll_interval seconds
294
+ while True:
295
+ sleep_seconds = 1
296
+ # get monotonic time
297
+ monotonic_now = monotonic()
298
+ next_poll = self.__next_poll_time
299
+ # check if we have any extra polls to do
300
+ while self.__extra_polls and self.__extra_polls[0] <= monotonic_now:
301
+ next_poll = heappop(self.__extra_polls)
302
+
303
+ if monotonic_now >= next_poll:
304
+ free_slots = max_concurrent_workflows - len(self.__active_workflows)
305
+ if free_slots == 0:
306
+ # No free slots, wait until at least one task completes
307
+ #
308
+ # Note that we collect the active tasks in a normal set to
309
+ # Ensure that `asyncio.wait` coroutine object will receive
310
+ # the same tasks that are in the WeakSet at the time of the call.
311
+ # To understand this better, consider the follwing scenario:
312
+
313
+ # - The max_concurrent_workflows is set to 1
314
+ # - We have 1 task in the set, meaning this branch will be taken
315
+ # - We pass the 1 task WeakSet to `asyncio.wait` coroutine factory,
316
+ # which creates the coroutine object referencing the WeakSet
317
+ # - We yield back to the event loop (`await`)
318
+ # - Before the `asyncio.wait` coroutine has a chance to start,
319
+ # the task finishes and is garbage collected, causing the WeakSet
320
+ # to be empty
321
+ # - asyncio.wait coroutine object starts with an empty set, causing
322
+ # an exception to be raised.
323
+ #
324
+ # Even though the above situation is extremely unlikely,
325
+ # especially in the default case of 100 max_concurrent_workflows,
326
+ # (and might be impossible, depending on how the order asyncio
327
+ # runs things), it is still a theoreticall possibility from the POV
328
+ # of the caller, so we have to do the correct thing.
329
+ #
330
+ # Another possibility would be to surround this on a try/except, but
331
+ # this would be less elegant.
332
+ logger.debug("no free slots, waiting for active tasks to complete")
333
+ await wait(
334
+ set(self.__active_workflows.values()),
335
+ return_when=FIRST_COMPLETED,
336
+ )
337
+ continue
338
+ logger.debug("polling workflows", poll_time=monotonic_now)
339
+ await self.__enqueue_suspended_workflows(free_slots, lock_duration)
340
+
341
+ if monotonic_now >= self.__next_poll_time:
342
+ self.__next_poll_time = monotonic_now + poll_interval
343
+ logger.debug("next poll time", next_poll_time=self.__next_poll_time)
344
+ # Not really used in practice, but tests can set poll
345
+ # interval to < 1 second, so we handle that here
346
+ sleep_seconds = min(1, poll_interval)
347
+ await sleep(sleep_seconds)
348
+
349
+ self.__running = False
350
+ WorkflowOrchestrator.reset(orchestrator_tok)
351
+ engine_var.reset(engine_tok)
352
+
353
+ async def run(
354
+ self,
355
+ *,
356
+ poll_interval: float = 300,
357
+ max_concurrent_workflows: int = 100,
358
+ lock_duration: timedelta = _DEFAULT_LOCK_DURATION,
359
+ notification_callback: WorkflowNotificationCallback | None = None,
360
+ ):
361
+ if notification_callback:
362
+ async with workflow_notification_context(notification_callback):
363
+ await self.__run(
364
+ poll_interval=poll_interval,
365
+ max_concurrent_workflows=max_concurrent_workflows,
366
+ lock_duration=lock_duration,
367
+ )
368
+ else:
369
+ await self.__run(
370
+ poll_interval=poll_interval,
371
+ max_concurrent_workflows=max_concurrent_workflows,
372
+ lock_duration=lock_duration,
373
+ )
374
+
375
+
376
+ @asynccontextmanager
377
+ async def orchestrator_context(orchestrator: WorkflowOrchestrator):
378
+ """Context manager for setting up and tearing down orchestrator context"""
379
+ tok = WorkflowOrchestrator.set(orchestrator)
380
+ try:
381
+ yield orchestrator
382
+ finally:
383
+ WorkflowOrchestrator.reset(tok)