loom-core 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. loom_core-0.1.0.dist-info/METADATA +342 -0
  2. loom_core-0.1.0.dist-info/RECORD +50 -0
  3. loom_core-0.1.0.dist-info/WHEEL +5 -0
  4. loom_core-0.1.0.dist-info/entry_points.txt +2 -0
  5. loom_core-0.1.0.dist-info/licenses/LICENSE +21 -0
  6. loom_core-0.1.0.dist-info/top_level.txt +1 -0
  7. src/__init__.py +45 -0
  8. src/cli/__init__.py +5 -0
  9. src/cli/cli.py +246 -0
  10. src/common/activity.py +30 -0
  11. src/common/config.py +9 -0
  12. src/common/errors.py +64 -0
  13. src/common/workflow.py +56 -0
  14. src/core/__init__.py +0 -0
  15. src/core/compiled.py +41 -0
  16. src/core/context.py +256 -0
  17. src/core/engine.py +106 -0
  18. src/core/handle.py +166 -0
  19. src/core/logger.py +60 -0
  20. src/core/runner.py +53 -0
  21. src/core/state.py +96 -0
  22. src/core/worker.py +147 -0
  23. src/core/workflow.py +168 -0
  24. src/database/__init__.py +0 -0
  25. src/database/db.py +716 -0
  26. src/decorators/__init__.py +0 -0
  27. src/decorators/activity.py +126 -0
  28. src/decorators/workflow.py +46 -0
  29. src/lib/progress.py +109 -0
  30. src/lib/utils.py +25 -0
  31. src/migrations/down/001_setup_pragma.sql +5 -0
  32. src/migrations/down/002_create_workflows.sql +3 -0
  33. src/migrations/down/003.create_events.sql +3 -0
  34. src/migrations/down/004.create_tasks.sql +3 -0
  35. src/migrations/down/005.create_indexes.sql +5 -0
  36. src/migrations/down/006_auto_update_triggers.sql +4 -0
  37. src/migrations/down/007_create_logs.sql +1 -0
  38. src/migrations/up/001_setup_pragma.sql +11 -0
  39. src/migrations/up/002_create_workflows.sql +15 -0
  40. src/migrations/up/003_create_events.sql +13 -0
  41. src/migrations/up/004_create_tasks.sql +23 -0
  42. src/migrations/up/005_create_indexes.sql +11 -0
  43. src/migrations/up/006_auto_update_triggers.sql +19 -0
  44. src/migrations/up/007_create_logs.sql +10 -0
  45. src/schemas/__init__.py +0 -0
  46. src/schemas/activity.py +13 -0
  47. src/schemas/database.py +17 -0
  48. src/schemas/events.py +70 -0
  49. src/schemas/tasks.py +58 -0
  50. src/schemas/workflow.py +33 -0
src/core/state.py ADDED
@@ -0,0 +1,96 @@
1
+ from contextlib import asynccontextmanager
2
+ from typing import Any, Awaitable, Callable, Generic
3
+
4
+ from ..common.errors import StopReplay
5
+ from ..schemas.workflow import InputT, StateT
6
+
7
+
8
+ class StateProxy(Generic[InputT, StateT]):
9
+ """
10
+ Proxy class for managing state interactions.
11
+ Provides methods to get and set state values in the database.
12
+ """
13
+
14
+ _data: StateT
15
+ _ctx: Any
16
+ _batch = None
17
+
18
+ def __init__(self, ctx: Any, data: StateT) -> None:
19
+ self._data = data
20
+ self._ctx = ctx
21
+
22
+ def __getattr__(self, name: str) -> Any:
23
+ return self._data.get(name)
24
+
25
+ def get(self, name: str, default: Any = None) -> Any:
26
+ return self._data.get(name, default)
27
+
28
+ def snapshot(self) -> StateT:
29
+ return self._data
30
+
31
+ async def set(self, name: str, value: Any) -> None:
32
+ event = self._ctx._peek()
33
+ if event and event["type"] == "STATE_SET" and event["payload"]["key"] == name:
34
+ self._ctx._consume()
35
+ return
36
+
37
+ event = ("STATE_SET", {"key": name, "value": value})
38
+
39
+ if self._batch is not None:
40
+ self._batch.append(event)
41
+ else:
42
+ await self._ctx._append_event(*event)
43
+ raise StopReplay
44
+
45
+ async def update(self, **updaters: Callable[..., Awaitable[Any]]) -> None:
46
+ """
47
+ Example:
48
+ await ctx.state.update(
49
+ count=lambda c: (c or 0) + 1,
50
+ name=lambda _: "Satadeep",
51
+ )
52
+ """
53
+ event = self._ctx._peek()
54
+
55
+ if event and event["type"] == "STATE_UPDATE":
56
+ payload = event["payload"]
57
+ if set(payload["values"].keys()) == set(updaters.keys()):
58
+ self._ctx._consume()
59
+ return
60
+ new_values = {}
61
+
62
+ for key, fn in updaters.items():
63
+ old = self._data.get(key)
64
+ new_values[key] = await fn(old)
65
+
66
+ event = ("STATE_UPDATE", {"values": new_values})
67
+
68
+ if self._batch is not None:
69
+ self._batch.append(event)
70
+ else:
71
+ await self._ctx._append_event(*event)
72
+ raise StopReplay
73
+
74
+ @asynccontextmanager
75
+ async def batch(self):
76
+ """
77
+ Context manager to batch multiple state updates into a single event.
78
+ Example:
79
+ async with ctx.state.batch():
80
+ await ctx.state.set("a", 1)
81
+ await ctx.state.set("b", 2)
82
+ await ctx.state.update(
83
+ count=lambda c: (c or 0) + 1,
84
+ )
85
+ """
86
+ if self._batch is not None:
87
+ raise RuntimeError("Nested batches are not supported.")
88
+ self._batch = [] # type: ignore
89
+
90
+ try:
91
+ yield
92
+ finally:
93
+ for type, payload in self._batch:
94
+ await self._ctx._append_event(type, payload)
95
+ self._batch = None # type: ignore
96
+ raise StopReplay
src/core/worker.py ADDED
@@ -0,0 +1,147 @@
1
+ """Distributed workflow worker with graceful shutdown and concurrent task execution."""
2
+
3
+ import asyncio
4
+ import signal
5
+ import sys
6
+ from datetime import datetime, timezone
7
+
8
+ from .runner import run_once
9
+
10
+
11
+ class WorkflowWorker:
12
+ """Distributed worker for processing workflow tasks with concurrency control.
13
+
14
+ Features:
15
+ - Concurrent task processing with configurable worker count
16
+ - Graceful shutdown on SIGINT/SIGTERM
17
+ - Automatic retry on transient failures
18
+ - Configurable polling interval
19
+ - Health monitoring and statistics
20
+ """
21
+
22
+ def __init__(
23
+ self,
24
+ workers: int = 4,
25
+ poll_interval: float = 0.5,
26
+ shutdown_timeout: float = 30.0,
27
+ ):
28
+ """Initialize the workflow worker.
29
+
30
+ Args:
31
+ workers: Number of concurrent task processors (default: 4)
32
+ poll_interval: Seconds between task queue polls (default: 0.5)
33
+ shutdown_timeout: Max seconds to wait for graceful shutdown (default: 30)
34
+ """
35
+ self.workers = workers
36
+ self.poll_interval = poll_interval
37
+ self.shutdown_timeout = shutdown_timeout
38
+ self._shutdown_event = asyncio.Event()
39
+ self._tasks: set[asyncio.Task] = set()
40
+ self._stats: dict[str, int | datetime | None] = {
41
+ "tasks_completed": 0,
42
+ "tasks_failed": 0,
43
+ "started_at": None,
44
+ }
45
+
46
+ async def start(self) -> None:
47
+ """Start the worker and process tasks until shutdown signal."""
48
+ self._stats["started_at"] = datetime.now(timezone.utc)
49
+
50
+ # Register signal handlers for graceful shutdown
51
+ self._register_signal_handlers()
52
+
53
+ print(f"Workflow worker started with {self.workers} concurrent workers")
54
+ print(f"Polling interval: {self.poll_interval}s")
55
+
56
+ try:
57
+ # Start worker tasks
58
+ for i in range(self.workers):
59
+ task = asyncio.create_task(
60
+ self._worker_loop(worker_id=i), name=f"worker-{i}"
61
+ )
62
+ self._tasks.add(task)
63
+
64
+ # Wait for shutdown signal
65
+ await self._shutdown_event.wait()
66
+
67
+ finally:
68
+ await self._graceful_shutdown()
69
+
70
+ async def _worker_loop(self, worker_id: int) -> None:
71
+ """Main processing loop for a single worker.
72
+
73
+ Args:
74
+ worker_id: Unique identifier for this worker instance
75
+ """
76
+ while not self._shutdown_event.is_set():
77
+ try:
78
+ # Try to claim and execute a task
79
+ task_executed = await run_once()
80
+
81
+ if task_executed:
82
+ self._stats["tasks_completed"] = int(self._stats["tasks_completed"]) + 1 # type: ignore
83
+ else:
84
+ # No tasks available, wait before polling again
85
+ await asyncio.sleep(self.poll_interval)
86
+
87
+ except asyncio.CancelledError:
88
+ # Graceful shutdown requested
89
+ break
90
+ except Exception as e:
91
+ self._stats["tasks_failed"] = int(self._stats["tasks_failed"]) + 1 # type: ignore
92
+ print(f"Worker {worker_id} error: {e}")
93
+ # Brief pause before retrying
94
+ await asyncio.sleep(1.0)
95
+
96
+ async def _graceful_shutdown(self) -> None:
97
+ """Gracefully shut down all worker tasks."""
98
+ print("\nShutting down gracefully...")
99
+
100
+ # Cancel all worker tasks
101
+ for task in self._tasks:
102
+ task.cancel()
103
+
104
+ # Wait for tasks to complete with timeout
105
+ try:
106
+ await asyncio.wait_for(
107
+ asyncio.gather(*self._tasks, return_exceptions=True),
108
+ timeout=self.shutdown_timeout,
109
+ )
110
+ except asyncio.TimeoutError:
111
+ print(f"Shutdown timeout reached ({self.shutdown_timeout}s)")
112
+
113
+ self._print_stats()
114
+ print("Worker shutdown complete")
115
+
116
+ def _register_signal_handlers(self) -> None:
117
+ """Register handlers for SIGINT and SIGTERM."""
118
+
119
+ def signal_handler(sig, frame):
120
+ print(f"\nReceived signal {signal.Signals(sig).name}")
121
+ self._shutdown_event.set()
122
+
123
+ # Handle Ctrl+C and termination signals
124
+ signal.signal(signal.SIGINT, signal_handler)
125
+ if sys.platform != "win32":
126
+ signal.signal(signal.SIGTERM, signal_handler)
127
+
128
+ def _print_stats(self) -> None:
129
+ """Print worker statistics."""
130
+ started_at = self._stats["started_at"]
131
+ if started_at and isinstance(started_at, datetime):
132
+ uptime = datetime.now(timezone.utc) - started_at
133
+ print("\nWorker Statistics:")
134
+ print(f" Uptime: {uptime}")
135
+ print(f" Tasks completed: {self._stats['tasks_completed']}")
136
+ print(f" Tasks failed: {self._stats['tasks_failed']}")
137
+
138
+
139
+ async def start_worker(workers: int = 4, poll_interval: float = 0.5) -> None:
140
+ """Start a workflow worker process.
141
+
142
+ Args:
143
+ workers: Number of concurrent task processors
144
+ poll_interval: Seconds between task queue polls
145
+ """
146
+ worker = WorkflowWorker(workers=workers, poll_interval=poll_interval)
147
+ await worker.start()
src/core/workflow.py ADDED
@@ -0,0 +1,168 @@
1
+ import inspect
2
+ from typing import Generic, List, TypeVar
3
+
4
+ from ..schemas.workflow import InputT, StateT, Step
5
+ from .compiled import CompiledWorkflow
6
+
7
+ # For better type inference in classmethods
8
+ Self = TypeVar("Self", bound="Workflow")
9
+
10
+
11
+ class Workflow(Generic[InputT, StateT]):
12
+ """
13
+ Abstract base class for defining typed workflows.
14
+
15
+ This class provides the foundation for creating deterministic, durable workflows
16
+ with strong typing support. Workflows are parameterized by:
17
+ - InputT: The immutable input type for the workflow
18
+ - StateT: The mutable state type that evolves during execution
19
+
20
+ Example:
21
+ @dataclass
22
+ class MyInput:
23
+ user_id: str
24
+
25
+ @dataclass
26
+ class MyState:
27
+ processed: bool = False
28
+ result: str = ""
29
+
30
+ @loom.workflow
31
+ class MyWorkflow(Workflow[MyInput, MyState]):
32
+ @loom.step
33
+ async def process(self, ctx: WorkflowContext[MyState]):
34
+ # Workflow logic here
35
+ pass
36
+ """
37
+
38
+ @classmethod
39
+ def compile(cls) -> CompiledWorkflow[InputT, StateT]:
40
+ """
41
+ Compile the workflow definition directly from the class.
42
+
43
+ This is a convenience method that allows calling SomeWorkflow.compile()
44
+ instead of SomeWorkflow().compile().
45
+
46
+ Returns:
47
+ CompiledWorkflow: A compiled, immutable workflow definition ready for execution
48
+
49
+ Raises:
50
+ ValueError: If the workflow has no steps defined or is malformed
51
+ """
52
+ # Create instance and delegate to instance method
53
+ instance = cls()
54
+ return instance._compile_instance()
55
+
56
+ def _compile_instance(self) -> CompiledWorkflow[InputT, StateT]:
57
+ """
58
+ Internal instance compilation method.
59
+
60
+ This method introspects the class to extract:
61
+ - Workflow metadata (name, description, version)
62
+ - Step definitions and their order
63
+ - Validation of workflow structure
64
+
65
+ Returns:
66
+ CompiledWorkflow: A compiled, immutable workflow definition ready for execution
67
+
68
+ Raises:
69
+ ValueError: If the workflow has no steps defined or is malformed
70
+ """
71
+ # Extract workflow metadata with sensible defaults
72
+ name = self._get_workflow_name()
73
+ description = self._get_workflow_description()
74
+ version = self._get_workflow_version()
75
+ module = self._get_workflow_module()
76
+
77
+ # Discover and validate workflow steps
78
+ steps = self._discover_workflow_steps()
79
+
80
+ # Validate workflow structure
81
+ self._validate_workflow(steps)
82
+
83
+ return CompiledWorkflow[InputT, StateT](
84
+ name=name,
85
+ description=description,
86
+ version=version,
87
+ module=module,
88
+ steps=steps,
89
+ )
90
+
91
+ def _get_workflow_name(self) -> str:
92
+ """Get the workflow name from metadata or class name."""
93
+ return getattr(self, "_workflow_name", self.__class__.__name__)
94
+
95
+ def _get_workflow_description(self) -> str:
96
+ """Get the workflow description from metadata or docstring."""
97
+ explicit_desc = getattr(self, "_workflow_description", "")
98
+ if explicit_desc:
99
+ return explicit_desc
100
+
101
+ # Fallback to class docstring first line
102
+ docstring = self.__class__.__doc__
103
+ if docstring:
104
+ return docstring.strip().split("\n")[0]
105
+
106
+ return ""
107
+
108
+ def _get_workflow_version(self) -> str:
109
+ """Get the workflow version from metadata."""
110
+ return getattr(self, "_workflow_version", "1.0.0")
111
+
112
+ def _get_workflow_module(self) -> str:
113
+ """Get the workflow module path."""
114
+ return getattr(self, "_workflow_module", self.__class__.__module__)
115
+
116
+ def _discover_workflow_steps(self) -> List[Step]:
117
+ """
118
+ Discover all workflow steps by introspecting decorated methods.
119
+
120
+ Returns:
121
+ List[Step]: Ordered list of step definitions
122
+ """
123
+ steps: List[Step] = []
124
+ # Get all callable attributes that are decorated as steps
125
+ for attr_name in self.__class__.__dict__:
126
+ if attr_name.startswith("_"):
127
+ continue
128
+
129
+ attr = getattr(self, attr_name)
130
+ if callable(attr) and hasattr(attr, "_step_name"):
131
+ step_info: Step = {
132
+ "name": getattr(attr, "_step_name"),
133
+ "description": getattr(attr, "_step_description", ""),
134
+ "fn": attr.__name__,
135
+ }
136
+ steps.append(step_info)
137
+
138
+ return steps
139
+
140
+ def _validate_workflow(self, steps: List[Step]) -> None:
141
+ """
142
+ Validate the workflow structure and step signatures.
143
+ Args:
144
+ steps (List[Step]): The list of discovered
145
+ Raises:
146
+ ValueError: If the workflow is malformed
147
+ """
148
+ if not steps:
149
+ raise ValueError(
150
+ f"Workflow '{self.__class__.__name__}' must have at least one step"
151
+ )
152
+ seen = set()
153
+
154
+ for step in steps:
155
+ name = step["name"]
156
+ if name in seen:
157
+ raise ValueError(f"Duplicate step name: {name}")
158
+ seen.add(name)
159
+
160
+ fn = getattr(self, step["fn"])
161
+ sig = inspect.signature(fn)
162
+ params = list(sig.parameters.values())
163
+
164
+ # bound method → first param is self
165
+ if len(params) != 1:
166
+ raise ValueError(
167
+ f"Step '{name}' must have signature (self, ctx), " f"got {sig}"
168
+ )
File without changes