krons 0.1.1__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- krons/__init__.py +49 -0
- krons/agent/__init__.py +144 -0
- krons/agent/mcps/__init__.py +14 -0
- krons/agent/mcps/loader.py +287 -0
- krons/agent/mcps/wrapper.py +799 -0
- krons/agent/message/__init__.py +20 -0
- krons/agent/message/action.py +69 -0
- krons/agent/message/assistant.py +52 -0
- krons/agent/message/common.py +49 -0
- krons/agent/message/instruction.py +130 -0
- krons/agent/message/prepare_msg.py +187 -0
- krons/agent/message/role.py +53 -0
- krons/agent/message/system.py +53 -0
- krons/agent/operations/__init__.py +82 -0
- krons/agent/operations/act.py +100 -0
- krons/agent/operations/generate.py +145 -0
- krons/agent/operations/llm_reparse.py +89 -0
- krons/agent/operations/operate.py +247 -0
- krons/agent/operations/parse.py +243 -0
- krons/agent/operations/react.py +286 -0
- krons/agent/operations/specs.py +235 -0
- krons/agent/operations/structure.py +151 -0
- krons/agent/operations/utils.py +79 -0
- krons/agent/providers/__init__.py +17 -0
- krons/agent/providers/anthropic_messages.py +146 -0
- krons/agent/providers/claude_code.py +276 -0
- krons/agent/providers/gemini.py +268 -0
- krons/agent/providers/match.py +75 -0
- krons/agent/providers/oai_chat.py +174 -0
- krons/agent/third_party/__init__.py +2 -0
- krons/agent/third_party/anthropic_models.py +154 -0
- krons/agent/third_party/claude_code.py +682 -0
- krons/agent/third_party/gemini_models.py +508 -0
- krons/agent/third_party/openai_models.py +295 -0
- krons/agent/tool.py +291 -0
- krons/core/__init__.py +56 -74
- krons/core/base/__init__.py +121 -0
- krons/core/{broadcaster.py → base/broadcaster.py} +7 -3
- krons/core/{element.py → base/element.py} +13 -5
- krons/core/{event.py → base/event.py} +39 -6
- krons/core/{eventbus.py → base/eventbus.py} +3 -1
- krons/core/{flow.py → base/flow.py} +11 -4
- krons/core/{graph.py → base/graph.py} +24 -8
- krons/core/{node.py → base/node.py} +44 -19
- krons/core/{pile.py → base/pile.py} +22 -8
- krons/core/{processor.py → base/processor.py} +21 -7
- krons/core/{progression.py → base/progression.py} +3 -1
- krons/{specs → core/specs}/__init__.py +0 -5
- krons/{specs → core/specs}/adapters/dataclass_field.py +16 -8
- krons/{specs → core/specs}/adapters/pydantic_adapter.py +11 -5
- krons/{specs → core/specs}/adapters/sql_ddl.py +14 -8
- krons/{specs → core/specs}/catalog/__init__.py +2 -2
- krons/{specs → core/specs}/catalog/_audit.py +2 -2
- krons/{specs → core/specs}/catalog/_common.py +2 -2
- krons/{specs → core/specs}/catalog/_content.py +4 -4
- krons/{specs → core/specs}/catalog/_enforcement.py +3 -3
- krons/{specs → core/specs}/factory.py +5 -5
- krons/{specs → core/specs}/operable.py +8 -2
- krons/{specs → core/specs}/protocol.py +4 -2
- krons/{specs → core/specs}/spec.py +23 -11
- krons/{types → core/types}/base.py +4 -2
- krons/{types → core/types}/db_types.py +2 -2
- krons/errors.py +13 -13
- krons/protocols.py +9 -4
- krons/resource/__init__.py +89 -0
- krons/{services → resource}/backend.py +48 -22
- krons/{services → resource}/endpoint.py +28 -14
- krons/{services → resource}/hook.py +20 -7
- krons/{services → resource}/imodel.py +46 -28
- krons/{services → resource}/registry.py +26 -24
- krons/{services → resource}/utilities/rate_limited_executor.py +7 -3
- krons/{services → resource}/utilities/rate_limiter.py +3 -1
- krons/{services → resource}/utilities/resilience.py +15 -5
- krons/resource/utilities/token_calculator.py +185 -0
- krons/session/__init__.py +12 -17
- krons/session/constraints.py +70 -0
- krons/session/exchange.py +11 -3
- krons/session/message.py +3 -1
- krons/session/registry.py +35 -0
- krons/session/session.py +165 -174
- krons/utils/__init__.py +45 -0
- krons/utils/_function_arg_parser.py +99 -0
- krons/utils/_pythonic_function_call.py +249 -0
- krons/utils/_to_list.py +9 -3
- krons/utils/_utils.py +6 -2
- krons/utils/concurrency/_async_call.py +4 -2
- krons/utils/concurrency/_errors.py +3 -1
- krons/utils/concurrency/_patterns.py +3 -1
- krons/utils/concurrency/_resource_tracker.py +6 -2
- krons/utils/display.py +257 -0
- krons/utils/fuzzy/__init__.py +6 -1
- krons/utils/fuzzy/_fuzzy_match.py +14 -8
- krons/utils/fuzzy/_string_similarity.py +3 -1
- krons/utils/fuzzy/_to_dict.py +3 -1
- krons/utils/schemas/__init__.py +26 -0
- krons/utils/schemas/_breakdown_pydantic_annotation.py +131 -0
- krons/utils/schemas/_formatter.py +72 -0
- krons/utils/schemas/_minimal_yaml.py +151 -0
- krons/utils/schemas/_typescript.py +153 -0
- krons/utils/validators/__init__.py +3 -0
- krons/utils/validators/_validate_image_url.py +56 -0
- krons/work/__init__.py +115 -0
- krons/work/engine.py +333 -0
- krons/work/form.py +242 -0
- krons/{operations → work/operations}/__init__.py +7 -4
- krons/{operations → work/operations}/builder.py +1 -1
- krons/{enforcement → work/operations}/context.py +36 -5
- krons/{operations → work/operations}/flow.py +13 -5
- krons/{operations → work/operations}/node.py +45 -43
- krons/work/operations/registry.py +103 -0
- krons/work/report.py +268 -0
- krons/work/rules/__init__.py +47 -0
- krons/{enforcement → work/rules}/common/boolean.py +3 -1
- krons/{enforcement → work/rules}/common/choice.py +9 -3
- krons/{enforcement → work/rules}/common/number.py +3 -1
- krons/{enforcement → work/rules}/common/string.py +9 -3
- krons/{enforcement → work/rules}/rule.py +1 -1
- krons/{enforcement → work/rules}/validator.py +20 -5
- krons/work/worker.py +266 -0
- {krons-0.1.1.dist-info → krons-0.2.1.dist-info}/METADATA +15 -1
- krons-0.2.1.dist-info/RECORD +151 -0
- krons/enforcement/__init__.py +0 -57
- krons/enforcement/policy.py +0 -80
- krons/enforcement/service.py +0 -370
- krons/operations/registry.py +0 -92
- krons/services/__init__.py +0 -81
- krons/specs/phrase.py +0 -405
- krons-0.1.1.dist-info/RECORD +0 -101
- /krons/{specs → core/specs}/adapters/__init__.py +0 -0
- /krons/{specs → core/specs}/adapters/_utils.py +0 -0
- /krons/{specs → core/specs}/adapters/factory.py +0 -0
- /krons/{types → core/types}/__init__.py +0 -0
- /krons/{types → core/types}/_sentinel.py +0 -0
- /krons/{types → core/types}/identity.py +0 -0
- /krons/{services → resource}/utilities/__init__.py +0 -0
- /krons/{services → resource}/utilities/header_factory.py +0 -0
- /krons/{enforcement → work/rules}/common/__init__.py +0 -0
- /krons/{enforcement → work/rules}/common/mapping.py +0 -0
- /krons/{enforcement → work/rules}/common/model.py +0 -0
- /krons/{enforcement → work/rules}/registry.py +0 -0
- {krons-0.1.1.dist-info → krons-0.2.1.dist-info}/WHEEL +0 -0
- {krons-0.1.1.dist-info → krons-0.2.1.dist-info}/licenses/LICENSE +0 -0
krons/work/__init__.py
ADDED
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""Work system - Declarative workflow orchestration.
|
|
5
|
+
|
|
6
|
+
Two complementary patterns at different abstraction levels:
|
|
7
|
+
|
|
8
|
+
**Report** (artifact state):
|
|
9
|
+
Declarative workflow definition via form_assignments DSL.
|
|
10
|
+
Tracks one specific job's progress through the workflow.
|
|
11
|
+
Dependencies implicit from field names.
|
|
12
|
+
|
|
13
|
+
class HiringBriefReport(Report):
|
|
14
|
+
role_classification: RoleClassification | None = None
|
|
15
|
+
strategic_context: StrategicContext | None = None
|
|
16
|
+
|
|
17
|
+
assignment: str = "job_input -> executive_summary"
|
|
18
|
+
|
|
19
|
+
form_assignments: list[str] = [
|
|
20
|
+
"classifier: job_input -> role_classification | api:fast",
|
|
21
|
+
"strategist: job_input, role_classification -> strategic_context | api:synthesis",
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
**Worker** (execution capability):
|
|
25
|
+
Functional station that can execute forms.
|
|
26
|
+
Has internal DAG for retries/error handling.
|
|
27
|
+
Matches to forms via resource hints.
|
|
28
|
+
|
|
29
|
+
class ClassifierWorker(Worker):
|
|
30
|
+
@work(assignment="job_input -> role_classification")
|
|
31
|
+
async def classify(self, job_input, **kwargs):
|
|
32
|
+
return await self.llm.chat(**kwargs)
|
|
33
|
+
|
|
34
|
+
Core concepts:
|
|
35
|
+
- Form: Data binding + scheduling (stateful artifact)
|
|
36
|
+
- Report: Multi-step workflow declaration (stateful artifact)
|
|
37
|
+
- Worker: Execution capability (stateless station)
|
|
38
|
+
- WorkerEngine: Execution driver
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
from __future__ import annotations
|
|
42
|
+
|
|
43
|
+
from typing import TYPE_CHECKING
|
|
44
|
+
|
|
45
|
+
# Lazy import mapping
|
|
46
|
+
_LAZY_IMPORTS: dict[str, tuple[str, str]] = {
|
|
47
|
+
# engine
|
|
48
|
+
"WorkerEngine": ("krons.work.engine", "WorkerEngine"),
|
|
49
|
+
"WorkerTask": ("krons.work.engine", "WorkerTask"),
|
|
50
|
+
# form
|
|
51
|
+
"Form": ("krons.work.form", "Form"),
|
|
52
|
+
"ParsedAssignment": ("krons.work.form", "ParsedAssignment"),
|
|
53
|
+
"parse_assignment": ("krons.work.form", "parse_assignment"),
|
|
54
|
+
"parse_full_assignment": ("krons.work.form", "parse_full_assignment"),
|
|
55
|
+
# report
|
|
56
|
+
"Report": ("krons.work.report", "Report"),
|
|
57
|
+
# worker
|
|
58
|
+
"Worker": ("krons.work.worker", "Worker"),
|
|
59
|
+
"WorkConfig": ("krons.work.worker", "WorkConfig"),
|
|
60
|
+
"WorkLink": ("krons.work.worker", "WorkLink"),
|
|
61
|
+
"work": ("krons.work.worker", "work"),
|
|
62
|
+
"worklink": ("krons.work.worker", "worklink"),
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
_LOADED: dict[str, object] = {}
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def __getattr__(name: str) -> object:
|
|
69
|
+
"""Lazy import attributes on first access."""
|
|
70
|
+
if name in _LOADED:
|
|
71
|
+
return _LOADED[name]
|
|
72
|
+
|
|
73
|
+
if name in _LAZY_IMPORTS:
|
|
74
|
+
from importlib import import_module
|
|
75
|
+
|
|
76
|
+
module_name, attr_name = _LAZY_IMPORTS[name]
|
|
77
|
+
module = import_module(module_name)
|
|
78
|
+
value = getattr(module, attr_name)
|
|
79
|
+
_LOADED[name] = value
|
|
80
|
+
return value
|
|
81
|
+
|
|
82
|
+
raise AttributeError(f"module 'krons.work' has no attribute {name!r}")
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def __dir__() -> list[str]:
|
|
86
|
+
"""Return all available attributes for autocomplete."""
|
|
87
|
+
return list(__all__)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
# TYPE_CHECKING block for static analysis
|
|
91
|
+
if TYPE_CHECKING:
|
|
92
|
+
from krons.work.engine import WorkerEngine, WorkerTask
|
|
93
|
+
from krons.work.form import (
|
|
94
|
+
Form,
|
|
95
|
+
ParsedAssignment,
|
|
96
|
+
parse_assignment,
|
|
97
|
+
parse_full_assignment,
|
|
98
|
+
)
|
|
99
|
+
from krons.work.report import Report
|
|
100
|
+
from krons.work.worker import WorkConfig, Worker, WorkLink, work, worklink
|
|
101
|
+
|
|
102
|
+
__all__ = (
|
|
103
|
+
"Form",
|
|
104
|
+
"ParsedAssignment",
|
|
105
|
+
"Report",
|
|
106
|
+
"WorkConfig",
|
|
107
|
+
"WorkLink",
|
|
108
|
+
"Worker",
|
|
109
|
+
"WorkerEngine",
|
|
110
|
+
"WorkerTask",
|
|
111
|
+
"parse_assignment",
|
|
112
|
+
"parse_full_assignment",
|
|
113
|
+
"work",
|
|
114
|
+
"worklink",
|
|
115
|
+
)
|
krons/work/engine.py
ADDED
|
@@ -0,0 +1,333 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""WorkerEngine - Execution driver for Worker workflows.
|
|
5
|
+
|
|
6
|
+
The engine manages task execution, following worklinks to traverse the
|
|
7
|
+
workflow graph defined by @work and @worklink decorators.
|
|
8
|
+
|
|
9
|
+
Example:
|
|
10
|
+
worker = FileCoder()
|
|
11
|
+
engine = WorkerEngine(worker=worker, refresh_time=0.3)
|
|
12
|
+
|
|
13
|
+
# Add a task starting at a specific function
|
|
14
|
+
task = await engine.add_task(
|
|
15
|
+
form=my_form,
|
|
16
|
+
task_function="start_task",
|
|
17
|
+
task_max_steps=20,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
# Run until all tasks complete
|
|
21
|
+
await engine.execute()
|
|
22
|
+
|
|
23
|
+
# Or run indefinitely
|
|
24
|
+
await engine.execute_lasting()
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
from __future__ import annotations
|
|
28
|
+
|
|
29
|
+
import asyncio
|
|
30
|
+
from dataclasses import dataclass, field
|
|
31
|
+
from typing import TYPE_CHECKING, Any
|
|
32
|
+
from uuid import UUID, uuid4
|
|
33
|
+
|
|
34
|
+
from krons.utils import concurrency
|
|
35
|
+
|
|
36
|
+
if TYPE_CHECKING:
|
|
37
|
+
from .worker import Worker
|
|
38
|
+
|
|
39
|
+
__all__ = ("WorkerEngine", "WorkerTask")
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@dataclass
|
|
43
|
+
class WorkerTask:
|
|
44
|
+
"""A task being executed by the engine.
|
|
45
|
+
|
|
46
|
+
Attributes:
|
|
47
|
+
id: Unique task identifier
|
|
48
|
+
function: Current method name to execute
|
|
49
|
+
kwargs: Arguments for the method
|
|
50
|
+
status: PENDING, PROCESSING, COMPLETED, FAILED
|
|
51
|
+
result: Final result when completed
|
|
52
|
+
error: Exception if failed
|
|
53
|
+
max_steps: Max workflow steps before stopping
|
|
54
|
+
current_step: Current step count
|
|
55
|
+
history: List of (function, result) tuples for debugging
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
id: UUID = field(default_factory=uuid4)
|
|
59
|
+
function: str = ""
|
|
60
|
+
kwargs: dict[str, Any] = field(default_factory=dict)
|
|
61
|
+
status: str = "PENDING"
|
|
62
|
+
result: Any = None
|
|
63
|
+
error: Exception | None = None
|
|
64
|
+
max_steps: int = 100
|
|
65
|
+
current_step: int = 0
|
|
66
|
+
history: list[tuple[str, Any]] = field(default_factory=list)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class WorkerEngine:
|
|
70
|
+
"""Execution driver for Worker workflows.
|
|
71
|
+
|
|
72
|
+
Manages a queue of tasks, executing them through the workflow graph
|
|
73
|
+
defined by the worker's @work and @worklink decorators.
|
|
74
|
+
|
|
75
|
+
Attributes:
|
|
76
|
+
worker: The Worker instance to execute
|
|
77
|
+
refresh_time: Seconds between processing cycles
|
|
78
|
+
tasks: Dict of active tasks by ID
|
|
79
|
+
_task_queue: Async queue for pending work
|
|
80
|
+
_stopped: Stop flag
|
|
81
|
+
|
|
82
|
+
Example:
|
|
83
|
+
engine = WorkerEngine(worker=my_worker)
|
|
84
|
+
task = await engine.add_task(
|
|
85
|
+
form=my_form,
|
|
86
|
+
task_function="entry_point",
|
|
87
|
+
)
|
|
88
|
+
await engine.execute()
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
def __init__(
|
|
92
|
+
self,
|
|
93
|
+
worker: Worker,
|
|
94
|
+
refresh_time: float = 0.1,
|
|
95
|
+
max_concurrent: int = 10,
|
|
96
|
+
) -> None:
|
|
97
|
+
"""Initialize the engine.
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
worker: Worker instance with @work/@worklink methods
|
|
101
|
+
refresh_time: Seconds between processing cycles
|
|
102
|
+
max_concurrent: Max concurrent task executions
|
|
103
|
+
"""
|
|
104
|
+
self.worker = worker
|
|
105
|
+
self.refresh_time = refresh_time
|
|
106
|
+
self.max_concurrent = max_concurrent
|
|
107
|
+
|
|
108
|
+
self.tasks: dict[UUID, WorkerTask] = {}
|
|
109
|
+
self._task_queue: asyncio.Queue[UUID] = asyncio.Queue()
|
|
110
|
+
self._stopped = False
|
|
111
|
+
self._semaphore = asyncio.Semaphore(max_concurrent)
|
|
112
|
+
|
|
113
|
+
async def add_task(
|
|
114
|
+
self,
|
|
115
|
+
task_function: str,
|
|
116
|
+
task_max_steps: int = 100,
|
|
117
|
+
**kwargs: Any,
|
|
118
|
+
) -> WorkerTask:
|
|
119
|
+
"""Add a new task to the execution queue.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
task_function: Entry method name to start execution
|
|
123
|
+
task_max_steps: Max workflow steps before stopping
|
|
124
|
+
**kwargs: Arguments for the entry method
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
WorkerTask instance (can be monitored for status)
|
|
128
|
+
|
|
129
|
+
Raises:
|
|
130
|
+
ValueError: If task_function not found in worker
|
|
131
|
+
"""
|
|
132
|
+
if task_function not in self.worker._work_methods:
|
|
133
|
+
raise ValueError(
|
|
134
|
+
f"Method '{task_function}' not found. "
|
|
135
|
+
f"Available: {list(self.worker._work_methods.keys())}"
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
task = WorkerTask(
|
|
139
|
+
function=task_function,
|
|
140
|
+
kwargs=kwargs,
|
|
141
|
+
max_steps=task_max_steps,
|
|
142
|
+
)
|
|
143
|
+
self.tasks[task.id] = task
|
|
144
|
+
await self._task_queue.put(task.id)
|
|
145
|
+
|
|
146
|
+
return task
|
|
147
|
+
|
|
148
|
+
async def execute(self) -> None:
|
|
149
|
+
"""Execute all queued tasks until queue is empty.
|
|
150
|
+
|
|
151
|
+
Processes tasks through their workflow graphs, following worklinks.
|
|
152
|
+
Returns when all tasks are completed or failed.
|
|
153
|
+
"""
|
|
154
|
+
self._stopped = False
|
|
155
|
+
await self.worker.start()
|
|
156
|
+
|
|
157
|
+
while not self._stopped and not self._task_queue.empty():
|
|
158
|
+
await self._process_cycle()
|
|
159
|
+
await concurrency.sleep(self.refresh_time)
|
|
160
|
+
|
|
161
|
+
async def execute_lasting(self) -> None:
|
|
162
|
+
"""Execute indefinitely until stop() is called.
|
|
163
|
+
|
|
164
|
+
Useful for long-running worker services that continuously
|
|
165
|
+
process incoming tasks.
|
|
166
|
+
"""
|
|
167
|
+
self._stopped = False
|
|
168
|
+
await self.worker.start()
|
|
169
|
+
|
|
170
|
+
while not self._stopped:
|
|
171
|
+
await self._process_cycle()
|
|
172
|
+
await concurrency.sleep(self.refresh_time)
|
|
173
|
+
|
|
174
|
+
async def stop(self) -> None:
|
|
175
|
+
"""Stop the execution loop."""
|
|
176
|
+
self._stopped = True
|
|
177
|
+
await self.worker.stop()
|
|
178
|
+
|
|
179
|
+
async def _process_cycle(self) -> None:
|
|
180
|
+
"""Process one cycle of tasks."""
|
|
181
|
+
# Collect tasks to process this cycle
|
|
182
|
+
tasks_to_process: list[UUID] = []
|
|
183
|
+
|
|
184
|
+
while (
|
|
185
|
+
not self._task_queue.empty() and len(tasks_to_process) < self.max_concurrent
|
|
186
|
+
):
|
|
187
|
+
try:
|
|
188
|
+
task_id = self._task_queue.get_nowait()
|
|
189
|
+
tasks_to_process.append(task_id)
|
|
190
|
+
except asyncio.QueueEmpty:
|
|
191
|
+
break
|
|
192
|
+
|
|
193
|
+
if not tasks_to_process:
|
|
194
|
+
return
|
|
195
|
+
|
|
196
|
+
# Process tasks concurrently
|
|
197
|
+
async with concurrency.create_task_group() as tg:
|
|
198
|
+
for task_id in tasks_to_process:
|
|
199
|
+
tg.start_soon(self._process_task, task_id)
|
|
200
|
+
|
|
201
|
+
async def _process_task(self, task_id: UUID) -> None:
|
|
202
|
+
"""Process a single task through one workflow step."""
|
|
203
|
+
async with self._semaphore:
|
|
204
|
+
task = self.tasks.get(task_id)
|
|
205
|
+
if task is None or task.status in ("COMPLETED", "FAILED"):
|
|
206
|
+
return
|
|
207
|
+
|
|
208
|
+
# Check step limit
|
|
209
|
+
if task.current_step >= task.max_steps:
|
|
210
|
+
task.status = "COMPLETED"
|
|
211
|
+
return
|
|
212
|
+
|
|
213
|
+
task.status = "PROCESSING"
|
|
214
|
+
task.current_step += 1
|
|
215
|
+
|
|
216
|
+
try:
|
|
217
|
+
# Get the work method and config
|
|
218
|
+
method, config = self.worker._work_methods[task.function]
|
|
219
|
+
|
|
220
|
+
# Prepare kwargs with form binding
|
|
221
|
+
call_kwargs = dict(task.kwargs)
|
|
222
|
+
if config.form_param_key and config.assignment:
|
|
223
|
+
form_id = call_kwargs.get(config.form_param_key)
|
|
224
|
+
if form_id and form_id in self.worker.forms:
|
|
225
|
+
form = self.worker.forms[form_id]
|
|
226
|
+
# Bind input fields from form to kwargs
|
|
227
|
+
for input_field in form.input_fields:
|
|
228
|
+
if input_field in form.available_data:
|
|
229
|
+
call_kwargs[input_field] = form.available_data[
|
|
230
|
+
input_field
|
|
231
|
+
]
|
|
232
|
+
|
|
233
|
+
# Execute with optional timeout
|
|
234
|
+
if config.timeout:
|
|
235
|
+
result = await asyncio.wait_for(
|
|
236
|
+
method(**call_kwargs),
|
|
237
|
+
timeout=config.timeout,
|
|
238
|
+
)
|
|
239
|
+
else:
|
|
240
|
+
result = await method(**call_kwargs)
|
|
241
|
+
|
|
242
|
+
# Record history
|
|
243
|
+
task.history.append((task.function, result))
|
|
244
|
+
task.result = result
|
|
245
|
+
|
|
246
|
+
# Follow worklinks
|
|
247
|
+
next_tasks = await self._follow_links(task, result)
|
|
248
|
+
|
|
249
|
+
if next_tasks:
|
|
250
|
+
# Continue with next step(s)
|
|
251
|
+
for next_func, next_kwargs in next_tasks:
|
|
252
|
+
task.function = next_func
|
|
253
|
+
task.kwargs = next_kwargs
|
|
254
|
+
task.status = "PENDING"
|
|
255
|
+
await self._task_queue.put(task_id)
|
|
256
|
+
break # Only follow first matching link for now
|
|
257
|
+
else:
|
|
258
|
+
# No more links - task complete
|
|
259
|
+
task.status = "COMPLETED"
|
|
260
|
+
|
|
261
|
+
except Exception as e:
|
|
262
|
+
task.status = "FAILED"
|
|
263
|
+
task.error = e
|
|
264
|
+
|
|
265
|
+
async def _follow_links(
|
|
266
|
+
self, task: WorkerTask, result: Any
|
|
267
|
+
) -> list[tuple[str, dict[str, Any]]]:
|
|
268
|
+
"""Follow worklinks from current method.
|
|
269
|
+
|
|
270
|
+
Args:
|
|
271
|
+
task: Current task
|
|
272
|
+
result: Result from current method
|
|
273
|
+
|
|
274
|
+
Returns:
|
|
275
|
+
List of (next_function, kwargs) tuples for matching links
|
|
276
|
+
"""
|
|
277
|
+
next_tasks: list[tuple[str, dict[str, Any]]] = []
|
|
278
|
+
|
|
279
|
+
for link in self.worker.get_links_from(task.function):
|
|
280
|
+
try:
|
|
281
|
+
# Get the handler method from worker and call it
|
|
282
|
+
handler = getattr(self.worker, link.handler_name)
|
|
283
|
+
next_kwargs = await handler(result)
|
|
284
|
+
|
|
285
|
+
# None means skip this edge
|
|
286
|
+
if next_kwargs is not None:
|
|
287
|
+
next_tasks.append((link.to_, next_kwargs))
|
|
288
|
+
|
|
289
|
+
except Exception:
|
|
290
|
+
# Link handler failed - skip this edge
|
|
291
|
+
continue
|
|
292
|
+
|
|
293
|
+
return next_tasks
|
|
294
|
+
|
|
295
|
+
def get_task(self, task_id: UUID) -> WorkerTask | None:
|
|
296
|
+
"""Get task by ID."""
|
|
297
|
+
return self.tasks.get(task_id)
|
|
298
|
+
|
|
299
|
+
def get_tasks_by_status(self, status: str) -> list[WorkerTask]:
|
|
300
|
+
"""Get all tasks with given status."""
|
|
301
|
+
return [t for t in self.tasks.values() if t.status == status]
|
|
302
|
+
|
|
303
|
+
@property
|
|
304
|
+
def pending_tasks(self) -> list[WorkerTask]:
|
|
305
|
+
"""Tasks waiting to be processed."""
|
|
306
|
+
return self.get_tasks_by_status("PENDING")
|
|
307
|
+
|
|
308
|
+
@property
|
|
309
|
+
def processing_tasks(self) -> list[WorkerTask]:
|
|
310
|
+
"""Tasks currently being processed."""
|
|
311
|
+
return self.get_tasks_by_status("PROCESSING")
|
|
312
|
+
|
|
313
|
+
@property
|
|
314
|
+
def completed_tasks(self) -> list[WorkerTask]:
|
|
315
|
+
"""Tasks that completed successfully."""
|
|
316
|
+
return self.get_tasks_by_status("COMPLETED")
|
|
317
|
+
|
|
318
|
+
@property
|
|
319
|
+
def failed_tasks(self) -> list[WorkerTask]:
|
|
320
|
+
"""Tasks that failed with errors."""
|
|
321
|
+
return self.get_tasks_by_status("FAILED")
|
|
322
|
+
|
|
323
|
+
def status_counts(self) -> dict[str, int]:
|
|
324
|
+
"""Count tasks by status."""
|
|
325
|
+
counts: dict[str, int] = {}
|
|
326
|
+
for task in self.tasks.values():
|
|
327
|
+
counts[task.status] = counts.get(task.status, 0) + 1
|
|
328
|
+
return counts
|
|
329
|
+
|
|
330
|
+
def __repr__(self) -> str:
|
|
331
|
+
counts = self.status_counts()
|
|
332
|
+
total = len(self.tasks)
|
|
333
|
+
return f"WorkerEngine(worker={self.worker.name}, tasks={total}, {counts})"
|
krons/work/form.py
ADDED
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""Form - Data binding and scheduling for work units.
|
|
5
|
+
|
|
6
|
+
A Form represents an instantiated work unit with:
|
|
7
|
+
- Data binding (input values)
|
|
8
|
+
- Execution state tracking (filled, workable)
|
|
9
|
+
|
|
10
|
+
Forms are the stateful scheduling layer for Operations.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from __future__ import annotations
|
|
14
|
+
|
|
15
|
+
from dataclasses import dataclass
|
|
16
|
+
from typing import Any
|
|
17
|
+
|
|
18
|
+
from pydantic import Field
|
|
19
|
+
|
|
20
|
+
from krons.core import Element
|
|
21
|
+
|
|
22
|
+
__all__ = ("Form", "ParsedAssignment", "parse_assignment", "parse_full_assignment")
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass
|
|
26
|
+
class ParsedAssignment:
|
|
27
|
+
"""Parsed form assignment with all components.
|
|
28
|
+
|
|
29
|
+
Attributes:
|
|
30
|
+
branch: Branch/worker name (e.g., "classifier1")
|
|
31
|
+
inputs: Input field names
|
|
32
|
+
outputs: Output field names
|
|
33
|
+
resource: Resource hint (e.g., "api:fast")
|
|
34
|
+
raw: Original assignment string
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
branch: str | None
|
|
38
|
+
inputs: list[str]
|
|
39
|
+
outputs: list[str]
|
|
40
|
+
resource: str | None
|
|
41
|
+
raw: str
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def parse_assignment(assignment: str) -> tuple[list[str], list[str]]:
|
|
45
|
+
"""Parse 'inputs -> outputs' assignment DSL (simple form).
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
assignment: DSL string like "a, b -> c, d"
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
Tuple of (input_fields, output_fields)
|
|
52
|
+
|
|
53
|
+
Raises:
|
|
54
|
+
ValueError: If assignment format is invalid
|
|
55
|
+
"""
|
|
56
|
+
parsed = parse_full_assignment(assignment)
|
|
57
|
+
return parsed.inputs, parsed.outputs
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def parse_full_assignment(assignment: str) -> ParsedAssignment:
|
|
61
|
+
"""Parse full assignment DSL with branch and resource hints.
|
|
62
|
+
|
|
63
|
+
Format: "branch: inputs -> outputs | resource"
|
|
64
|
+
|
|
65
|
+
Examples:
|
|
66
|
+
"a, b -> c" # Simple
|
|
67
|
+
"classifier: job -> role | api:fast" # Full
|
|
68
|
+
"writer: context -> summary" # Branch, no resource
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
assignment: DSL string
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
ParsedAssignment with all components
|
|
75
|
+
|
|
76
|
+
Raises:
|
|
77
|
+
ValueError: If format is invalid
|
|
78
|
+
"""
|
|
79
|
+
raw = assignment.strip()
|
|
80
|
+
branch = None
|
|
81
|
+
resource = None
|
|
82
|
+
|
|
83
|
+
# Extract resource hint (after |)
|
|
84
|
+
if "|" in raw:
|
|
85
|
+
main_part, resource_part = raw.rsplit("|", 1)
|
|
86
|
+
resource = resource_part.strip()
|
|
87
|
+
raw = main_part.strip()
|
|
88
|
+
|
|
89
|
+
# Extract branch name (before :)
|
|
90
|
+
if ":" in raw:
|
|
91
|
+
# Check it's not just inside the field list
|
|
92
|
+
colon_idx = raw.find(":")
|
|
93
|
+
arrow_idx = raw.find("->")
|
|
94
|
+
if arrow_idx == -1 or colon_idx < arrow_idx:
|
|
95
|
+
branch_part, raw = raw.split(":", 1)
|
|
96
|
+
branch = branch_part.strip()
|
|
97
|
+
raw = raw.strip()
|
|
98
|
+
|
|
99
|
+
# Parse inputs -> outputs
|
|
100
|
+
if "->" not in raw:
|
|
101
|
+
raise ValueError(f"Invalid assignment syntax (missing '->'): {assignment}")
|
|
102
|
+
|
|
103
|
+
parts = raw.split("->")
|
|
104
|
+
if len(parts) != 2:
|
|
105
|
+
raise ValueError(f"Invalid assignment syntax: {assignment}")
|
|
106
|
+
|
|
107
|
+
inputs = [f.strip() for f in parts[0].split(",") if f.strip()]
|
|
108
|
+
outputs = [f.strip() for f in parts[1].split(",") if f.strip()]
|
|
109
|
+
|
|
110
|
+
return ParsedAssignment(
|
|
111
|
+
branch=branch,
|
|
112
|
+
inputs=inputs,
|
|
113
|
+
outputs=outputs,
|
|
114
|
+
resource=resource,
|
|
115
|
+
raw=assignment,
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
class Form(Element):
|
|
120
|
+
"""Data binding container for work units.
|
|
121
|
+
|
|
122
|
+
A Form binds input data and tracks execution state.
|
|
123
|
+
|
|
124
|
+
Assignment DSL supports full format:
|
|
125
|
+
"branch: inputs -> outputs | resource"
|
|
126
|
+
|
|
127
|
+
Examples:
|
|
128
|
+
"a, b -> c" # Simple
|
|
129
|
+
"classifier: job -> role | api:fast" # Full with branch and resource
|
|
130
|
+
"writer: context -> summary" # Branch, no resource
|
|
131
|
+
|
|
132
|
+
Attributes:
|
|
133
|
+
assignment: DSL string 'branch: inputs -> outputs | resource'
|
|
134
|
+
branch: Worker/branch name for routing
|
|
135
|
+
resource: Resource hint for capability matching
|
|
136
|
+
input_fields: Fields required as inputs
|
|
137
|
+
output_fields: Fields produced as outputs
|
|
138
|
+
available_data: Current data values
|
|
139
|
+
output: Execution result
|
|
140
|
+
filled: Whether form has been executed
|
|
141
|
+
"""
|
|
142
|
+
|
|
143
|
+
assignment: str = Field(
|
|
144
|
+
default="",
|
|
145
|
+
description="Assignment DSL: 'branch: inputs -> outputs | resource'",
|
|
146
|
+
)
|
|
147
|
+
branch: str | None = Field(
|
|
148
|
+
default=None,
|
|
149
|
+
description="Worker/branch name for routing",
|
|
150
|
+
)
|
|
151
|
+
resource: str | None = Field(
|
|
152
|
+
default=None,
|
|
153
|
+
description="Resource hint (e.g., 'api:fast')",
|
|
154
|
+
)
|
|
155
|
+
input_fields: list[str] = Field(default_factory=list)
|
|
156
|
+
output_fields: list[str] = Field(default_factory=list)
|
|
157
|
+
available_data: dict[str, Any] = Field(default_factory=dict)
|
|
158
|
+
output: Any = Field(default=None)
|
|
159
|
+
filled: bool = Field(default=False)
|
|
160
|
+
|
|
161
|
+
def model_post_init(self, _: Any) -> None:
|
|
162
|
+
"""Parse assignment to derive fields if not already set."""
|
|
163
|
+
if self.assignment and not self.input_fields and not self.output_fields:
|
|
164
|
+
parsed = parse_full_assignment(self.assignment)
|
|
165
|
+
self.input_fields = parsed.inputs
|
|
166
|
+
self.output_fields = parsed.outputs
|
|
167
|
+
if parsed.branch and self.branch is None:
|
|
168
|
+
self.branch = parsed.branch
|
|
169
|
+
if parsed.resource and self.resource is None:
|
|
170
|
+
self.resource = parsed.resource
|
|
171
|
+
|
|
172
|
+
def is_workable(self) -> bool:
|
|
173
|
+
"""Check if form is ready for execution.
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
True if all inputs available and not already filled
|
|
177
|
+
"""
|
|
178
|
+
if self.filled:
|
|
179
|
+
return False
|
|
180
|
+
|
|
181
|
+
for field in self.input_fields:
|
|
182
|
+
if field not in self.available_data:
|
|
183
|
+
return False
|
|
184
|
+
if self.available_data[field] is None:
|
|
185
|
+
return False
|
|
186
|
+
|
|
187
|
+
return True
|
|
188
|
+
|
|
189
|
+
def get_inputs(self) -> dict[str, Any]:
|
|
190
|
+
"""Extract input data for execution.
|
|
191
|
+
|
|
192
|
+
Returns:
|
|
193
|
+
Dict of input field values
|
|
194
|
+
"""
|
|
195
|
+
return {
|
|
196
|
+
f: self.available_data[f]
|
|
197
|
+
for f in self.input_fields
|
|
198
|
+
if f in self.available_data
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
def fill(self, **data: Any) -> None:
|
|
202
|
+
"""Add data to available_data.
|
|
203
|
+
|
|
204
|
+
Args:
|
|
205
|
+
**data: Field values to add
|
|
206
|
+
"""
|
|
207
|
+
self.available_data.update(data)
|
|
208
|
+
|
|
209
|
+
def set_output(self, output: Any) -> None:
|
|
210
|
+
"""Mark form as filled with output.
|
|
211
|
+
|
|
212
|
+
Args:
|
|
213
|
+
output: Execution result
|
|
214
|
+
"""
|
|
215
|
+
self.output = output
|
|
216
|
+
self.filled = True
|
|
217
|
+
|
|
218
|
+
# Extract output field values from result
|
|
219
|
+
if output is not None:
|
|
220
|
+
for field in self.output_fields:
|
|
221
|
+
if hasattr(output, field):
|
|
222
|
+
self.available_data[field] = getattr(output, field)
|
|
223
|
+
elif isinstance(output, dict) and field in output:
|
|
224
|
+
self.available_data[field] = output[field]
|
|
225
|
+
|
|
226
|
+
def get_output_data(self) -> dict[str, Any]:
|
|
227
|
+
"""Extract output field values.
|
|
228
|
+
|
|
229
|
+
Returns:
|
|
230
|
+
Dict mapping output field names to values
|
|
231
|
+
"""
|
|
232
|
+
result = {}
|
|
233
|
+
for field in self.output_fields:
|
|
234
|
+
if field in self.available_data:
|
|
235
|
+
result[field] = self.available_data[field]
|
|
236
|
+
return result
|
|
237
|
+
|
|
238
|
+
def __repr__(self) -> str:
|
|
239
|
+
status = (
|
|
240
|
+
"filled" if self.filled else ("ready" if self.is_workable() else "pending")
|
|
241
|
+
)
|
|
242
|
+
return f"Form('{self.assignment}', {status})"
|