krons 0.1.1__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- krons/__init__.py +49 -0
- krons/agent/__init__.py +144 -0
- krons/agent/mcps/__init__.py +14 -0
- krons/agent/mcps/loader.py +287 -0
- krons/agent/mcps/wrapper.py +799 -0
- krons/agent/message/__init__.py +20 -0
- krons/agent/message/action.py +69 -0
- krons/agent/message/assistant.py +52 -0
- krons/agent/message/common.py +49 -0
- krons/agent/message/instruction.py +130 -0
- krons/agent/message/prepare_msg.py +187 -0
- krons/agent/message/role.py +53 -0
- krons/agent/message/system.py +53 -0
- krons/agent/operations/__init__.py +82 -0
- krons/agent/operations/act.py +100 -0
- krons/agent/operations/generate.py +145 -0
- krons/agent/operations/llm_reparse.py +89 -0
- krons/agent/operations/operate.py +247 -0
- krons/agent/operations/parse.py +243 -0
- krons/agent/operations/react.py +286 -0
- krons/agent/operations/specs.py +235 -0
- krons/agent/operations/structure.py +151 -0
- krons/agent/operations/utils.py +79 -0
- krons/agent/providers/__init__.py +17 -0
- krons/agent/providers/anthropic_messages.py +146 -0
- krons/agent/providers/claude_code.py +276 -0
- krons/agent/providers/gemini.py +268 -0
- krons/agent/providers/match.py +75 -0
- krons/agent/providers/oai_chat.py +174 -0
- krons/agent/third_party/__init__.py +2 -0
- krons/agent/third_party/anthropic_models.py +154 -0
- krons/agent/third_party/claude_code.py +682 -0
- krons/agent/third_party/gemini_models.py +508 -0
- krons/agent/third_party/openai_models.py +295 -0
- krons/agent/tool.py +291 -0
- krons/core/__init__.py +56 -74
- krons/core/base/__init__.py +121 -0
- krons/core/{broadcaster.py → base/broadcaster.py} +7 -3
- krons/core/{element.py → base/element.py} +13 -5
- krons/core/{event.py → base/event.py} +39 -6
- krons/core/{eventbus.py → base/eventbus.py} +3 -1
- krons/core/{flow.py → base/flow.py} +11 -4
- krons/core/{graph.py → base/graph.py} +24 -8
- krons/core/{node.py → base/node.py} +44 -19
- krons/core/{pile.py → base/pile.py} +22 -8
- krons/core/{processor.py → base/processor.py} +21 -7
- krons/core/{progression.py → base/progression.py} +3 -1
- krons/{specs → core/specs}/__init__.py +0 -5
- krons/{specs → core/specs}/adapters/dataclass_field.py +16 -8
- krons/{specs → core/specs}/adapters/pydantic_adapter.py +11 -5
- krons/{specs → core/specs}/adapters/sql_ddl.py +14 -8
- krons/{specs → core/specs}/catalog/__init__.py +2 -2
- krons/{specs → core/specs}/catalog/_audit.py +2 -2
- krons/{specs → core/specs}/catalog/_common.py +2 -2
- krons/{specs → core/specs}/catalog/_content.py +4 -4
- krons/{specs → core/specs}/catalog/_enforcement.py +3 -3
- krons/{specs → core/specs}/factory.py +5 -5
- krons/{specs → core/specs}/operable.py +8 -2
- krons/{specs → core/specs}/protocol.py +4 -2
- krons/{specs → core/specs}/spec.py +23 -11
- krons/{types → core/types}/base.py +4 -2
- krons/{types → core/types}/db_types.py +2 -2
- krons/errors.py +13 -13
- krons/protocols.py +9 -4
- krons/resource/__init__.py +89 -0
- krons/{services → resource}/backend.py +48 -22
- krons/{services → resource}/endpoint.py +28 -14
- krons/{services → resource}/hook.py +20 -7
- krons/{services → resource}/imodel.py +46 -28
- krons/{services → resource}/registry.py +26 -24
- krons/{services → resource}/utilities/rate_limited_executor.py +7 -3
- krons/{services → resource}/utilities/rate_limiter.py +3 -1
- krons/{services → resource}/utilities/resilience.py +15 -5
- krons/resource/utilities/token_calculator.py +185 -0
- krons/session/__init__.py +12 -17
- krons/session/constraints.py +70 -0
- krons/session/exchange.py +11 -3
- krons/session/message.py +3 -1
- krons/session/registry.py +35 -0
- krons/session/session.py +165 -174
- krons/utils/__init__.py +45 -0
- krons/utils/_function_arg_parser.py +99 -0
- krons/utils/_pythonic_function_call.py +249 -0
- krons/utils/_to_list.py +9 -3
- krons/utils/_utils.py +6 -2
- krons/utils/concurrency/_async_call.py +4 -2
- krons/utils/concurrency/_errors.py +3 -1
- krons/utils/concurrency/_patterns.py +3 -1
- krons/utils/concurrency/_resource_tracker.py +6 -2
- krons/utils/display.py +257 -0
- krons/utils/fuzzy/__init__.py +6 -1
- krons/utils/fuzzy/_fuzzy_match.py +14 -8
- krons/utils/fuzzy/_string_similarity.py +3 -1
- krons/utils/fuzzy/_to_dict.py +3 -1
- krons/utils/schemas/__init__.py +26 -0
- krons/utils/schemas/_breakdown_pydantic_annotation.py +131 -0
- krons/utils/schemas/_formatter.py +72 -0
- krons/utils/schemas/_minimal_yaml.py +151 -0
- krons/utils/schemas/_typescript.py +153 -0
- krons/utils/validators/__init__.py +3 -0
- krons/utils/validators/_validate_image_url.py +56 -0
- krons/work/__init__.py +126 -0
- krons/work/engine.py +333 -0
- krons/work/form.py +305 -0
- krons/{operations → work/operations}/__init__.py +7 -4
- krons/{operations → work/operations}/builder.py +1 -1
- krons/{enforcement → work/operations}/context.py +36 -5
- krons/{operations → work/operations}/flow.py +13 -5
- krons/{operations → work/operations}/node.py +45 -43
- krons/work/operations/registry.py +103 -0
- krons/{specs → work}/phrase.py +130 -13
- krons/{enforcement → work}/policy.py +3 -3
- krons/work/report.py +268 -0
- krons/work/rules/__init__.py +47 -0
- krons/{enforcement → work/rules}/common/boolean.py +3 -1
- krons/{enforcement → work/rules}/common/choice.py +9 -3
- krons/{enforcement → work/rules}/common/number.py +3 -1
- krons/{enforcement → work/rules}/common/string.py +9 -3
- krons/{enforcement → work/rules}/rule.py +1 -1
- krons/{enforcement → work/rules}/validator.py +20 -5
- krons/{enforcement → work}/service.py +16 -7
- krons/work/worker.py +266 -0
- {krons-0.1.1.dist-info → krons-0.2.0.dist-info}/METADATA +15 -1
- krons-0.2.0.dist-info/RECORD +154 -0
- krons/enforcement/__init__.py +0 -57
- krons/operations/registry.py +0 -92
- krons/services/__init__.py +0 -81
- krons-0.1.1.dist-info/RECORD +0 -101
- /krons/{specs → core/specs}/adapters/__init__.py +0 -0
- /krons/{specs → core/specs}/adapters/_utils.py +0 -0
- /krons/{specs → core/specs}/adapters/factory.py +0 -0
- /krons/{types → core/types}/__init__.py +0 -0
- /krons/{types → core/types}/_sentinel.py +0 -0
- /krons/{types → core/types}/identity.py +0 -0
- /krons/{services → resource}/utilities/__init__.py +0 -0
- /krons/{services → resource}/utilities/header_factory.py +0 -0
- /krons/{enforcement → work/rules}/common/__init__.py +0 -0
- /krons/{enforcement → work/rules}/common/mapping.py +0 -0
- /krons/{enforcement → work/rules}/common/model.py +0 -0
- /krons/{enforcement → work/rules}/registry.py +0 -0
- {krons-0.1.1.dist-info → krons-0.2.0.dist-info}/WHEEL +0 -0
- {krons-0.1.1.dist-info → krons-0.2.0.dist-info}/licenses/LICENSE +0 -0
krons/work/__init__.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""Work system - Declarative workflow orchestration.
|
|
5
|
+
|
|
6
|
+
Two complementary patterns at different abstraction levels:
|
|
7
|
+
|
|
8
|
+
**Report** (artifact state):
|
|
9
|
+
Declarative workflow definition via form_assignments DSL.
|
|
10
|
+
Tracks one specific job's progress through the workflow.
|
|
11
|
+
Dependencies implicit from field names.
|
|
12
|
+
|
|
13
|
+
class HiringBriefReport(Report):
|
|
14
|
+
role_classification: RoleClassification | None = None
|
|
15
|
+
strategic_context: StrategicContext | None = None
|
|
16
|
+
|
|
17
|
+
assignment: str = "job_input -> executive_summary"
|
|
18
|
+
|
|
19
|
+
form_assignments: list[str] = [
|
|
20
|
+
"classifier: job_input -> role_classification | api:fast",
|
|
21
|
+
"strategist: job_input, role_classification -> strategic_context | api:synthesis",
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
**Worker** (execution capability):
|
|
25
|
+
Functional station that can execute forms.
|
|
26
|
+
Has internal DAG for retries/error handling.
|
|
27
|
+
Matches to forms via resource hints.
|
|
28
|
+
|
|
29
|
+
class ClassifierWorker(Worker):
|
|
30
|
+
@work(assignment="job_input -> role_classification")
|
|
31
|
+
async def classify(self, job_input, **kwargs):
|
|
32
|
+
return await self.llm.chat(**kwargs)
|
|
33
|
+
|
|
34
|
+
Core concepts:
|
|
35
|
+
- Phrase: Typed operation signature (inputs -> outputs)
|
|
36
|
+
- Form: Data binding + scheduling (stateful artifact)
|
|
37
|
+
- Report: Multi-step workflow declaration (stateful artifact)
|
|
38
|
+
- Worker: Execution capability (stateless station)
|
|
39
|
+
- WorkerEngine: Execution driver
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
from __future__ import annotations
|
|
43
|
+
|
|
44
|
+
from typing import TYPE_CHECKING
|
|
45
|
+
|
|
46
|
+
# Lazy import mapping
|
|
47
|
+
_LAZY_IMPORTS: dict[str, tuple[str, str]] = {
|
|
48
|
+
# engine
|
|
49
|
+
"WorkerEngine": ("krons.work.engine", "WorkerEngine"),
|
|
50
|
+
"WorkerTask": ("krons.work.engine", "WorkerTask"),
|
|
51
|
+
# form
|
|
52
|
+
"Form": ("krons.work.form", "Form"),
|
|
53
|
+
"ParsedAssignment": ("krons.work.form", "ParsedAssignment"),
|
|
54
|
+
"parse_assignment": ("krons.work.form", "parse_assignment"),
|
|
55
|
+
"parse_full_assignment": ("krons.work.form", "parse_full_assignment"),
|
|
56
|
+
# phrase
|
|
57
|
+
"CrudOperation": ("krons.work.phrase", "CrudOperation"),
|
|
58
|
+
"CrudPattern": ("krons.work.phrase", "CrudPattern"),
|
|
59
|
+
"Phrase": ("krons.work.phrase", "Phrase"),
|
|
60
|
+
"phrase": ("krons.work.phrase", "phrase"),
|
|
61
|
+
# report
|
|
62
|
+
"Report": ("krons.work.report", "Report"),
|
|
63
|
+
# worker
|
|
64
|
+
"Worker": ("krons.work.worker", "Worker"),
|
|
65
|
+
"WorkConfig": ("krons.work.worker", "WorkConfig"),
|
|
66
|
+
"WorkLink": ("krons.work.worker", "WorkLink"),
|
|
67
|
+
"work": ("krons.work.worker", "work"),
|
|
68
|
+
"worklink": ("krons.work.worker", "worklink"),
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
_LOADED: dict[str, object] = {}
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def __getattr__(name: str) -> object:
|
|
75
|
+
"""Lazy import attributes on first access."""
|
|
76
|
+
if name in _LOADED:
|
|
77
|
+
return _LOADED[name]
|
|
78
|
+
|
|
79
|
+
if name in _LAZY_IMPORTS:
|
|
80
|
+
from importlib import import_module
|
|
81
|
+
|
|
82
|
+
module_name, attr_name = _LAZY_IMPORTS[name]
|
|
83
|
+
module = import_module(module_name)
|
|
84
|
+
value = getattr(module, attr_name)
|
|
85
|
+
_LOADED[name] = value
|
|
86
|
+
return value
|
|
87
|
+
|
|
88
|
+
raise AttributeError(f"module 'krons.work' has no attribute {name!r}")
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def __dir__() -> list[str]:
|
|
92
|
+
"""Return all available attributes for autocomplete."""
|
|
93
|
+
return list(__all__)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
# TYPE_CHECKING block for static analysis
|
|
97
|
+
if TYPE_CHECKING:
|
|
98
|
+
from krons.work.engine import WorkerEngine, WorkerTask
|
|
99
|
+
from krons.work.form import (
|
|
100
|
+
Form,
|
|
101
|
+
ParsedAssignment,
|
|
102
|
+
parse_assignment,
|
|
103
|
+
parse_full_assignment,
|
|
104
|
+
)
|
|
105
|
+
from krons.work.phrase import CrudOperation, CrudPattern, Phrase, phrase
|
|
106
|
+
from krons.work.report import Report
|
|
107
|
+
from krons.work.worker import WorkConfig, Worker, WorkLink, work, worklink
|
|
108
|
+
|
|
109
|
+
__all__ = (
|
|
110
|
+
"CrudOperation",
|
|
111
|
+
"CrudPattern",
|
|
112
|
+
"Form",
|
|
113
|
+
"ParsedAssignment",
|
|
114
|
+
"Phrase",
|
|
115
|
+
"Report",
|
|
116
|
+
"WorkConfig",
|
|
117
|
+
"WorkLink",
|
|
118
|
+
"Worker",
|
|
119
|
+
"WorkerEngine",
|
|
120
|
+
"WorkerTask",
|
|
121
|
+
"parse_assignment",
|
|
122
|
+
"parse_full_assignment",
|
|
123
|
+
"phrase",
|
|
124
|
+
"work",
|
|
125
|
+
"worklink",
|
|
126
|
+
)
|
krons/work/engine.py
ADDED
|
@@ -0,0 +1,333 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""WorkerEngine - Execution driver for Worker workflows.
|
|
5
|
+
|
|
6
|
+
The engine manages task execution, following worklinks to traverse the
|
|
7
|
+
workflow graph defined by @work and @worklink decorators.
|
|
8
|
+
|
|
9
|
+
Example:
|
|
10
|
+
worker = FileCoder()
|
|
11
|
+
engine = WorkerEngine(worker=worker, refresh_time=0.3)
|
|
12
|
+
|
|
13
|
+
# Add a task starting at a specific function
|
|
14
|
+
task = await engine.add_task(
|
|
15
|
+
form=my_form,
|
|
16
|
+
task_function="start_task",
|
|
17
|
+
task_max_steps=20,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
# Run until all tasks complete
|
|
21
|
+
await engine.execute()
|
|
22
|
+
|
|
23
|
+
# Or run indefinitely
|
|
24
|
+
await engine.execute_lasting()
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
from __future__ import annotations
|
|
28
|
+
|
|
29
|
+
import asyncio
|
|
30
|
+
from dataclasses import dataclass, field
|
|
31
|
+
from typing import TYPE_CHECKING, Any
|
|
32
|
+
from uuid import UUID, uuid4
|
|
33
|
+
|
|
34
|
+
from krons.utils import concurrency
|
|
35
|
+
|
|
36
|
+
if TYPE_CHECKING:
|
|
37
|
+
from .worker import Worker
|
|
38
|
+
|
|
39
|
+
__all__ = ("WorkerEngine", "WorkerTask")
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@dataclass
|
|
43
|
+
class WorkerTask:
|
|
44
|
+
"""A task being executed by the engine.
|
|
45
|
+
|
|
46
|
+
Attributes:
|
|
47
|
+
id: Unique task identifier
|
|
48
|
+
function: Current method name to execute
|
|
49
|
+
kwargs: Arguments for the method
|
|
50
|
+
status: PENDING, PROCESSING, COMPLETED, FAILED
|
|
51
|
+
result: Final result when completed
|
|
52
|
+
error: Exception if failed
|
|
53
|
+
max_steps: Max workflow steps before stopping
|
|
54
|
+
current_step: Current step count
|
|
55
|
+
history: List of (function, result) tuples for debugging
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
id: UUID = field(default_factory=uuid4)
|
|
59
|
+
function: str = ""
|
|
60
|
+
kwargs: dict[str, Any] = field(default_factory=dict)
|
|
61
|
+
status: str = "PENDING"
|
|
62
|
+
result: Any = None
|
|
63
|
+
error: Exception | None = None
|
|
64
|
+
max_steps: int = 100
|
|
65
|
+
current_step: int = 0
|
|
66
|
+
history: list[tuple[str, Any]] = field(default_factory=list)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class WorkerEngine:
|
|
70
|
+
"""Execution driver for Worker workflows.
|
|
71
|
+
|
|
72
|
+
Manages a queue of tasks, executing them through the workflow graph
|
|
73
|
+
defined by the worker's @work and @worklink decorators.
|
|
74
|
+
|
|
75
|
+
Attributes:
|
|
76
|
+
worker: The Worker instance to execute
|
|
77
|
+
refresh_time: Seconds between processing cycles
|
|
78
|
+
tasks: Dict of active tasks by ID
|
|
79
|
+
_task_queue: Async queue for pending work
|
|
80
|
+
_stopped: Stop flag
|
|
81
|
+
|
|
82
|
+
Example:
|
|
83
|
+
engine = WorkerEngine(worker=my_worker)
|
|
84
|
+
task = await engine.add_task(
|
|
85
|
+
form=my_form,
|
|
86
|
+
task_function="entry_point",
|
|
87
|
+
)
|
|
88
|
+
await engine.execute()
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
def __init__(
|
|
92
|
+
self,
|
|
93
|
+
worker: Worker,
|
|
94
|
+
refresh_time: float = 0.1,
|
|
95
|
+
max_concurrent: int = 10,
|
|
96
|
+
) -> None:
|
|
97
|
+
"""Initialize the engine.
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
worker: Worker instance with @work/@worklink methods
|
|
101
|
+
refresh_time: Seconds between processing cycles
|
|
102
|
+
max_concurrent: Max concurrent task executions
|
|
103
|
+
"""
|
|
104
|
+
self.worker = worker
|
|
105
|
+
self.refresh_time = refresh_time
|
|
106
|
+
self.max_concurrent = max_concurrent
|
|
107
|
+
|
|
108
|
+
self.tasks: dict[UUID, WorkerTask] = {}
|
|
109
|
+
self._task_queue: asyncio.Queue[UUID] = asyncio.Queue()
|
|
110
|
+
self._stopped = False
|
|
111
|
+
self._semaphore = asyncio.Semaphore(max_concurrent)
|
|
112
|
+
|
|
113
|
+
async def add_task(
|
|
114
|
+
self,
|
|
115
|
+
task_function: str,
|
|
116
|
+
task_max_steps: int = 100,
|
|
117
|
+
**kwargs: Any,
|
|
118
|
+
) -> WorkerTask:
|
|
119
|
+
"""Add a new task to the execution queue.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
task_function: Entry method name to start execution
|
|
123
|
+
task_max_steps: Max workflow steps before stopping
|
|
124
|
+
**kwargs: Arguments for the entry method
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
WorkerTask instance (can be monitored for status)
|
|
128
|
+
|
|
129
|
+
Raises:
|
|
130
|
+
ValueError: If task_function not found in worker
|
|
131
|
+
"""
|
|
132
|
+
if task_function not in self.worker._work_methods:
|
|
133
|
+
raise ValueError(
|
|
134
|
+
f"Method '{task_function}' not found. "
|
|
135
|
+
f"Available: {list(self.worker._work_methods.keys())}"
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
task = WorkerTask(
|
|
139
|
+
function=task_function,
|
|
140
|
+
kwargs=kwargs,
|
|
141
|
+
max_steps=task_max_steps,
|
|
142
|
+
)
|
|
143
|
+
self.tasks[task.id] = task
|
|
144
|
+
await self._task_queue.put(task.id)
|
|
145
|
+
|
|
146
|
+
return task
|
|
147
|
+
|
|
148
|
+
async def execute(self) -> None:
|
|
149
|
+
"""Execute all queued tasks until queue is empty.
|
|
150
|
+
|
|
151
|
+
Processes tasks through their workflow graphs, following worklinks.
|
|
152
|
+
Returns when all tasks are completed or failed.
|
|
153
|
+
"""
|
|
154
|
+
self._stopped = False
|
|
155
|
+
await self.worker.start()
|
|
156
|
+
|
|
157
|
+
while not self._stopped and not self._task_queue.empty():
|
|
158
|
+
await self._process_cycle()
|
|
159
|
+
await concurrency.sleep(self.refresh_time)
|
|
160
|
+
|
|
161
|
+
async def execute_lasting(self) -> None:
|
|
162
|
+
"""Execute indefinitely until stop() is called.
|
|
163
|
+
|
|
164
|
+
Useful for long-running worker services that continuously
|
|
165
|
+
process incoming tasks.
|
|
166
|
+
"""
|
|
167
|
+
self._stopped = False
|
|
168
|
+
await self.worker.start()
|
|
169
|
+
|
|
170
|
+
while not self._stopped:
|
|
171
|
+
await self._process_cycle()
|
|
172
|
+
await concurrency.sleep(self.refresh_time)
|
|
173
|
+
|
|
174
|
+
async def stop(self) -> None:
|
|
175
|
+
"""Stop the execution loop."""
|
|
176
|
+
self._stopped = True
|
|
177
|
+
await self.worker.stop()
|
|
178
|
+
|
|
179
|
+
async def _process_cycle(self) -> None:
|
|
180
|
+
"""Process one cycle of tasks."""
|
|
181
|
+
# Collect tasks to process this cycle
|
|
182
|
+
tasks_to_process: list[UUID] = []
|
|
183
|
+
|
|
184
|
+
while (
|
|
185
|
+
not self._task_queue.empty() and len(tasks_to_process) < self.max_concurrent
|
|
186
|
+
):
|
|
187
|
+
try:
|
|
188
|
+
task_id = self._task_queue.get_nowait()
|
|
189
|
+
tasks_to_process.append(task_id)
|
|
190
|
+
except asyncio.QueueEmpty:
|
|
191
|
+
break
|
|
192
|
+
|
|
193
|
+
if not tasks_to_process:
|
|
194
|
+
return
|
|
195
|
+
|
|
196
|
+
# Process tasks concurrently
|
|
197
|
+
async with concurrency.create_task_group() as tg:
|
|
198
|
+
for task_id in tasks_to_process:
|
|
199
|
+
tg.start_soon(self._process_task, task_id)
|
|
200
|
+
|
|
201
|
+
async def _process_task(self, task_id: UUID) -> None:
|
|
202
|
+
"""Process a single task through one workflow step."""
|
|
203
|
+
async with self._semaphore:
|
|
204
|
+
task = self.tasks.get(task_id)
|
|
205
|
+
if task is None or task.status in ("COMPLETED", "FAILED"):
|
|
206
|
+
return
|
|
207
|
+
|
|
208
|
+
# Check step limit
|
|
209
|
+
if task.current_step >= task.max_steps:
|
|
210
|
+
task.status = "COMPLETED"
|
|
211
|
+
return
|
|
212
|
+
|
|
213
|
+
task.status = "PROCESSING"
|
|
214
|
+
task.current_step += 1
|
|
215
|
+
|
|
216
|
+
try:
|
|
217
|
+
# Get the work method and config
|
|
218
|
+
method, config = self.worker._work_methods[task.function]
|
|
219
|
+
|
|
220
|
+
# Prepare kwargs with form binding
|
|
221
|
+
call_kwargs = dict(task.kwargs)
|
|
222
|
+
if config.form_param_key and config.assignment:
|
|
223
|
+
form_id = call_kwargs.get(config.form_param_key)
|
|
224
|
+
if form_id and form_id in self.worker.forms:
|
|
225
|
+
form = self.worker.forms[form_id]
|
|
226
|
+
# Bind input fields from form to kwargs
|
|
227
|
+
for input_field in form.input_fields:
|
|
228
|
+
if input_field in form.available_data:
|
|
229
|
+
call_kwargs[input_field] = form.available_data[
|
|
230
|
+
input_field
|
|
231
|
+
]
|
|
232
|
+
|
|
233
|
+
# Execute with optional timeout
|
|
234
|
+
if config.timeout:
|
|
235
|
+
result = await asyncio.wait_for(
|
|
236
|
+
method(**call_kwargs),
|
|
237
|
+
timeout=config.timeout,
|
|
238
|
+
)
|
|
239
|
+
else:
|
|
240
|
+
result = await method(**call_kwargs)
|
|
241
|
+
|
|
242
|
+
# Record history
|
|
243
|
+
task.history.append((task.function, result))
|
|
244
|
+
task.result = result
|
|
245
|
+
|
|
246
|
+
# Follow worklinks
|
|
247
|
+
next_tasks = await self._follow_links(task, result)
|
|
248
|
+
|
|
249
|
+
if next_tasks:
|
|
250
|
+
# Continue with next step(s)
|
|
251
|
+
for next_func, next_kwargs in next_tasks:
|
|
252
|
+
task.function = next_func
|
|
253
|
+
task.kwargs = next_kwargs
|
|
254
|
+
task.status = "PENDING"
|
|
255
|
+
await self._task_queue.put(task_id)
|
|
256
|
+
break # Only follow first matching link for now
|
|
257
|
+
else:
|
|
258
|
+
# No more links - task complete
|
|
259
|
+
task.status = "COMPLETED"
|
|
260
|
+
|
|
261
|
+
except Exception as e:
|
|
262
|
+
task.status = "FAILED"
|
|
263
|
+
task.error = e
|
|
264
|
+
|
|
265
|
+
async def _follow_links(
|
|
266
|
+
self, task: WorkerTask, result: Any
|
|
267
|
+
) -> list[tuple[str, dict[str, Any]]]:
|
|
268
|
+
"""Follow worklinks from current method.
|
|
269
|
+
|
|
270
|
+
Args:
|
|
271
|
+
task: Current task
|
|
272
|
+
result: Result from current method
|
|
273
|
+
|
|
274
|
+
Returns:
|
|
275
|
+
List of (next_function, kwargs) tuples for matching links
|
|
276
|
+
"""
|
|
277
|
+
next_tasks: list[tuple[str, dict[str, Any]]] = []
|
|
278
|
+
|
|
279
|
+
for link in self.worker.get_links_from(task.function):
|
|
280
|
+
try:
|
|
281
|
+
# Get the handler method from worker and call it
|
|
282
|
+
handler = getattr(self.worker, link.handler_name)
|
|
283
|
+
next_kwargs = await handler(result)
|
|
284
|
+
|
|
285
|
+
# None means skip this edge
|
|
286
|
+
if next_kwargs is not None:
|
|
287
|
+
next_tasks.append((link.to_, next_kwargs))
|
|
288
|
+
|
|
289
|
+
except Exception:
|
|
290
|
+
# Link handler failed - skip this edge
|
|
291
|
+
continue
|
|
292
|
+
|
|
293
|
+
return next_tasks
|
|
294
|
+
|
|
295
|
+
def get_task(self, task_id: UUID) -> WorkerTask | None:
|
|
296
|
+
"""Get task by ID."""
|
|
297
|
+
return self.tasks.get(task_id)
|
|
298
|
+
|
|
299
|
+
def get_tasks_by_status(self, status: str) -> list[WorkerTask]:
|
|
300
|
+
"""Get all tasks with given status."""
|
|
301
|
+
return [t for t in self.tasks.values() if t.status == status]
|
|
302
|
+
|
|
303
|
+
@property
|
|
304
|
+
def pending_tasks(self) -> list[WorkerTask]:
|
|
305
|
+
"""Tasks waiting to be processed."""
|
|
306
|
+
return self.get_tasks_by_status("PENDING")
|
|
307
|
+
|
|
308
|
+
@property
|
|
309
|
+
def processing_tasks(self) -> list[WorkerTask]:
|
|
310
|
+
"""Tasks currently being processed."""
|
|
311
|
+
return self.get_tasks_by_status("PROCESSING")
|
|
312
|
+
|
|
313
|
+
@property
|
|
314
|
+
def completed_tasks(self) -> list[WorkerTask]:
|
|
315
|
+
"""Tasks that completed successfully."""
|
|
316
|
+
return self.get_tasks_by_status("COMPLETED")
|
|
317
|
+
|
|
318
|
+
@property
|
|
319
|
+
def failed_tasks(self) -> list[WorkerTask]:
|
|
320
|
+
"""Tasks that failed with errors."""
|
|
321
|
+
return self.get_tasks_by_status("FAILED")
|
|
322
|
+
|
|
323
|
+
def status_counts(self) -> dict[str, int]:
|
|
324
|
+
"""Count tasks by status."""
|
|
325
|
+
counts: dict[str, int] = {}
|
|
326
|
+
for task in self.tasks.values():
|
|
327
|
+
counts[task.status] = counts.get(task.status, 0) + 1
|
|
328
|
+
return counts
|
|
329
|
+
|
|
330
|
+
def __repr__(self) -> str:
|
|
331
|
+
counts = self.status_counts()
|
|
332
|
+
total = len(self.tasks)
|
|
333
|
+
return f"WorkerEngine(worker={self.worker.name}, tasks={total}, {counts})"
|