aethergraph 0.1.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aethergraph/__init__.py +49 -0
- aethergraph/config/__init__.py +0 -0
- aethergraph/config/config.py +121 -0
- aethergraph/config/context.py +16 -0
- aethergraph/config/llm.py +26 -0
- aethergraph/config/loader.py +60 -0
- aethergraph/config/runtime.py +9 -0
- aethergraph/contracts/errors/errors.py +44 -0
- aethergraph/contracts/services/artifacts.py +142 -0
- aethergraph/contracts/services/channel.py +72 -0
- aethergraph/contracts/services/continuations.py +23 -0
- aethergraph/contracts/services/eventbus.py +12 -0
- aethergraph/contracts/services/kv.py +24 -0
- aethergraph/contracts/services/llm.py +17 -0
- aethergraph/contracts/services/mcp.py +22 -0
- aethergraph/contracts/services/memory.py +108 -0
- aethergraph/contracts/services/resume.py +28 -0
- aethergraph/contracts/services/state_stores.py +33 -0
- aethergraph/contracts/services/wakeup.py +28 -0
- aethergraph/core/execution/base_scheduler.py +77 -0
- aethergraph/core/execution/forward_scheduler.py +777 -0
- aethergraph/core/execution/global_scheduler.py +634 -0
- aethergraph/core/execution/retry_policy.py +22 -0
- aethergraph/core/execution/step_forward.py +411 -0
- aethergraph/core/execution/step_result.py +18 -0
- aethergraph/core/execution/wait_types.py +72 -0
- aethergraph/core/graph/graph_builder.py +192 -0
- aethergraph/core/graph/graph_fn.py +219 -0
- aethergraph/core/graph/graph_io.py +67 -0
- aethergraph/core/graph/graph_refs.py +154 -0
- aethergraph/core/graph/graph_spec.py +115 -0
- aethergraph/core/graph/graph_state.py +59 -0
- aethergraph/core/graph/graphify.py +128 -0
- aethergraph/core/graph/interpreter.py +145 -0
- aethergraph/core/graph/node_handle.py +33 -0
- aethergraph/core/graph/node_spec.py +46 -0
- aethergraph/core/graph/node_state.py +63 -0
- aethergraph/core/graph/task_graph.py +747 -0
- aethergraph/core/graph/task_node.py +82 -0
- aethergraph/core/graph/utils.py +37 -0
- aethergraph/core/graph/visualize.py +239 -0
- aethergraph/core/runtime/ad_hoc_context.py +61 -0
- aethergraph/core/runtime/base_service.py +153 -0
- aethergraph/core/runtime/bind_adapter.py +42 -0
- aethergraph/core/runtime/bound_memory.py +69 -0
- aethergraph/core/runtime/execution_context.py +220 -0
- aethergraph/core/runtime/graph_runner.py +349 -0
- aethergraph/core/runtime/lifecycle.py +26 -0
- aethergraph/core/runtime/node_context.py +203 -0
- aethergraph/core/runtime/node_services.py +30 -0
- aethergraph/core/runtime/recovery.py +159 -0
- aethergraph/core/runtime/run_registration.py +33 -0
- aethergraph/core/runtime/runtime_env.py +157 -0
- aethergraph/core/runtime/runtime_registry.py +32 -0
- aethergraph/core/runtime/runtime_services.py +224 -0
- aethergraph/core/runtime/wakeup_watcher.py +40 -0
- aethergraph/core/tools/__init__.py +10 -0
- aethergraph/core/tools/builtins/channel_tools.py +194 -0
- aethergraph/core/tools/builtins/toolset.py +134 -0
- aethergraph/core/tools/toolkit.py +510 -0
- aethergraph/core/tools/waitable.py +109 -0
- aethergraph/plugins/channel/__init__.py +0 -0
- aethergraph/plugins/channel/adapters/__init__.py +0 -0
- aethergraph/plugins/channel/adapters/console.py +106 -0
- aethergraph/plugins/channel/adapters/file.py +102 -0
- aethergraph/plugins/channel/adapters/slack.py +285 -0
- aethergraph/plugins/channel/adapters/telegram.py +302 -0
- aethergraph/plugins/channel/adapters/webhook.py +104 -0
- aethergraph/plugins/channel/adapters/webui.py +134 -0
- aethergraph/plugins/channel/routes/__init__.py +0 -0
- aethergraph/plugins/channel/routes/console_routes.py +86 -0
- aethergraph/plugins/channel/routes/slack_routes.py +49 -0
- aethergraph/plugins/channel/routes/telegram_routes.py +26 -0
- aethergraph/plugins/channel/routes/webui_routes.py +136 -0
- aethergraph/plugins/channel/utils/__init__.py +0 -0
- aethergraph/plugins/channel/utils/slack_utils.py +278 -0
- aethergraph/plugins/channel/utils/telegram_utils.py +324 -0
- aethergraph/plugins/channel/websockets/slack_ws.py +68 -0
- aethergraph/plugins/channel/websockets/telegram_polling.py +151 -0
- aethergraph/plugins/mcp/fs_server.py +128 -0
- aethergraph/plugins/mcp/http_server.py +101 -0
- aethergraph/plugins/mcp/ws_server.py +180 -0
- aethergraph/plugins/net/http.py +10 -0
- aethergraph/plugins/utils/data_io.py +359 -0
- aethergraph/runner/__init__.py +5 -0
- aethergraph/runtime/__init__.py +62 -0
- aethergraph/server/__init__.py +3 -0
- aethergraph/server/app_factory.py +84 -0
- aethergraph/server/start.py +122 -0
- aethergraph/services/__init__.py +10 -0
- aethergraph/services/artifacts/facade.py +284 -0
- aethergraph/services/artifacts/factory.py +35 -0
- aethergraph/services/artifacts/fs_store.py +656 -0
- aethergraph/services/artifacts/jsonl_index.py +123 -0
- aethergraph/services/artifacts/paths.py +23 -0
- aethergraph/services/artifacts/sqlite_index.py +209 -0
- aethergraph/services/artifacts/utils.py +124 -0
- aethergraph/services/auth/dev.py +16 -0
- aethergraph/services/channel/channel_bus.py +293 -0
- aethergraph/services/channel/factory.py +44 -0
- aethergraph/services/channel/session.py +511 -0
- aethergraph/services/channel/wait_helpers.py +57 -0
- aethergraph/services/clock/clock.py +9 -0
- aethergraph/services/container/default_container.py +320 -0
- aethergraph/services/continuations/continuation.py +56 -0
- aethergraph/services/continuations/factory.py +34 -0
- aethergraph/services/continuations/stores/fs_store.py +264 -0
- aethergraph/services/continuations/stores/inmem_store.py +95 -0
- aethergraph/services/eventbus/inmem.py +21 -0
- aethergraph/services/features/static.py +10 -0
- aethergraph/services/kv/ephemeral.py +90 -0
- aethergraph/services/kv/factory.py +27 -0
- aethergraph/services/kv/layered.py +41 -0
- aethergraph/services/kv/sqlite_kv.py +128 -0
- aethergraph/services/llm/factory.py +157 -0
- aethergraph/services/llm/generic_client.py +542 -0
- aethergraph/services/llm/providers.py +3 -0
- aethergraph/services/llm/service.py +105 -0
- aethergraph/services/logger/base.py +36 -0
- aethergraph/services/logger/compat.py +50 -0
- aethergraph/services/logger/formatters.py +106 -0
- aethergraph/services/logger/std.py +203 -0
- aethergraph/services/mcp/helpers.py +23 -0
- aethergraph/services/mcp/http_client.py +70 -0
- aethergraph/services/mcp/mcp_tools.py +21 -0
- aethergraph/services/mcp/registry.py +14 -0
- aethergraph/services/mcp/service.py +100 -0
- aethergraph/services/mcp/stdio_client.py +70 -0
- aethergraph/services/mcp/ws_client.py +115 -0
- aethergraph/services/memory/bound.py +106 -0
- aethergraph/services/memory/distillers/episode.py +116 -0
- aethergraph/services/memory/distillers/rolling.py +74 -0
- aethergraph/services/memory/facade.py +633 -0
- aethergraph/services/memory/factory.py +78 -0
- aethergraph/services/memory/hotlog_kv.py +27 -0
- aethergraph/services/memory/indices.py +74 -0
- aethergraph/services/memory/io_helpers.py +72 -0
- aethergraph/services/memory/persist_fs.py +40 -0
- aethergraph/services/memory/resolver.py +152 -0
- aethergraph/services/metering/noop.py +4 -0
- aethergraph/services/prompts/file_store.py +41 -0
- aethergraph/services/rag/chunker.py +29 -0
- aethergraph/services/rag/facade.py +593 -0
- aethergraph/services/rag/index/base.py +27 -0
- aethergraph/services/rag/index/faiss_index.py +121 -0
- aethergraph/services/rag/index/sqlite_index.py +134 -0
- aethergraph/services/rag/index_factory.py +52 -0
- aethergraph/services/rag/parsers/md.py +7 -0
- aethergraph/services/rag/parsers/pdf.py +14 -0
- aethergraph/services/rag/parsers/txt.py +7 -0
- aethergraph/services/rag/utils/hybrid.py +39 -0
- aethergraph/services/rag/utils/make_fs_key.py +62 -0
- aethergraph/services/redactor/simple.py +16 -0
- aethergraph/services/registry/key_parsing.py +44 -0
- aethergraph/services/registry/registry_key.py +19 -0
- aethergraph/services/registry/unified_registry.py +185 -0
- aethergraph/services/resume/multi_scheduler_resume_bus.py +65 -0
- aethergraph/services/resume/router.py +73 -0
- aethergraph/services/schedulers/registry.py +41 -0
- aethergraph/services/secrets/base.py +7 -0
- aethergraph/services/secrets/env.py +8 -0
- aethergraph/services/state_stores/externalize.py +135 -0
- aethergraph/services/state_stores/graph_observer.py +131 -0
- aethergraph/services/state_stores/json_store.py +67 -0
- aethergraph/services/state_stores/resume_policy.py +119 -0
- aethergraph/services/state_stores/serialize.py +249 -0
- aethergraph/services/state_stores/utils.py +91 -0
- aethergraph/services/state_stores/validate.py +78 -0
- aethergraph/services/tracing/noop.py +18 -0
- aethergraph/services/waits/wait_registry.py +91 -0
- aethergraph/services/wakeup/memory_queue.py +57 -0
- aethergraph/services/wakeup/scanner_producer.py +56 -0
- aethergraph/services/wakeup/worker.py +31 -0
- aethergraph/tools/__init__.py +25 -0
- aethergraph/utils/optdeps.py +8 -0
- aethergraph-0.1.0a1.dist-info/METADATA +410 -0
- aethergraph-0.1.0a1.dist-info/RECORD +182 -0
- aethergraph-0.1.0a1.dist-info/WHEEL +5 -0
- aethergraph-0.1.0a1.dist-info/entry_points.txt +2 -0
- aethergraph-0.1.0a1.dist-info/licenses/LICENSE +176 -0
- aethergraph-0.1.0a1.dist-info/licenses/NOTICE +31 -0
- aethergraph-0.1.0a1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import inspect
|
|
4
|
+
|
|
5
|
+
from ..runtime.runtime_registry import current_registry
|
|
6
|
+
from .task_graph import TaskGraph
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def graphify(
|
|
10
|
+
*, name="default_graph", inputs=(), outputs=None, version="0.1.0", agent: str | None = None
|
|
11
|
+
):
|
|
12
|
+
"""
|
|
13
|
+
Decorator that builds a TaskGraph from a function body using the builder context.
|
|
14
|
+
The function author writes sequential code with tool calls returning NodeHandles.
|
|
15
|
+
|
|
16
|
+
Usage:
|
|
17
|
+
@graphify(name="my_graph", inputs=["input1", "input2"], outputs=["output"])
|
|
18
|
+
def my_graph(input1, input2):
|
|
19
|
+
# function body using graph builder API
|
|
20
|
+
pass
|
|
21
|
+
return {"output": some_node_handle}
|
|
22
|
+
|
|
23
|
+
The decorated function returns a builder function that constructs the TaskGraph.
|
|
24
|
+
|
|
25
|
+
To build the graph, call the returned function:
|
|
26
|
+
graph_instance = my_graph.build()
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def _wrap(fn):
|
|
30
|
+
fn_sig = inspect.signature(fn)
|
|
31
|
+
fn_params = list(fn_sig.parameters.keys())
|
|
32
|
+
|
|
33
|
+
# Normalize declared inputs into a list of names
|
|
34
|
+
required_inputs = list(inputs.keys()) if isinstance(inputs, dict) else list(inputs)
|
|
35
|
+
|
|
36
|
+
# Optional: validate the signature matches declared inputs
|
|
37
|
+
# (or keep permissive: inject only the overlap)
|
|
38
|
+
overlap = [p for p in fn_params if p in required_inputs]
|
|
39
|
+
|
|
40
|
+
def _build() -> TaskGraph:
|
|
41
|
+
from .graph_builder import graph
|
|
42
|
+
from .graph_refs import arg
|
|
43
|
+
|
|
44
|
+
with graph(name=name) as g:
|
|
45
|
+
# declarations unchanged...
|
|
46
|
+
if isinstance(inputs, dict):
|
|
47
|
+
g.declare_inputs(required=[], optional=inputs)
|
|
48
|
+
else:
|
|
49
|
+
g.declare_inputs(required=required_inputs, optional={})
|
|
50
|
+
|
|
51
|
+
# --- Inject args: map fn params -> arg("<name>")
|
|
52
|
+
injected_kwargs = {p: arg(p) for p in overlap}
|
|
53
|
+
|
|
54
|
+
# Run user body
|
|
55
|
+
ret = fn(**injected_kwargs) # ← key line
|
|
56
|
+
|
|
57
|
+
# expose logic (fixed typo + single-output collapse)
|
|
58
|
+
def _is_ref(x):
|
|
59
|
+
return (
|
|
60
|
+
isinstance(x, dict)
|
|
61
|
+
and x.get("_type") == "ref"
|
|
62
|
+
and "from" in x
|
|
63
|
+
and "key" in x
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
def _expose_from_handle(prefix, handle):
|
|
67
|
+
oks = list(getattr(handle, "output_keys", []))
|
|
68
|
+
if prefix and len(oks) == 1:
|
|
69
|
+
g.expose(prefix, getattr(handle, oks[0]))
|
|
70
|
+
else:
|
|
71
|
+
for k in oks:
|
|
72
|
+
g.expose(f"{prefix}.{k}" if prefix else k, getattr(handle, k))
|
|
73
|
+
|
|
74
|
+
if isinstance(ret, dict):
|
|
75
|
+
for k, v in ret.items():
|
|
76
|
+
if _is_ref(v):
|
|
77
|
+
g.expose(k, v)
|
|
78
|
+
elif hasattr(v, "node_id"):
|
|
79
|
+
_expose_from_handle(k, v)
|
|
80
|
+
else:
|
|
81
|
+
g.expose(k, v)
|
|
82
|
+
elif hasattr(ret, "node_id"):
|
|
83
|
+
_expose_from_handle("", ret)
|
|
84
|
+
else:
|
|
85
|
+
if outputs:
|
|
86
|
+
if len(outputs) != 1:
|
|
87
|
+
raise ValueError(
|
|
88
|
+
"Returning a single literal but multiple outputs are declared."
|
|
89
|
+
)
|
|
90
|
+
g.expose(outputs[0], ret)
|
|
91
|
+
else:
|
|
92
|
+
raise ValueError(
|
|
93
|
+
"Returning a single literal but no output name is declared."
|
|
94
|
+
)
|
|
95
|
+
return g
|
|
96
|
+
|
|
97
|
+
_build.__name__ = fn.__name__
|
|
98
|
+
_build.build = _build # alias
|
|
99
|
+
_build.graph_name = name
|
|
100
|
+
_build.version = version
|
|
101
|
+
|
|
102
|
+
def _spec():
|
|
103
|
+
g = _build()
|
|
104
|
+
return g.spec
|
|
105
|
+
|
|
106
|
+
_build.spec = _spec
|
|
107
|
+
|
|
108
|
+
def _io():
|
|
109
|
+
g = _build()
|
|
110
|
+
return g.io_signature()
|
|
111
|
+
|
|
112
|
+
_build.io = _io
|
|
113
|
+
|
|
114
|
+
# ---- Register graph + optional agent ----
|
|
115
|
+
hub = current_registry()
|
|
116
|
+
if hub is not None:
|
|
117
|
+
# Prefer registering the FACTORY, not a single built instance
|
|
118
|
+
# fallback: register a concrete instance now
|
|
119
|
+
hub.register(nspace="graph", name=name, version=version, obj=_build())
|
|
120
|
+
|
|
121
|
+
if agent:
|
|
122
|
+
# we will have agent API later, now just register a graph as agent
|
|
123
|
+
agent_id = agent
|
|
124
|
+
hub.register(nspace="agent", name=agent_id, version=version, obj=_build())
|
|
125
|
+
|
|
126
|
+
return _build
|
|
127
|
+
|
|
128
|
+
return _wrap
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Callable
|
|
4
|
+
from contextvars import ContextVar
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from ..execution.retry_policy import RetryPolicy
|
|
9
|
+
from ..runtime.runtime_env import RuntimeEnv
|
|
10
|
+
from .task_graph import TaskGraph
|
|
11
|
+
from .task_node import TaskNodeRuntime
|
|
12
|
+
|
|
13
|
+
# Public ContextVar for current runtime environment
|
|
14
|
+
_INTERP_CTX: ContextVar[Interpreter | None] = ContextVar("_INTERP_CTX", default=None)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class SimpleNS:
|
|
18
|
+
"""
|
|
19
|
+
Lightweight attribute-access wrapper for a dict.
|
|
20
|
+
Used as a 'handle' for tool nodes during graph build and also as a thin
|
|
21
|
+
outputs view at runtime. Must carry node_id so the scheduler can target it.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
__slots__ = ("_data", "_node_id")
|
|
25
|
+
|
|
26
|
+
def __init__(self, d: dict[str, Any] | None = None, *, node_id: str | None = None):
|
|
27
|
+
self._data = dict(d or {})
|
|
28
|
+
self._node_id = node_id # may be None for plain dict-like use
|
|
29
|
+
|
|
30
|
+
# ---- Introspection ----
|
|
31
|
+
@property
|
|
32
|
+
def node_id(self) -> str | None:
|
|
33
|
+
return self._node_id
|
|
34
|
+
|
|
35
|
+
def has_node(self) -> bool:
|
|
36
|
+
return self._node_id is not None
|
|
37
|
+
|
|
38
|
+
# ---- Dict-ish API ----
|
|
39
|
+
def to_dict(self) -> dict[str, Any]:
|
|
40
|
+
# do NOT include node_id in the payload view
|
|
41
|
+
return dict(self._data)
|
|
42
|
+
|
|
43
|
+
def get(self, name, default=None):
|
|
44
|
+
return self._data.get(name, default)
|
|
45
|
+
|
|
46
|
+
def __getitem__(self, key: str):
|
|
47
|
+
return self._data[key]
|
|
48
|
+
|
|
49
|
+
def __getattr__(self, name: str):
|
|
50
|
+
try:
|
|
51
|
+
return self._data[name]
|
|
52
|
+
except KeyError:
|
|
53
|
+
raise AttributeError(f"No such attribute: {name}") from None
|
|
54
|
+
|
|
55
|
+
def __repr__(self):
|
|
56
|
+
nid = f" node_id={self._node_id}" if self._node_id else ""
|
|
57
|
+
return f"SimpleNS({self._data!r}{nid})"
|
|
58
|
+
|
|
59
|
+
# ---- Builder-time ref helpers ----
|
|
60
|
+
def ref(self, *path: str) -> dict[str, Any]:
|
|
61
|
+
"""
|
|
62
|
+
Return a binding-ref dict usable as an input to another node during build:
|
|
63
|
+
handle.ref("result") -> {'_type':'ref','from':<node_id>,'path':['result']}
|
|
64
|
+
If no path is given, refers to the entire outputs dict.
|
|
65
|
+
"""
|
|
66
|
+
if not self._node_id:
|
|
67
|
+
raise RuntimeError("Cannot create ref(): handle has no node_id (not a tool handle?)")
|
|
68
|
+
return {"_type": "ref", "from": self._node_id, "path": list(path or [])}
|
|
69
|
+
|
|
70
|
+
def on(self, key: str) -> dict[str, Any]:
|
|
71
|
+
"""Alias for ref(key)."""
|
|
72
|
+
return self.ref(key)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class AwaitableResult:
|
|
76
|
+
"""Lightweight awaitable wrapper for a result value."""
|
|
77
|
+
|
|
78
|
+
def __init__(self, coro: Callable[[], Any], *, node_id: str | None = None):
|
|
79
|
+
self._coro = coro
|
|
80
|
+
self.node_id = node_id
|
|
81
|
+
|
|
82
|
+
def __await__(self):
|
|
83
|
+
return self._coro().__await__()
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
@dataclass
|
|
87
|
+
class Interpreter:
|
|
88
|
+
"""Binds a TaskGraph to a scheduler, let tools add+run nodes on the fly."""
|
|
89
|
+
|
|
90
|
+
graph: TaskGraph
|
|
91
|
+
env: RuntimeEnv
|
|
92
|
+
retry: RetryPolicy = field(default_factory=RetryPolicy)
|
|
93
|
+
max_concurrency: int = 1
|
|
94
|
+
|
|
95
|
+
def __post_init__(self):
|
|
96
|
+
from ..execution.forward_scheduler import ForwardScheduler
|
|
97
|
+
|
|
98
|
+
# get logger from env's container
|
|
99
|
+
from ..runtime.runtime_services import current_logger_factory
|
|
100
|
+
|
|
101
|
+
logger = current_logger_factory().for_scheduler()
|
|
102
|
+
|
|
103
|
+
self.scheduler = ForwardScheduler(
|
|
104
|
+
self.graph, self.env, self.retry, max_concurrency=self.max_concurrency, logger=logger
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
# NEW: convenience pass-through
|
|
108
|
+
def add_listener(self, cb):
|
|
109
|
+
self.scheduler.add_listener(cb)
|
|
110
|
+
|
|
111
|
+
# NEW: run the whole plan (mirrors ForwardScheduler.run)
|
|
112
|
+
async def run(self):
|
|
113
|
+
"""Run the entire graph to completion."""
|
|
114
|
+
return await self.scheduler.run()
|
|
115
|
+
|
|
116
|
+
def enter(self):
|
|
117
|
+
"""Enter the interpreter context."""
|
|
118
|
+
|
|
119
|
+
class _Guard:
|
|
120
|
+
def __init__(_g, interp: Interpreter):
|
|
121
|
+
_g.interp = interp
|
|
122
|
+
_g.token = None
|
|
123
|
+
|
|
124
|
+
def __enter__(_g):
|
|
125
|
+
_g.token = _INTERP_CTX.set(_g.interp)
|
|
126
|
+
return _g.interp
|
|
127
|
+
|
|
128
|
+
def __exit__(_g, exc_type, exc_val, exc_tb):
|
|
129
|
+
"""Exit the interpreter context.
|
|
130
|
+
Args:
|
|
131
|
+
exc_type, exc_val, exc_tb: exception info if any
|
|
132
|
+
"""
|
|
133
|
+
_INTERP_CTX.reset(_g.token)
|
|
134
|
+
|
|
135
|
+
return _Guard(self)
|
|
136
|
+
|
|
137
|
+
async def run_one(self, node: TaskNodeRuntime) -> dict[str, Any]:
|
|
138
|
+
"""Run a single node by ID, return its outputs."""
|
|
139
|
+
return await self.scheduler.run_one(node)
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
# Convenience helpers
|
|
143
|
+
def current_interpreter() -> Interpreter | None:
|
|
144
|
+
"""Get the current interpreter from context, or None if not in one."""
|
|
145
|
+
return _INTERP_CTX.get()
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
from .graph_refs import ref as _ref
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@dataclass
|
|
7
|
+
class NodeHandle:
|
|
8
|
+
"""A handle to a node's outputs in the graph.
|
|
9
|
+
Allows access to outputs via attribute or key.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
node_id: str
|
|
13
|
+
output_keys: list[str]
|
|
14
|
+
|
|
15
|
+
def __getattr__(self, name: str) -> dict[str, str]:
|
|
16
|
+
"""Access output by attribute, e.g. node.output_key"""
|
|
17
|
+
# Allow handle.path or handle.analysis -> Ref
|
|
18
|
+
if name in self.output_keys:
|
|
19
|
+
return _ref(self.node_id, name)
|
|
20
|
+
raise AttributeError(f"NodeHandle has no output '{name}'")
|
|
21
|
+
|
|
22
|
+
def __getitem__(self, key: str) -> dict[str, str]:
|
|
23
|
+
"""Access output by key, e.g. node["output_key"]"""
|
|
24
|
+
# Allow handle["path"] or handle["analysis"] -> Ref
|
|
25
|
+
if key in self.output_keys:
|
|
26
|
+
return _ref(self.node_id, key)
|
|
27
|
+
raise KeyError(f"NodeHandle has no output '{key}'")
|
|
28
|
+
|
|
29
|
+
def tuple(self, n: int):
|
|
30
|
+
return [_ref(self.node_id, f"out{i}") for i in range(n)]
|
|
31
|
+
|
|
32
|
+
def unpack(self, *names: str):
|
|
33
|
+
return [_ref(self.node_id, name) for name in names]
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class NodeType(str, Enum):
|
|
9
|
+
TOOL = "tool"
|
|
10
|
+
LLM = "llm"
|
|
11
|
+
HUMAN = "human"
|
|
12
|
+
ROBOT = "robot"
|
|
13
|
+
CUSTOM = "custom"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class NodeEvent:
|
|
18
|
+
run_id: str
|
|
19
|
+
graph_id: str
|
|
20
|
+
node_id: str
|
|
21
|
+
status: str # one of NodeStatus
|
|
22
|
+
outputs: dict[str, Any]
|
|
23
|
+
timestamp: float # event time (time.time())
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@dataclass
|
|
27
|
+
class TaskNodeSpec:
|
|
28
|
+
node_id: str
|
|
29
|
+
type: str | NodeType # one of NodeType
|
|
30
|
+
logic: str | callable | None = None
|
|
31
|
+
dependencies: list[str] = field(default_factory=list)
|
|
32
|
+
inputs: dict[str, Any] = field(default_factory=dict) # static inputs
|
|
33
|
+
|
|
34
|
+
expected_input_keys: list[str] = field(default_factory=list)
|
|
35
|
+
expected_output_keys: list[str] = field(default_factory=lambda: ["result"])
|
|
36
|
+
output_keys: list[str] = field(default_factory=lambda: ["result"])
|
|
37
|
+
|
|
38
|
+
# Allowed if it's *static* condition -- NOT IMPLEMENTED YET
|
|
39
|
+
condition: bool | dict[str, Any] | callable[[dict[str, Any]], bool] = True
|
|
40
|
+
|
|
41
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
42
|
+
reads: set[str] = field(default_factory=set) # state keys to read
|
|
43
|
+
writes: set[str] = field(default_factory=set) # state keys to write
|
|
44
|
+
|
|
45
|
+
tool_name: str | None = None # used for logging/monitoring
|
|
46
|
+
tool_version: str | None = None # used for logging/monitoring
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import Any, Literal
|
|
3
|
+
|
|
4
|
+
NodeWaitingKind = Literal["human", "robot", "external", "time", "event"]
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class NodeStatus:
|
|
8
|
+
PENDING = "PENDING"
|
|
9
|
+
RUNNING = "RUNNING"
|
|
10
|
+
DONE = "DONE"
|
|
11
|
+
FAILED = "FAILED"
|
|
12
|
+
SKIPPED = "SKIPPED"
|
|
13
|
+
FAILED_TIMEOUT = "FAILED_TIMEOUT"
|
|
14
|
+
WAITING_HUMAN = "WAITING_HUMAN"
|
|
15
|
+
WAITING_ROBOT = "WAITING_ROBOT"
|
|
16
|
+
WAITING_EXTERNAL = "WAITING_EXTERNAL"
|
|
17
|
+
WAITING_TIME = "WAITING_TIME"
|
|
18
|
+
WAITING_EVENT = "WAITING_EVENT"
|
|
19
|
+
|
|
20
|
+
@classmethod
|
|
21
|
+
def from_kind(cls, kind: NodeWaitingKind) -> str:
|
|
22
|
+
"""Map waiting kind to status."""
|
|
23
|
+
return {
|
|
24
|
+
"human": cls.WAITING_HUMAN,
|
|
25
|
+
"approval": cls.WAITING_HUMAN,
|
|
26
|
+
"user_approval": cls.WAITING_HUMAN, # alias to keep backward compatibility
|
|
27
|
+
"user_input": cls.WAITING_HUMAN,
|
|
28
|
+
"user_files": cls.WAITING_HUMAN,
|
|
29
|
+
"robot": cls.WAITING_ROBOT,
|
|
30
|
+
"external": cls.WAITING_EXTERNAL,
|
|
31
|
+
"time": cls.WAITING_TIME,
|
|
32
|
+
"event": cls.WAITING_EVENT,
|
|
33
|
+
}[kind]
|
|
34
|
+
|
|
35
|
+
@classmethod
|
|
36
|
+
def is_waiting(cls, status: str) -> bool:
|
|
37
|
+
return status.startswith("WAITING_")
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
TERMINAL_STATES = {NodeStatus.DONE, NodeStatus.FAILED, NodeStatus.SKIPPED}
|
|
41
|
+
WAITING_STATES = {
|
|
42
|
+
NodeStatus.WAITING_HUMAN,
|
|
43
|
+
NodeStatus.WAITING_ROBOT,
|
|
44
|
+
NodeStatus.WAITING_EXTERNAL,
|
|
45
|
+
NodeStatus.WAITING_TIME,
|
|
46
|
+
NodeStatus.WAITING_EVENT,
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class TaskNodeState:
|
|
52
|
+
status: NodeStatus = NodeStatus.PENDING
|
|
53
|
+
outputs: dict[str, any] = field(default_factory=dict)
|
|
54
|
+
error: str | None = None
|
|
55
|
+
attempts: int = 0
|
|
56
|
+
next_wakeup_at: str | None = None # ISO timestamp
|
|
57
|
+
wait_token: str | None = None # for external wait/resume with Continuation
|
|
58
|
+
wait_spec: dict[str, Any] | None = None # spec for waiting (kind, channel, meta, etc.)
|
|
59
|
+
|
|
60
|
+
@property
|
|
61
|
+
def output(self):
|
|
62
|
+
# convenience for single-output nodes
|
|
63
|
+
return self.outputs.get("result")
|