aethergraph 0.1.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aethergraph/__init__.py +49 -0
- aethergraph/config/__init__.py +0 -0
- aethergraph/config/config.py +121 -0
- aethergraph/config/context.py +16 -0
- aethergraph/config/llm.py +26 -0
- aethergraph/config/loader.py +60 -0
- aethergraph/config/runtime.py +9 -0
- aethergraph/contracts/errors/errors.py +44 -0
- aethergraph/contracts/services/artifacts.py +142 -0
- aethergraph/contracts/services/channel.py +72 -0
- aethergraph/contracts/services/continuations.py +23 -0
- aethergraph/contracts/services/eventbus.py +12 -0
- aethergraph/contracts/services/kv.py +24 -0
- aethergraph/contracts/services/llm.py +17 -0
- aethergraph/contracts/services/mcp.py +22 -0
- aethergraph/contracts/services/memory.py +108 -0
- aethergraph/contracts/services/resume.py +28 -0
- aethergraph/contracts/services/state_stores.py +33 -0
- aethergraph/contracts/services/wakeup.py +28 -0
- aethergraph/core/execution/base_scheduler.py +77 -0
- aethergraph/core/execution/forward_scheduler.py +777 -0
- aethergraph/core/execution/global_scheduler.py +634 -0
- aethergraph/core/execution/retry_policy.py +22 -0
- aethergraph/core/execution/step_forward.py +411 -0
- aethergraph/core/execution/step_result.py +18 -0
- aethergraph/core/execution/wait_types.py +72 -0
- aethergraph/core/graph/graph_builder.py +192 -0
- aethergraph/core/graph/graph_fn.py +219 -0
- aethergraph/core/graph/graph_io.py +67 -0
- aethergraph/core/graph/graph_refs.py +154 -0
- aethergraph/core/graph/graph_spec.py +115 -0
- aethergraph/core/graph/graph_state.py +59 -0
- aethergraph/core/graph/graphify.py +128 -0
- aethergraph/core/graph/interpreter.py +145 -0
- aethergraph/core/graph/node_handle.py +33 -0
- aethergraph/core/graph/node_spec.py +46 -0
- aethergraph/core/graph/node_state.py +63 -0
- aethergraph/core/graph/task_graph.py +747 -0
- aethergraph/core/graph/task_node.py +82 -0
- aethergraph/core/graph/utils.py +37 -0
- aethergraph/core/graph/visualize.py +239 -0
- aethergraph/core/runtime/ad_hoc_context.py +61 -0
- aethergraph/core/runtime/base_service.py +153 -0
- aethergraph/core/runtime/bind_adapter.py +42 -0
- aethergraph/core/runtime/bound_memory.py +69 -0
- aethergraph/core/runtime/execution_context.py +220 -0
- aethergraph/core/runtime/graph_runner.py +349 -0
- aethergraph/core/runtime/lifecycle.py +26 -0
- aethergraph/core/runtime/node_context.py +203 -0
- aethergraph/core/runtime/node_services.py +30 -0
- aethergraph/core/runtime/recovery.py +159 -0
- aethergraph/core/runtime/run_registration.py +33 -0
- aethergraph/core/runtime/runtime_env.py +157 -0
- aethergraph/core/runtime/runtime_registry.py +32 -0
- aethergraph/core/runtime/runtime_services.py +224 -0
- aethergraph/core/runtime/wakeup_watcher.py +40 -0
- aethergraph/core/tools/__init__.py +10 -0
- aethergraph/core/tools/builtins/channel_tools.py +194 -0
- aethergraph/core/tools/builtins/toolset.py +134 -0
- aethergraph/core/tools/toolkit.py +510 -0
- aethergraph/core/tools/waitable.py +109 -0
- aethergraph/plugins/channel/__init__.py +0 -0
- aethergraph/plugins/channel/adapters/__init__.py +0 -0
- aethergraph/plugins/channel/adapters/console.py +106 -0
- aethergraph/plugins/channel/adapters/file.py +102 -0
- aethergraph/plugins/channel/adapters/slack.py +285 -0
- aethergraph/plugins/channel/adapters/telegram.py +302 -0
- aethergraph/plugins/channel/adapters/webhook.py +104 -0
- aethergraph/plugins/channel/adapters/webui.py +134 -0
- aethergraph/plugins/channel/routes/__init__.py +0 -0
- aethergraph/plugins/channel/routes/console_routes.py +86 -0
- aethergraph/plugins/channel/routes/slack_routes.py +49 -0
- aethergraph/plugins/channel/routes/telegram_routes.py +26 -0
- aethergraph/plugins/channel/routes/webui_routes.py +136 -0
- aethergraph/plugins/channel/utils/__init__.py +0 -0
- aethergraph/plugins/channel/utils/slack_utils.py +278 -0
- aethergraph/plugins/channel/utils/telegram_utils.py +324 -0
- aethergraph/plugins/channel/websockets/slack_ws.py +68 -0
- aethergraph/plugins/channel/websockets/telegram_polling.py +151 -0
- aethergraph/plugins/mcp/fs_server.py +128 -0
- aethergraph/plugins/mcp/http_server.py +101 -0
- aethergraph/plugins/mcp/ws_server.py +180 -0
- aethergraph/plugins/net/http.py +10 -0
- aethergraph/plugins/utils/data_io.py +359 -0
- aethergraph/runner/__init__.py +5 -0
- aethergraph/runtime/__init__.py +62 -0
- aethergraph/server/__init__.py +3 -0
- aethergraph/server/app_factory.py +84 -0
- aethergraph/server/start.py +122 -0
- aethergraph/services/__init__.py +10 -0
- aethergraph/services/artifacts/facade.py +284 -0
- aethergraph/services/artifacts/factory.py +35 -0
- aethergraph/services/artifacts/fs_store.py +656 -0
- aethergraph/services/artifacts/jsonl_index.py +123 -0
- aethergraph/services/artifacts/paths.py +23 -0
- aethergraph/services/artifacts/sqlite_index.py +209 -0
- aethergraph/services/artifacts/utils.py +124 -0
- aethergraph/services/auth/dev.py +16 -0
- aethergraph/services/channel/channel_bus.py +293 -0
- aethergraph/services/channel/factory.py +44 -0
- aethergraph/services/channel/session.py +511 -0
- aethergraph/services/channel/wait_helpers.py +57 -0
- aethergraph/services/clock/clock.py +9 -0
- aethergraph/services/container/default_container.py +320 -0
- aethergraph/services/continuations/continuation.py +56 -0
- aethergraph/services/continuations/factory.py +34 -0
- aethergraph/services/continuations/stores/fs_store.py +264 -0
- aethergraph/services/continuations/stores/inmem_store.py +95 -0
- aethergraph/services/eventbus/inmem.py +21 -0
- aethergraph/services/features/static.py +10 -0
- aethergraph/services/kv/ephemeral.py +90 -0
- aethergraph/services/kv/factory.py +27 -0
- aethergraph/services/kv/layered.py +41 -0
- aethergraph/services/kv/sqlite_kv.py +128 -0
- aethergraph/services/llm/factory.py +157 -0
- aethergraph/services/llm/generic_client.py +542 -0
- aethergraph/services/llm/providers.py +3 -0
- aethergraph/services/llm/service.py +105 -0
- aethergraph/services/logger/base.py +36 -0
- aethergraph/services/logger/compat.py +50 -0
- aethergraph/services/logger/formatters.py +106 -0
- aethergraph/services/logger/std.py +203 -0
- aethergraph/services/mcp/helpers.py +23 -0
- aethergraph/services/mcp/http_client.py +70 -0
- aethergraph/services/mcp/mcp_tools.py +21 -0
- aethergraph/services/mcp/registry.py +14 -0
- aethergraph/services/mcp/service.py +100 -0
- aethergraph/services/mcp/stdio_client.py +70 -0
- aethergraph/services/mcp/ws_client.py +115 -0
- aethergraph/services/memory/bound.py +106 -0
- aethergraph/services/memory/distillers/episode.py +116 -0
- aethergraph/services/memory/distillers/rolling.py +74 -0
- aethergraph/services/memory/facade.py +633 -0
- aethergraph/services/memory/factory.py +78 -0
- aethergraph/services/memory/hotlog_kv.py +27 -0
- aethergraph/services/memory/indices.py +74 -0
- aethergraph/services/memory/io_helpers.py +72 -0
- aethergraph/services/memory/persist_fs.py +40 -0
- aethergraph/services/memory/resolver.py +152 -0
- aethergraph/services/metering/noop.py +4 -0
- aethergraph/services/prompts/file_store.py +41 -0
- aethergraph/services/rag/chunker.py +29 -0
- aethergraph/services/rag/facade.py +593 -0
- aethergraph/services/rag/index/base.py +27 -0
- aethergraph/services/rag/index/faiss_index.py +121 -0
- aethergraph/services/rag/index/sqlite_index.py +134 -0
- aethergraph/services/rag/index_factory.py +52 -0
- aethergraph/services/rag/parsers/md.py +7 -0
- aethergraph/services/rag/parsers/pdf.py +14 -0
- aethergraph/services/rag/parsers/txt.py +7 -0
- aethergraph/services/rag/utils/hybrid.py +39 -0
- aethergraph/services/rag/utils/make_fs_key.py +62 -0
- aethergraph/services/redactor/simple.py +16 -0
- aethergraph/services/registry/key_parsing.py +44 -0
- aethergraph/services/registry/registry_key.py +19 -0
- aethergraph/services/registry/unified_registry.py +185 -0
- aethergraph/services/resume/multi_scheduler_resume_bus.py +65 -0
- aethergraph/services/resume/router.py +73 -0
- aethergraph/services/schedulers/registry.py +41 -0
- aethergraph/services/secrets/base.py +7 -0
- aethergraph/services/secrets/env.py +8 -0
- aethergraph/services/state_stores/externalize.py +135 -0
- aethergraph/services/state_stores/graph_observer.py +131 -0
- aethergraph/services/state_stores/json_store.py +67 -0
- aethergraph/services/state_stores/resume_policy.py +119 -0
- aethergraph/services/state_stores/serialize.py +249 -0
- aethergraph/services/state_stores/utils.py +91 -0
- aethergraph/services/state_stores/validate.py +78 -0
- aethergraph/services/tracing/noop.py +18 -0
- aethergraph/services/waits/wait_registry.py +91 -0
- aethergraph/services/wakeup/memory_queue.py +57 -0
- aethergraph/services/wakeup/scanner_producer.py +56 -0
- aethergraph/services/wakeup/worker.py +31 -0
- aethergraph/tools/__init__.py +25 -0
- aethergraph/utils/optdeps.py +8 -0
- aethergraph-0.1.0a1.dist-info/METADATA +410 -0
- aethergraph-0.1.0a1.dist-info/RECORD +182 -0
- aethergraph-0.1.0a1.dist-info/WHEEL +5 -0
- aethergraph-0.1.0a1.dist-info/entry_points.txt +2 -0
- aethergraph-0.1.0a1.dist-info/licenses/LICENSE +176 -0
- aethergraph-0.1.0a1.dist-info/licenses/NOTICE +31 -0
- aethergraph-0.1.0a1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Callable
|
|
4
|
+
import inspect
|
|
5
|
+
|
|
6
|
+
from aethergraph.core.runtime.run_registration import RunRegistrationGuard
|
|
7
|
+
|
|
8
|
+
from ..execution.retry_policy import RetryPolicy
|
|
9
|
+
from ..runtime.runtime_env import RuntimeEnv
|
|
10
|
+
from ..runtime.runtime_registry import current_registry # ContextVar accessor
|
|
11
|
+
from .graph_builder import graph # context manager
|
|
12
|
+
from .graph_refs import GRAPH_INPUTS_NODE_ID
|
|
13
|
+
from .interpreter import Interpreter
|
|
14
|
+
from .node_spec import TaskNodeSpec
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class GraphFunction:
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
name: str,
|
|
21
|
+
fn: Callable,
|
|
22
|
+
inputs: list[str] | None = None,
|
|
23
|
+
outputs: list[str] | None = None,
|
|
24
|
+
version: str = "0.1.0",
|
|
25
|
+
):
|
|
26
|
+
self.graph_id = name
|
|
27
|
+
self.name = name
|
|
28
|
+
self.fn = fn
|
|
29
|
+
self.inputs = inputs or []
|
|
30
|
+
self.outputs = outputs or []
|
|
31
|
+
self.version = version
|
|
32
|
+
self.registry_key: str | None = None
|
|
33
|
+
self.last_graph = None
|
|
34
|
+
self.last_context = None
|
|
35
|
+
self.last_memory_snapshot = None
|
|
36
|
+
|
|
37
|
+
async def run(
|
|
38
|
+
self,
|
|
39
|
+
*,
|
|
40
|
+
env: RuntimeEnv | None = None,
|
|
41
|
+
retry: RetryPolicy | None = None,
|
|
42
|
+
max_concurrency: int | None = None,
|
|
43
|
+
**inputs,
|
|
44
|
+
):
|
|
45
|
+
"""
|
|
46
|
+
Build a fresh TaskGraph and execute this function via the Interpreter.
|
|
47
|
+
If 'context' is declared in the fn signature, inject a NodeContext.
|
|
48
|
+
"""
|
|
49
|
+
# Build env if not provided (use runner’s builder for consistency)
|
|
50
|
+
if env is None:
|
|
51
|
+
from ..runtime.graph_runner import _build_env # internal helper
|
|
52
|
+
|
|
53
|
+
env, retry, max_concurrency = await _build_env(self, inputs)
|
|
54
|
+
if retry is None:
|
|
55
|
+
retry = RetryPolicy()
|
|
56
|
+
|
|
57
|
+
node_spec = TaskNodeSpec(
|
|
58
|
+
node_id=GRAPH_INPUTS_NODE_ID, type="inputs", metadata={"synthetic": True}
|
|
59
|
+
)
|
|
60
|
+
runtime_ctx = env.make_ctx(
|
|
61
|
+
node=node_spec, resume_payload=getattr(env, "resume_payload", None)
|
|
62
|
+
)
|
|
63
|
+
node_ctx = runtime_ctx.create_node_context(node=node_spec)
|
|
64
|
+
|
|
65
|
+
with graph(name=self.graph_id) as G:
|
|
66
|
+
interp = Interpreter(G, env, retry=retry, max_concurrency=max_concurrency)
|
|
67
|
+
run_id = env.run_id
|
|
68
|
+
|
|
69
|
+
# Register the scheduler for this run_id
|
|
70
|
+
with RunRegistrationGuard(
|
|
71
|
+
run_id=run_id, scheduler=interp.scheduler, container=env.container
|
|
72
|
+
):
|
|
73
|
+
sig = inspect.signature(self.fn)
|
|
74
|
+
call_kwargs = dict(inputs)
|
|
75
|
+
if "context" in sig.parameters:
|
|
76
|
+
call_kwargs["context"] = node_ctx
|
|
77
|
+
|
|
78
|
+
with interp.enter():
|
|
79
|
+
res = self.fn(**call_kwargs)
|
|
80
|
+
if inspect.isawaitable(res):
|
|
81
|
+
res = await res
|
|
82
|
+
|
|
83
|
+
self.last_graph = G
|
|
84
|
+
|
|
85
|
+
res = _normalize_and_expose(G, res, self.outputs)
|
|
86
|
+
return res
|
|
87
|
+
|
|
88
|
+
# --- Syntactic sugar ---
|
|
89
|
+
async def __call__(self, **inputs):
|
|
90
|
+
"""Async call to run the graph function.
|
|
91
|
+
Usage:
|
|
92
|
+
result = await my_graph_fn(input1=value1, input2=value2)
|
|
93
|
+
"""
|
|
94
|
+
from ..runtime.graph_runner import run_async
|
|
95
|
+
|
|
96
|
+
return await run_async(self, inputs)
|
|
97
|
+
|
|
98
|
+
def sync(self, **inputs):
|
|
99
|
+
"""Synchronous wrapper around async run(). Useful for quick tests or scripts.
|
|
100
|
+
Usage:
|
|
101
|
+
result = my_graph_fn.sync(input1=value1, input2=value2)
|
|
102
|
+
"""
|
|
103
|
+
from ..runtime.graph_runner import run
|
|
104
|
+
|
|
105
|
+
return run(self, inputs)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def _is_ref(x: object) -> bool:
|
|
109
|
+
return isinstance(x, dict) and x.get("_type") == "ref" and "from" in x and "key" in x
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def _is_nodehandle(x: object) -> bool:
|
|
113
|
+
return hasattr(x, "node_id") and hasattr(x, "output_keys")
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def _expose_from_handle(G, prefix: str, handle) -> dict:
|
|
117
|
+
oks = list(getattr(handle, "output_keys", []))
|
|
118
|
+
if not oks:
|
|
119
|
+
raise ValueError(f"NodeHandle '{getattr(handle, 'node_id', '?')}' has no output_keys")
|
|
120
|
+
out = {}
|
|
121
|
+
if prefix and len(oks) == 1:
|
|
122
|
+
# collapse single output to the provided key
|
|
123
|
+
k = oks[0]
|
|
124
|
+
ref = getattr(handle, k)
|
|
125
|
+
G.expose(prefix, ref)
|
|
126
|
+
out[prefix] = ref
|
|
127
|
+
else:
|
|
128
|
+
# multi-output (or top-level handle)
|
|
129
|
+
for k in oks:
|
|
130
|
+
key = f"{prefix}.{k}" if prefix else k
|
|
131
|
+
ref = getattr(handle, k)
|
|
132
|
+
G.expose(key, ref)
|
|
133
|
+
out[key] = ref
|
|
134
|
+
return out
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def _normalize_and_expose(G, ret, declared_outputs: list[str] | None) -> dict:
|
|
138
|
+
"""
|
|
139
|
+
Normalize user return into {key: Ref or literal}.
|
|
140
|
+
- Dict of NodeHandles/Refs/literals supported
|
|
141
|
+
- Single NodeHandle supported
|
|
142
|
+
- Single literal supported (needs 1 declared output)
|
|
143
|
+
Also exposes Refs on G as boundary outputs.
|
|
144
|
+
|
|
145
|
+
Examples:
|
|
146
|
+
- return {"result": ref(...), "summary": node_handle(...), "count": 42}
|
|
147
|
+
- return node_handle(...)
|
|
148
|
+
"""
|
|
149
|
+
result = {}
|
|
150
|
+
|
|
151
|
+
if isinstance(ret, dict):
|
|
152
|
+
for k, v in ret.items():
|
|
153
|
+
if _is_ref(v):
|
|
154
|
+
G.expose(k, v)
|
|
155
|
+
result[k] = v
|
|
156
|
+
elif _is_nodehandle(v):
|
|
157
|
+
result.update(_expose_from_handle(G, k, v))
|
|
158
|
+
else:
|
|
159
|
+
# literal stays literal; no expose
|
|
160
|
+
result[k] = v
|
|
161
|
+
|
|
162
|
+
elif _is_nodehandle(ret):
|
|
163
|
+
result.update(_expose_from_handle(G, "", ret))
|
|
164
|
+
|
|
165
|
+
else:
|
|
166
|
+
# single literal/ref case
|
|
167
|
+
if declared_outputs and len(declared_outputs) == 1:
|
|
168
|
+
key = declared_outputs[0]
|
|
169
|
+
if _is_ref(ret):
|
|
170
|
+
G.expose(key, ret)
|
|
171
|
+
result[key] = ret
|
|
172
|
+
else:
|
|
173
|
+
raise ValueError(
|
|
174
|
+
"Returning a single literal but outputs are not declared or >1. "
|
|
175
|
+
"Declare exactly one output or return a dict."
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
# If outputs were declared, restrict to those keys (keep order)
|
|
179
|
+
if declared_outputs:
|
|
180
|
+
result = {k: result[k] for k in declared_outputs if k in result}
|
|
181
|
+
|
|
182
|
+
# Validate presence
|
|
183
|
+
missing = [k for k in declared_outputs if k not in result]
|
|
184
|
+
if missing:
|
|
185
|
+
raise ValueError(f"Missing declared outputs: {missing}")
|
|
186
|
+
|
|
187
|
+
return result
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def graph_fn(
|
|
191
|
+
name: str,
|
|
192
|
+
inputs: list[str] | None = None,
|
|
193
|
+
outputs: list[str] | None = None,
|
|
194
|
+
version: str = "0.1.0",
|
|
195
|
+
agent: str | None = None, # if agent is set, register this graph fn as an agent with given name
|
|
196
|
+
) -> Callable[[Callable], GraphFunction]:
|
|
197
|
+
"""Decorator to define a graph function."""
|
|
198
|
+
|
|
199
|
+
def decorator(fn: Callable):
|
|
200
|
+
gf = GraphFunction(name=name, fn=fn, inputs=inputs, outputs=outputs, version=version)
|
|
201
|
+
# Register in registry if given
|
|
202
|
+
registry = current_registry()
|
|
203
|
+
|
|
204
|
+
if registry is not None:
|
|
205
|
+
registry.register(
|
|
206
|
+
nspace="graphfn",
|
|
207
|
+
name=name,
|
|
208
|
+
version=version,
|
|
209
|
+
obj=gf, # we register GraphFunction directly without spec -- graph function is already a runtime object
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
if agent:
|
|
213
|
+
assert (
|
|
214
|
+
registry is not None
|
|
215
|
+
), "No registry available to register agent, make sure to have a current_registry() set up."
|
|
216
|
+
registry.register(nspace="agent", name=agent, version=version, obj=gf)
|
|
217
|
+
return gf
|
|
218
|
+
|
|
219
|
+
return decorator
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
# ParamSpec, IOSpec, IOBindings, validators
|
|
2
|
+
|
|
3
|
+
from dataclasses import asdict, dataclass, field
|
|
4
|
+
from typing import Any, Literal
|
|
5
|
+
|
|
6
|
+
from .graph_refs import normalize_binding
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@dataclass
|
|
10
|
+
class ParamSpec:
|
|
11
|
+
"""ParamSpec defines a single parameter's specification."""
|
|
12
|
+
|
|
13
|
+
schema: dict[str, Any] = field(default_factory=dict) # JSON schema or empty
|
|
14
|
+
default: Any = None # default value or None
|
|
15
|
+
source: Literal["arg", "ctx", "memory", "env", "secret", "kv"] | None = (
|
|
16
|
+
None # where to bind from
|
|
17
|
+
)
|
|
18
|
+
doc: str | None = None # optional description or docstring
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass
|
|
22
|
+
class IOSpec:
|
|
23
|
+
required: dict[str, "ParamSpec"] = field(default_factory=dict)
|
|
24
|
+
optional: dict[str, "ParamSpec"] = field(default_factory=dict)
|
|
25
|
+
outputs: dict[str, "ParamSpec"] = field(default_factory=dict)
|
|
26
|
+
|
|
27
|
+
# Existing field (keep for back-compat)
|
|
28
|
+
expose: list[str] = field(default_factory=list)
|
|
29
|
+
|
|
30
|
+
# NEW: canonical bindings for exposed outputs (name -> Ref|literal)
|
|
31
|
+
expose_bindings: dict[str, Any] = field(default_factory=dict)
|
|
32
|
+
|
|
33
|
+
# ---- Convenience API (non-breaking) ----
|
|
34
|
+
def set_expose(self, name: str, binding: Any) -> None:
|
|
35
|
+
"""Canonical way to record a public output and its binding."""
|
|
36
|
+
if name not in self.expose:
|
|
37
|
+
self.expose.append(name)
|
|
38
|
+
self.expose_bindings[name] = normalize_binding(binding)
|
|
39
|
+
|
|
40
|
+
def get_expose_names(self) -> list[str]:
|
|
41
|
+
# Use dict keys if present; otherwise fall back to list
|
|
42
|
+
if self.expose_bindings:
|
|
43
|
+
# ensure order is stable: preserve original list order if possible
|
|
44
|
+
ordered = [n for n in self.expose if n in self.expose_bindings]
|
|
45
|
+
# include any names defined only in bindings (edge cases)
|
|
46
|
+
ordered += [n for n in self.expose_bindings if n not in ordered]
|
|
47
|
+
return ordered
|
|
48
|
+
return list(self.expose)
|
|
49
|
+
|
|
50
|
+
def get_expose_bindings(self) -> dict[str, Any]:
|
|
51
|
+
# If only a list exists (legacy), return empty; caller can use heuristics if desired
|
|
52
|
+
return dict(self.expose_bindings)
|
|
53
|
+
|
|
54
|
+
def to_dict(self) -> dict[str, Any]:
|
|
55
|
+
return asdict(self)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
@dataclass
|
|
59
|
+
class IOBindings:
|
|
60
|
+
"""IO bindings are used to bind actual values to the inputs/outputs defined in IOSpec."""
|
|
61
|
+
|
|
62
|
+
inbound: dict[str, str] = field(
|
|
63
|
+
default_factory=dict
|
|
64
|
+
) # name -> source (arg, ctx, memory, env, secret, kv)
|
|
65
|
+
outbound: dict[str, str] = field(
|
|
66
|
+
default_factory=dict
|
|
67
|
+
) # name -> destination (ctx, memory, kv, output)
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Mapping
|
|
4
|
+
from typing import Any, TypedDict
|
|
5
|
+
|
|
6
|
+
GRAPH_INPUTS_NODE_ID = "__graph_inputs__" # special node_id for graph inputs
|
|
7
|
+
RESERVED_INJECTABLES = {"resume", "context", "self"}
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
REF_TYPE = "ref"
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class RefDict(TypedDict):
|
|
14
|
+
_type: str
|
|
15
|
+
from_: str # 'from' is reserved in Python keyword args, keep key as 'from' in payload though
|
|
16
|
+
key: str
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
Ref = dict[str, str] # {"_type": "ref", "from": "<node_id>", "key": "<output_key>"}
|
|
20
|
+
|
|
21
|
+
# ---------- Constructors ----------
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def ref(node_id: str, key: str) -> Ref:
|
|
25
|
+
return {"_type": "ref", "from": node_id, "key": key}
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def arg(name: str) -> Ref:
|
|
29
|
+
return ref(GRAPH_INPUTS_NODE_ID, name)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
# ---------- Type checks / Normalizations ----------
|
|
33
|
+
def is_ref(x: Any) -> bool:
|
|
34
|
+
"""True if x is a dict that looks like a Ref."""
|
|
35
|
+
return isinstance(x, Mapping) and x.get("_type") == REF_TYPE and "from" in x and "key" in x
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def is_arg_ref(x: Any) -> bool:
|
|
39
|
+
"""True if x is a ref pointing to __graph_inputs__."""
|
|
40
|
+
return is_ref(x) and x.get("from") == GRAPH_INPUTS_NODE_ID
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def to_tuple(x: Ref | tuple[str, str]) -> tuple[str, str] | None:
|
|
44
|
+
"""Return (node_id, key) if x is a Ref/tuple; else None."""
|
|
45
|
+
if isinstance(x, tuple) and len(x) == 2 and all(isinstance(s, str) for s in x):
|
|
46
|
+
return x # already canonical enough
|
|
47
|
+
if is_ref(x):
|
|
48
|
+
return x["from"], x["key"]
|
|
49
|
+
return None
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def from_tuple(node_key: tuple[str, str]) -> Ref:
|
|
53
|
+
"""Build a Ref from (node_id, key)."""
|
|
54
|
+
node_id, key = node_key
|
|
55
|
+
return ref(node_id, key)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def normalize_binding(x: Any) -> Any:
|
|
59
|
+
"""
|
|
60
|
+
Normalize a binding value (Ref | (node, key) | literal) into Ref|literal.
|
|
61
|
+
- If tuple -> Ref
|
|
62
|
+
- If Ref -> ensure minimal canonical shape
|
|
63
|
+
- Else literal passthrough
|
|
64
|
+
"""
|
|
65
|
+
t = to_tuple(x)
|
|
66
|
+
if t is not None:
|
|
67
|
+
return from_tuple(t)
|
|
68
|
+
if is_ref(x):
|
|
69
|
+
# keep only the keys we care about (defensive)
|
|
70
|
+
return {"_type": REF_TYPE, "from": x["from"], "key": x["key"]}
|
|
71
|
+
return x # literal
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
# ---------- Resolution ----------
|
|
75
|
+
def resolve_ref(reference: Ref, node_outputs: Mapping[str, Mapping[str, Any]]) -> Any:
|
|
76
|
+
"""
|
|
77
|
+
Resolve a Ref against the current node_outputs: {node_id: {output_key: value}}.
|
|
78
|
+
Returns None if missing.
|
|
79
|
+
"""
|
|
80
|
+
src = reference["from"]
|
|
81
|
+
key = reference["key"]
|
|
82
|
+
|
|
83
|
+
if src is None or key is None:
|
|
84
|
+
raise KeyError(f"Bad Ref: {ref}")
|
|
85
|
+
if src not in node_outputs:
|
|
86
|
+
raise KeyError(f"Upstream node '{src}' has no outputs yet")
|
|
87
|
+
if key not in node_outputs[src]:
|
|
88
|
+
raise KeyError(f"Output '{key}' not found on node '{src}'")
|
|
89
|
+
|
|
90
|
+
outs = node_outputs.get(src)
|
|
91
|
+
return outs.get(key) if isinstance(outs, Mapping) else None
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def resolve_any(val, *, graph_inputs: dict[str, Any], outputs_by_node: dict[str, dict[str, Any]]):
|
|
95
|
+
"""Recursively resolve any value that may contain Refs or Args. This function is used
|
|
96
|
+
to resolve inputs for a node before execution.
|
|
97
|
+
Args:
|
|
98
|
+
val: The value to resolve. Can be a literal, dict, list, or Ref/Arg.
|
|
99
|
+
graph_inputs: The dict of graph inputs for Arg resolution.
|
|
100
|
+
outputs_by_node: The dict of node_id to outputs for Ref resolution.
|
|
101
|
+
Returns:
|
|
102
|
+
The fully resolved value.
|
|
103
|
+
"""
|
|
104
|
+
# Arg shape: {"_type":"arg","key":"<input_key>"}
|
|
105
|
+
if isinstance(val, dict):
|
|
106
|
+
t = val.get("_type")
|
|
107
|
+
if t == "arg":
|
|
108
|
+
k = val.get("key")
|
|
109
|
+
if k not in graph_inputs:
|
|
110
|
+
raise KeyError(f"Graph input '{k}' was not provided")
|
|
111
|
+
return graph_inputs[k]
|
|
112
|
+
if t == "ref":
|
|
113
|
+
return resolve_ref(val, outputs_by_node)
|
|
114
|
+
# regular dict → recurse
|
|
115
|
+
return {
|
|
116
|
+
k: resolve_any(v, graph_inputs=graph_inputs, outputs_by_node=outputs_by_node)
|
|
117
|
+
for k, v in val.items()
|
|
118
|
+
}
|
|
119
|
+
if isinstance(val, list | tuple):
|
|
120
|
+
cast = list if isinstance(val, list) else tuple
|
|
121
|
+
return cast(
|
|
122
|
+
resolve_any(v, graph_inputs=graph_inputs, outputs_by_node=outputs_by_node) for v in val
|
|
123
|
+
)
|
|
124
|
+
return val # literal
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def resolve_binding(binding: Any, node_outputs: Mapping[str, Mapping[str, Any]]) -> Any:
|
|
128
|
+
"""
|
|
129
|
+
Resolve a binding that can be Ref or literal. Literals pass through unchanged.
|
|
130
|
+
"""
|
|
131
|
+
if is_ref(binding):
|
|
132
|
+
return resolve_ref(binding, node_outputs)
|
|
133
|
+
return binding
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
# ---------- Pretty helpers ----------
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def ref_str(x: Ref | tuple[str, str] | Any) -> str:
|
|
140
|
+
"""Human-friendly string for logs."""
|
|
141
|
+
t = to_tuple(x)
|
|
142
|
+
if t is None:
|
|
143
|
+
return repr(x)
|
|
144
|
+
node_id, key = t
|
|
145
|
+
return f"{node_id}.{key}"
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
# --------- Marker checks ----------
|
|
149
|
+
def is_arg_marker(x: Any) -> bool:
|
|
150
|
+
return isinstance(x, Mapping) and x.get("_type") == "arg" and "key" in x
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def is_context_marker(x: Any) -> bool:
|
|
154
|
+
return isinstance(x, Mapping) and x.get("_type") == "context" and "key" in x
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
import inspect
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from .graph_io import IOBindings, IOSpec
|
|
6
|
+
from .node_spec import TaskNodeSpec
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@dataclass
|
|
10
|
+
class TaskGraphSpec:
|
|
11
|
+
graph_id: str
|
|
12
|
+
version: str = "0.1.0"
|
|
13
|
+
nodes: dict[str, TaskNodeSpec] = field(default_factory=dict) # node_id -> TaskNodeSpec
|
|
14
|
+
io: IOSpec = field(default_factory=IOSpec) # inputs/outputs
|
|
15
|
+
bindings: IOBindings | None = None # input/output bindings
|
|
16
|
+
meta: dict[str, Any] = field(default_factory=dict) # additional metadata
|
|
17
|
+
|
|
18
|
+
def canonical(self) -> str:
|
|
19
|
+
return f"graph:{self.graph_id}@{self.version}"
|
|
20
|
+
|
|
21
|
+
@property
|
|
22
|
+
def inputs_required(self) -> set[str]:
|
|
23
|
+
return set(self.io.required.keys())
|
|
24
|
+
|
|
25
|
+
@property
|
|
26
|
+
def inputs_optional(self) -> dict[str, Any]:
|
|
27
|
+
return {k: p.default for k, p in self.io.optional.items()}
|
|
28
|
+
|
|
29
|
+
@property
|
|
30
|
+
def outputs(self) -> dict[str, Any]:
|
|
31
|
+
return {k: p.default for k, p in self.io.outputs.items()}
|
|
32
|
+
|
|
33
|
+
def io_summary_lines(self) -> list[str]:
|
|
34
|
+
return [
|
|
35
|
+
f"required: {_fmt_set(self.inputs_required)}",
|
|
36
|
+
f"optional: {_fmt_opt_map(self.inputs_optional, show_values=False)}",
|
|
37
|
+
f"outputs: {_fmt_outputs_map(self.outputs)}",
|
|
38
|
+
]
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass
|
|
42
|
+
class GraphView:
|
|
43
|
+
"""A read-only view of the graph's spec and state."""
|
|
44
|
+
|
|
45
|
+
graph_id: str
|
|
46
|
+
nodes: dict[str, Any] # node_id -> TaskNodeRuntime
|
|
47
|
+
metadata: dict[str, Any] = field(default_factory=dict) # Optional metadata
|
|
48
|
+
|
|
49
|
+
# helpers, no mutation
|
|
50
|
+
def get_dependents(self, nid: str) -> list[str]:
|
|
51
|
+
"""Get list of node_ids that depend on the given node_id."""
|
|
52
|
+
return [x.node_id for x in self.nodes.values() if nid in x.dependencies]
|
|
53
|
+
|
|
54
|
+
def get_root_nodes(self) -> list[str]:
|
|
55
|
+
"""Get list of root node_ids (no dependencies)."""
|
|
56
|
+
return [x.node_id for x in self.nodes.values() if not x.dependencies]
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
# ---------- helpers for printing and debugging ----------
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _short(x: Any, maxlen: int = 42) -> str:
|
|
63
|
+
s = str(x)
|
|
64
|
+
return s if len(s) <= maxlen else s[: maxlen - 1] + "…"
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def _status_label(s: Any) -> str:
|
|
68
|
+
# Accept Enum-like (with .name), strings, or None
|
|
69
|
+
if s is None:
|
|
70
|
+
return "-"
|
|
71
|
+
return getattr(s, "name", str(s))
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def _logic_label(logic: Any) -> str:
|
|
75
|
+
# Show a dotted path when possible; fall back to repr/str
|
|
76
|
+
if isinstance(logic, str):
|
|
77
|
+
return logic
|
|
78
|
+
# Unwrap @tool proxies if present
|
|
79
|
+
impl = getattr(logic, "__aether_impl__", logic)
|
|
80
|
+
if inspect.isfunction(impl) or inspect.ismethod(impl):
|
|
81
|
+
mod = getattr(impl, "__module__", None) or ""
|
|
82
|
+
name = getattr(impl, "__name__", None) or "tool"
|
|
83
|
+
return f"{mod}.{name}".strip(".")
|
|
84
|
+
return _short(repr(logic), 80)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def _fmt_set(xs: set | None) -> str:
|
|
88
|
+
return ", ".join(sorted(map(str, xs))) if xs else "—"
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def _fmt_opt_map(d: dict | None, *, show_values: bool = False, maxval: int = 26) -> str:
|
|
92
|
+
if not d:
|
|
93
|
+
return "—"
|
|
94
|
+
if show_values:
|
|
95
|
+
items = [f"{k}={_short(v, maxval)}" for k, v in d.items()]
|
|
96
|
+
else:
|
|
97
|
+
items = list(map(str, d.keys()))
|
|
98
|
+
return ", ".join(sorted(items)) if items else "—"
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def _fmt_outputs_map(d: dict | None) -> str:
|
|
102
|
+
"""
|
|
103
|
+
Show graph outputs mapping; if a value looks like a Ref(node_id, key),
|
|
104
|
+
render as 'out_key ← node_id.key'. Otherwise, just list keys.
|
|
105
|
+
"""
|
|
106
|
+
if not d:
|
|
107
|
+
return "—"
|
|
108
|
+
parts = []
|
|
109
|
+
for out_k, v in d.items():
|
|
110
|
+
# duck-typed Ref
|
|
111
|
+
if hasattr(v, "node_id") and hasattr(v, "key"):
|
|
112
|
+
parts.append(f"{out_k} ← {v.node_id}.{v.key}")
|
|
113
|
+
else:
|
|
114
|
+
parts.append(str(out_k))
|
|
115
|
+
return ", ".join(sorted(parts))
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
from .node_spec import TaskNodeSpec
|
|
5
|
+
from .node_state import TaskNodeState
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class GraphPatch:
|
|
10
|
+
# Used for mutation of a graph's topology; NOT USED YET
|
|
11
|
+
op: str # "add_node", "remove_node", "update_node", "add_edge", "remove_edge"
|
|
12
|
+
payload: dict[str, Any] # details depend on op type -> To be defined later
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass
|
|
16
|
+
class TaskGraphState:
|
|
17
|
+
run_id: str | None = (
|
|
18
|
+
None # unique run identifier, used when in agent or program execution, and run_id is known from agent/program level
|
|
19
|
+
)
|
|
20
|
+
nodes: dict[str, TaskNodeState] = field(default_factory=dict) # node_id -> TaskNodeState
|
|
21
|
+
# node_status: Dict[str, str] = field(default_factory=dict) # node_id -> status ("pending", "running", "completed", "failed", etc.)
|
|
22
|
+
# node_outputs: Dict[str, Any] = field(default_factory=dict) # node_id -> output data
|
|
23
|
+
_bound_inputs: dict[str, Any] | None = field(
|
|
24
|
+
default=None, repr=False
|
|
25
|
+
) # inputs bound at runtime
|
|
26
|
+
rev: int = 0 # revision number, incremented on each mutation
|
|
27
|
+
patches: list[GraphPatch] = field(
|
|
28
|
+
default_factory=list, repr=False
|
|
29
|
+
) # list of patches applied to the graph
|
|
30
|
+
|
|
31
|
+
def default_node_states(self, spec: TaskNodeSpec):
|
|
32
|
+
# Initialize node states based on the given spec
|
|
33
|
+
for nid in spec.nodes:
|
|
34
|
+
if nid not in self.nodes:
|
|
35
|
+
self.nodes[nid] = TaskNodeState()
|
|
36
|
+
self.node_status[nid] = "PENDING"
|
|
37
|
+
|
|
38
|
+
def summary_line(self) -> str:
|
|
39
|
+
from collections import Counter
|
|
40
|
+
|
|
41
|
+
sc = Counter(self.node_status.values())
|
|
42
|
+
counts = ", ".join(f"{k}={v}" for k, v in sorted(sc.items())) or "—"
|
|
43
|
+
bound = list(self._bound_inputs.keys()) if self._bound_inputs else "—"
|
|
44
|
+
return (
|
|
45
|
+
f"bound_inputs={bound}, node_outputs={len(self.node_outputs)}, status_counts: {counts}"
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def node_statuses(self) -> dict[str, str]:
|
|
50
|
+
return {nid: ns.status for nid, ns in self.nodes.items()}
|
|
51
|
+
|
|
52
|
+
# alias to node_statuses
|
|
53
|
+
@property
|
|
54
|
+
def node_status(self) -> dict[str, str]:
|
|
55
|
+
return {nid: ns.status for nid, ns in self.nodes.items()}
|
|
56
|
+
|
|
57
|
+
@property
|
|
58
|
+
def node_outputs(self) -> dict[str, Any]:
|
|
59
|
+
return {nid: ns.outputs for nid, ns in self.nodes.items() if ns.outputs}
|