AbstractRuntime 0.4.0__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- abstractruntime/__init__.py +76 -1
- abstractruntime/core/config.py +68 -1
- abstractruntime/core/models.py +5 -0
- abstractruntime/core/policy.py +74 -3
- abstractruntime/core/runtime.py +1002 -126
- abstractruntime/core/vars.py +8 -2
- abstractruntime/evidence/recorder.py +1 -1
- abstractruntime/history_bundle.py +772 -0
- abstractruntime/integrations/abstractcore/__init__.py +3 -0
- abstractruntime/integrations/abstractcore/default_tools.py +127 -3
- abstractruntime/integrations/abstractcore/effect_handlers.py +2440 -99
- abstractruntime/integrations/abstractcore/embeddings_client.py +69 -0
- abstractruntime/integrations/abstractcore/factory.py +68 -20
- abstractruntime/integrations/abstractcore/llm_client.py +447 -15
- abstractruntime/integrations/abstractcore/mcp_worker.py +1 -0
- abstractruntime/integrations/abstractcore/session_attachments.py +946 -0
- abstractruntime/integrations/abstractcore/tool_executor.py +31 -10
- abstractruntime/integrations/abstractcore/workspace_scoped_tools.py +561 -0
- abstractruntime/integrations/abstractmemory/__init__.py +3 -0
- abstractruntime/integrations/abstractmemory/effect_handlers.py +946 -0
- abstractruntime/memory/active_context.py +6 -1
- abstractruntime/memory/kg_packets.py +164 -0
- abstractruntime/memory/memact_composer.py +175 -0
- abstractruntime/memory/recall_levels.py +163 -0
- abstractruntime/memory/token_budget.py +86 -0
- abstractruntime/storage/__init__.py +4 -1
- abstractruntime/storage/artifacts.py +158 -30
- abstractruntime/storage/base.py +17 -1
- abstractruntime/storage/commands.py +339 -0
- abstractruntime/storage/in_memory.py +41 -1
- abstractruntime/storage/json_files.py +195 -12
- abstractruntime/storage/observable.py +38 -1
- abstractruntime/storage/offloading.py +433 -0
- abstractruntime/storage/sqlite.py +836 -0
- abstractruntime/visualflow_compiler/__init__.py +29 -0
- abstractruntime/visualflow_compiler/adapters/__init__.py +11 -0
- abstractruntime/visualflow_compiler/adapters/agent_adapter.py +126 -0
- abstractruntime/visualflow_compiler/adapters/context_adapter.py +109 -0
- abstractruntime/visualflow_compiler/adapters/control_adapter.py +615 -0
- abstractruntime/visualflow_compiler/adapters/effect_adapter.py +1051 -0
- abstractruntime/visualflow_compiler/adapters/event_adapter.py +307 -0
- abstractruntime/visualflow_compiler/adapters/function_adapter.py +97 -0
- abstractruntime/visualflow_compiler/adapters/memact_adapter.py +114 -0
- abstractruntime/visualflow_compiler/adapters/subflow_adapter.py +74 -0
- abstractruntime/visualflow_compiler/adapters/variable_adapter.py +316 -0
- abstractruntime/visualflow_compiler/compiler.py +3832 -0
- abstractruntime/visualflow_compiler/flow.py +247 -0
- abstractruntime/visualflow_compiler/visual/__init__.py +13 -0
- abstractruntime/visualflow_compiler/visual/agent_ids.py +29 -0
- abstractruntime/visualflow_compiler/visual/builtins.py +1376 -0
- abstractruntime/visualflow_compiler/visual/code_executor.py +214 -0
- abstractruntime/visualflow_compiler/visual/executor.py +2804 -0
- abstractruntime/visualflow_compiler/visual/models.py +211 -0
- abstractruntime/workflow_bundle/__init__.py +52 -0
- abstractruntime/workflow_bundle/models.py +236 -0
- abstractruntime/workflow_bundle/packer.py +317 -0
- abstractruntime/workflow_bundle/reader.py +87 -0
- abstractruntime/workflow_bundle/registry.py +587 -0
- abstractruntime-0.4.1.dist-info/METADATA +177 -0
- abstractruntime-0.4.1.dist-info/RECORD +86 -0
- abstractruntime-0.4.0.dist-info/METADATA +0 -167
- abstractruntime-0.4.0.dist-info/RECORD +0 -49
- {abstractruntime-0.4.0.dist-info → abstractruntime-0.4.1.dist-info}/WHEEL +0 -0
- {abstractruntime-0.4.0.dist-info → abstractruntime-0.4.1.dist-info}/entry_points.txt +0 -0
- {abstractruntime-0.4.0.dist-info → abstractruntime-0.4.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,317 @@
|
|
|
1
|
+
"""WorkflowBundle pack/unpack tooling (stdlib-only).
|
|
2
|
+
|
|
3
|
+
This module is intentionally host-agnostic:
|
|
4
|
+
- packing bundles is a pure filesystem/content operation
|
|
5
|
+
- hosts decide where bundles live and how they're distributed (disk, gateway upload, etc.)
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import json
|
|
11
|
+
import zipfile
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
from datetime import datetime, timezone
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from typing import Any, Dict, Iterable, List, Optional, Tuple
|
|
16
|
+
|
|
17
|
+
from .models import WORKFLOW_BUNDLE_FORMAT_VERSION_V1, WorkflowBundleEntrypoint, WorkflowBundleError, WorkflowBundleManifest, workflow_bundle_manifest_to_dict
|
|
18
|
+
from .reader import open_workflow_bundle
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass(frozen=True)
|
|
22
|
+
class PackedWorkflowBundle:
|
|
23
|
+
path: Path
|
|
24
|
+
manifest: WorkflowBundleManifest
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def _now_iso() -> str:
|
|
28
|
+
return datetime.now(timezone.utc).isoformat()
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _read_json_bytes(path: Path) -> bytes:
|
|
32
|
+
return path.read_bytes()
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _load_visualflow_dict_from_bytes(raw: bytes) -> Dict[str, Any]:
|
|
36
|
+
data = json.loads(raw.decode("utf-8"))
|
|
37
|
+
if not isinstance(data, dict):
|
|
38
|
+
raise WorkflowBundleError("VisualFlow JSON must be an object")
|
|
39
|
+
return data
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def _node_type(node: Any) -> str:
|
|
43
|
+
if isinstance(node, dict):
|
|
44
|
+
t = node.get("type")
|
|
45
|
+
if isinstance(t, str) and t.strip():
|
|
46
|
+
return t.strip()
|
|
47
|
+
data = node.get("data") if isinstance(node.get("data"), dict) else {}
|
|
48
|
+
t2 = data.get("nodeType")
|
|
49
|
+
return str(t2 or "").strip()
|
|
50
|
+
return ""
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _pins_from_node(node: Dict[str, Any]) -> Iterable[Dict[str, Any]]:
|
|
54
|
+
data = node.get("data") if isinstance(node.get("data"), dict) else {}
|
|
55
|
+
pins_in = data.get("inputs") if isinstance(data.get("inputs"), list) else node.get("inputs") if isinstance(node.get("inputs"), list) else []
|
|
56
|
+
pins_out = data.get("outputs") if isinstance(data.get("outputs"), list) else node.get("outputs") if isinstance(node.get("outputs"), list) else []
|
|
57
|
+
for p in list(pins_in) + list(pins_out):
|
|
58
|
+
if isinstance(p, dict):
|
|
59
|
+
yield p
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _reachable_exec_node_ids(flow: Dict[str, Any]) -> set[str]:
|
|
63
|
+
"""Return exec-reachable node ids (Blueprint-style; ignores disconnected exec nodes)."""
|
|
64
|
+
nodes = flow.get("nodes")
|
|
65
|
+
if not isinstance(nodes, list):
|
|
66
|
+
return set()
|
|
67
|
+
|
|
68
|
+
exec_ids: set[str] = set()
|
|
69
|
+
for n in nodes:
|
|
70
|
+
if not isinstance(n, dict):
|
|
71
|
+
continue
|
|
72
|
+
node_id = str(n.get("id") or "").strip()
|
|
73
|
+
if not node_id:
|
|
74
|
+
continue
|
|
75
|
+
for p in _pins_from_node(n):
|
|
76
|
+
if p.get("type") == "execution":
|
|
77
|
+
exec_ids.add(node_id)
|
|
78
|
+
break
|
|
79
|
+
|
|
80
|
+
if not exec_ids:
|
|
81
|
+
return set()
|
|
82
|
+
|
|
83
|
+
edges = flow.get("edges")
|
|
84
|
+
edges_list = edges if isinstance(edges, list) else []
|
|
85
|
+
incoming_exec = {str(e.get("target") or "").strip() for e in edges_list if isinstance(e, dict) and e.get("targetHandle") == "exec-in"}
|
|
86
|
+
|
|
87
|
+
roots: list[str] = []
|
|
88
|
+
entry = flow.get("entryNode")
|
|
89
|
+
if isinstance(entry, str) and entry in exec_ids:
|
|
90
|
+
roots.append(entry)
|
|
91
|
+
for n in nodes:
|
|
92
|
+
if not isinstance(n, dict):
|
|
93
|
+
continue
|
|
94
|
+
node_id = str(n.get("id") or "").strip()
|
|
95
|
+
if not node_id or node_id not in exec_ids:
|
|
96
|
+
continue
|
|
97
|
+
if _node_type(n) == "on_event":
|
|
98
|
+
roots.append(node_id)
|
|
99
|
+
if not roots:
|
|
100
|
+
for node_id in exec_ids:
|
|
101
|
+
if node_id not in incoming_exec:
|
|
102
|
+
roots.append(node_id)
|
|
103
|
+
break
|
|
104
|
+
if not roots:
|
|
105
|
+
roots.append(next(iter(exec_ids)))
|
|
106
|
+
|
|
107
|
+
adj: Dict[str, list[str]] = {}
|
|
108
|
+
for e in edges_list:
|
|
109
|
+
if not isinstance(e, dict):
|
|
110
|
+
continue
|
|
111
|
+
if e.get("targetHandle") != "exec-in":
|
|
112
|
+
continue
|
|
113
|
+
src = str(e.get("source") or "").strip()
|
|
114
|
+
tgt = str(e.get("target") or "").strip()
|
|
115
|
+
if not src or not tgt:
|
|
116
|
+
continue
|
|
117
|
+
if src not in exec_ids or tgt not in exec_ids:
|
|
118
|
+
continue
|
|
119
|
+
adj.setdefault(src, []).append(tgt)
|
|
120
|
+
|
|
121
|
+
reachable: set[str] = set()
|
|
122
|
+
stack = list(dict.fromkeys([r for r in roots if isinstance(r, str) and r]))
|
|
123
|
+
while stack:
|
|
124
|
+
cur = stack.pop()
|
|
125
|
+
if cur in reachable:
|
|
126
|
+
continue
|
|
127
|
+
reachable.add(cur)
|
|
128
|
+
for nxt in adj.get(cur, []):
|
|
129
|
+
if nxt not in reachable:
|
|
130
|
+
stack.append(nxt)
|
|
131
|
+
return reachable
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def _collect_reachable_flows(
|
|
135
|
+
*,
|
|
136
|
+
root_flow: Dict[str, Any],
|
|
137
|
+
root_bytes: bytes,
|
|
138
|
+
flows_dir: Path,
|
|
139
|
+
) -> Tuple[List[Tuple[str, Dict[str, Any], bytes]], List[str]]:
|
|
140
|
+
"""Return [(flow_id, flow_dict, raw_bytes)] in discovery order + list of missing subflow ids."""
|
|
141
|
+
ordered: list[Tuple[str, Dict[str, Any], bytes]] = []
|
|
142
|
+
visited: set[str] = set()
|
|
143
|
+
missing: list[str] = []
|
|
144
|
+
|
|
145
|
+
root_id = str(root_flow.get("id") or "").strip()
|
|
146
|
+
if not root_id:
|
|
147
|
+
raise WorkflowBundleError("Root flow is missing 'id'")
|
|
148
|
+
|
|
149
|
+
cache: Dict[str, Tuple[Dict[str, Any], bytes]] = {root_id: (root_flow, root_bytes)}
|
|
150
|
+
|
|
151
|
+
def _load_by_id(flow_id: str) -> Optional[Tuple[Dict[str, Any], bytes]]:
|
|
152
|
+
fid = str(flow_id or "").strip()
|
|
153
|
+
if not fid:
|
|
154
|
+
return None
|
|
155
|
+
if fid in cache:
|
|
156
|
+
return cache[fid]
|
|
157
|
+
p = (flows_dir / f"{fid}.json").resolve()
|
|
158
|
+
if not p.exists():
|
|
159
|
+
return None
|
|
160
|
+
raw = _read_json_bytes(p)
|
|
161
|
+
vf = _load_visualflow_dict_from_bytes(raw)
|
|
162
|
+
cache[fid] = (vf, raw)
|
|
163
|
+
return cache[fid]
|
|
164
|
+
|
|
165
|
+
def _dfs(vf: Dict[str, Any], raw: bytes) -> None:
|
|
166
|
+
fid = str(vf.get("id") or "").strip()
|
|
167
|
+
if not fid:
|
|
168
|
+
missing.append("<missing-flow-id>")
|
|
169
|
+
return
|
|
170
|
+
if fid in visited:
|
|
171
|
+
return
|
|
172
|
+
visited.add(fid)
|
|
173
|
+
ordered.append((fid, vf, raw))
|
|
174
|
+
|
|
175
|
+
nodes = vf.get("nodes")
|
|
176
|
+
if not isinstance(nodes, list):
|
|
177
|
+
return
|
|
178
|
+
reachable = _reachable_exec_node_ids(vf)
|
|
179
|
+
for n in nodes:
|
|
180
|
+
if not isinstance(n, dict):
|
|
181
|
+
continue
|
|
182
|
+
if _node_type(n) != "subflow":
|
|
183
|
+
continue
|
|
184
|
+
nid = str(n.get("id") or "").strip()
|
|
185
|
+
if reachable and nid and nid not in reachable:
|
|
186
|
+
continue
|
|
187
|
+
data = n.get("data") if isinstance(n.get("data"), dict) else {}
|
|
188
|
+
sub_id = data.get("subflowId") or data.get("flowId")
|
|
189
|
+
if not isinstance(sub_id, str) or not sub_id.strip():
|
|
190
|
+
missing.append(f"<missing-subflow-id:{fid}:{nid or '?'}>")
|
|
191
|
+
continue
|
|
192
|
+
sub_id2 = sub_id.strip()
|
|
193
|
+
child = _load_by_id(sub_id2)
|
|
194
|
+
if child is None:
|
|
195
|
+
if sub_id2 == fid:
|
|
196
|
+
_dfs(vf, raw)
|
|
197
|
+
continue
|
|
198
|
+
missing.append(sub_id2)
|
|
199
|
+
continue
|
|
200
|
+
_dfs(child[0], child[1])
|
|
201
|
+
|
|
202
|
+
_dfs(root_flow, root_bytes)
|
|
203
|
+
return ordered, missing
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def pack_workflow_bundle(
|
|
207
|
+
*,
|
|
208
|
+
root_flow_json: str | Path,
|
|
209
|
+
out_path: str | Path,
|
|
210
|
+
bundle_id: Optional[str] = None,
|
|
211
|
+
bundle_version: str = "0.0.0",
|
|
212
|
+
flows_dir: Optional[str | Path] = None,
|
|
213
|
+
entrypoints: Optional[List[str]] = None,
|
|
214
|
+
default_entrypoint: Optional[str] = None,
|
|
215
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
216
|
+
) -> PackedWorkflowBundle:
|
|
217
|
+
"""Pack a `.flow` bundle from a root VisualFlow JSON file."""
|
|
218
|
+
root_path = Path(root_flow_json).expanduser().resolve()
|
|
219
|
+
if not root_path.exists():
|
|
220
|
+
raise FileNotFoundError(f"root flow not found: {root_path}")
|
|
221
|
+
root_bytes = _read_json_bytes(root_path)
|
|
222
|
+
root_flow = _load_visualflow_dict_from_bytes(root_bytes)
|
|
223
|
+
|
|
224
|
+
flows_base = Path(flows_dir).expanduser().resolve() if flows_dir is not None else root_path.parent
|
|
225
|
+
if not flows_base.exists() or not flows_base.is_dir():
|
|
226
|
+
raise FileNotFoundError(f"flows_dir does not exist: {flows_base}")
|
|
227
|
+
|
|
228
|
+
ordered, missing = _collect_reachable_flows(root_flow=root_flow, root_bytes=root_bytes, flows_dir=flows_base)
|
|
229
|
+
if missing:
|
|
230
|
+
uniq = sorted(set(missing))
|
|
231
|
+
raise WorkflowBundleError(f"Missing referenced subflows in flows_dir: {uniq}")
|
|
232
|
+
|
|
233
|
+
root_id = str(root_flow.get("id") or "").strip()
|
|
234
|
+
if not root_id:
|
|
235
|
+
raise WorkflowBundleError("Root flow is missing 'id'")
|
|
236
|
+
|
|
237
|
+
entry_ids = list(entrypoints) if isinstance(entrypoints, list) and entrypoints else [root_id]
|
|
238
|
+
entry_ids = [str(x).strip() for x in entry_ids if isinstance(x, str) and str(x).strip()]
|
|
239
|
+
if not entry_ids:
|
|
240
|
+
raise WorkflowBundleError("No valid entrypoints specified")
|
|
241
|
+
|
|
242
|
+
de_param = str(default_entrypoint).strip() if isinstance(default_entrypoint, str) and str(default_entrypoint).strip() else ""
|
|
243
|
+
if de_param and de_param not in entry_ids:
|
|
244
|
+
raise WorkflowBundleError(f"default_entrypoint '{de_param}' must be one of: {entry_ids}")
|
|
245
|
+
default_ep = de_param or (root_id if root_id in entry_ids else entry_ids[0])
|
|
246
|
+
|
|
247
|
+
flows_json: Dict[str, bytes] = {}
|
|
248
|
+
interfaces_by_flow: Dict[str, list[str]] = {}
|
|
249
|
+
name_by_flow: Dict[str, str] = {}
|
|
250
|
+
desc_by_flow: Dict[str, str] = {}
|
|
251
|
+
|
|
252
|
+
for fid, vf, raw in ordered:
|
|
253
|
+
flows_json[fid] = raw
|
|
254
|
+
name_by_flow[fid] = str(vf.get("name") or "")
|
|
255
|
+
desc_by_flow[fid] = str(vf.get("description") or "")
|
|
256
|
+
ifaces = vf.get("interfaces")
|
|
257
|
+
interfaces_by_flow[fid] = [str(x).strip() for x in list(ifaces) if isinstance(x, str) and x.strip()] if isinstance(ifaces, list) else []
|
|
258
|
+
|
|
259
|
+
bid = str(bundle_id or "").strip() or root_id
|
|
260
|
+
created_at = _now_iso()
|
|
261
|
+
|
|
262
|
+
eps: list[WorkflowBundleEntrypoint] = []
|
|
263
|
+
for fid in entry_ids:
|
|
264
|
+
fid2 = str(fid or "").strip()
|
|
265
|
+
if not fid2:
|
|
266
|
+
continue
|
|
267
|
+
eps.append(
|
|
268
|
+
WorkflowBundleEntrypoint(
|
|
269
|
+
flow_id=fid2,
|
|
270
|
+
name=name_by_flow.get(fid2) or fid2,
|
|
271
|
+
description=desc_by_flow.get(fid2, ""),
|
|
272
|
+
interfaces=list(interfaces_by_flow.get(fid2, [])),
|
|
273
|
+
)
|
|
274
|
+
)
|
|
275
|
+
if not eps:
|
|
276
|
+
raise WorkflowBundleError("No valid entrypoints specified")
|
|
277
|
+
|
|
278
|
+
manifest = WorkflowBundleManifest(
|
|
279
|
+
bundle_format_version=WORKFLOW_BUNDLE_FORMAT_VERSION_V1,
|
|
280
|
+
bundle_id=bid,
|
|
281
|
+
bundle_version=str(bundle_version or "0.0.0"),
|
|
282
|
+
created_at=created_at,
|
|
283
|
+
entrypoints=eps,
|
|
284
|
+
default_entrypoint=default_ep,
|
|
285
|
+
flows={fid: f"flows/{fid}.json" for fid in sorted(flows_json.keys())},
|
|
286
|
+
artifacts={},
|
|
287
|
+
assets={},
|
|
288
|
+
metadata=dict(metadata) if isinstance(metadata, dict) else {},
|
|
289
|
+
)
|
|
290
|
+
manifest.validate()
|
|
291
|
+
|
|
292
|
+
out = Path(out_path).expanduser().resolve()
|
|
293
|
+
out.parent.mkdir(parents=True, exist_ok=True)
|
|
294
|
+
|
|
295
|
+
with zipfile.ZipFile(out, "w", compression=zipfile.ZIP_DEFLATED) as zf:
|
|
296
|
+
zf.writestr("manifest.json", json.dumps(workflow_bundle_manifest_to_dict(manifest), indent=2, ensure_ascii=False))
|
|
297
|
+
for fid in sorted(flows_json.keys()):
|
|
298
|
+
zf.writestr(f"flows/{fid}.json", flows_json[fid])
|
|
299
|
+
|
|
300
|
+
return PackedWorkflowBundle(path=out, manifest=manifest)
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
def inspect_workflow_bundle(*, bundle_path: str | Path) -> WorkflowBundleManifest:
|
|
304
|
+
b = open_workflow_bundle(bundle_path)
|
|
305
|
+
return b.manifest
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
def unpack_workflow_bundle(*, bundle_path: str | Path, out_dir: str | Path) -> Path:
|
|
309
|
+
src = Path(bundle_path).expanduser().resolve()
|
|
310
|
+
if not src.exists() or not src.is_file():
|
|
311
|
+
raise FileNotFoundError(f"Bundle not found: {src}")
|
|
312
|
+
out = Path(out_dir).expanduser().resolve()
|
|
313
|
+
out.mkdir(parents=True, exist_ok=True)
|
|
314
|
+
with zipfile.ZipFile(src, "r") as zf:
|
|
315
|
+
zf.extractall(out)
|
|
316
|
+
return out
|
|
317
|
+
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"""WorkflowBundle reader (directory or .flow zip).
|
|
2
|
+
|
|
3
|
+
This module focuses on *reading* bundles. Writing/packing bundles is expected to
|
|
4
|
+
be performed by authoring tooling (e.g., AbstractFlow).
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
import zipfile
|
|
11
|
+
from dataclasses import dataclass
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Any, Optional
|
|
14
|
+
|
|
15
|
+
from .models import WorkflowBundleError, WorkflowBundleManifest, workflow_bundle_manifest_from_dict
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass(frozen=True)
|
|
19
|
+
class WorkflowBundle:
|
|
20
|
+
"""An opened WorkflowBundle source."""
|
|
21
|
+
|
|
22
|
+
source: Path
|
|
23
|
+
manifest: WorkflowBundleManifest
|
|
24
|
+
|
|
25
|
+
def _is_zip(self) -> bool:
|
|
26
|
+
return self.source.is_file()
|
|
27
|
+
|
|
28
|
+
def read_bytes(self, relpath: str) -> bytes:
|
|
29
|
+
p = str(relpath or "").strip()
|
|
30
|
+
if not p:
|
|
31
|
+
raise WorkflowBundleError("read_bytes requires a non-empty relpath")
|
|
32
|
+
|
|
33
|
+
if self._is_zip():
|
|
34
|
+
with zipfile.ZipFile(self.source, "r") as zf:
|
|
35
|
+
try:
|
|
36
|
+
return zf.read(p)
|
|
37
|
+
except KeyError as e:
|
|
38
|
+
raise FileNotFoundError(f"Bundle file not found: {p}") from e
|
|
39
|
+
else:
|
|
40
|
+
abs_p = (self.source / p).resolve()
|
|
41
|
+
# Ensure relpath cannot escape the bundle directory.
|
|
42
|
+
try:
|
|
43
|
+
abs_p.relative_to(self.source.resolve())
|
|
44
|
+
except Exception as e:
|
|
45
|
+
raise WorkflowBundleError(f"Unsafe relpath outside bundle dir: {p}") from e
|
|
46
|
+
if not abs_p.exists():
|
|
47
|
+
raise FileNotFoundError(f"Bundle file not found: {p}")
|
|
48
|
+
return abs_p.read_bytes()
|
|
49
|
+
|
|
50
|
+
def read_text(self, relpath: str, *, encoding: str = "utf-8") -> str:
|
|
51
|
+
return self.read_bytes(relpath).decode(encoding)
|
|
52
|
+
|
|
53
|
+
def read_json(self, relpath: str) -> Any:
|
|
54
|
+
return json.loads(self.read_text(relpath))
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def _read_manifest_from_dir(dir_path: Path) -> WorkflowBundleManifest:
|
|
58
|
+
p = (dir_path / "manifest.json").resolve()
|
|
59
|
+
if not p.exists():
|
|
60
|
+
raise FileNotFoundError(f"manifest.json not found in bundle dir: {dir_path}")
|
|
61
|
+
raw = json.loads(p.read_text(encoding="utf-8"))
|
|
62
|
+
return workflow_bundle_manifest_from_dict(raw)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _read_manifest_from_zip(zip_path: Path) -> WorkflowBundleManifest:
|
|
66
|
+
with zipfile.ZipFile(zip_path, "r") as zf:
|
|
67
|
+
try:
|
|
68
|
+
raw_bytes = zf.read("manifest.json")
|
|
69
|
+
except KeyError as e:
|
|
70
|
+
raise FileNotFoundError(f"manifest.json not found in bundle: {zip_path}") from e
|
|
71
|
+
raw = json.loads(raw_bytes.decode("utf-8"))
|
|
72
|
+
return workflow_bundle_manifest_from_dict(raw)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def open_workflow_bundle(source: str | Path) -> WorkflowBundle:
|
|
76
|
+
"""Open a WorkflowBundle from a directory or a `.flow` zip file."""
|
|
77
|
+
p = Path(source).expanduser().resolve()
|
|
78
|
+
if not p.exists():
|
|
79
|
+
raise FileNotFoundError(f"Bundle source not found: {p}")
|
|
80
|
+
if p.is_dir():
|
|
81
|
+
manifest = _read_manifest_from_dir(p)
|
|
82
|
+
return WorkflowBundle(source=p, manifest=manifest)
|
|
83
|
+
# File: treat as zip bundle.
|
|
84
|
+
manifest = _read_manifest_from_zip(p)
|
|
85
|
+
return WorkflowBundle(source=p, manifest=manifest)
|
|
86
|
+
|
|
87
|
+
|