abstractflow 0.3.0__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- abstractflow/__init__.py +2 -2
- abstractflow/adapters/agent_adapter.py +2 -121
- abstractflow/adapters/control_adapter.py +2 -612
- abstractflow/adapters/effect_adapter.py +2 -642
- abstractflow/adapters/event_adapter.py +2 -304
- abstractflow/adapters/function_adapter.py +2 -94
- abstractflow/adapters/subflow_adapter.py +2 -71
- abstractflow/adapters/variable_adapter.py +2 -314
- abstractflow/cli.py +73 -28
- abstractflow/compiler.py +18 -2022
- abstractflow/core/flow.py +4 -240
- abstractflow/runner.py +59 -5
- abstractflow/visual/agent_ids.py +2 -26
- abstractflow/visual/builtins.py +2 -786
- abstractflow/visual/code_executor.py +2 -211
- abstractflow/visual/executor.py +319 -2140
- abstractflow/visual/interfaces.py +103 -10
- abstractflow/visual/models.py +26 -1
- abstractflow/visual/session_runner.py +23 -9
- abstractflow/visual/workspace_scoped_tools.py +11 -243
- abstractflow/workflow_bundle.py +290 -0
- abstractflow-0.3.1.dist-info/METADATA +186 -0
- abstractflow-0.3.1.dist-info/RECORD +33 -0
- {abstractflow-0.3.0.dist-info → abstractflow-0.3.1.dist-info}/WHEEL +1 -1
- abstractflow-0.3.0.dist-info/METADATA +0 -413
- abstractflow-0.3.0.dist-info/RECORD +0 -32
- {abstractflow-0.3.0.dist-info → abstractflow-0.3.1.dist-info}/entry_points.txt +0 -0
- {abstractflow-0.3.0.dist-info → abstractflow-0.3.1.dist-info}/licenses/LICENSE +0 -0
- {abstractflow-0.3.0.dist-info → abstractflow-0.3.1.dist-info}/top_level.txt +0 -0
|
@@ -1,317 +1,5 @@
|
|
|
1
|
-
"""
|
|
2
|
-
|
|
3
|
-
Design goals:
|
|
4
|
-
- Variables are stored durably in `run.vars` (so pause/resume works).
|
|
5
|
-
- `Set Variable` must not clobber the visual pipeline `_last_output` (pass-through),
|
|
6
|
-
otherwise inserting it into a chain would destroy downstream inputs.
|
|
7
|
-
"""
|
|
1
|
+
"""Re-export: AbstractRuntime VisualFlow compiler adapter."""
|
|
8
2
|
|
|
9
3
|
from __future__ import annotations
|
|
10
4
|
|
|
11
|
-
import
|
|
12
|
-
from typing import Any, Callable, Dict, Optional
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
def _set_by_path(target: Dict[str, Any], dotted_key: str, value: Any) -> None:
|
|
16
|
-
"""Set a dotted path on a dict, creating intermediate dicts as needed."""
|
|
17
|
-
parts = [p for p in dotted_key.split(".") if p]
|
|
18
|
-
if not parts:
|
|
19
|
-
raise ValueError("Variable name must be non-empty")
|
|
20
|
-
cur: Dict[str, Any] = target
|
|
21
|
-
for part in parts[:-1]:
|
|
22
|
-
nxt = cur.get(part)
|
|
23
|
-
if not isinstance(nxt, dict):
|
|
24
|
-
nxt = {}
|
|
25
|
-
cur[part] = nxt
|
|
26
|
-
cur = nxt
|
|
27
|
-
cur[parts[-1]] = value
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
def _get_by_path(source: Dict[str, Any], dotted_key: str) -> Any:
|
|
31
|
-
"""Best-effort dotted-path lookup supporting dicts (and nested dicts).
|
|
32
|
-
|
|
33
|
-
This is intentionally conservative: workflow variables (`run.vars`) are dict-like state.
|
|
34
|
-
"""
|
|
35
|
-
parts = [p for p in str(dotted_key or "").split(".") if p]
|
|
36
|
-
if not parts:
|
|
37
|
-
return None
|
|
38
|
-
current: Any = source
|
|
39
|
-
for part in parts:
|
|
40
|
-
if not isinstance(current, dict):
|
|
41
|
-
return None
|
|
42
|
-
current = current.get(part)
|
|
43
|
-
return current
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
def _set_on_object(obj: Dict[str, Any], dotted_key: str, value: Any) -> Dict[str, Any]:
|
|
47
|
-
"""Set a nested key on an object dict (mutates the given dict) and return it."""
|
|
48
|
-
parts = [p for p in str(dotted_key or "").split(".") if p]
|
|
49
|
-
if not parts:
|
|
50
|
-
return obj
|
|
51
|
-
cur: Dict[str, Any] = obj
|
|
52
|
-
for part in parts[:-1]:
|
|
53
|
-
nxt = cur.get(part)
|
|
54
|
-
if not isinstance(nxt, dict):
|
|
55
|
-
nxt = {}
|
|
56
|
-
cur[part] = nxt
|
|
57
|
-
cur = nxt
|
|
58
|
-
cur[parts[-1]] = value
|
|
59
|
-
return obj
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
def _persist_node_output(run_vars: Dict[str, Any], node_id: str, value: Dict[str, Any]) -> None:
|
|
63
|
-
temp = run_vars.get("_temp")
|
|
64
|
-
if not isinstance(temp, dict):
|
|
65
|
-
temp = {}
|
|
66
|
-
run_vars["_temp"] = temp
|
|
67
|
-
persisted = temp.get("node_outputs")
|
|
68
|
-
if not isinstance(persisted, dict):
|
|
69
|
-
persisted = {}
|
|
70
|
-
temp["node_outputs"] = persisted
|
|
71
|
-
persisted[node_id] = value
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
def create_set_var_node_handler(
|
|
75
|
-
*,
|
|
76
|
-
node_id: str,
|
|
77
|
-
next_node: Optional[str],
|
|
78
|
-
data_aware_handler: Optional[Callable[[Any], Any]],
|
|
79
|
-
flow: Any,
|
|
80
|
-
) -> Callable:
|
|
81
|
-
"""Create a handler for `set_var` visual nodes."""
|
|
82
|
-
from abstractruntime.core.models import StepPlan
|
|
83
|
-
from abstractflow.compiler import _sync_effect_results_to_node_outputs
|
|
84
|
-
|
|
85
|
-
def handler(run: Any, ctx: Any) -> "StepPlan":
|
|
86
|
-
del ctx
|
|
87
|
-
if flow is not None and hasattr(flow, "_node_outputs") and hasattr(flow, "_data_edge_map"):
|
|
88
|
-
_sync_effect_results_to_node_outputs(run, flow)
|
|
89
|
-
|
|
90
|
-
last_output = run.vars.get("_last_output", {})
|
|
91
|
-
resolved = data_aware_handler(last_output) if callable(data_aware_handler) else {}
|
|
92
|
-
payload = resolved if isinstance(resolved, dict) else {}
|
|
93
|
-
|
|
94
|
-
raw_name = payload.get("name")
|
|
95
|
-
name = (raw_name if isinstance(raw_name, str) else str(raw_name or "")).strip()
|
|
96
|
-
if not name:
|
|
97
|
-
run.vars["_flow_error"] = "Set Variable requires a non-empty variable name."
|
|
98
|
-
run.vars["_flow_error_node"] = node_id
|
|
99
|
-
return StepPlan(
|
|
100
|
-
node_id=node_id,
|
|
101
|
-
complete_output={"success": False, "error": run.vars["_flow_error"], "node": node_id},
|
|
102
|
-
)
|
|
103
|
-
if name.startswith("_"):
|
|
104
|
-
run.vars["_flow_error"] = f"Invalid variable name '{name}': names starting with '_' are reserved."
|
|
105
|
-
run.vars["_flow_error_node"] = node_id
|
|
106
|
-
return StepPlan(
|
|
107
|
-
node_id=node_id,
|
|
108
|
-
complete_output={"success": False, "error": run.vars["_flow_error"], "node": node_id},
|
|
109
|
-
)
|
|
110
|
-
|
|
111
|
-
value = payload.get("value")
|
|
112
|
-
|
|
113
|
-
try:
|
|
114
|
-
if not isinstance(run.vars, dict):
|
|
115
|
-
raise ValueError("run.vars is not a dict")
|
|
116
|
-
_set_by_path(run.vars, name, value)
|
|
117
|
-
except Exception as e:
|
|
118
|
-
run.vars["_flow_error"] = f"Failed to set variable '{name}': {e}"
|
|
119
|
-
run.vars["_flow_error_node"] = node_id
|
|
120
|
-
return StepPlan(
|
|
121
|
-
node_id=node_id,
|
|
122
|
-
complete_output={"success": False, "error": run.vars["_flow_error"], "node": node_id},
|
|
123
|
-
)
|
|
124
|
-
|
|
125
|
-
# Persist this node's outputs for pause/resume (data edges may depend on them).
|
|
126
|
-
_persist_node_output(run.vars, node_id, {"value": value})
|
|
127
|
-
|
|
128
|
-
# IMPORTANT: pass-through semantics (do NOT clobber the pipeline output).
|
|
129
|
-
# `_last_output` stays as-is.
|
|
130
|
-
|
|
131
|
-
if next_node:
|
|
132
|
-
return StepPlan(node_id=node_id, next_node=next_node)
|
|
133
|
-
return StepPlan(node_id=node_id, complete_output={"success": True, "result": run.vars.get("_last_output")})
|
|
134
|
-
|
|
135
|
-
return handler
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
def create_set_var_property_node_handler(
|
|
139
|
-
*,
|
|
140
|
-
node_id: str,
|
|
141
|
-
next_node: Optional[str],
|
|
142
|
-
data_aware_handler: Optional[Callable[[Any], Any]],
|
|
143
|
-
flow: Any,
|
|
144
|
-
) -> Callable:
|
|
145
|
-
"""Create a handler for `set_var_property` visual nodes.
|
|
146
|
-
|
|
147
|
-
Contract:
|
|
148
|
-
- Inputs:
|
|
149
|
-
- `name`: base variable path (e.g. "state" or "state.player")
|
|
150
|
-
- `key`: nested key path inside that variable's object (e.g. "hp" or "stats.hp")
|
|
151
|
-
- `value`: value to set at `key`
|
|
152
|
-
- Behavior:
|
|
153
|
-
- reads current object at `name` (defaults to `{}` if missing/not an object)
|
|
154
|
-
- applies the update to a copy
|
|
155
|
-
- writes the updated object back into `run.vars[name]` (durable)
|
|
156
|
-
- persists node outputs for pause/resume
|
|
157
|
-
- does NOT clobber `_last_output` (pass-through)
|
|
158
|
-
"""
|
|
159
|
-
from abstractruntime.core.models import StepPlan
|
|
160
|
-
from abstractflow.compiler import _sync_effect_results_to_node_outputs
|
|
161
|
-
|
|
162
|
-
def handler(run: Any, ctx: Any) -> "StepPlan":
|
|
163
|
-
del ctx
|
|
164
|
-
if flow is not None and hasattr(flow, "_node_outputs") and hasattr(flow, "_data_edge_map"):
|
|
165
|
-
_sync_effect_results_to_node_outputs(run, flow)
|
|
166
|
-
|
|
167
|
-
last_output = run.vars.get("_last_output", {})
|
|
168
|
-
resolved = data_aware_handler(last_output) if callable(data_aware_handler) else {}
|
|
169
|
-
payload = resolved if isinstance(resolved, dict) else {}
|
|
170
|
-
|
|
171
|
-
raw_name = payload.get("name")
|
|
172
|
-
name = (raw_name if isinstance(raw_name, str) else str(raw_name or "")).strip()
|
|
173
|
-
if not name:
|
|
174
|
-
run.vars["_flow_error"] = "Set Variable Property requires a non-empty variable name."
|
|
175
|
-
run.vars["_flow_error_node"] = node_id
|
|
176
|
-
return StepPlan(
|
|
177
|
-
node_id=node_id,
|
|
178
|
-
complete_output={"success": False, "error": run.vars["_flow_error"], "node": node_id},
|
|
179
|
-
)
|
|
180
|
-
if name.startswith("_"):
|
|
181
|
-
run.vars["_flow_error"] = f"Invalid variable name '{name}': names starting with '_' are reserved."
|
|
182
|
-
run.vars["_flow_error_node"] = node_id
|
|
183
|
-
return StepPlan(
|
|
184
|
-
node_id=node_id,
|
|
185
|
-
complete_output={"success": False, "error": run.vars["_flow_error"], "node": node_id},
|
|
186
|
-
)
|
|
187
|
-
|
|
188
|
-
raw_key = payload.get("key")
|
|
189
|
-
key = (raw_key if isinstance(raw_key, str) else str(raw_key or "")).strip()
|
|
190
|
-
if not key:
|
|
191
|
-
run.vars["_flow_error"] = "Set Variable Property requires a non-empty key."
|
|
192
|
-
run.vars["_flow_error_node"] = node_id
|
|
193
|
-
return StepPlan(
|
|
194
|
-
node_id=node_id,
|
|
195
|
-
complete_output={"success": False, "error": run.vars["_flow_error"], "node": node_id},
|
|
196
|
-
)
|
|
197
|
-
|
|
198
|
-
value = payload.get("value")
|
|
199
|
-
|
|
200
|
-
try:
|
|
201
|
-
if not isinstance(run.vars, dict):
|
|
202
|
-
raise ValueError("run.vars is not a dict")
|
|
203
|
-
|
|
204
|
-
current = _get_by_path(run.vars, name)
|
|
205
|
-
base_obj: Dict[str, Any] = dict(current) if isinstance(current, dict) else {}
|
|
206
|
-
_set_on_object(base_obj, key, value)
|
|
207
|
-
_set_by_path(run.vars, name, base_obj)
|
|
208
|
-
except Exception as e:
|
|
209
|
-
run.vars["_flow_error"] = f"Failed to set variable property '{name}.{key}': {e}"
|
|
210
|
-
run.vars["_flow_error_node"] = node_id
|
|
211
|
-
return StepPlan(
|
|
212
|
-
node_id=node_id,
|
|
213
|
-
complete_output={"success": False, "error": run.vars["_flow_error"], "node": node_id},
|
|
214
|
-
)
|
|
215
|
-
|
|
216
|
-
# Persist this node's outputs for pause/resume (data edges may depend on them).
|
|
217
|
-
_persist_node_output(run.vars, node_id, {"value": base_obj})
|
|
218
|
-
|
|
219
|
-
# IMPORTANT: pass-through semantics (do NOT clobber the pipeline output).
|
|
220
|
-
# `_last_output` stays as-is.
|
|
221
|
-
if next_node:
|
|
222
|
-
return StepPlan(node_id=node_id, next_node=next_node)
|
|
223
|
-
return StepPlan(node_id=node_id, complete_output={"success": True, "value": base_obj, "result": run.vars.get("_last_output")})
|
|
224
|
-
|
|
225
|
-
return handler
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
def create_set_vars_node_handler(
|
|
229
|
-
*,
|
|
230
|
-
node_id: str,
|
|
231
|
-
next_node: Optional[str],
|
|
232
|
-
data_aware_handler: Optional[Callable[[Any], Any]],
|
|
233
|
-
flow: Any,
|
|
234
|
-
) -> Callable:
|
|
235
|
-
"""Create a handler for `set_vars` visual nodes.
|
|
236
|
-
|
|
237
|
-
Contract:
|
|
238
|
-
- Input pin: `updates` (object or JSON string), where keys are dotted paths and values are JSON-safe values.
|
|
239
|
-
- Output pin: `updates` (echoed), for observability/debugging.
|
|
240
|
-
- Pass-through: must NOT clobber `_last_output` (same as `set_var`).
|
|
241
|
-
"""
|
|
242
|
-
from abstractruntime.core.models import StepPlan
|
|
243
|
-
from abstractflow.compiler import _sync_effect_results_to_node_outputs
|
|
244
|
-
|
|
245
|
-
def _coerce_updates(raw: Any) -> Dict[str, Any]:
|
|
246
|
-
if isinstance(raw, dict):
|
|
247
|
-
return dict(raw)
|
|
248
|
-
if isinstance(raw, str) and raw.strip():
|
|
249
|
-
try:
|
|
250
|
-
parsed = json.loads(raw)
|
|
251
|
-
except Exception:
|
|
252
|
-
return {}
|
|
253
|
-
return dict(parsed) if isinstance(parsed, dict) else {}
|
|
254
|
-
return {}
|
|
255
|
-
|
|
256
|
-
def handler(run: Any, ctx: Any) -> "StepPlan":
|
|
257
|
-
del ctx
|
|
258
|
-
if flow is not None and hasattr(flow, "_node_outputs") and hasattr(flow, "_data_edge_map"):
|
|
259
|
-
_sync_effect_results_to_node_outputs(run, flow)
|
|
260
|
-
|
|
261
|
-
last_output = run.vars.get("_last_output", {})
|
|
262
|
-
resolved = data_aware_handler(last_output) if callable(data_aware_handler) else {}
|
|
263
|
-
payload = resolved if isinstance(resolved, dict) else {}
|
|
264
|
-
|
|
265
|
-
updates = _coerce_updates(payload.get("updates"))
|
|
266
|
-
if not updates:
|
|
267
|
-
# Deterministic no-op (still counts as a step, but doesn't pollute `_flow_error`).
|
|
268
|
-
_persist_node_output(run.vars, node_id, {"updates": {}})
|
|
269
|
-
if next_node:
|
|
270
|
-
return StepPlan(node_id=node_id, next_node=next_node)
|
|
271
|
-
return StepPlan(node_id=node_id, complete_output={"success": True, "updates": {}, "result": run.vars.get("_last_output")})
|
|
272
|
-
|
|
273
|
-
# Validate all keys first so we don't partially apply.
|
|
274
|
-
normalized: Dict[str, Any] = {}
|
|
275
|
-
for k, v in updates.items():
|
|
276
|
-
name = (k if isinstance(k, str) else str(k or "")).strip()
|
|
277
|
-
if not name:
|
|
278
|
-
run.vars["_flow_error"] = "Set Variables requires non-empty variable names in updates."
|
|
279
|
-
run.vars["_flow_error_node"] = node_id
|
|
280
|
-
return StepPlan(
|
|
281
|
-
node_id=node_id,
|
|
282
|
-
complete_output={"success": False, "error": run.vars["_flow_error"], "node": node_id},
|
|
283
|
-
)
|
|
284
|
-
if name.startswith("_"):
|
|
285
|
-
run.vars["_flow_error"] = f"Invalid variable name '{name}': names starting with '_' are reserved."
|
|
286
|
-
run.vars["_flow_error_node"] = node_id
|
|
287
|
-
return StepPlan(
|
|
288
|
-
node_id=node_id,
|
|
289
|
-
complete_output={"success": False, "error": run.vars["_flow_error"], "node": node_id},
|
|
290
|
-
)
|
|
291
|
-
normalized[name] = v
|
|
292
|
-
|
|
293
|
-
try:
|
|
294
|
-
if not isinstance(run.vars, dict):
|
|
295
|
-
raise ValueError("run.vars is not a dict")
|
|
296
|
-
for name, value in normalized.items():
|
|
297
|
-
_set_by_path(run.vars, name, value)
|
|
298
|
-
except Exception as e:
|
|
299
|
-
run.vars["_flow_error"] = f"Failed to set variables: {e}"
|
|
300
|
-
run.vars["_flow_error_node"] = node_id
|
|
301
|
-
return StepPlan(
|
|
302
|
-
node_id=node_id,
|
|
303
|
-
complete_output={"success": False, "error": run.vars["_flow_error"], "node": node_id},
|
|
304
|
-
)
|
|
305
|
-
|
|
306
|
-
# Persist this node's outputs for pause/resume (data edges may depend on them).
|
|
307
|
-
_persist_node_output(run.vars, node_id, {"updates": normalized})
|
|
308
|
-
|
|
309
|
-
# IMPORTANT: pass-through semantics (do NOT clobber the pipeline output).
|
|
310
|
-
# `_last_output` stays as-is.
|
|
311
|
-
if next_node:
|
|
312
|
-
return StepPlan(node_id=node_id, next_node=next_node)
|
|
313
|
-
return StepPlan(node_id=node_id, complete_output={"success": True, "updates": normalized, "result": run.vars.get("_last_output")})
|
|
314
|
-
|
|
315
|
-
return handler
|
|
316
|
-
|
|
317
|
-
|
|
5
|
+
from abstractruntime.visualflow_compiler.adapters.variable_adapter import * # noqa: F401,F403
|
abstractflow/cli.py
CHANGED
|
@@ -1,44 +1,89 @@
|
|
|
1
|
-
"""
|
|
2
|
-
|
|
1
|
+
"""Command-line interface for AbstractFlow.
|
|
2
|
+
|
|
3
|
+
Current implemented features:
|
|
4
|
+
- WorkflowBundle (.flow) pack/inspect/unpack (backlog 314)
|
|
3
5
|
|
|
4
|
-
|
|
6
|
+
Other commands are intentionally kept minimal for now.
|
|
5
7
|
"""
|
|
6
8
|
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import argparse
|
|
12
|
+
import json
|
|
7
13
|
import sys
|
|
8
14
|
from typing import List, Optional
|
|
9
15
|
|
|
16
|
+
from .workflow_bundle import inspect_workflow_bundle, pack_workflow_bundle, unpack_workflow_bundle
|
|
17
|
+
from abstractruntime.workflow_bundle import workflow_bundle_manifest_to_dict
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _build_parser() -> argparse.ArgumentParser:
|
|
21
|
+
p = argparse.ArgumentParser(prog="abstractflow", add_help=True)
|
|
22
|
+
sub = p.add_subparsers(dest="command")
|
|
23
|
+
|
|
24
|
+
bundle = sub.add_parser("bundle", help="WorkflowBundle (.flow) tools")
|
|
25
|
+
bundle_sub = bundle.add_subparsers(dest="bundle_cmd")
|
|
26
|
+
|
|
27
|
+
pack = bundle_sub.add_parser("pack", help="Pack a .flow bundle from a root VisualFlow JSON file")
|
|
28
|
+
pack.add_argument("root", help="Path to root VisualFlow JSON (e.g., ./flows/<id>.json)")
|
|
29
|
+
pack.add_argument("--out", required=True, help="Output .flow path")
|
|
30
|
+
pack.add_argument("--bundle-id", default=None, help="Bundle id (default: root flow id)")
|
|
31
|
+
pack.add_argument("--bundle-version", default="0.0.0", help="Bundle version (default: 0.0.0)")
|
|
32
|
+
pack.add_argument("--flows-dir", default=None, help="Directory containing flow JSON files (default: root's directory)")
|
|
33
|
+
pack.add_argument(
|
|
34
|
+
"--entrypoint",
|
|
35
|
+
action="append",
|
|
36
|
+
default=None,
|
|
37
|
+
help="Entrypoint flow id (repeatable). Default: root flow id",
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
insp = bundle_sub.add_parser("inspect", help="Print bundle manifest (JSON)")
|
|
41
|
+
insp.add_argument("bundle", help="Path to .flow (zip) or extracted directory")
|
|
42
|
+
|
|
43
|
+
unpack = bundle_sub.add_parser("unpack", help="Extract a .flow bundle to a directory")
|
|
44
|
+
unpack.add_argument("bundle", help="Path to .flow (zip) or extracted directory")
|
|
45
|
+
unpack.add_argument("--dir", required=True, help="Output directory")
|
|
46
|
+
|
|
47
|
+
return p
|
|
48
|
+
|
|
10
49
|
|
|
11
50
|
def main(args: Optional[List[str]] = None) -> int:
|
|
12
|
-
"""
|
|
13
|
-
Main entry point for the AbstractFlow CLI.
|
|
14
|
-
|
|
15
|
-
Args:
|
|
16
|
-
args: Command-line arguments (defaults to sys.argv[1:])
|
|
17
|
-
|
|
18
|
-
Returns:
|
|
19
|
-
Exit code (0 for success, non-zero for error)
|
|
20
|
-
"""
|
|
21
51
|
if args is None:
|
|
22
52
|
args = sys.argv[1:]
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
53
|
+
|
|
54
|
+
parser = _build_parser()
|
|
55
|
+
ns = parser.parse_args(args)
|
|
56
|
+
|
|
57
|
+
if ns.command == "bundle":
|
|
58
|
+
if ns.bundle_cmd == "pack":
|
|
59
|
+
packed = pack_workflow_bundle(
|
|
60
|
+
root_flow_json=ns.root,
|
|
61
|
+
out_path=ns.out,
|
|
62
|
+
bundle_id=ns.bundle_id,
|
|
63
|
+
bundle_version=ns.bundle_version,
|
|
64
|
+
flows_dir=ns.flows_dir,
|
|
65
|
+
entrypoints=list(ns.entrypoint) if isinstance(ns.entrypoint, list) and ns.entrypoint else None,
|
|
66
|
+
)
|
|
67
|
+
sys.stdout.write(str(packed.path) + "\n")
|
|
68
|
+
return 0
|
|
69
|
+
|
|
70
|
+
if ns.bundle_cmd == "inspect":
|
|
71
|
+
man = inspect_workflow_bundle(bundle_path=ns.bundle)
|
|
72
|
+
sys.stdout.write(json.dumps(workflow_bundle_manifest_to_dict(man), indent=2, ensure_ascii=False) + "\n")
|
|
73
|
+
return 0
|
|
74
|
+
|
|
75
|
+
if ns.bundle_cmd == "unpack":
|
|
76
|
+
out = unpack_workflow_bundle(bundle_path=ns.bundle, out_dir=ns.dir)
|
|
77
|
+
sys.stdout.write(str(out) + "\n")
|
|
78
|
+
return 0
|
|
79
|
+
|
|
80
|
+
parser.error("Missing bundle subcommand (pack|inspect|unpack)")
|
|
81
|
+
|
|
82
|
+
parser.print_help()
|
|
38
83
|
return 0
|
|
39
84
|
|
|
40
85
|
|
|
41
86
|
if __name__ == "__main__":
|
|
42
|
-
|
|
87
|
+
raise SystemExit(main())
|
|
43
88
|
|
|
44
89
|
|