abstractgateway 0.1.0__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. abstractgateway/__init__.py +1 -2
  2. abstractgateway/__main__.py +7 -0
  3. abstractgateway/app.py +4 -4
  4. abstractgateway/cli.py +568 -8
  5. abstractgateway/config.py +15 -5
  6. abstractgateway/embeddings_config.py +45 -0
  7. abstractgateway/host_metrics.py +274 -0
  8. abstractgateway/hosts/bundle_host.py +528 -55
  9. abstractgateway/hosts/visualflow_host.py +30 -3
  10. abstractgateway/integrations/__init__.py +2 -0
  11. abstractgateway/integrations/email_bridge.py +782 -0
  12. abstractgateway/integrations/telegram_bridge.py +534 -0
  13. abstractgateway/maintenance/__init__.py +5 -0
  14. abstractgateway/maintenance/action_tokens.py +100 -0
  15. abstractgateway/maintenance/backlog_exec_runner.py +1592 -0
  16. abstractgateway/maintenance/backlog_parser.py +184 -0
  17. abstractgateway/maintenance/draft_generator.py +451 -0
  18. abstractgateway/maintenance/llm_assist.py +212 -0
  19. abstractgateway/maintenance/notifier.py +109 -0
  20. abstractgateway/maintenance/process_manager.py +1064 -0
  21. abstractgateway/maintenance/report_models.py +81 -0
  22. abstractgateway/maintenance/report_parser.py +219 -0
  23. abstractgateway/maintenance/text_similarity.py +123 -0
  24. abstractgateway/maintenance/triage.py +507 -0
  25. abstractgateway/maintenance/triage_queue.py +142 -0
  26. abstractgateway/migrate.py +155 -0
  27. abstractgateway/routes/__init__.py +2 -2
  28. abstractgateway/routes/gateway.py +10817 -179
  29. abstractgateway/routes/triage.py +118 -0
  30. abstractgateway/runner.py +689 -14
  31. abstractgateway/security/gateway_security.py +425 -110
  32. abstractgateway/service.py +213 -6
  33. abstractgateway/stores.py +64 -4
  34. abstractgateway/workflow_deprecations.py +225 -0
  35. abstractgateway-0.1.1.dist-info/METADATA +135 -0
  36. abstractgateway-0.1.1.dist-info/RECORD +40 -0
  37. abstractgateway-0.1.0.dist-info/METADATA +0 -101
  38. abstractgateway-0.1.0.dist-info/RECORD +0 -18
  39. {abstractgateway-0.1.0.dist-info → abstractgateway-0.1.1.dist-info}/WHEEL +0 -0
  40. {abstractgateway-0.1.0.dist-info → abstractgateway-0.1.1.dist-info}/entry_points.txt +0 -0
@@ -1,15 +1,20 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import json
3
4
  import logging
4
5
  import os
5
- from dataclasses import dataclass
6
+ import re
7
+ import threading
8
+ from dataclasses import dataclass, field
6
9
  from pathlib import Path
7
10
  from typing import Any, Dict, Optional, Tuple
8
11
 
9
- from abstractruntime import Runtime, WorkflowRegistry, WorkflowSpec
12
+ from abstractruntime import Runtime, WorkflowRegistry, WorkflowSpec, persist_workflow_snapshot
10
13
  from abstractruntime.visualflow_compiler import compile_visualflow
11
14
  from abstractruntime.workflow_bundle import WorkflowBundle, WorkflowBundleError, open_workflow_bundle
12
15
 
16
+ from ..workflow_deprecations import WorkflowDeprecatedError, WorkflowDeprecationStore
17
+
13
18
 
14
19
  logger = logging.getLogger(__name__)
15
20
 
@@ -18,6 +23,66 @@ def _namespace(bundle_id: str, flow_id: str) -> str:
18
23
  return f"{bundle_id}:{flow_id}"
19
24
 
20
25
 
26
+ def _bundle_ref(bundle_id: str, bundle_version: str) -> str:
27
+ bid = str(bundle_id or "").strip()
28
+ bv = str(bundle_version or "").strip()
29
+ if not bid:
30
+ raise ValueError("bundle_id is required")
31
+ if not bv:
32
+ raise ValueError("bundle_version is required")
33
+ return f"{bid}@{bv}"
34
+
35
+
36
+ def _split_bundle_ref(raw: str) -> tuple[str, Optional[str]]:
37
+ s = str(raw or "").strip()
38
+ if not s:
39
+ return ("", None)
40
+ if "@" not in s:
41
+ return (s, None)
42
+ a, b = s.split("@", 1)
43
+ a = a.strip()
44
+ b = b.strip()
45
+ if not a:
46
+ return ("", None)
47
+ if not b:
48
+ return (a, None)
49
+ return (a, b)
50
+
51
+
52
+ def _try_parse_semver(v: str) -> Optional[tuple[int, int, int]]:
53
+ s = str(v or "").strip()
54
+ if not s:
55
+ return None
56
+ parts = [p.strip() for p in s.split(".")]
57
+ if not parts or any(not p for p in parts):
58
+ return None
59
+ nums: list[int] = []
60
+ for p in parts:
61
+ if not p.isdigit():
62
+ return None
63
+ nums.append(int(p))
64
+ while len(nums) < 3:
65
+ nums.append(0)
66
+ return (nums[0], nums[1], nums[2])
67
+
68
+
69
+ def _pick_latest_version(versions: Dict[str, WorkflowBundle]) -> str:
70
+ items = [(str(k), v) for k, v in (versions or {}).items() if isinstance(k, str)]
71
+ if not items:
72
+ return "0.0.0"
73
+
74
+ if all(_try_parse_semver(ver) is not None for ver, _ in items):
75
+ return max(items, key=lambda x: _try_parse_semver(x[0]) or (0, 0, 0))[0]
76
+
77
+ # Fallback when versions are not semver-like: prefer newest created_at.
78
+ def _key(x: tuple[str, WorkflowBundle]) -> tuple[str, str]:
79
+ ver, b = x
80
+ created = str(getattr(getattr(b, "manifest", None), "created_at", "") or "")
81
+ return (created, ver)
82
+
83
+ return max(items, key=_key)[0]
84
+
85
+
21
86
  def _coerce_namespaced_id(*, bundle_id: Optional[str], flow_id: str, default_bundle_id: Optional[str]) -> str:
22
87
  fid = str(flow_id or "").strip()
23
88
  if not fid:
@@ -192,6 +257,17 @@ def _flow_uses_tools(raw: Dict[str, Any]) -> bool:
192
257
  return False
193
258
 
194
259
 
260
+ def _flow_uses_memory_kg(raw: Dict[str, Any]) -> bool:
261
+ nodes = raw.get("nodes")
262
+ if not isinstance(nodes, list):
263
+ return False
264
+ for n in nodes:
265
+ t = _node_type_from_raw(n)
266
+ if t in {"memory_kg_assert", "memory_kg_query"}:
267
+ return True
268
+ return False
269
+
270
+
195
271
  def _collect_agent_nodes(raw: Dict[str, Any]) -> list[tuple[str, Dict[str, Any]]]:
196
272
  nodes = raw.get("nodes")
197
273
  if not isinstance(nodes, list):
@@ -229,7 +305,7 @@ def _visual_event_listener_workflow_id(*, flow_id: str, node_id: str) -> str:
229
305
  return f"visual_event_listener_{_sanitize(flow_id)}_{_sanitize(node_id)}"
230
306
 
231
307
 
232
- @dataclass(frozen=True)
308
+ @dataclass
233
309
  class WorkflowBundleGatewayHost:
234
310
  """Gateway host that starts/ticks runs from WorkflowBundles (no AbstractFlow import).
235
311
 
@@ -238,17 +314,139 @@ class WorkflowBundleGatewayHost:
238
314
  """
239
315
 
240
316
  bundles_dir: Path
241
- bundles: Dict[str, WorkflowBundle]
317
+ data_dir: Path
318
+ dynamic_flows_dir: Path
319
+ deprecation_store: WorkflowDeprecationStore
320
+ # bundle_id -> bundle_version -> WorkflowBundle
321
+ bundles: Dict[str, Dict[str, WorkflowBundle]]
322
+ # bundle_id -> latest bundle_version
323
+ latest_bundle_versions: Dict[str, str]
242
324
  runtime: Runtime
243
325
  workflow_registry: WorkflowRegistry
244
326
  specs: Dict[str, WorkflowSpec]
245
327
  event_listener_specs_by_root: Dict[str, list[str]]
246
328
  _default_bundle_id: Optional[str]
329
+ _lock: Any = field(default_factory=threading.RLock, repr=False, compare=False)
330
+
331
+ @staticmethod
332
+ def _dynamic_flow_filename(workflow_id: str) -> str:
333
+ safe_re = re.compile(r"[^a-zA-Z0-9._-]+")
334
+ s = safe_re.sub("_", str(workflow_id or "").strip())
335
+ if not s or s in {".", ".."}:
336
+ s = "workflow"
337
+ return f"{s}.json"
338
+
339
+ def _dynamic_flow_path(self, workflow_id: str) -> Path:
340
+ return Path(self.dynamic_flows_dir) / self._dynamic_flow_filename(workflow_id)
341
+
342
+ def load_dynamic_visualflow(self, workflow_id: str) -> Optional[Dict[str, Any]]:
343
+ """Load a persisted dynamic VisualFlow JSON from disk (best-effort)."""
344
+ wid = str(workflow_id or "").strip()
345
+ if not wid:
346
+ return None
347
+ p = self._dynamic_flow_path(wid)
348
+ if not p.exists() or not p.is_file():
349
+ return None
350
+ try:
351
+ raw = json.loads(p.read_text(encoding="utf-8"))
352
+ except Exception:
353
+ return None
354
+ return raw if isinstance(raw, dict) else None
355
+
356
+ def register_dynamic_visualflow(self, raw: Dict[str, Any], *, persist: bool = True) -> str:
357
+ """Register a VisualFlow JSON object as a dynamic workflow (durable in data_dir).
358
+
359
+ This is used for gateway-generated wrapper workflows (e.g. scheduled runs).
360
+ """
361
+ if not isinstance(raw, dict):
362
+ raise TypeError("Dynamic VisualFlow must be an object")
363
+ wid = str(raw.get("id") or "").strip()
364
+ if not wid:
365
+ raise ValueError("Dynamic VisualFlow missing required 'id'")
366
+
367
+ spec = compile_visualflow(raw)
368
+ self.workflow_registry.register(spec)
369
+ self.specs[str(spec.workflow_id)] = spec
370
+
371
+ if persist:
372
+ try:
373
+ Path(self.dynamic_flows_dir).mkdir(parents=True, exist_ok=True)
374
+ p = self._dynamic_flow_path(str(spec.workflow_id))
375
+ p.write_text(json.dumps(raw, ensure_ascii=False, indent=2), encoding="utf-8")
376
+ except Exception as e:
377
+ raise RuntimeError(f"Failed to persist dynamic VisualFlow: {e}") from e
378
+
379
+ return str(spec.workflow_id)
380
+
381
+ def upsert_dynamic_visualflow(self, raw: Dict[str, Any], *, persist: bool = True) -> str:
382
+ """Register or replace a dynamic VisualFlow JSON object (durable in data_dir).
383
+
384
+ This is used for gateway-generated wrapper workflows that must be edited in-place
385
+ (e.g. rescheduling a recurrent job without killing the parent run).
386
+ """
387
+ if not isinstance(raw, dict):
388
+ raise TypeError("Dynamic VisualFlow must be an object")
389
+ wid = str(raw.get("id") or "").strip()
390
+ if not wid:
391
+ raise ValueError("Dynamic VisualFlow missing required 'id'")
392
+
393
+ spec = compile_visualflow(raw)
394
+ with self._lock:
395
+ try:
396
+ existing = self.workflow_registry.get(spec.workflow_id)
397
+ except Exception:
398
+ existing = None
399
+ if existing is not None:
400
+ try:
401
+ self.workflow_registry.unregister(spec.workflow_id)
402
+ except Exception:
403
+ pass
404
+ try:
405
+ self.workflow_registry.register(spec)
406
+ except Exception as e:
407
+ raise RuntimeError(f"Failed to register workflow '{spec.workflow_id}': {e}") from e
408
+
409
+ self.specs[str(spec.workflow_id)] = spec
410
+
411
+ if persist:
412
+ try:
413
+ Path(self.dynamic_flows_dir).mkdir(parents=True, exist_ok=True)
414
+ p = self._dynamic_flow_path(str(spec.workflow_id))
415
+ p.write_text(json.dumps(raw, ensure_ascii=False, indent=2), encoding="utf-8")
416
+ except Exception as e:
417
+ raise RuntimeError(f"Failed to persist dynamic VisualFlow: {e}") from e
418
+
419
+ return str(spec.workflow_id)
420
+
421
+ def _try_register_dynamic_workflow_from_disk(self, workflow_id: str) -> Optional[WorkflowSpec]:
422
+ wid = str(workflow_id or "").strip()
423
+ if not wid:
424
+ return None
425
+ p = self._dynamic_flow_path(wid)
426
+ if not p.exists() or not p.is_file():
427
+ return None
428
+ try:
429
+ raw = json.loads(p.read_text(encoding="utf-8"))
430
+ except Exception:
431
+ return None
432
+ if not isinstance(raw, dict):
433
+ return None
434
+ try:
435
+ spec = compile_visualflow(raw)
436
+ except Exception:
437
+ return None
438
+ try:
439
+ self.workflow_registry.register(spec)
440
+ self.specs[str(spec.workflow_id)] = spec
441
+ except Exception:
442
+ return None
443
+ return spec
247
444
 
248
445
  @staticmethod
249
446
  def load_from_dir(
250
447
  *,
251
448
  bundles_dir: Path,
449
+ data_dir: Path,
252
450
  run_store: Any,
253
451
  ledger_store: Any,
254
452
  artifact_store: Any,
@@ -263,52 +461,150 @@ class WorkflowBundleGatewayHost:
263
461
  else:
264
462
  bundle_paths = sorted([p for p in base.glob("*.flow") if p.is_file()])
265
463
 
266
- bundles: Dict[str, WorkflowBundle] = {}
464
+ bundles_by_id: Dict[str, Dict[str, WorkflowBundle]] = {}
267
465
  for p in bundle_paths:
268
466
  try:
269
467
  b = open_workflow_bundle(p)
270
- bundles[str(b.manifest.bundle_id)] = b
468
+ bid = str(getattr(getattr(b, "manifest", None), "bundle_id", "") or "").strip()
469
+ bver = str(getattr(getattr(b, "manifest", None), "bundle_version", "0.0.0") or "0.0.0").strip() or "0.0.0"
470
+ if not bid:
471
+ raise WorkflowBundleError(f"Bundle '{p}' has empty bundle_id")
472
+ versions = bundles_by_id.setdefault(bid, {})
473
+ if bver in versions:
474
+ logger.warning("Duplicate bundle version '%s@%s' at %s; keeping first", bid, bver, p)
475
+ continue
476
+ versions[bver] = b
271
477
  except Exception as e:
272
478
  logger.warning("Failed to load bundle %s: %s", p, e)
273
479
 
274
- if not bundles:
275
- raise FileNotFoundError(f"No bundles found in {base} (expected *.flow)")
480
+ if not bundles_by_id:
481
+ logger.warning("No bundles found in %s (expected *.flow). Starting gateway with zero loaded bundles.", base)
482
+
483
+ default_bundle_id = next(iter(bundles_by_id.keys())) if len(bundles_by_id) == 1 else None
484
+ latest_versions: Dict[str, str] = {bid: _pick_latest_version(versions) for bid, versions in bundles_by_id.items()}
276
485
 
277
- default_bundle_id = next(iter(bundles.keys())) if len(bundles) == 1 else None
486
+ data_root = Path(data_dir).expanduser().resolve()
487
+ dep_store = WorkflowDeprecationStore(path=data_root / "workflow_deprecations.json")
488
+ dynamic_dir = data_root / "dynamic_flows"
489
+ try:
490
+ dynamic_dir.mkdir(parents=True, exist_ok=True)
491
+ except Exception:
492
+ # Best-effort: dynamic workflows are optional.
493
+ pass
278
494
 
279
495
  # Build runtime + registry and register all workflow specs.
280
496
  wf_reg: WorkflowRegistry = WorkflowRegistry()
281
497
  specs: Dict[str, WorkflowSpec] = {}
282
498
  flows_by_namespaced_id: Dict[str, Dict[str, Any]] = {}
283
499
 
284
- for bid, b in bundles.items():
285
- man = b.manifest
286
- if not man.flows:
287
- raise WorkflowBundleError(f"Bundle '{bid}' has no flows (manifest.flows is empty)")
288
-
289
- flow_ids = set(man.flows.keys())
290
- id_map = {flow_id: _namespace(bid, flow_id) for flow_id in flow_ids}
291
-
292
- for flow_id, rel in man.flows.items():
293
- raw = b.read_json(rel)
500
+ for bid, versions in bundles_by_id.items():
501
+ for bver, b in versions.items():
502
+ bundle_ref = _bundle_ref(bid, bver)
503
+ man = b.manifest
504
+ if not man.flows:
505
+ raise WorkflowBundleError(f"Bundle '{bid}@{bver}' has no flows (manifest.flows is empty)")
506
+
507
+ flow_ids = set(man.flows.keys())
508
+ id_map = {flow_id: _namespace(bundle_ref, flow_id) for flow_id in flow_ids}
509
+
510
+ for flow_id, rel in man.flows.items():
511
+ raw = b.read_json(rel)
512
+ if not isinstance(raw, dict):
513
+ raise WorkflowBundleError(f"VisualFlow JSON for '{bid}@{bver}:{flow_id}' must be an object")
514
+ namespaced_raw = _namespace_visualflow_raw(
515
+ raw=raw,
516
+ bundle_id=bundle_ref,
517
+ flow_id=flow_id,
518
+ id_map=id_map,
519
+ )
520
+ flows_by_namespaced_id[str(namespaced_raw.get("id") or _namespace(bundle_ref, flow_id))] = namespaced_raw
521
+ try:
522
+ spec = compile_visualflow(namespaced_raw)
523
+ except Exception as e:
524
+ raise WorkflowBundleError(f"Failed compiling VisualFlow '{bid}@{bver}:{flow_id}': {e}") from e
525
+ wf_reg.register(spec)
526
+ specs[str(spec.workflow_id)] = spec
527
+
528
+ # Load dynamic flows persisted in data_dir (e.g. scheduled wrapper flows).
529
+ try:
530
+ for p in sorted(dynamic_dir.glob("*.json")):
531
+ if not p.is_file():
532
+ continue
533
+ try:
534
+ raw = json.loads(p.read_text(encoding="utf-8"))
535
+ except Exception as e:
536
+ logger.warning("Failed to read dynamic flow %s: %s", p, e)
537
+ continue
294
538
  if not isinstance(raw, dict):
295
- raise WorkflowBundleError(f"VisualFlow JSON for '{bid}:{flow_id}' must be an object")
296
- namespaced_raw = _namespace_visualflow_raw(
297
- raw=raw,
298
- bundle_id=bid,
299
- flow_id=flow_id,
300
- id_map=id_map,
301
- )
302
- flows_by_namespaced_id[str(namespaced_raw.get("id") or _namespace(bid, flow_id))] = namespaced_raw
539
+ continue
303
540
  try:
304
- spec = compile_visualflow(namespaced_raw)
541
+ spec = compile_visualflow(raw)
305
542
  except Exception as e:
306
- raise WorkflowBundleError(f"Failed compiling VisualFlow '{bid}:{flow_id}': {e}") from e
307
- wf_reg.register(spec)
308
- specs[str(spec.workflow_id)] = spec
543
+ logger.warning("Failed compiling dynamic flow %s: %s", p, e)
544
+ continue
545
+ try:
546
+ wf_reg.register(spec)
547
+ specs[str(spec.workflow_id)] = spec
548
+ except Exception as e:
549
+ logger.warning("Failed registering dynamic flow %s: %s", p, e)
550
+ continue
551
+ except Exception:
552
+ pass
309
553
 
310
554
  needs_llm = any(_flow_uses_llm(raw) for raw in flows_by_namespaced_id.values())
311
555
  needs_tools = any(_flow_uses_tools(raw) for raw in flows_by_namespaced_id.values())
556
+ needs_memory_kg = any(_flow_uses_memory_kg(raw) for raw in flows_by_namespaced_id.values())
557
+
558
+ extra_effect_handlers: Dict[Any, Any] = {}
559
+ if needs_memory_kg:
560
+ try:
561
+ from abstractmemory import LanceDBTripleStore
562
+ from abstractruntime.integrations.abstractmemory.effect_handlers import build_memory_kg_effect_handlers
563
+ from abstractruntime.storage.artifacts import utc_now_iso
564
+ except Exception as e: # pragma: no cover
565
+ raise WorkflowBundleError(
566
+ "Bundle uses memory_kg_* nodes but AbstractMemory integration is not available. "
567
+ "Install `abstractmemory` (and optionally `abstractmemory[lancedb]`)."
568
+ ) from e
569
+
570
+ embedder = None
571
+ try:
572
+ from abstractruntime.integrations.abstractcore.embeddings_client import AbstractCoreEmbeddingsClient
573
+ from abstractgateway.embeddings_config import resolve_embedding_config
574
+
575
+ emb_provider, emb_model = resolve_embedding_config(base_dir=Path(data_root))
576
+ emb_client = AbstractCoreEmbeddingsClient(
577
+ provider=str(emb_provider).strip().lower(),
578
+ model=str(emb_model).strip(),
579
+ manager_kwargs={
580
+ "cache_dir": Path(data_root) / "abstractcore" / "embeddings",
581
+ # Embeddings must be trustworthy for semantic retrieval; do not return zero vectors on failure.
582
+ "strict": True,
583
+ },
584
+ )
585
+
586
+ class _Embedder:
587
+ def __init__(self, client: Any) -> None:
588
+ self._client = client
589
+
590
+ def embed_texts(self, texts):
591
+ return self._client.embed_texts(texts).embeddings
592
+
593
+ embedder = _Embedder(emb_client)
594
+ except Exception:
595
+ embedder = None
596
+
597
+ try:
598
+ store_path = Path(data_root) / "abstractmemory" / "kg"
599
+ store_path.parent.mkdir(parents=True, exist_ok=True)
600
+ store_obj = LanceDBTripleStore(store_path, embedder=embedder)
601
+ except Exception as e:
602
+ raise WorkflowBundleError(
603
+ "Bundle uses memory_kg_* nodes, which require a LanceDB-backed store. "
604
+ "Install `lancedb` and ensure the gateway runs under the same environment."
605
+ ) from e
606
+
607
+ extra_effect_handlers = build_memory_kg_effect_handlers(store=store_obj, run_store=run_store, now_iso=utc_now_iso)
312
608
 
313
609
  # Optional AbstractCore integration for LLM_CALL + TOOL_CALLS.
314
610
  if needs_llm or needs_tools:
@@ -368,6 +664,7 @@ class WorkflowBundleGatewayHost:
368
664
  ledger_store=ledger_store,
369
665
  artifact_store=artifact_store,
370
666
  tool_executor=tool_executor,
667
+ extra_effect_handlers=extra_effect_handlers,
371
668
  )
372
669
  runtime.set_workflow_registry(wf_reg)
373
670
  else:
@@ -381,7 +678,12 @@ class WorkflowBundleGatewayHost:
381
678
  workflow_registry=wf_reg,
382
679
  artifact_store=artifact_store,
383
680
  effect_handlers={
384
- EffectType.TOOL_CALLS: make_tool_calls_handler(tools=tool_executor),
681
+ EffectType.TOOL_CALLS: make_tool_calls_handler(
682
+ tools=tool_executor,
683
+ artifact_store=artifact_store,
684
+ run_store=run_store,
685
+ ),
686
+ **extra_effect_handlers,
385
687
  },
386
688
  )
387
689
  else:
@@ -455,12 +757,20 @@ class WorkflowBundleGatewayHost:
455
757
  from abstractagent.logic.builtins import ( # type: ignore
456
758
  ASK_USER_TOOL,
457
759
  COMPACT_MEMORY_TOOL,
760
+ DELEGATE_AGENT_TOOL,
458
761
  INSPECT_VARS_TOOL,
459
762
  RECALL_MEMORY_TOOL,
460
763
  REMEMBER_TOOL,
461
764
  )
462
765
 
463
- builtin_defs = [ASK_USER_TOOL, RECALL_MEMORY_TOOL, INSPECT_VARS_TOOL, REMEMBER_TOOL, COMPACT_MEMORY_TOOL]
766
+ builtin_defs = [
767
+ ASK_USER_TOOL,
768
+ RECALL_MEMORY_TOOL,
769
+ INSPECT_VARS_TOOL,
770
+ REMEMBER_TOOL,
771
+ COMPACT_MEMORY_TOOL,
772
+ DELEGATE_AGENT_TOOL,
773
+ ]
464
774
  seen_names = {t.name for t in all_tool_defs if getattr(t, "name", None)}
465
775
  for t in builtin_defs:
466
776
  if getattr(t, "name", None) and t.name not in seen_names:
@@ -523,7 +833,11 @@ class WorkflowBundleGatewayHost:
523
833
 
524
834
  return WorkflowBundleGatewayHost(
525
835
  bundles_dir=base,
526
- bundles=bundles,
836
+ data_dir=data_root,
837
+ dynamic_flows_dir=dynamic_dir,
838
+ deprecation_store=dep_store,
839
+ bundles=bundles_by_id,
840
+ latest_bundle_versions=latest_versions,
527
841
  runtime=runtime,
528
842
  workflow_registry=wf_reg,
529
843
  specs=specs,
@@ -543,6 +857,34 @@ class WorkflowBundleGatewayHost:
543
857
  def artifact_store(self) -> Any:
544
858
  return self.runtime.artifact_store
545
859
 
860
+ def reload_bundles_from_disk(self) -> Dict[str, Any]:
861
+ """Reload bundles/specs from bundles_dir (best-effort, intended for dev).
862
+
863
+ Notes:
864
+ - This rebuilds the in-memory registry and swaps host internals in-place so the
865
+ runner can keep using the same host object.
866
+ - Dynamic flows persisted in `data_dir/dynamic_flows` are reloaded as part of
867
+ the rebuild.
868
+ """
869
+ new_host = WorkflowBundleGatewayHost.load_from_dir(
870
+ bundles_dir=self.bundles_dir,
871
+ data_dir=self.data_dir,
872
+ run_store=self.run_store,
873
+ ledger_store=self.ledger_store,
874
+ artifact_store=self.artifact_store,
875
+ )
876
+ with self._lock:
877
+ self.bundles = new_host.bundles
878
+ self.latest_bundle_versions = new_host.latest_bundle_versions
879
+ self.runtime = new_host.runtime
880
+ self.workflow_registry = new_host.workflow_registry
881
+ self.specs = new_host.specs
882
+ self.event_listener_specs_by_root = new_host.event_listener_specs_by_root
883
+ self._default_bundle_id = new_host._default_bundle_id
884
+ self.deprecation_store = new_host.deprecation_store
885
+ bundle_ids = sorted([str(k) for k in (self.bundles or {}).keys() if isinstance(k, str)])
886
+ return {"ok": True, "bundle_ids": bundle_ids, "count": len(bundle_ids)}
887
+
546
888
  def start_run(
547
889
  self,
548
890
  *,
@@ -550,47 +892,166 @@ class WorkflowBundleGatewayHost:
550
892
  input_data: Dict[str, Any],
551
893
  actor_id: str = "gateway",
552
894
  bundle_id: Optional[str] = None,
895
+ bundle_version: Optional[str] = None,
896
+ session_id: Optional[str] = None,
553
897
  ) -> str:
554
- fid = str(flow_id or "").strip()
555
- bid = str(bundle_id or "").strip() if isinstance(bundle_id, str) else ""
556
- bundle_id_clean = bid or None
557
-
558
- if not fid:
559
- # Default entrypoint selection for the common case:
560
- # start {bundle_id, input_data} without needing flow_id.
561
- selected_bundle_id = bundle_id_clean or self._default_bundle_id
898
+ fid_raw = str(flow_id or "").strip()
899
+
900
+ bid_raw = str(bundle_id or "").strip() if isinstance(bundle_id, str) else ""
901
+ bid_base, bid_ver = _split_bundle_ref(bid_raw)
902
+
903
+ bver_raw = str(bundle_version or "").strip() if isinstance(bundle_version, str) else ""
904
+ if bid_ver and bver_raw and bid_ver != bver_raw:
905
+ raise ValueError("bundle_version conflicts with bundle_id (bundle_id already includes '@version')")
906
+
907
+ # Effective requested version (may still be None; default to latest per bundle_id).
908
+ requested_ver = bver_raw or bid_ver
909
+
910
+ def _get_bundle(*, bundle_id2: str, bundle_version2: Optional[str]) -> tuple[str, WorkflowBundle]:
911
+ bid2 = str(bundle_id2 or "").strip()
912
+ if not bid2:
913
+ raise ValueError("bundle_id is required")
914
+ versions = self.bundles.get(bid2)
915
+ if not isinstance(versions, dict) or not versions:
916
+ raise KeyError(f"Bundle '{bid2}' not found")
917
+ ver2 = str(bundle_version2 or "").strip() if isinstance(bundle_version2, str) and str(bundle_version2).strip() else ""
918
+ if not ver2:
919
+ ver2 = str(self.latest_bundle_versions.get(bid2) or "").strip()
920
+ if not ver2:
921
+ raise KeyError(f"Bundle '{bid2}' has no versions loaded")
922
+ bundle2 = versions.get(ver2)
923
+ if bundle2 is None:
924
+ raise KeyError(f"Bundle '{bid2}@{ver2}' not found")
925
+ return (ver2, bundle2)
926
+
927
+ # Default entrypoint selection for the common case:
928
+ # start {bundle_id, input_data} without needing flow_id.
929
+ if not fid_raw:
930
+ selected_bundle_id = bid_base or (str(self._default_bundle_id or "").strip() if self._default_bundle_id else "")
562
931
  if not selected_bundle_id:
563
932
  raise ValueError(
564
933
  "flow_id is required when multiple bundles are loaded; "
565
934
  "provide bundle_id (or pass flow_id as 'bundle:flow')"
566
935
  )
567
- bundle = self.bundles.get(str(selected_bundle_id))
568
- if bundle is None:
569
- raise KeyError(f"Bundle '{selected_bundle_id}' not found")
570
- entrypoints = list(getattr(bundle.manifest, "entrypoints", None) or [])
571
- default_ep = str(getattr(bundle.manifest, "default_entrypoint", "") or "").strip()
936
+ selected_ver, bundle2 = _get_bundle(bundle_id2=selected_bundle_id, bundle_version2=requested_ver)
937
+ entrypoints = list(getattr(bundle2.manifest, "entrypoints", None) or [])
938
+ default_ep = str(getattr(bundle2.manifest, "default_entrypoint", "") or "").strip()
572
939
  if len(entrypoints) == 1:
573
940
  ep_fid = str(getattr(entrypoints[0], "flow_id", "") or "").strip()
574
941
  elif default_ep:
575
942
  ep_fid = default_ep
576
943
  else:
577
944
  raise ValueError(
578
- f"Bundle '{selected_bundle_id}' has {len(entrypoints)} entrypoints; "
945
+ f"Bundle '{selected_bundle_id}@{selected_ver}' has {len(entrypoints)} entrypoints; "
579
946
  "specify flow_id to select which entrypoint to start "
580
947
  "(or set manifest.default_entrypoint)"
581
948
  )
582
949
  if not ep_fid:
583
- raise ValueError(f"Bundle '{selected_bundle_id}' entrypoint flow_id is empty")
584
- workflow_id = _namespace(str(selected_bundle_id), ep_fid)
950
+ raise ValueError(f"Bundle '{selected_bundle_id}@{selected_ver}' entrypoint flow_id is empty")
951
+ workflow_id = _namespace(_bundle_ref(selected_bundle_id, selected_ver), ep_fid)
585
952
  else:
586
- workflow_id = _coerce_namespaced_id(
587
- bundle_id=bundle_id_clean, flow_id=fid, default_bundle_id=self._default_bundle_id
588
- )
953
+ # Allow passing fully qualified flow_id:
954
+ # - bundle:flow (defaults to latest version)
955
+ # - bundle@ver:flow (exact)
956
+ if ":" in fid_raw:
957
+ prefix, inner = fid_raw.split(":", 1)
958
+ prefix_base, prefix_ver = _split_bundle_ref(prefix)
959
+ # Dynamic workflows often use ':' in their ids (e.g. scheduled:uuid).
960
+ # Only treat this as a bundle namespace when the prefix matches a loaded bundle id.
961
+ if prefix_base and prefix_base in self.bundles:
962
+ if bid_base and prefix_base and bid_base != prefix_base:
963
+ raise ValueError("flow_id bundle prefix does not match bundle_id")
964
+ if prefix_ver and requested_ver and prefix_ver != requested_ver:
965
+ raise ValueError("flow_id version does not match bundle_version")
966
+ selected_bundle_id = prefix_base or bid_base
967
+ if not selected_bundle_id:
968
+ raise ValueError("bundle_id is required")
969
+ selected_ver, _bundle2 = _get_bundle(
970
+ bundle_id2=selected_bundle_id,
971
+ bundle_version2=prefix_ver or requested_ver,
972
+ )
973
+ workflow_id = _namespace(_bundle_ref(selected_bundle_id, selected_ver), inner.strip())
974
+ else:
975
+ if bid_base or requested_ver:
976
+ raise ValueError(
977
+ f"flow_id '{fid_raw}' is already namespaced, but bundle_id/bundle_version was also provided"
978
+ )
979
+ workflow_id = fid_raw
980
+ else:
981
+ selected_bundle_id = bid_base or (str(self._default_bundle_id or "").strip() if self._default_bundle_id else "")
982
+ if not selected_bundle_id:
983
+ raise ValueError("bundle_id is required when multiple bundles are loaded (or pass flow_id as 'bundle:flow')")
984
+ selected_ver, _bundle2 = _get_bundle(bundle_id2=selected_bundle_id, bundle_version2=requested_ver)
985
+ workflow_id = _namespace(_bundle_ref(selected_bundle_id, selected_ver), fid_raw)
986
+
987
+ # Enforce workflow deprecations (bundle-owned entry workflows).
988
+ # This must live in the host so scheduled child launches are also blocked.
989
+ if ":" in workflow_id:
990
+ prefix, inner = workflow_id.split(":", 1)
991
+ dep_bid, _dep_ver = _split_bundle_ref(prefix)
992
+ dep_flow = inner.strip()
993
+ if dep_bid and dep_flow and dep_bid in self.bundles:
994
+ rec = self.deprecation_store.get_record(bundle_id=dep_bid, flow_id=dep_flow)
995
+ if rec is not None:
996
+ raise WorkflowDeprecatedError(bundle_id=dep_bid, flow_id=dep_flow, record=rec)
589
997
 
590
998
  spec = self.specs.get(workflow_id)
591
999
  if spec is None:
592
1000
  raise KeyError(f"Workflow '{workflow_id}' not found")
593
- run_id = str(self.runtime.start(workflow=spec, vars=dict(input_data or {}), actor_id=actor_id))
1001
+ sid = str(session_id).strip() if isinstance(session_id, str) and session_id.strip() else None
1002
+ run_id = str(self.runtime.start(workflow=spec, vars=dict(input_data or {}), actor_id=actor_id, session_id=sid))
1003
+
1004
+ # Default session_id to the root run_id for durable session-scoped behavior
1005
+ # (matches VisualSessionRunner semantics).
1006
+ effective_session_id = sid
1007
+ if effective_session_id is None:
1008
+ try:
1009
+ state = self.runtime.get_state(run_id)
1010
+ if not getattr(state, "session_id", None):
1011
+ state.session_id = run_id # type: ignore[attr-defined]
1012
+ self.runtime.run_store.save(state)
1013
+ effective_session_id = str(getattr(state, "session_id", None) or run_id).strip() or run_id
1014
+ except Exception:
1015
+ effective_session_id = run_id
1016
+
1017
+ # Persist a workflow snapshot for reproducible replay (best-effort).
1018
+ try:
1019
+ if ":" in workflow_id:
1020
+ prefix, inner = workflow_id.split(":", 1)
1021
+ bid_base, bid_ver2 = _split_bundle_ref(prefix)
1022
+ if bid_base and bid_ver2:
1023
+ versions = self.bundles.get(bid_base)
1024
+ bundle2 = versions.get(bid_ver2) if isinstance(versions, dict) else None
1025
+ if bundle2 is not None:
1026
+ bundle_ref = _bundle_ref(bid_base, bid_ver2)
1027
+ man = bundle2.manifest
1028
+ flow_ids = set(man.flows.keys()) if isinstance(getattr(man, "flows", None), dict) else set()
1029
+ id_map = {fid: _namespace(bundle_ref, fid) for fid in flow_ids if isinstance(fid, str) and fid.strip()}
1030
+ rel = man.flow_path_for(inner) if hasattr(man, "flow_path_for") else None
1031
+ raw = bundle2.read_json(rel) if isinstance(rel, str) and rel.strip() else None
1032
+ if isinstance(raw, dict):
1033
+ namespaced_raw = _namespace_visualflow_raw(
1034
+ raw=raw,
1035
+ bundle_id=bundle_ref,
1036
+ flow_id=inner,
1037
+ id_map=id_map,
1038
+ )
1039
+ snapshot = {
1040
+ "kind": "visualflow_json",
1041
+ "bundle_ref": bundle_ref,
1042
+ "flow_id": str(inner),
1043
+ "visualflow": namespaced_raw,
1044
+ }
1045
+ persist_workflow_snapshot(
1046
+ run_store=self.run_store,
1047
+ artifact_store=self.artifact_store,
1048
+ run_id=str(run_id),
1049
+ workflow_id=str(workflow_id),
1050
+ snapshot=snapshot,
1051
+ format="visualflow_json",
1052
+ )
1053
+ except Exception:
1054
+ pass
594
1055
 
595
1056
  # Start session-scoped event listener workflows (best-effort).
596
1057
  listener_ids = self.event_listener_specs_by_root.get(workflow_id) or []
@@ -602,7 +1063,7 @@ class WorkflowBundleGatewayHost:
602
1063
  child_run_id = self.runtime.start(
603
1064
  workflow=listener_spec,
604
1065
  vars={},
605
- session_id=run_id,
1066
+ session_id=effective_session_id,
606
1067
  parent_run_id=run_id,
607
1068
  actor_id=actor_id,
608
1069
  )
@@ -621,6 +1082,18 @@ class WorkflowBundleGatewayHost:
621
1082
  if not isinstance(workflow_id, str) or not workflow_id:
622
1083
  raise ValueError(f"Run '{run_id}' missing workflow_id")
623
1084
  spec = self.specs.get(workflow_id)
1085
+ if spec is None and ":" in workflow_id:
1086
+ # Backward compatibility: older runs may store workflow_id as "bundle:flow"
1087
+ # (without bundle_version). Best-effort map to the latest loaded version.
1088
+ prefix, fid = workflow_id.split(":", 1)
1089
+ prefix_base, prefix_ver = _split_bundle_ref(prefix)
1090
+ if prefix_base and not prefix_ver:
1091
+ latest = str(self.latest_bundle_versions.get(prefix_base) or "").strip()
1092
+ if latest:
1093
+ workflow_id2 = _namespace(_bundle_ref(prefix_base, latest), fid.strip())
1094
+ spec = self.specs.get(workflow_id2)
1095
+ if spec is None:
1096
+ spec = self._try_register_dynamic_workflow_from_disk(workflow_id)
624
1097
  if spec is None:
625
1098
  raise KeyError(f"Workflow '{workflow_id}' not registered")
626
1099
  return (self.runtime, spec)