AbstractRuntime 0.2.0__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. abstractruntime/__init__.py +83 -3
  2. abstractruntime/core/config.py +82 -2
  3. abstractruntime/core/event_keys.py +62 -0
  4. abstractruntime/core/models.py +17 -1
  5. abstractruntime/core/policy.py +74 -3
  6. abstractruntime/core/runtime.py +3334 -28
  7. abstractruntime/core/vars.py +103 -2
  8. abstractruntime/evidence/__init__.py +10 -0
  9. abstractruntime/evidence/recorder.py +325 -0
  10. abstractruntime/history_bundle.py +772 -0
  11. abstractruntime/integrations/abstractcore/__init__.py +6 -0
  12. abstractruntime/integrations/abstractcore/constants.py +19 -0
  13. abstractruntime/integrations/abstractcore/default_tools.py +258 -0
  14. abstractruntime/integrations/abstractcore/effect_handlers.py +2622 -32
  15. abstractruntime/integrations/abstractcore/embeddings_client.py +69 -0
  16. abstractruntime/integrations/abstractcore/factory.py +149 -16
  17. abstractruntime/integrations/abstractcore/llm_client.py +891 -55
  18. abstractruntime/integrations/abstractcore/mcp_worker.py +587 -0
  19. abstractruntime/integrations/abstractcore/observability.py +80 -0
  20. abstractruntime/integrations/abstractcore/session_attachments.py +946 -0
  21. abstractruntime/integrations/abstractcore/summarizer.py +154 -0
  22. abstractruntime/integrations/abstractcore/tool_executor.py +509 -31
  23. abstractruntime/integrations/abstractcore/workspace_scoped_tools.py +561 -0
  24. abstractruntime/integrations/abstractmemory/__init__.py +3 -0
  25. abstractruntime/integrations/abstractmemory/effect_handlers.py +946 -0
  26. abstractruntime/memory/__init__.py +21 -0
  27. abstractruntime/memory/active_context.py +751 -0
  28. abstractruntime/memory/active_memory.py +452 -0
  29. abstractruntime/memory/compaction.py +105 -0
  30. abstractruntime/memory/kg_packets.py +164 -0
  31. abstractruntime/memory/memact_composer.py +175 -0
  32. abstractruntime/memory/recall_levels.py +163 -0
  33. abstractruntime/memory/token_budget.py +86 -0
  34. abstractruntime/rendering/__init__.py +17 -0
  35. abstractruntime/rendering/agent_trace_report.py +256 -0
  36. abstractruntime/rendering/json_stringify.py +136 -0
  37. abstractruntime/scheduler/scheduler.py +93 -2
  38. abstractruntime/storage/__init__.py +7 -2
  39. abstractruntime/storage/artifacts.py +175 -32
  40. abstractruntime/storage/base.py +17 -1
  41. abstractruntime/storage/commands.py +339 -0
  42. abstractruntime/storage/in_memory.py +41 -1
  43. abstractruntime/storage/json_files.py +210 -14
  44. abstractruntime/storage/observable.py +136 -0
  45. abstractruntime/storage/offloading.py +433 -0
  46. abstractruntime/storage/sqlite.py +836 -0
  47. abstractruntime/visualflow_compiler/__init__.py +29 -0
  48. abstractruntime/visualflow_compiler/adapters/__init__.py +11 -0
  49. abstractruntime/visualflow_compiler/adapters/agent_adapter.py +126 -0
  50. abstractruntime/visualflow_compiler/adapters/context_adapter.py +109 -0
  51. abstractruntime/visualflow_compiler/adapters/control_adapter.py +615 -0
  52. abstractruntime/visualflow_compiler/adapters/effect_adapter.py +1051 -0
  53. abstractruntime/visualflow_compiler/adapters/event_adapter.py +307 -0
  54. abstractruntime/visualflow_compiler/adapters/function_adapter.py +97 -0
  55. abstractruntime/visualflow_compiler/adapters/memact_adapter.py +114 -0
  56. abstractruntime/visualflow_compiler/adapters/subflow_adapter.py +74 -0
  57. abstractruntime/visualflow_compiler/adapters/variable_adapter.py +316 -0
  58. abstractruntime/visualflow_compiler/compiler.py +3832 -0
  59. abstractruntime/visualflow_compiler/flow.py +247 -0
  60. abstractruntime/visualflow_compiler/visual/__init__.py +13 -0
  61. abstractruntime/visualflow_compiler/visual/agent_ids.py +29 -0
  62. abstractruntime/visualflow_compiler/visual/builtins.py +1376 -0
  63. abstractruntime/visualflow_compiler/visual/code_executor.py +214 -0
  64. abstractruntime/visualflow_compiler/visual/executor.py +2804 -0
  65. abstractruntime/visualflow_compiler/visual/models.py +211 -0
  66. abstractruntime/workflow_bundle/__init__.py +52 -0
  67. abstractruntime/workflow_bundle/models.py +236 -0
  68. abstractruntime/workflow_bundle/packer.py +317 -0
  69. abstractruntime/workflow_bundle/reader.py +87 -0
  70. abstractruntime/workflow_bundle/registry.py +587 -0
  71. abstractruntime-0.4.1.dist-info/METADATA +177 -0
  72. abstractruntime-0.4.1.dist-info/RECORD +86 -0
  73. abstractruntime-0.4.1.dist-info/entry_points.txt +2 -0
  74. abstractruntime-0.2.0.dist-info/METADATA +0 -163
  75. abstractruntime-0.2.0.dist-info/RECORD +0 -32
  76. {abstractruntime-0.2.0.dist-info → abstractruntime-0.4.1.dist-info}/WHEEL +0 -0
  77. {abstractruntime-0.2.0.dist-info → abstractruntime-0.4.1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,29 @@
1
+ """abstractruntime.visualflow_compiler
2
+
3
+ VisualFlow JSON → WorkflowSpec compiler (single semantics engine).
4
+
5
+ This module compiles the VisualFlow authoring DSL (JSON) into an in-memory
6
+ `WorkflowSpec` (Python callables) that AbstractRuntime can execute durably.
7
+
8
+ Note: `WorkflowSpec` is not a portable artifact format (it contains callables).
9
+ """
10
+
11
+ from .flow import Flow, FlowEdge, FlowNode
12
+ from .compiler import compile_flow, compile_visualflow, compile_visualflow_tree
13
+ from .visual.models import VisualEdge, VisualFlow, VisualNode, load_visualflow_json
14
+ from .visual.executor import visual_to_flow
15
+
16
+ __all__ = [
17
+ "Flow",
18
+ "FlowNode",
19
+ "FlowEdge",
20
+ "VisualFlow",
21
+ "VisualNode",
22
+ "VisualEdge",
23
+ "load_visualflow_json",
24
+ "visual_to_flow",
25
+ "compile_flow",
26
+ "compile_visualflow",
27
+ "compile_visualflow_tree",
28
+ ]
29
+
@@ -0,0 +1,11 @@
1
+ """AbstractFlow adapters for converting handlers to workflow nodes."""
2
+
3
+ from .function_adapter import create_function_node_handler
4
+ from .agent_adapter import create_agent_node_handler
5
+ from .subflow_adapter import create_subflow_node_handler
6
+
7
+ __all__ = [
8
+ "create_function_node_handler",
9
+ "create_agent_node_handler",
10
+ "create_subflow_node_handler",
11
+ ]
@@ -0,0 +1,126 @@
1
+ """Adapter for using AbstractAgent agents as flow nodes."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import Any, Callable, Optional, TYPE_CHECKING
6
+
7
+ if TYPE_CHECKING:
8
+ from abstractruntime.core.models import RunState, StepPlan
9
+ from abstractagent.agents.base import BaseAgent
10
+
11
+
12
+ def create_agent_node_handler(
13
+ node_id: str,
14
+ agent: "BaseAgent",
15
+ next_node: Optional[str],
16
+ input_key: Optional[str] = None,
17
+ output_key: Optional[str] = None,
18
+ ) -> Callable:
19
+ """Create a node handler that runs an agent as a subworkflow.
20
+
21
+ Agent nodes execute the full agent workflow (ReAct loop, CodeAct, etc.)
22
+ as a subworkflow. The agent runs to completion before transitioning.
23
+
24
+ Args:
25
+ node_id: Unique identifier for this node
26
+ agent: The agent instance to run
27
+ next_node: ID of the next node to transition to (None for terminal)
28
+ input_key: Key in run.vars to read task/input from
29
+ output_key: Key in run.vars to write agent output to
30
+
31
+ Returns:
32
+ A node handler function compatible with AbstractRuntime
33
+
34
+ Example:
35
+ >>> from abstractagent import create_react_agent
36
+ >>> planner = create_react_agent(provider="ollama", model="qwen3:4b")
37
+ >>> handler = create_agent_node_handler("plan", planner, "search")
38
+ """
39
+ from abstractruntime.core.models import Effect, EffectType, StepPlan
40
+
41
+ def handler(run: "RunState", ctx: Any) -> "StepPlan":
42
+ """Start the agent as a subworkflow."""
43
+ # Determine task for the agent
44
+ task = ""
45
+
46
+ if input_key:
47
+ input_data = run.vars.get(input_key, {})
48
+ if isinstance(input_data, dict):
49
+ task = input_data.get("task", "") or input_data.get("query", "")
50
+ if not task:
51
+ # Use the whole input as context
52
+ task = str(input_data)
53
+ else:
54
+ task = str(input_data)
55
+
56
+ # Fallback to flow's main task
57
+ if not task:
58
+ context = run.vars.get("context", {})
59
+ if isinstance(context, dict):
60
+ task = context.get("task", "")
61
+
62
+ if not task:
63
+ task = f"Execute {node_id} step"
64
+
65
+ # Build initial vars for the agent subworkflow
66
+ max_iterations = getattr(agent, "_max_iterations", 25)
67
+ max_history_messages = getattr(agent, "_max_history_messages", -1)
68
+ max_tokens = getattr(agent, "_max_tokens", None)
69
+ if not isinstance(max_tokens, int) or max_tokens <= 0:
70
+ from abstractruntime.core.vars import DEFAULT_MAX_TOKENS
71
+
72
+ try:
73
+ runtime = getattr(agent, "runtime", None)
74
+ config = getattr(runtime, "config", None)
75
+ base = config.to_limits_dict() if config is not None else {}
76
+ max_tokens = int(base.get("max_tokens", DEFAULT_MAX_TOKENS) or DEFAULT_MAX_TOKENS)
77
+ except Exception:
78
+ max_tokens = DEFAULT_MAX_TOKENS
79
+
80
+ agent_vars = {
81
+ "context": {
82
+ "task": task,
83
+ "messages": [],
84
+ },
85
+ "scratchpad": {
86
+ "iteration": 0,
87
+ "max_iterations": max_iterations,
88
+ "max_history_messages": max_history_messages,
89
+ },
90
+ "_runtime": {"inbox": []},
91
+ "_temp": {},
92
+ # Canonical _limits namespace for runtime awareness
93
+ "_limits": {
94
+ "max_iterations": max_iterations,
95
+ "current_iteration": 0,
96
+ "max_tokens": max_tokens,
97
+ "max_history_messages": max_history_messages,
98
+ "estimated_tokens_used": 0,
99
+ "warn_iterations_pct": 80,
100
+ "warn_tokens_pct": 80,
101
+ },
102
+ }
103
+
104
+ # Inject any additional context from the flow
105
+ if input_key and isinstance(run.vars.get(input_key), dict):
106
+ # Merge additional context
107
+ for k, v in run.vars.get(input_key, {}).items():
108
+ if k not in ("task", "query"):
109
+ agent_vars["context"][k] = v
110
+
111
+ # Use START_SUBWORKFLOW effect to run agent durably
112
+ return StepPlan(
113
+ node_id=node_id,
114
+ effect=Effect(
115
+ type=EffectType.START_SUBWORKFLOW,
116
+ payload={
117
+ "workflow_id": agent.workflow.workflow_id,
118
+ "vars": agent_vars,
119
+ "async": False, # Sync: wait for completion
120
+ },
121
+ result_key=output_key or f"_flow.{node_id}.result",
122
+ ),
123
+ next_node=next_node,
124
+ )
125
+
126
+ return handler
@@ -0,0 +1,109 @@
1
+ """Context node adapters (active context helpers).
2
+
3
+ These nodes mutate `run.vars["context"]` durably so pause/resume works and other
4
+ nodes (Agent/Subflow/LLM_CALL with use_context) can read the updated history.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from typing import Any, Callable, Dict, Optional
10
+
11
+
12
+ def _persist_node_output(run_vars: Dict[str, Any], node_id: str, value: Dict[str, Any]) -> None:
13
+ temp = run_vars.get("_temp")
14
+ if not isinstance(temp, dict):
15
+ temp = {}
16
+ run_vars["_temp"] = temp
17
+ persisted = temp.get("node_outputs")
18
+ if not isinstance(persisted, dict):
19
+ persisted = {}
20
+ temp["node_outputs"] = persisted
21
+ persisted[node_id] = value
22
+
23
+
24
+ def create_add_message_node_handler(
25
+ *,
26
+ node_id: str,
27
+ next_node: Optional[str],
28
+ data_aware_handler: Optional[Callable[[Any], Any]],
29
+ flow: Any,
30
+ ) -> Callable:
31
+ """Create a handler for `add_message` visual nodes.
32
+
33
+ Contract:
34
+ - Inputs:
35
+ - `role`: message role
36
+ - `content`: message content
37
+ - Behavior:
38
+ - builds a canonical message object (timestamp + metadata.message_id)
39
+ - appends it to `run.vars.context.messages` (durable)
40
+ - does NOT clobber `_last_output` (pass-through)
41
+ """
42
+ from abstractruntime.core.models import StepPlan
43
+ from ..compiler import _sync_effect_results_to_node_outputs
44
+
45
+ def handler(run: Any, ctx: Any) -> "StepPlan":
46
+ del ctx
47
+ if flow is not None and hasattr(flow, "_node_outputs") and hasattr(flow, "_data_edge_map"):
48
+ _sync_effect_results_to_node_outputs(run, flow)
49
+
50
+ last_output = run.vars.get("_last_output", {})
51
+ resolved = data_aware_handler(last_output) if callable(data_aware_handler) else {}
52
+ payload = resolved if isinstance(resolved, dict) else {}
53
+
54
+ message = payload.get("message")
55
+ if not isinstance(message, dict):
56
+ # Fallback: build a minimal message object if the visual handler did not.
57
+ role = payload.get("role")
58
+ role_str = role if isinstance(role, str) else str(role or "user")
59
+ content = payload.get("content")
60
+ content_str = content if isinstance(content, str) else str(content or "")
61
+
62
+ try:
63
+ from datetime import datetime, timezone
64
+
65
+ timestamp = datetime.now(timezone.utc).isoformat()
66
+ except Exception:
67
+ from datetime import datetime
68
+
69
+ timestamp = datetime.utcnow().isoformat() + "Z"
70
+
71
+ import uuid
72
+
73
+ message = {
74
+ "role": role_str,
75
+ "content": content_str,
76
+ "timestamp": timestamp,
77
+ "metadata": {"message_id": f"msg_{uuid.uuid4().hex}"},
78
+ }
79
+
80
+ ctx_ns = run.vars.get("context")
81
+ if not isinstance(ctx_ns, dict):
82
+ ctx_ns = {}
83
+ run.vars["context"] = ctx_ns
84
+
85
+ msgs = ctx_ns.get("messages")
86
+ if not isinstance(msgs, list):
87
+ msgs = []
88
+ ctx_ns["messages"] = msgs
89
+
90
+ msgs.append(message)
91
+
92
+ # Persist node outputs for pause/resume and data-edge reads.
93
+ _persist_node_output(
94
+ run.vars,
95
+ node_id,
96
+ {
97
+ "message": message,
98
+ "context": ctx_ns,
99
+ "task": ctx_ns.get("task") if isinstance(ctx_ns.get("task"), str) else str(ctx_ns.get("task") or ""),
100
+ "messages": list(msgs),
101
+ },
102
+ )
103
+
104
+ if next_node:
105
+ return StepPlan(node_id=node_id, next_node=next_node)
106
+ return StepPlan(node_id=node_id, complete_output={"success": True, "message": message})
107
+
108
+ return handler
109
+