AbstractRuntime 0.2.0__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- abstractruntime/__init__.py +83 -3
- abstractruntime/core/config.py +82 -2
- abstractruntime/core/event_keys.py +62 -0
- abstractruntime/core/models.py +17 -1
- abstractruntime/core/policy.py +74 -3
- abstractruntime/core/runtime.py +3334 -28
- abstractruntime/core/vars.py +103 -2
- abstractruntime/evidence/__init__.py +10 -0
- abstractruntime/evidence/recorder.py +325 -0
- abstractruntime/history_bundle.py +772 -0
- abstractruntime/integrations/abstractcore/__init__.py +6 -0
- abstractruntime/integrations/abstractcore/constants.py +19 -0
- abstractruntime/integrations/abstractcore/default_tools.py +258 -0
- abstractruntime/integrations/abstractcore/effect_handlers.py +2622 -32
- abstractruntime/integrations/abstractcore/embeddings_client.py +69 -0
- abstractruntime/integrations/abstractcore/factory.py +149 -16
- abstractruntime/integrations/abstractcore/llm_client.py +891 -55
- abstractruntime/integrations/abstractcore/mcp_worker.py +587 -0
- abstractruntime/integrations/abstractcore/observability.py +80 -0
- abstractruntime/integrations/abstractcore/session_attachments.py +946 -0
- abstractruntime/integrations/abstractcore/summarizer.py +154 -0
- abstractruntime/integrations/abstractcore/tool_executor.py +509 -31
- abstractruntime/integrations/abstractcore/workspace_scoped_tools.py +561 -0
- abstractruntime/integrations/abstractmemory/__init__.py +3 -0
- abstractruntime/integrations/abstractmemory/effect_handlers.py +946 -0
- abstractruntime/memory/__init__.py +21 -0
- abstractruntime/memory/active_context.py +751 -0
- abstractruntime/memory/active_memory.py +452 -0
- abstractruntime/memory/compaction.py +105 -0
- abstractruntime/memory/kg_packets.py +164 -0
- abstractruntime/memory/memact_composer.py +175 -0
- abstractruntime/memory/recall_levels.py +163 -0
- abstractruntime/memory/token_budget.py +86 -0
- abstractruntime/rendering/__init__.py +17 -0
- abstractruntime/rendering/agent_trace_report.py +256 -0
- abstractruntime/rendering/json_stringify.py +136 -0
- abstractruntime/scheduler/scheduler.py +93 -2
- abstractruntime/storage/__init__.py +7 -2
- abstractruntime/storage/artifacts.py +175 -32
- abstractruntime/storage/base.py +17 -1
- abstractruntime/storage/commands.py +339 -0
- abstractruntime/storage/in_memory.py +41 -1
- abstractruntime/storage/json_files.py +210 -14
- abstractruntime/storage/observable.py +136 -0
- abstractruntime/storage/offloading.py +433 -0
- abstractruntime/storage/sqlite.py +836 -0
- abstractruntime/visualflow_compiler/__init__.py +29 -0
- abstractruntime/visualflow_compiler/adapters/__init__.py +11 -0
- abstractruntime/visualflow_compiler/adapters/agent_adapter.py +126 -0
- abstractruntime/visualflow_compiler/adapters/context_adapter.py +109 -0
- abstractruntime/visualflow_compiler/adapters/control_adapter.py +615 -0
- abstractruntime/visualflow_compiler/adapters/effect_adapter.py +1051 -0
- abstractruntime/visualflow_compiler/adapters/event_adapter.py +307 -0
- abstractruntime/visualflow_compiler/adapters/function_adapter.py +97 -0
- abstractruntime/visualflow_compiler/adapters/memact_adapter.py +114 -0
- abstractruntime/visualflow_compiler/adapters/subflow_adapter.py +74 -0
- abstractruntime/visualflow_compiler/adapters/variable_adapter.py +316 -0
- abstractruntime/visualflow_compiler/compiler.py +3832 -0
- abstractruntime/visualflow_compiler/flow.py +247 -0
- abstractruntime/visualflow_compiler/visual/__init__.py +13 -0
- abstractruntime/visualflow_compiler/visual/agent_ids.py +29 -0
- abstractruntime/visualflow_compiler/visual/builtins.py +1376 -0
- abstractruntime/visualflow_compiler/visual/code_executor.py +214 -0
- abstractruntime/visualflow_compiler/visual/executor.py +2804 -0
- abstractruntime/visualflow_compiler/visual/models.py +211 -0
- abstractruntime/workflow_bundle/__init__.py +52 -0
- abstractruntime/workflow_bundle/models.py +236 -0
- abstractruntime/workflow_bundle/packer.py +317 -0
- abstractruntime/workflow_bundle/reader.py +87 -0
- abstractruntime/workflow_bundle/registry.py +587 -0
- abstractruntime-0.4.1.dist-info/METADATA +177 -0
- abstractruntime-0.4.1.dist-info/RECORD +86 -0
- abstractruntime-0.4.1.dist-info/entry_points.txt +2 -0
- abstractruntime-0.2.0.dist-info/METADATA +0 -163
- abstractruntime-0.2.0.dist-info/RECORD +0 -32
- {abstractruntime-0.2.0.dist-info → abstractruntime-0.4.1.dist-info}/WHEEL +0 -0
- {abstractruntime-0.2.0.dist-info → abstractruntime-0.4.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,561 @@
|
|
|
1
|
+
"""Workspace-scoped tool execution helpers.
|
|
2
|
+
|
|
3
|
+
This module provides utilities to scope filesystem-ish tool calls (files + shell)
|
|
4
|
+
to a workspace policy, driven by run `vars` / `input_data`.
|
|
5
|
+
|
|
6
|
+
Key concepts:
|
|
7
|
+
- `workspace_root`: base directory for resolving relative paths (and default cwd for `execute_command`).
|
|
8
|
+
- `workspace_access_mode`:
|
|
9
|
+
- `workspace_only` (default): absolute paths must remain under `workspace_root`
|
|
10
|
+
- `all_except_ignored`: absolute paths may escape `workspace_root` unless blocked by `workspace_ignored_paths`
|
|
11
|
+
- `workspace_or_allowed`: absolute paths may escape `workspace_root` only when under `workspace_allowed_paths`
|
|
12
|
+
- `workspace_ignored_paths`: denylist of directories (absolute or relative-to-workspace_root).
|
|
13
|
+
- `workspace_allowed_paths`: allowlist of directories (absolute or relative-to-workspace_root).
|
|
14
|
+
|
|
15
|
+
Important limitations:
|
|
16
|
+
- `execute_command` is not a sandbox; commands can still write outside via absolute paths / `cd ..`.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from __future__ import annotations
|
|
20
|
+
|
|
21
|
+
from dataclasses import dataclass
|
|
22
|
+
import os
|
|
23
|
+
from pathlib import Path
|
|
24
|
+
from typing import Any, Dict, Iterable, List, Optional, Tuple
|
|
25
|
+
|
|
26
|
+
WorkspaceAccessMode = str # "workspace_only" | "all_except_ignored" | "workspace_or_allowed"
|
|
27
|
+
|
|
28
|
+
_VALID_ACCESS_MODES: set[str] = {"workspace_only", "all_except_ignored", "workspace_or_allowed"}
|
|
29
|
+
_MOUNT_NAME_RE: set[str] = set("abcdefghijklmnopqrstuvwxyz0123456789_-")
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _resolve_no_strict(path: Path) -> Path:
|
|
33
|
+
"""Resolve without requiring the path to exist (best-effort across py versions)."""
|
|
34
|
+
try:
|
|
35
|
+
return path.resolve(strict=False)
|
|
36
|
+
except TypeError: # pragma: no cover (older python)
|
|
37
|
+
return path.resolve()
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _find_repo_root_from_here(*, start: Path, max_hops: int = 10) -> Optional[Path]:
|
|
41
|
+
"""Best-effort monorepo root detection for local/dev runs."""
|
|
42
|
+
cur = _resolve_no_strict(start)
|
|
43
|
+
for _ in range(max_hops):
|
|
44
|
+
docs = cur / "docs" / "KnowledgeBase.md"
|
|
45
|
+
if docs.exists():
|
|
46
|
+
return cur
|
|
47
|
+
if (cur / "abstractflow").exists() and (cur / "abstractcore").exists() and (cur / "abstractruntime").exists():
|
|
48
|
+
return cur
|
|
49
|
+
nxt = cur.parent
|
|
50
|
+
if nxt == cur:
|
|
51
|
+
break
|
|
52
|
+
cur = nxt
|
|
53
|
+
return None
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def resolve_workspace_base_dir() -> Path:
|
|
57
|
+
"""Base directory against which relative workspace roots are resolved.
|
|
58
|
+
|
|
59
|
+
Priority:
|
|
60
|
+
- `ABSTRACT_WORKSPACE_BASE_DIR` env var, if set.
|
|
61
|
+
- `ABSTRACTFLOW_WORKSPACE_BASE_DIR` env var, if set (backward compat).
|
|
62
|
+
- Best-effort monorepo root detection from this file location.
|
|
63
|
+
- Current working directory.
|
|
64
|
+
"""
|
|
65
|
+
env = os.getenv("ABSTRACT_WORKSPACE_BASE_DIR") or os.getenv("ABSTRACTFLOW_WORKSPACE_BASE_DIR")
|
|
66
|
+
if isinstance(env, str) and env.strip():
|
|
67
|
+
return _resolve_no_strict(Path(env.strip()).expanduser())
|
|
68
|
+
|
|
69
|
+
here_dir = Path(__file__).resolve().parent
|
|
70
|
+
guessed = _find_repo_root_from_here(start=here_dir)
|
|
71
|
+
if guessed is not None:
|
|
72
|
+
return guessed
|
|
73
|
+
|
|
74
|
+
return _resolve_no_strict(Path.cwd())
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _normalize_access_mode(raw: Any) -> WorkspaceAccessMode:
|
|
78
|
+
text = str(raw or "").strip().lower()
|
|
79
|
+
if not text:
|
|
80
|
+
return "workspace_only"
|
|
81
|
+
if text in _VALID_ACCESS_MODES:
|
|
82
|
+
return text
|
|
83
|
+
raise ValueError(f"Invalid workspace_access_mode: '{raw}' (expected one of: {sorted(_VALID_ACCESS_MODES)})")
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def _parse_ignored_paths(raw: Any) -> list[str]:
|
|
87
|
+
if raw is None:
|
|
88
|
+
return []
|
|
89
|
+
if isinstance(raw, list):
|
|
90
|
+
out: list[str] = []
|
|
91
|
+
for x in raw:
|
|
92
|
+
if isinstance(x, str) and x.strip():
|
|
93
|
+
out.append(x.strip())
|
|
94
|
+
return out
|
|
95
|
+
if isinstance(raw, str):
|
|
96
|
+
text = raw.strip()
|
|
97
|
+
if not text:
|
|
98
|
+
return []
|
|
99
|
+
# Tolerate users pasting a JSON array into a text field.
|
|
100
|
+
if text.startswith("["):
|
|
101
|
+
try:
|
|
102
|
+
import json
|
|
103
|
+
|
|
104
|
+
parsed = json.loads(text)
|
|
105
|
+
if isinstance(parsed, list):
|
|
106
|
+
return [str(x).strip() for x in parsed if isinstance(x, str) and str(x).strip()]
|
|
107
|
+
except Exception:
|
|
108
|
+
pass
|
|
109
|
+
# Newline-separated entries (UI-friendly).
|
|
110
|
+
lines = [ln.strip() for ln in text.splitlines()]
|
|
111
|
+
return [ln for ln in lines if ln]
|
|
112
|
+
return []
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _parse_allowed_paths(raw: Any) -> list[str]:
|
|
116
|
+
if raw is None:
|
|
117
|
+
return []
|
|
118
|
+
if isinstance(raw, list):
|
|
119
|
+
out: list[str] = []
|
|
120
|
+
for x in raw:
|
|
121
|
+
if isinstance(x, str) and x.strip():
|
|
122
|
+
out.append(x.strip())
|
|
123
|
+
return out
|
|
124
|
+
if isinstance(raw, str):
|
|
125
|
+
text = raw.strip()
|
|
126
|
+
if not text:
|
|
127
|
+
return []
|
|
128
|
+
# Tolerate users pasting a JSON array into a text field.
|
|
129
|
+
if text.startswith("["):
|
|
130
|
+
try:
|
|
131
|
+
import json
|
|
132
|
+
|
|
133
|
+
parsed = json.loads(text)
|
|
134
|
+
if isinstance(parsed, list):
|
|
135
|
+
return [str(x).strip() for x in parsed if isinstance(x, str) and str(x).strip()]
|
|
136
|
+
except Exception:
|
|
137
|
+
pass
|
|
138
|
+
# Newline-separated entries (UI-friendly).
|
|
139
|
+
lines = [ln.strip() for ln in text.splitlines()]
|
|
140
|
+
return [ln for ln in lines if ln]
|
|
141
|
+
return []
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def _resolve_ignored_paths(*, root: Path, ignored: Iterable[str]) -> Tuple[Path, ...]:
|
|
145
|
+
out: list[Path] = []
|
|
146
|
+
for raw in ignored:
|
|
147
|
+
s = str(raw or "").strip()
|
|
148
|
+
if not s:
|
|
149
|
+
continue
|
|
150
|
+
p = Path(s).expanduser()
|
|
151
|
+
if not p.is_absolute():
|
|
152
|
+
p = root / p
|
|
153
|
+
out.append(_resolve_no_strict(p))
|
|
154
|
+
# Stable ordering for deterministic error messages/tests.
|
|
155
|
+
return tuple(dict.fromkeys(out))
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def _resolve_allowed_paths(*, root: Path, allowed: Iterable[str]) -> Tuple[Path, ...]:
|
|
159
|
+
out: list[Path] = []
|
|
160
|
+
for raw in allowed:
|
|
161
|
+
s = str(raw or "").strip()
|
|
162
|
+
if not s:
|
|
163
|
+
continue
|
|
164
|
+
p = Path(s).expanduser()
|
|
165
|
+
if not p.is_absolute():
|
|
166
|
+
p = root / p
|
|
167
|
+
out.append(_resolve_no_strict(p))
|
|
168
|
+
return tuple(dict.fromkeys(out))
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def _is_under(child: Path, parent: Path) -> bool:
|
|
172
|
+
try:
|
|
173
|
+
_resolve_no_strict(child).relative_to(_resolve_no_strict(parent))
|
|
174
|
+
return True
|
|
175
|
+
except Exception:
|
|
176
|
+
return False
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def _slug_mount_name(name: str) -> str:
|
|
180
|
+
"""Return a stable mount name (<= 32 chars, lower-case, [a-z0-9_-])."""
|
|
181
|
+
s = str(name or "").strip().lower()
|
|
182
|
+
if not s:
|
|
183
|
+
return "mount"
|
|
184
|
+
out: list[str] = []
|
|
185
|
+
for ch in s:
|
|
186
|
+
if ch in _MOUNT_NAME_RE:
|
|
187
|
+
out.append(ch)
|
|
188
|
+
else:
|
|
189
|
+
out.append("-")
|
|
190
|
+
slug = "".join(out).strip("-")
|
|
191
|
+
if not slug:
|
|
192
|
+
return "mount"
|
|
193
|
+
return slug[:32]
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def _mounts_from_allowed_paths(*, allowed_dirs: Iterable[Path], used_names: set[str]) -> Dict[str, Path]:
|
|
197
|
+
"""Build a deterministic {mount_name -> root} map for allowed roots outside workspace_root."""
|
|
198
|
+
import hashlib
|
|
199
|
+
|
|
200
|
+
out: Dict[str, Path] = {}
|
|
201
|
+
for p in allowed_dirs:
|
|
202
|
+
try:
|
|
203
|
+
resolved = p.resolve()
|
|
204
|
+
except Exception:
|
|
205
|
+
resolved = p
|
|
206
|
+
base = _slug_mount_name(resolved.name)
|
|
207
|
+
name = base
|
|
208
|
+
if name in used_names:
|
|
209
|
+
digest = hashlib.sha256(str(resolved).encode("utf-8")).hexdigest()[:8]
|
|
210
|
+
trim = max(1, 32 - (1 + len(digest)))
|
|
211
|
+
name = f"{base[:trim]}_{digest}"
|
|
212
|
+
i = 2
|
|
213
|
+
while name in used_names:
|
|
214
|
+
suffix = f"_{i}"
|
|
215
|
+
trim = max(1, 32 - len(suffix))
|
|
216
|
+
name = f"{base[:trim]}{suffix}"
|
|
217
|
+
i += 1
|
|
218
|
+
used_names.add(name)
|
|
219
|
+
out[name] = resolved
|
|
220
|
+
return out
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def _resolve_virtual_mount_relative_path(*, scope: "WorkspaceScope", raw: str) -> tuple[Path, str]:
|
|
224
|
+
"""Resolve a relative path that may be a virtual mount path.
|
|
225
|
+
|
|
226
|
+
Supported forms:
|
|
227
|
+
- "rel/path.txt" (workspace_root)
|
|
228
|
+
- "mount/rel/path.txt" (allowed root mount; only when access_mode==workspace_or_allowed)
|
|
229
|
+
- "<workspace_root_name>/rel/path.txt" (best-effort redundant prefix stripping)
|
|
230
|
+
- Optional leading "@", tolerated for UX across clients ("@mount/rel/path.txt")
|
|
231
|
+
|
|
232
|
+
Returns:
|
|
233
|
+
(root_used, rel_part) where rel_part is a relative path to join under root_used.
|
|
234
|
+
"""
|
|
235
|
+
text = str(raw or "").strip().replace("\\", "/")
|
|
236
|
+
if text.startswith("@"):
|
|
237
|
+
text = text[1:].lstrip()
|
|
238
|
+
while text.startswith("./"):
|
|
239
|
+
text = text[2:]
|
|
240
|
+
|
|
241
|
+
parts = [seg for seg in text.split("/") if seg not in ("", ".")]
|
|
242
|
+
if len(parts) < 2:
|
|
243
|
+
return (scope.root, text)
|
|
244
|
+
|
|
245
|
+
first = parts[0]
|
|
246
|
+
|
|
247
|
+
# Mounts: allow a "mount/..." prefix for allowed roots outside workspace_root.
|
|
248
|
+
if scope.access_mode == "workspace_or_allowed" and scope.allowed_paths:
|
|
249
|
+
used: set[str] = set()
|
|
250
|
+
allowed_outside = [p for p in scope.allowed_paths if isinstance(p, Path) and not _is_under(p, scope.root)]
|
|
251
|
+
mounts = _mounts_from_allowed_paths(allowed_dirs=allowed_outside, used_names=used)
|
|
252
|
+
if first in mounts:
|
|
253
|
+
root = mounts[first]
|
|
254
|
+
rel = "/".join(parts[1:])
|
|
255
|
+
return (root, rel)
|
|
256
|
+
|
|
257
|
+
# Convenience: if the path redundantly begins with the workspace directory name, strip it
|
|
258
|
+
# when it does not exist as a real child directory (common with "repo-name/..." patterns).
|
|
259
|
+
try:
|
|
260
|
+
if first == scope.root.name and not (scope.root / first).exists():
|
|
261
|
+
rel = "/".join(parts[1:])
|
|
262
|
+
return (scope.root, rel)
|
|
263
|
+
except Exception:
|
|
264
|
+
pass
|
|
265
|
+
|
|
266
|
+
return (scope.root, text)
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
def _ensure_allowed(*, path: Path, scope: "WorkspaceScope") -> None:
|
|
270
|
+
for blocked in scope.ignored_paths:
|
|
271
|
+
if _is_under(path, blocked) or _resolve_no_strict(path) == _resolve_no_strict(blocked):
|
|
272
|
+
raise ValueError(f"Path is blocked by workspace_ignored_paths: '{path}'")
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
def _resolve_under_root_strict(*, root: Path, user_path: str) -> Path:
|
|
276
|
+
"""Resolve under root and ensure it doesn't escape (used for relative paths always)."""
|
|
277
|
+
p = Path(str(user_path or "").strip()).expanduser()
|
|
278
|
+
if p.is_absolute():
|
|
279
|
+
raise ValueError("Internal error: strict under-root resolver received absolute path")
|
|
280
|
+
resolved = _resolve_no_strict(root / p)
|
|
281
|
+
if not _is_under(resolved, root):
|
|
282
|
+
raise ValueError(f"Path escapes workspace_root: '{user_path}'")
|
|
283
|
+
return resolved
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
def resolve_user_path(*, scope: "WorkspaceScope", user_path: str) -> Path:
|
|
287
|
+
"""Resolve a user path according to workspace policy."""
|
|
288
|
+
raw = str(user_path or "").strip()
|
|
289
|
+
if not raw:
|
|
290
|
+
raise ValueError("Empty path")
|
|
291
|
+
|
|
292
|
+
# Tolerate "@path" handles (used by attachments and some UIs) for filesystem-ish tools.
|
|
293
|
+
if raw.startswith("@"):
|
|
294
|
+
raw = raw[1:].lstrip()
|
|
295
|
+
|
|
296
|
+
p = Path(raw).expanduser()
|
|
297
|
+
if p.is_absolute():
|
|
298
|
+
resolved = _resolve_no_strict(p)
|
|
299
|
+
if scope.access_mode == "workspace_only":
|
|
300
|
+
if not _is_under(resolved, scope.root):
|
|
301
|
+
raise ValueError(f"Path escapes workspace_root: '{user_path}'")
|
|
302
|
+
elif scope.access_mode == "workspace_or_allowed":
|
|
303
|
+
if not _is_under(resolved, scope.root) and not any(_is_under(resolved, p) for p in scope.allowed_paths):
|
|
304
|
+
raise ValueError(f"Path is outside workspace roots: '{user_path}'")
|
|
305
|
+
_ensure_allowed(path=resolved, scope=scope)
|
|
306
|
+
return resolved
|
|
307
|
+
|
|
308
|
+
# Relative paths normally resolve under workspace_root, but we also support a
|
|
309
|
+
# conservative "mount/..." convention for allowed roots (mirrors gateway file endpoints).
|
|
310
|
+
root_used, rel_part = _resolve_virtual_mount_relative_path(scope=scope, raw=raw)
|
|
311
|
+
resolved = _resolve_under_root_strict(root=root_used, user_path=rel_part)
|
|
312
|
+
_ensure_allowed(path=resolved, scope=scope)
|
|
313
|
+
return resolved
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
def _normalize_arguments(raw: Any) -> Dict[str, Any]:
|
|
317
|
+
if raw is None:
|
|
318
|
+
return {}
|
|
319
|
+
if isinstance(raw, dict):
|
|
320
|
+
return dict(raw)
|
|
321
|
+
# Some models emit JSON strings for args.
|
|
322
|
+
if isinstance(raw, str) and raw.strip():
|
|
323
|
+
import json
|
|
324
|
+
|
|
325
|
+
try:
|
|
326
|
+
parsed = json.loads(raw)
|
|
327
|
+
except Exception:
|
|
328
|
+
return {}
|
|
329
|
+
return dict(parsed) if isinstance(parsed, dict) else {}
|
|
330
|
+
return {}
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
@dataclass(frozen=True)
|
|
334
|
+
class WorkspaceScope:
|
|
335
|
+
root: Path
|
|
336
|
+
access_mode: WorkspaceAccessMode = "workspace_only"
|
|
337
|
+
ignored_paths: Tuple[Path, ...] = ()
|
|
338
|
+
allowed_paths: Tuple[Path, ...] = ()
|
|
339
|
+
|
|
340
|
+
@classmethod
|
|
341
|
+
def from_input_data(
|
|
342
|
+
cls,
|
|
343
|
+
input_data: Dict[str, Any],
|
|
344
|
+
*,
|
|
345
|
+
key: str = "workspace_root",
|
|
346
|
+
base_dir: Optional[Path] = None,
|
|
347
|
+
) -> Optional["WorkspaceScope"]:
|
|
348
|
+
raw = input_data.get(key)
|
|
349
|
+
if not isinstance(raw, str) or not raw.strip():
|
|
350
|
+
return None
|
|
351
|
+
|
|
352
|
+
base = base_dir or resolve_workspace_base_dir()
|
|
353
|
+
root = Path(raw.strip()).expanduser()
|
|
354
|
+
if not root.is_absolute():
|
|
355
|
+
root = base / root
|
|
356
|
+
root = _resolve_no_strict(root)
|
|
357
|
+
if root.exists() and not root.is_dir():
|
|
358
|
+
raise ValueError(f"workspace_root must be a directory (got file): {raw}")
|
|
359
|
+
root.mkdir(parents=True, exist_ok=True)
|
|
360
|
+
|
|
361
|
+
access_mode = _normalize_access_mode(input_data.get("workspace_access_mode") or input_data.get("workspaceAccessMode"))
|
|
362
|
+
ignored = _parse_ignored_paths(input_data.get("workspace_ignored_paths") or input_data.get("workspaceIgnoredPaths"))
|
|
363
|
+
ignored_paths = _resolve_ignored_paths(root=root, ignored=ignored)
|
|
364
|
+
allowed = _parse_allowed_paths(input_data.get("workspace_allowed_paths") or input_data.get("workspaceAllowedPaths"))
|
|
365
|
+
allowed_paths = _resolve_allowed_paths(root=root, allowed=allowed)
|
|
366
|
+
|
|
367
|
+
return cls(root=root, access_mode=access_mode, ignored_paths=ignored_paths, allowed_paths=allowed_paths)
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
class WorkspaceScopedToolExecutor:
|
|
371
|
+
"""Wrap another ToolExecutor and scope filesystem-ish tool calls to a workspace policy."""
|
|
372
|
+
|
|
373
|
+
def __init__(self, *, scope: WorkspaceScope, delegate: Any):
|
|
374
|
+
self._scope = scope
|
|
375
|
+
self._delegate = delegate
|
|
376
|
+
|
|
377
|
+
def set_timeout_s(self, timeout_s: Optional[float]) -> None: # pragma: no cover (depends on delegate)
|
|
378
|
+
setter = getattr(self._delegate, "set_timeout_s", None)
|
|
379
|
+
if callable(setter):
|
|
380
|
+
setter(timeout_s)
|
|
381
|
+
|
|
382
|
+
def execute(self, *, tool_calls: List[Dict[str, Any]]) -> Dict[str, Any]:
|
|
383
|
+
# Preprocess: rewrite and pre-block invalid calls so we don't crash the whole run.
|
|
384
|
+
blocked: Dict[Tuple[int, str], Dict[str, Any]] = {}
|
|
385
|
+
to_execute: List[Dict[str, Any]] = []
|
|
386
|
+
|
|
387
|
+
for i, tc in enumerate(tool_calls or []):
|
|
388
|
+
name = str(tc.get("name", "") or "")
|
|
389
|
+
call_id = str(tc.get("call_id") or tc.get("id") or f"call_{i}")
|
|
390
|
+
args = _normalize_arguments(tc.get("arguments"))
|
|
391
|
+
|
|
392
|
+
try:
|
|
393
|
+
rewritten_args = self._rewrite_args(tool_name=name, args=args)
|
|
394
|
+
except Exception as e:
|
|
395
|
+
blocked[(i, call_id)] = {
|
|
396
|
+
"call_id": call_id,
|
|
397
|
+
"name": name,
|
|
398
|
+
"success": False,
|
|
399
|
+
"output": None,
|
|
400
|
+
"error": str(e),
|
|
401
|
+
}
|
|
402
|
+
continue
|
|
403
|
+
|
|
404
|
+
rewritten = dict(tc)
|
|
405
|
+
rewritten["name"] = name
|
|
406
|
+
rewritten["call_id"] = call_id
|
|
407
|
+
rewritten["arguments"] = rewritten_args
|
|
408
|
+
to_execute.append(rewritten)
|
|
409
|
+
|
|
410
|
+
delegate_result = self._delegate.execute(tool_calls=to_execute)
|
|
411
|
+
|
|
412
|
+
# If the delegate didn't execute tools, we can't merge blocked results meaningfully.
|
|
413
|
+
if not isinstance(delegate_result, dict) or delegate_result.get("mode") != "executed":
|
|
414
|
+
return delegate_result
|
|
415
|
+
|
|
416
|
+
results = delegate_result.get("results")
|
|
417
|
+
if not isinstance(results, list):
|
|
418
|
+
results = []
|
|
419
|
+
|
|
420
|
+
by_id: Dict[str, Dict[str, Any]] = {}
|
|
421
|
+
for r in results:
|
|
422
|
+
if not isinstance(r, dict):
|
|
423
|
+
continue
|
|
424
|
+
rid = str(r.get("call_id") or "")
|
|
425
|
+
if rid:
|
|
426
|
+
by_id[rid] = r
|
|
427
|
+
|
|
428
|
+
merged: List[Dict[str, Any]] = []
|
|
429
|
+
for i, tc in enumerate(tool_calls or []):
|
|
430
|
+
call_id = str(tc.get("call_id") or tc.get("id") or f"call_{i}")
|
|
431
|
+
key = (i, call_id)
|
|
432
|
+
if key in blocked:
|
|
433
|
+
merged.append(blocked[key])
|
|
434
|
+
continue
|
|
435
|
+
r = by_id.get(call_id)
|
|
436
|
+
if r is None:
|
|
437
|
+
merged.append(
|
|
438
|
+
{
|
|
439
|
+
"call_id": call_id,
|
|
440
|
+
"name": str(tc.get("name", "") or ""),
|
|
441
|
+
"success": False,
|
|
442
|
+
"output": None,
|
|
443
|
+
"error": "Tool result missing (internal error)",
|
|
444
|
+
}
|
|
445
|
+
)
|
|
446
|
+
continue
|
|
447
|
+
merged.append(r)
|
|
448
|
+
|
|
449
|
+
return {"mode": "executed", "results": merged}
|
|
450
|
+
|
|
451
|
+
def _rewrite_args(self, *, tool_name: str, args: Dict[str, Any]) -> Dict[str, Any]:
|
|
452
|
+
return rewrite_tool_arguments(tool_name=tool_name, args=args, scope=self._scope)
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
def rewrite_tool_arguments(*, tool_name: str, args: Dict[str, Any], scope: WorkspaceScope) -> Dict[str, Any]:
|
|
456
|
+
"""Rewrite tool args so file operations follow the workspace policy."""
|
|
457
|
+
root = scope.root
|
|
458
|
+
out = dict(args or {})
|
|
459
|
+
|
|
460
|
+
def _alias_field(preferred: str, aliases: Iterable[str]) -> None:
|
|
461
|
+
if preferred in out and out.get(preferred) is not None:
|
|
462
|
+
return
|
|
463
|
+
for a in aliases:
|
|
464
|
+
if a in out and out.get(a) is not None:
|
|
465
|
+
out[preferred] = out.get(a)
|
|
466
|
+
return
|
|
467
|
+
|
|
468
|
+
def _rewrite_path_field(field: str, *, default_to_root: bool = False) -> None:
|
|
469
|
+
raw = out.get(field)
|
|
470
|
+
if (raw is None or (isinstance(raw, str) and not raw.strip())) and default_to_root:
|
|
471
|
+
out[field] = str(_resolve_no_strict(root))
|
|
472
|
+
return
|
|
473
|
+
if raw is None:
|
|
474
|
+
return
|
|
475
|
+
if not isinstance(raw, str):
|
|
476
|
+
raw = str(raw)
|
|
477
|
+
resolved = resolve_user_path(scope=scope, user_path=raw)
|
|
478
|
+
out[field] = str(resolved)
|
|
479
|
+
|
|
480
|
+
def _rewrite_path_list_field(field: str) -> None:
|
|
481
|
+
raw = out.get(field)
|
|
482
|
+
if raw is None:
|
|
483
|
+
return
|
|
484
|
+
|
|
485
|
+
items: list[Any]
|
|
486
|
+
if isinstance(raw, list):
|
|
487
|
+
items = list(raw)
|
|
488
|
+
elif isinstance(raw, tuple):
|
|
489
|
+
items = list(raw)
|
|
490
|
+
else:
|
|
491
|
+
# Accept a single string (or scalar) and let the underlying tool parse it.
|
|
492
|
+
items = [raw]
|
|
493
|
+
|
|
494
|
+
rewritten: list[str] = []
|
|
495
|
+
for it in items:
|
|
496
|
+
s = str(it or "").strip()
|
|
497
|
+
if not s:
|
|
498
|
+
continue
|
|
499
|
+
resolved = resolve_user_path(scope=scope, user_path=s)
|
|
500
|
+
rewritten.append(str(resolved))
|
|
501
|
+
|
|
502
|
+
out[field] = rewritten
|
|
503
|
+
|
|
504
|
+
# Filesystem-ish tools (AbstractCore common tools)
|
|
505
|
+
if tool_name == "list_files":
|
|
506
|
+
_rewrite_path_field("directory_path", default_to_root=True)
|
|
507
|
+
return out
|
|
508
|
+
if tool_name == "search_files":
|
|
509
|
+
_rewrite_path_field("path", default_to_root=True)
|
|
510
|
+
return out
|
|
511
|
+
if tool_name == "analyze_code":
|
|
512
|
+
_alias_field("file_path", ["path", "filename", "file"])
|
|
513
|
+
_rewrite_path_field("file_path")
|
|
514
|
+
if "file_path" not in out:
|
|
515
|
+
raise ValueError("analyze_code requires file_path")
|
|
516
|
+
return out
|
|
517
|
+
if tool_name == "read_file":
|
|
518
|
+
_alias_field("file_path", ["path", "filename", "file"])
|
|
519
|
+
_rewrite_path_field("file_path")
|
|
520
|
+
if "file_path" not in out:
|
|
521
|
+
raise ValueError("read_file requires file_path")
|
|
522
|
+
return out
|
|
523
|
+
if tool_name == "write_file":
|
|
524
|
+
_alias_field("file_path", ["path", "filename", "file"])
|
|
525
|
+
_rewrite_path_field("file_path")
|
|
526
|
+
if "file_path" not in out:
|
|
527
|
+
raise ValueError("write_file requires file_path")
|
|
528
|
+
return out
|
|
529
|
+
if tool_name == "edit_file":
|
|
530
|
+
_alias_field("file_path", ["path", "filename", "file"])
|
|
531
|
+
_rewrite_path_field("file_path")
|
|
532
|
+
if "file_path" not in out:
|
|
533
|
+
raise ValueError("edit_file requires file_path")
|
|
534
|
+
return out
|
|
535
|
+
if tool_name == "execute_command":
|
|
536
|
+
_rewrite_path_field("working_directory", default_to_root=True)
|
|
537
|
+
return out
|
|
538
|
+
if tool_name == "skim_files":
|
|
539
|
+
_alias_field("paths", ["path", "file_path", "filename", "file"])
|
|
540
|
+
_rewrite_path_list_field("paths")
|
|
541
|
+
if "paths" not in out:
|
|
542
|
+
raise ValueError("skim_files requires paths")
|
|
543
|
+
return out
|
|
544
|
+
if tool_name == "skim_folders":
|
|
545
|
+
_alias_field("paths", ["path", "directory_path", "folder"])
|
|
546
|
+
_rewrite_path_list_field("paths")
|
|
547
|
+
if "paths" not in out:
|
|
548
|
+
raise ValueError("skim_folders requires paths")
|
|
549
|
+
return out
|
|
550
|
+
|
|
551
|
+
return out
|
|
552
|
+
|
|
553
|
+
|
|
554
|
+
__all__ = [
|
|
555
|
+
"WorkspaceAccessMode",
|
|
556
|
+
"WorkspaceScope",
|
|
557
|
+
"WorkspaceScopedToolExecutor",
|
|
558
|
+
"rewrite_tool_arguments",
|
|
559
|
+
"resolve_workspace_base_dir",
|
|
560
|
+
"resolve_user_path",
|
|
561
|
+
]
|