weakincentives 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- weakincentives/__init__.py +67 -0
- weakincentives/adapters/__init__.py +37 -0
- weakincentives/adapters/_names.py +32 -0
- weakincentives/adapters/_provider_protocols.py +69 -0
- weakincentives/adapters/_tool_messages.py +80 -0
- weakincentives/adapters/core.py +102 -0
- weakincentives/adapters/litellm.py +254 -0
- weakincentives/adapters/openai.py +254 -0
- weakincentives/adapters/shared.py +1021 -0
- weakincentives/cli/__init__.py +23 -0
- weakincentives/cli/wink.py +58 -0
- weakincentives/dbc/__init__.py +412 -0
- weakincentives/deadlines.py +58 -0
- weakincentives/prompt/__init__.py +105 -0
- weakincentives/prompt/_generic_params_specializer.py +64 -0
- weakincentives/prompt/_normalization.py +48 -0
- weakincentives/prompt/_overrides_protocols.py +33 -0
- weakincentives/prompt/_types.py +34 -0
- weakincentives/prompt/chapter.py +146 -0
- weakincentives/prompt/composition.py +281 -0
- weakincentives/prompt/errors.py +57 -0
- weakincentives/prompt/markdown.py +108 -0
- weakincentives/prompt/overrides/__init__.py +59 -0
- weakincentives/prompt/overrides/_fs.py +164 -0
- weakincentives/prompt/overrides/inspection.py +141 -0
- weakincentives/prompt/overrides/local_store.py +275 -0
- weakincentives/prompt/overrides/validation.py +534 -0
- weakincentives/prompt/overrides/versioning.py +269 -0
- weakincentives/prompt/prompt.py +353 -0
- weakincentives/prompt/protocols.py +103 -0
- weakincentives/prompt/registry.py +375 -0
- weakincentives/prompt/rendering.py +288 -0
- weakincentives/prompt/response_format.py +60 -0
- weakincentives/prompt/section.py +166 -0
- weakincentives/prompt/structured_output.py +179 -0
- weakincentives/prompt/tool.py +397 -0
- weakincentives/prompt/tool_result.py +30 -0
- weakincentives/py.typed +0 -0
- weakincentives/runtime/__init__.py +82 -0
- weakincentives/runtime/events/__init__.py +126 -0
- weakincentives/runtime/events/_types.py +110 -0
- weakincentives/runtime/logging.py +284 -0
- weakincentives/runtime/session/__init__.py +46 -0
- weakincentives/runtime/session/_slice_types.py +24 -0
- weakincentives/runtime/session/_types.py +55 -0
- weakincentives/runtime/session/dataclasses.py +29 -0
- weakincentives/runtime/session/protocols.py +34 -0
- weakincentives/runtime/session/reducer_context.py +40 -0
- weakincentives/runtime/session/reducers.py +82 -0
- weakincentives/runtime/session/selectors.py +56 -0
- weakincentives/runtime/session/session.py +387 -0
- weakincentives/runtime/session/snapshots.py +310 -0
- weakincentives/serde/__init__.py +19 -0
- weakincentives/serde/_utils.py +240 -0
- weakincentives/serde/dataclass_serde.py +55 -0
- weakincentives/serde/dump.py +189 -0
- weakincentives/serde/parse.py +417 -0
- weakincentives/serde/schema.py +260 -0
- weakincentives/tools/__init__.py +154 -0
- weakincentives/tools/_context.py +38 -0
- weakincentives/tools/asteval.py +853 -0
- weakincentives/tools/errors.py +26 -0
- weakincentives/tools/planning.py +831 -0
- weakincentives/tools/podman.py +1655 -0
- weakincentives/tools/subagents.py +346 -0
- weakincentives/tools/vfs.py +1390 -0
- weakincentives/types/__init__.py +35 -0
- weakincentives/types/json.py +45 -0
- weakincentives-0.9.0.dist-info/METADATA +775 -0
- weakincentives-0.9.0.dist-info/RECORD +73 -0
- weakincentives-0.9.0.dist-info/WHEEL +4 -0
- weakincentives-0.9.0.dist-info/entry_points.txt +2 -0
- weakincentives-0.9.0.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,1390 @@
|
|
|
1
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
2
|
+
# you may not use this file except in compliance with the License.
|
|
3
|
+
# You may obtain a copy of the License at
|
|
4
|
+
#
|
|
5
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
6
|
+
#
|
|
7
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
8
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
9
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
10
|
+
# See the License for the specific language governing permissions and
|
|
11
|
+
# limitations under the License.
|
|
12
|
+
|
|
13
|
+
"""Virtual filesystem tool suite."""
|
|
14
|
+
|
|
15
|
+
from __future__ import annotations
|
|
16
|
+
|
|
17
|
+
import fnmatch
|
|
18
|
+
import os
|
|
19
|
+
import re
|
|
20
|
+
from collections.abc import Iterable, Sequence
|
|
21
|
+
from dataclasses import dataclass, field
|
|
22
|
+
from datetime import UTC, datetime
|
|
23
|
+
from pathlib import Path
|
|
24
|
+
from typing import Final, Literal, cast
|
|
25
|
+
|
|
26
|
+
from ..prompt import SupportsDataclass, SupportsToolResult
|
|
27
|
+
from ..prompt.markdown import MarkdownSection
|
|
28
|
+
from ..prompt.tool import Tool, ToolContext, ToolResult
|
|
29
|
+
from ..runtime.session import (
|
|
30
|
+
ReducerContextProtocol,
|
|
31
|
+
ReducerEvent,
|
|
32
|
+
Session,
|
|
33
|
+
TypedReducer,
|
|
34
|
+
replace_latest,
|
|
35
|
+
select_latest,
|
|
36
|
+
)
|
|
37
|
+
from ._context import ensure_context_uses_session
|
|
38
|
+
from .errors import ToolValidationError
|
|
39
|
+
|
|
40
|
+
FileEncoding = Literal["utf-8"]
|
|
41
|
+
WriteMode = Literal["create", "overwrite", "append"]
|
|
42
|
+
|
|
43
|
+
_ASCII: Final[str] = "ascii"
|
|
44
|
+
_DEFAULT_ENCODING: Final[FileEncoding] = "utf-8"
|
|
45
|
+
_MAX_WRITE_LENGTH: Final[int] = 48_000
|
|
46
|
+
_MAX_PATH_DEPTH: Final[int] = 16
|
|
47
|
+
_MAX_SEGMENT_LENGTH: Final[int] = 80
|
|
48
|
+
_MAX_READ_LIMIT: Final[int] = 2_000
|
|
49
|
+
_MAX_MOUNT_PREVIEW_ENTRIES: Final[int] = 20
|
|
50
|
+
_VFS_SECTION_TEMPLATE: Final[str] = (
|
|
51
|
+
"The virtual filesystem starts empty unless host mounts are configured."
|
|
52
|
+
" It is the only filesystem available and will contain files relevant to the task;"
|
|
53
|
+
" use it as scratch space when necessary.\n"
|
|
54
|
+
"1. Remember the snapshot begins empty aside from configured host mounts.\n"
|
|
55
|
+
"2. Explore with `ls` or `glob` before reading or modifying files.\n"
|
|
56
|
+
"3. Fetch file content with `read_file`; pagination keeps responses focused.\n"
|
|
57
|
+
"4. Create files via `write_file` (create-only) and edit them with `edit_file`.\n"
|
|
58
|
+
"5. Remove files or directories recursively with `rm` when they are no longer needed.\n"
|
|
59
|
+
"6. Host mounts are fixed at session start; additional directories cannot be mounted later.\n"
|
|
60
|
+
"7. Avoid mirroring large repositories or binary assets—only UTF-8 text up to 48k characters is accepted.\n"
|
|
61
|
+
"8. Use `grep` to search for patterns across files when the workspace grows."
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@dataclass(slots=True, frozen=True)
|
|
66
|
+
class VfsPath:
|
|
67
|
+
"""Relative POSIX-style path representation."""
|
|
68
|
+
|
|
69
|
+
segments: tuple[str, ...] = field(
|
|
70
|
+
metadata={
|
|
71
|
+
"description": (
|
|
72
|
+
"Ordered path segments. Values must be relative, ASCII-only, and "
|
|
73
|
+
"free of '.' or '..'."
|
|
74
|
+
)
|
|
75
|
+
}
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@dataclass(slots=True, frozen=True)
|
|
80
|
+
class VfsFile:
|
|
81
|
+
"""Snapshot of a single file stored in the virtual filesystem."""
|
|
82
|
+
|
|
83
|
+
path: VfsPath = field(
|
|
84
|
+
metadata={"description": "Location of the file within the virtual filesystem."}
|
|
85
|
+
)
|
|
86
|
+
content: str = field(
|
|
87
|
+
metadata={
|
|
88
|
+
"description": (
|
|
89
|
+
"UTF-8 text content of the file. Binary data is not supported."
|
|
90
|
+
)
|
|
91
|
+
}
|
|
92
|
+
)
|
|
93
|
+
encoding: FileEncoding = field(
|
|
94
|
+
metadata={"description": "Name of the codec used to decode the file contents."}
|
|
95
|
+
)
|
|
96
|
+
size_bytes: int = field(
|
|
97
|
+
metadata={"description": "Size of the encoded file on disk, in bytes."}
|
|
98
|
+
)
|
|
99
|
+
version: int = field(
|
|
100
|
+
metadata={
|
|
101
|
+
"description": (
|
|
102
|
+
"Monotonic version counter that increments after each write."
|
|
103
|
+
)
|
|
104
|
+
}
|
|
105
|
+
)
|
|
106
|
+
created_at: datetime = field(
|
|
107
|
+
metadata={
|
|
108
|
+
"description": "Timestamp indicating when the file was first created."
|
|
109
|
+
}
|
|
110
|
+
)
|
|
111
|
+
updated_at: datetime = field(
|
|
112
|
+
metadata={"description": "Timestamp of the most recent write operation."}
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
@dataclass(slots=True, frozen=True)
|
|
117
|
+
class VirtualFileSystem:
|
|
118
|
+
"""Immutable snapshot of the virtual filesystem state."""
|
|
119
|
+
|
|
120
|
+
files: tuple[VfsFile, ...] = field(
|
|
121
|
+
default_factory=tuple,
|
|
122
|
+
metadata={
|
|
123
|
+
"description": (
|
|
124
|
+
"Collection of tracked files. Each entry captures file metadata "
|
|
125
|
+
"and contents."
|
|
126
|
+
)
|
|
127
|
+
},
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
@dataclass(slots=True, frozen=True)
|
|
132
|
+
class FileInfo:
|
|
133
|
+
"""Metadata describing a directory entry."""
|
|
134
|
+
|
|
135
|
+
path: VfsPath = field(
|
|
136
|
+
metadata={"description": "Normalized VFS path referencing the directory entry."}
|
|
137
|
+
)
|
|
138
|
+
kind: Literal["file", "directory"] = field(
|
|
139
|
+
metadata={
|
|
140
|
+
"description": (
|
|
141
|
+
"Entry type; directories surface nested paths while files carry metadata."
|
|
142
|
+
)
|
|
143
|
+
}
|
|
144
|
+
)
|
|
145
|
+
size_bytes: int | None = field(
|
|
146
|
+
default=None,
|
|
147
|
+
metadata={
|
|
148
|
+
"description": (
|
|
149
|
+
"On-disk size for files. Directories omit sizes to avoid redundant traversal."
|
|
150
|
+
)
|
|
151
|
+
},
|
|
152
|
+
)
|
|
153
|
+
version: int | None = field(
|
|
154
|
+
default=None,
|
|
155
|
+
metadata={
|
|
156
|
+
"description": (
|
|
157
|
+
"Monotonic file version propagated from the VFS snapshot. Directories omit versions."
|
|
158
|
+
)
|
|
159
|
+
},
|
|
160
|
+
)
|
|
161
|
+
updated_at: datetime | None = field(
|
|
162
|
+
default=None,
|
|
163
|
+
metadata={
|
|
164
|
+
"description": (
|
|
165
|
+
"Timestamp describing the most recent mutation for files; directories omit the value."
|
|
166
|
+
)
|
|
167
|
+
},
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
@dataclass(slots=True, frozen=True)
|
|
172
|
+
class ReadFileResult:
|
|
173
|
+
"""Payload returned from :func:`read_file`."""
|
|
174
|
+
|
|
175
|
+
path: VfsPath = field(
|
|
176
|
+
metadata={"description": "Path of the file that was read inside the VFS."}
|
|
177
|
+
)
|
|
178
|
+
content: str = field(
|
|
179
|
+
metadata={
|
|
180
|
+
"description": (
|
|
181
|
+
"Formatted slice of the file contents with line numbers applied for clarity."
|
|
182
|
+
)
|
|
183
|
+
}
|
|
184
|
+
)
|
|
185
|
+
offset: int = field(
|
|
186
|
+
metadata={
|
|
187
|
+
"description": (
|
|
188
|
+
"Zero-based line offset applied to the read window after normalization."
|
|
189
|
+
)
|
|
190
|
+
}
|
|
191
|
+
)
|
|
192
|
+
limit: int = field(
|
|
193
|
+
metadata={
|
|
194
|
+
"description": (
|
|
195
|
+
"Maximum number of lines returned in this response after clamping to file length."
|
|
196
|
+
)
|
|
197
|
+
}
|
|
198
|
+
)
|
|
199
|
+
total_lines: int = field(
|
|
200
|
+
metadata={
|
|
201
|
+
"description": "Total line count of the file so callers can paginate follow-up reads."
|
|
202
|
+
}
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
@dataclass(slots=True, frozen=True)
|
|
207
|
+
class GlobMatch:
|
|
208
|
+
"""Match returned by the :func:`glob` tool."""
|
|
209
|
+
|
|
210
|
+
path: VfsPath = field(
|
|
211
|
+
metadata={"description": "Path of the file or directory that matched the glob."}
|
|
212
|
+
)
|
|
213
|
+
size_bytes: int = field(
|
|
214
|
+
metadata={
|
|
215
|
+
"description": (
|
|
216
|
+
"File size in bytes captured at snapshot time to help prioritize large assets."
|
|
217
|
+
)
|
|
218
|
+
}
|
|
219
|
+
)
|
|
220
|
+
version: int = field(
|
|
221
|
+
metadata={
|
|
222
|
+
"description": "Monotonic VFS version counter reflecting the latest write to the entry."
|
|
223
|
+
}
|
|
224
|
+
)
|
|
225
|
+
updated_at: datetime = field(
|
|
226
|
+
metadata={
|
|
227
|
+
"description": "Timestamp from the snapshot identifying when the entry last changed."
|
|
228
|
+
}
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
@dataclass(slots=True, frozen=True)
|
|
233
|
+
class GrepMatch:
|
|
234
|
+
"""Regex match returned by :func:`grep`."""
|
|
235
|
+
|
|
236
|
+
path: VfsPath = field(
|
|
237
|
+
metadata={
|
|
238
|
+
"description": "Path of the file containing the regex hit, normalized to the VFS."
|
|
239
|
+
}
|
|
240
|
+
)
|
|
241
|
+
line_number: int = field(
|
|
242
|
+
metadata={"description": "One-based line number where the regex matched."}
|
|
243
|
+
)
|
|
244
|
+
line: str = field(
|
|
245
|
+
metadata={
|
|
246
|
+
"description": "Full line content containing the match so callers can review context."
|
|
247
|
+
}
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
@dataclass(slots=True, frozen=True)
|
|
252
|
+
class ListDirectoryParams:
|
|
253
|
+
path: str | None = field(
|
|
254
|
+
default=None,
|
|
255
|
+
metadata={
|
|
256
|
+
"description": (
|
|
257
|
+
"Directory path to list. Provide a relative VFS path or omit to list the root."
|
|
258
|
+
)
|
|
259
|
+
},
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
@dataclass(slots=True, frozen=True)
|
|
264
|
+
class ReadFileParams:
|
|
265
|
+
file_path: str = field(
|
|
266
|
+
metadata={
|
|
267
|
+
"description": "Relative VFS path of the file to read (leading slashes are optional)."
|
|
268
|
+
}
|
|
269
|
+
)
|
|
270
|
+
offset: int = field(
|
|
271
|
+
default=0,
|
|
272
|
+
metadata={
|
|
273
|
+
"description": (
|
|
274
|
+
"Zero-based line offset where reading should begin. Must be non-negative."
|
|
275
|
+
)
|
|
276
|
+
},
|
|
277
|
+
)
|
|
278
|
+
limit: int = field(
|
|
279
|
+
default=_MAX_READ_LIMIT,
|
|
280
|
+
metadata={
|
|
281
|
+
"description": (
|
|
282
|
+
"Maximum number of lines to return. Values are capped at 2,000 lines per request."
|
|
283
|
+
)
|
|
284
|
+
},
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
@dataclass(slots=True, frozen=True)
|
|
289
|
+
class WriteFileParams:
|
|
290
|
+
file_path: str = field(
|
|
291
|
+
metadata={
|
|
292
|
+
"description": "Destination VFS path for the new file. Must not already exist."
|
|
293
|
+
}
|
|
294
|
+
)
|
|
295
|
+
content: str = field(
|
|
296
|
+
metadata={
|
|
297
|
+
"description": (
|
|
298
|
+
"UTF-8 text that will be written to the file. Content is limited to 48,000 characters."
|
|
299
|
+
)
|
|
300
|
+
}
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
@dataclass(slots=True, frozen=True)
|
|
305
|
+
class EditFileParams:
|
|
306
|
+
file_path: str = field(
|
|
307
|
+
metadata={
|
|
308
|
+
"description": "Path to the file that should be edited inside the VFS."
|
|
309
|
+
}
|
|
310
|
+
)
|
|
311
|
+
old_string: str = field(
|
|
312
|
+
metadata={
|
|
313
|
+
"description": (
|
|
314
|
+
"Exact text to search for within the file. At least one occurrence must be present."
|
|
315
|
+
)
|
|
316
|
+
}
|
|
317
|
+
)
|
|
318
|
+
new_string: str = field(
|
|
319
|
+
metadata={
|
|
320
|
+
"description": "Replacement text that will substitute the matched content."
|
|
321
|
+
}
|
|
322
|
+
)
|
|
323
|
+
replace_all: bool = field(
|
|
324
|
+
default=False,
|
|
325
|
+
metadata={
|
|
326
|
+
"description": (
|
|
327
|
+
"When true, replace every occurrence of `old_string`. Otherwise require a single match."
|
|
328
|
+
)
|
|
329
|
+
},
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
@dataclass(slots=True, frozen=True)
|
|
334
|
+
class GlobParams:
|
|
335
|
+
pattern: str = field(
|
|
336
|
+
metadata={
|
|
337
|
+
"description": ("Shell-style pattern used to match files (e.g. `**/*.py`).")
|
|
338
|
+
}
|
|
339
|
+
)
|
|
340
|
+
path: str = field(
|
|
341
|
+
default="/",
|
|
342
|
+
metadata={
|
|
343
|
+
"description": (
|
|
344
|
+
"Directory to treat as the search root. Defaults to the VFS root (`/`)."
|
|
345
|
+
)
|
|
346
|
+
},
|
|
347
|
+
)
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
@dataclass(slots=True, frozen=True)
|
|
351
|
+
class GrepParams:
|
|
352
|
+
pattern: str = field(
|
|
353
|
+
metadata={
|
|
354
|
+
"description": "Regular expression pattern to search for in matching files."
|
|
355
|
+
}
|
|
356
|
+
)
|
|
357
|
+
path: str | None = field(
|
|
358
|
+
default=None,
|
|
359
|
+
metadata={
|
|
360
|
+
"description": (
|
|
361
|
+
"Optional directory path that scopes the search. Defaults to the entire VFS snapshot."
|
|
362
|
+
)
|
|
363
|
+
},
|
|
364
|
+
)
|
|
365
|
+
glob: str | None = field(
|
|
366
|
+
default=None,
|
|
367
|
+
metadata={
|
|
368
|
+
"description": (
|
|
369
|
+
"Optional glob pattern that filters files before applying the regex search."
|
|
370
|
+
)
|
|
371
|
+
},
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
|
|
375
|
+
@dataclass(slots=True, frozen=True)
|
|
376
|
+
class RemoveParams:
|
|
377
|
+
path: str = field(
|
|
378
|
+
metadata={
|
|
379
|
+
"description": "Relative VFS path targeting the file or directory that should be removed."
|
|
380
|
+
}
|
|
381
|
+
)
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
@dataclass(slots=True, frozen=True)
|
|
385
|
+
class ListDirectory:
|
|
386
|
+
path: VfsPath | None = field(
|
|
387
|
+
default=None,
|
|
388
|
+
metadata={
|
|
389
|
+
"description": (
|
|
390
|
+
"Directory path to enumerate. When omitted the VFS root is listed."
|
|
391
|
+
)
|
|
392
|
+
},
|
|
393
|
+
)
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
@dataclass(slots=True, frozen=True)
|
|
397
|
+
class ListDirectoryResult:
|
|
398
|
+
path: VfsPath = field(
|
|
399
|
+
metadata={"description": "Directory that was listed after normalization."}
|
|
400
|
+
)
|
|
401
|
+
directories: tuple[str, ...] = field(
|
|
402
|
+
metadata={
|
|
403
|
+
"description": (
|
|
404
|
+
"Immediate child directories contained within the listed path, sorted lexicographically."
|
|
405
|
+
)
|
|
406
|
+
}
|
|
407
|
+
)
|
|
408
|
+
files: tuple[str, ...] = field(
|
|
409
|
+
metadata={
|
|
410
|
+
"description": (
|
|
411
|
+
"Immediate child files contained within the listed path, sorted lexicographically."
|
|
412
|
+
)
|
|
413
|
+
}
|
|
414
|
+
)
|
|
415
|
+
|
|
416
|
+
|
|
417
|
+
@dataclass(slots=True, frozen=True)
|
|
418
|
+
class ReadFile:
|
|
419
|
+
path: VfsPath = field(
|
|
420
|
+
metadata={
|
|
421
|
+
"description": (
|
|
422
|
+
"Normalized path referencing the file read via :func:`read_file`."
|
|
423
|
+
)
|
|
424
|
+
}
|
|
425
|
+
)
|
|
426
|
+
|
|
427
|
+
|
|
428
|
+
@dataclass(slots=True, frozen=True)
|
|
429
|
+
class WriteFile:
|
|
430
|
+
path: VfsPath = field(
|
|
431
|
+
metadata={"description": "Destination file path being written inside the VFS."}
|
|
432
|
+
)
|
|
433
|
+
content: str = field(
|
|
434
|
+
metadata={
|
|
435
|
+
"description": (
|
|
436
|
+
"UTF-8 payload that will be persisted to the target file after validation."
|
|
437
|
+
)
|
|
438
|
+
}
|
|
439
|
+
)
|
|
440
|
+
mode: WriteMode = field(
|
|
441
|
+
default="create",
|
|
442
|
+
metadata={
|
|
443
|
+
"description": (
|
|
444
|
+
"Write strategy describing whether the file is newly created, overwritten, or appended."
|
|
445
|
+
)
|
|
446
|
+
},
|
|
447
|
+
)
|
|
448
|
+
encoding: FileEncoding = field(
|
|
449
|
+
default=_DEFAULT_ENCODING,
|
|
450
|
+
metadata={
|
|
451
|
+
"description": (
|
|
452
|
+
"Codec used to encode the content when persisting it to the virtual filesystem."
|
|
453
|
+
)
|
|
454
|
+
},
|
|
455
|
+
)
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
@dataclass(slots=True, frozen=True)
|
|
459
|
+
class DeleteEntry:
|
|
460
|
+
path: VfsPath = field(
|
|
461
|
+
metadata={
|
|
462
|
+
"description": "Path of the file or directory slated for removal from the VFS."
|
|
463
|
+
}
|
|
464
|
+
)
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
@dataclass(slots=True, frozen=True)
|
|
468
|
+
class HostMount:
|
|
469
|
+
host_path: str = field(
|
|
470
|
+
metadata={
|
|
471
|
+
"description": (
|
|
472
|
+
"Relative path (within an allowed host root) that should be mirrored into the VFS snapshot."
|
|
473
|
+
)
|
|
474
|
+
}
|
|
475
|
+
)
|
|
476
|
+
mount_path: VfsPath | None = field(
|
|
477
|
+
default=None,
|
|
478
|
+
metadata={
|
|
479
|
+
"description": (
|
|
480
|
+
"Optional target path inside the VFS. Defaults to the host-relative path when omitted."
|
|
481
|
+
)
|
|
482
|
+
},
|
|
483
|
+
)
|
|
484
|
+
include_glob: tuple[str, ...] = field(
|
|
485
|
+
default_factory=tuple,
|
|
486
|
+
metadata={
|
|
487
|
+
"description": (
|
|
488
|
+
"Whitelist of glob patterns applied to host files before mounting. Empty means include all files."
|
|
489
|
+
)
|
|
490
|
+
},
|
|
491
|
+
)
|
|
492
|
+
exclude_glob: tuple[str, ...] = field(
|
|
493
|
+
default_factory=tuple,
|
|
494
|
+
metadata={
|
|
495
|
+
"description": (
|
|
496
|
+
"Blacklist of glob patterns that remove host files from the mount after inclusion filtering."
|
|
497
|
+
)
|
|
498
|
+
},
|
|
499
|
+
)
|
|
500
|
+
max_bytes: int | None = field(
|
|
501
|
+
default=None,
|
|
502
|
+
metadata={
|
|
503
|
+
"description": (
|
|
504
|
+
"Optional limit on the total number of bytes that may be imported from the host directory."
|
|
505
|
+
)
|
|
506
|
+
},
|
|
507
|
+
)
|
|
508
|
+
follow_symlinks: bool = field(
|
|
509
|
+
default=False,
|
|
510
|
+
metadata={
|
|
511
|
+
"description": (
|
|
512
|
+
"Whether to follow symbolic links when traversing the host directory tree during the mount."
|
|
513
|
+
)
|
|
514
|
+
},
|
|
515
|
+
)
|
|
516
|
+
|
|
517
|
+
|
|
518
|
+
@dataclass(slots=True, frozen=True)
|
|
519
|
+
class HostMountPreview:
|
|
520
|
+
host_path: str = field(
|
|
521
|
+
metadata={
|
|
522
|
+
"description": (
|
|
523
|
+
"User-specified relative path identifying the host directory or file to mount."
|
|
524
|
+
)
|
|
525
|
+
}
|
|
526
|
+
)
|
|
527
|
+
resolved_host: Path = field(
|
|
528
|
+
metadata={
|
|
529
|
+
"description": (
|
|
530
|
+
"Absolute host filesystem path derived from the allowed mount roots."
|
|
531
|
+
)
|
|
532
|
+
}
|
|
533
|
+
)
|
|
534
|
+
mount_path: VfsPath = field(
|
|
535
|
+
metadata={
|
|
536
|
+
"description": "Destination path inside the VFS where the host content will appear."
|
|
537
|
+
}
|
|
538
|
+
)
|
|
539
|
+
entries: tuple[str, ...] = field(
|
|
540
|
+
metadata={
|
|
541
|
+
"description": (
|
|
542
|
+
"Sample of files or directories that will be imported for previewing the mount."
|
|
543
|
+
)
|
|
544
|
+
}
|
|
545
|
+
)
|
|
546
|
+
is_directory: bool = field(
|
|
547
|
+
metadata={
|
|
548
|
+
"description": (
|
|
549
|
+
"Indicates whether the host path resolves to a directory (True) or a file (False)."
|
|
550
|
+
)
|
|
551
|
+
}
|
|
552
|
+
)
|
|
553
|
+
|
|
554
|
+
|
|
555
|
+
@dataclass(slots=True, frozen=True)
|
|
556
|
+
class _VfsSectionParams:
|
|
557
|
+
pass
|
|
558
|
+
|
|
559
|
+
|
|
560
|
+
class VfsToolsSection(MarkdownSection[_VfsSectionParams]):
|
|
561
|
+
"""Prompt section exposing the virtual filesystem tool suite."""
|
|
562
|
+
|
|
563
|
+
def __init__(
|
|
564
|
+
self,
|
|
565
|
+
*,
|
|
566
|
+
session: Session,
|
|
567
|
+
mounts: Sequence[HostMount] = (),
|
|
568
|
+
allowed_host_roots: Sequence[os.PathLike[str] | str] = (),
|
|
569
|
+
accepts_overrides: bool = False,
|
|
570
|
+
) -> None:
|
|
571
|
+
allowed_roots = tuple(normalize_host_root(path) for path in allowed_host_roots)
|
|
572
|
+
self._mount_snapshot, mount_previews = materialize_host_mounts(
|
|
573
|
+
mounts, allowed_roots
|
|
574
|
+
)
|
|
575
|
+
self._session = session
|
|
576
|
+
self._initialize_session(session)
|
|
577
|
+
|
|
578
|
+
tools = _build_tools(section=self, accepts_overrides=accepts_overrides)
|
|
579
|
+
super().__init__(
|
|
580
|
+
title="Virtual Filesystem Tools",
|
|
581
|
+
key="vfs.tools",
|
|
582
|
+
template=_render_section_template(mount_previews),
|
|
583
|
+
default_params=_VfsSectionParams(),
|
|
584
|
+
tools=tools,
|
|
585
|
+
accepts_overrides=accepts_overrides,
|
|
586
|
+
)
|
|
587
|
+
|
|
588
|
+
@property
|
|
589
|
+
def session(self) -> Session:
|
|
590
|
+
return self._session
|
|
591
|
+
|
|
592
|
+
def _initialize_session(self, session: Session) -> None:
|
|
593
|
+
session.register_reducer(VirtualFileSystem, replace_latest)
|
|
594
|
+
session.seed_slice(VirtualFileSystem, (self._mount_snapshot,))
|
|
595
|
+
session.register_reducer(
|
|
596
|
+
WriteFile,
|
|
597
|
+
_make_write_reducer(),
|
|
598
|
+
slice_type=VirtualFileSystem,
|
|
599
|
+
)
|
|
600
|
+
session.register_reducer(
|
|
601
|
+
DeleteEntry,
|
|
602
|
+
_make_delete_reducer(),
|
|
603
|
+
slice_type=VirtualFileSystem,
|
|
604
|
+
)
|
|
605
|
+
|
|
606
|
+
def latest_snapshot(self) -> VirtualFileSystem:
|
|
607
|
+
snapshot = select_latest(self._session, VirtualFileSystem)
|
|
608
|
+
return snapshot or VirtualFileSystem()
|
|
609
|
+
|
|
610
|
+
|
|
611
|
+
def _build_tools(
|
|
612
|
+
*,
|
|
613
|
+
section: VfsToolsSection,
|
|
614
|
+
accepts_overrides: bool,
|
|
615
|
+
) -> tuple[Tool[SupportsDataclass, SupportsToolResult], ...]:
|
|
616
|
+
suite = _VfsToolSuite(section=section)
|
|
617
|
+
return cast(
|
|
618
|
+
tuple[Tool[SupportsDataclass, SupportsToolResult], ...],
|
|
619
|
+
(
|
|
620
|
+
Tool[ListDirectoryParams, tuple[FileInfo, ...]](
|
|
621
|
+
name="ls",
|
|
622
|
+
description="List directory entries under a relative path.",
|
|
623
|
+
handler=suite.list_directory,
|
|
624
|
+
accepts_overrides=accepts_overrides,
|
|
625
|
+
),
|
|
626
|
+
Tool[ReadFileParams, ReadFileResult](
|
|
627
|
+
name="read_file",
|
|
628
|
+
description="Read UTF-8 file contents with pagination support.",
|
|
629
|
+
handler=suite.read_file,
|
|
630
|
+
accepts_overrides=accepts_overrides,
|
|
631
|
+
),
|
|
632
|
+
Tool[WriteFileParams, WriteFile](
|
|
633
|
+
name="write_file",
|
|
634
|
+
description="Create a new UTF-8 text file.",
|
|
635
|
+
handler=suite.write_file,
|
|
636
|
+
accepts_overrides=accepts_overrides,
|
|
637
|
+
),
|
|
638
|
+
Tool[EditFileParams, WriteFile](
|
|
639
|
+
name="edit_file",
|
|
640
|
+
description="Replace occurrences of a string within a file.",
|
|
641
|
+
handler=suite.edit_file,
|
|
642
|
+
accepts_overrides=accepts_overrides,
|
|
643
|
+
),
|
|
644
|
+
Tool[GlobParams, tuple[GlobMatch, ...]](
|
|
645
|
+
name="glob",
|
|
646
|
+
description="Match files beneath a directory using shell patterns.",
|
|
647
|
+
handler=suite.glob,
|
|
648
|
+
accepts_overrides=accepts_overrides,
|
|
649
|
+
),
|
|
650
|
+
Tool[GrepParams, tuple[GrepMatch, ...]](
|
|
651
|
+
name="grep",
|
|
652
|
+
description="Search files for a regular expression pattern.",
|
|
653
|
+
handler=suite.grep,
|
|
654
|
+
accepts_overrides=accepts_overrides,
|
|
655
|
+
),
|
|
656
|
+
Tool[RemoveParams, DeleteEntry](
|
|
657
|
+
name="rm",
|
|
658
|
+
description="Remove files or directories recursively.",
|
|
659
|
+
handler=suite.remove,
|
|
660
|
+
accepts_overrides=accepts_overrides,
|
|
661
|
+
),
|
|
662
|
+
),
|
|
663
|
+
)
|
|
664
|
+
|
|
665
|
+
|
|
666
|
+
class _VfsToolSuite:
|
|
667
|
+
"""Collection of VFS handlers bound to a section instance."""
|
|
668
|
+
|
|
669
|
+
def __init__(self, *, section: VfsToolsSection) -> None:
|
|
670
|
+
super().__init__()
|
|
671
|
+
self._section = section
|
|
672
|
+
|
|
673
|
+
def list_directory(
|
|
674
|
+
self, params: ListDirectoryParams, *, context: ToolContext
|
|
675
|
+
) -> ToolResult[tuple[FileInfo, ...]]:
|
|
676
|
+
ensure_context_uses_session(context=context, session=self._section.session)
|
|
677
|
+
del context
|
|
678
|
+
path = _normalize_string_path(params.path, allow_empty=True, field="path")
|
|
679
|
+
snapshot = self._section.latest_snapshot()
|
|
680
|
+
if _find_file(snapshot.files, path) is not None:
|
|
681
|
+
raise ToolValidationError("Cannot list a file path; provide a directory.")
|
|
682
|
+
|
|
683
|
+
directories: set[tuple[str, ...]] = set()
|
|
684
|
+
files: list[VfsFile] = []
|
|
685
|
+
for file in snapshot.files:
|
|
686
|
+
segments = file.path.segments
|
|
687
|
+
if not _is_path_prefix(segments, path.segments):
|
|
688
|
+
continue
|
|
689
|
+
prefix_length = len(path.segments)
|
|
690
|
+
next_segment = segments[prefix_length]
|
|
691
|
+
subpath = (*path.segments, next_segment)
|
|
692
|
+
if len(segments) == prefix_length + 1:
|
|
693
|
+
files.append(file)
|
|
694
|
+
else:
|
|
695
|
+
directories.add(subpath)
|
|
696
|
+
|
|
697
|
+
entries: list[FileInfo] = [
|
|
698
|
+
FileInfo(
|
|
699
|
+
path=file.path,
|
|
700
|
+
kind="file",
|
|
701
|
+
size_bytes=file.size_bytes,
|
|
702
|
+
version=file.version,
|
|
703
|
+
updated_at=file.updated_at,
|
|
704
|
+
)
|
|
705
|
+
for file in files
|
|
706
|
+
if len(file.path.segments) == len(path.segments) + 1
|
|
707
|
+
]
|
|
708
|
+
entries.extend(
|
|
709
|
+
FileInfo(path=VfsPath(directory), kind="directory")
|
|
710
|
+
for directory in directories
|
|
711
|
+
)
|
|
712
|
+
|
|
713
|
+
entries.sort(key=lambda entry: entry.path.segments)
|
|
714
|
+
message = _format_directory_message(path, entries)
|
|
715
|
+
return ToolResult(message=message, value=tuple(entries))
|
|
716
|
+
|
|
717
|
+
def read_file(
|
|
718
|
+
self, params: ReadFileParams, *, context: ToolContext
|
|
719
|
+
) -> ToolResult[ReadFileResult]:
|
|
720
|
+
ensure_context_uses_session(context=context, session=self._section.session)
|
|
721
|
+
del context
|
|
722
|
+
path = _normalize_string_path(params.file_path, field="file_path")
|
|
723
|
+
offset = _normalize_offset(params.offset)
|
|
724
|
+
limit = _normalize_limit(params.limit)
|
|
725
|
+
|
|
726
|
+
snapshot = self._section.latest_snapshot()
|
|
727
|
+
file = _require_file(snapshot.files, path)
|
|
728
|
+
lines = file.content.splitlines()
|
|
729
|
+
total_lines = len(lines)
|
|
730
|
+
start = min(offset, total_lines)
|
|
731
|
+
end = min(start + limit, total_lines)
|
|
732
|
+
numbered = [
|
|
733
|
+
f"{index + 1:>4} | {line}"
|
|
734
|
+
for index, line in enumerate(lines[start:end], start=start)
|
|
735
|
+
]
|
|
736
|
+
content = "\n".join(numbered)
|
|
737
|
+
message = _format_read_file_message(file, start, end)
|
|
738
|
+
returned_lines = end - start
|
|
739
|
+
result = ReadFileResult(
|
|
740
|
+
path=file.path,
|
|
741
|
+
content=content,
|
|
742
|
+
offset=start,
|
|
743
|
+
limit=returned_lines,
|
|
744
|
+
total_lines=total_lines,
|
|
745
|
+
)
|
|
746
|
+
return ToolResult(message=message, value=result)
|
|
747
|
+
|
|
748
|
+
def write_file(
|
|
749
|
+
self, params: WriteFileParams, *, context: ToolContext
|
|
750
|
+
) -> ToolResult[WriteFile]:
|
|
751
|
+
ensure_context_uses_session(context=context, session=self._section.session)
|
|
752
|
+
del context
|
|
753
|
+
path = _normalize_string_path(params.file_path, field="file_path")
|
|
754
|
+
content = _normalize_content(params.content)
|
|
755
|
+
|
|
756
|
+
snapshot = self._section.latest_snapshot()
|
|
757
|
+
if _find_file(snapshot.files, path) is not None:
|
|
758
|
+
raise ToolValidationError(
|
|
759
|
+
"File already exists; use edit_file to modify existing content."
|
|
760
|
+
)
|
|
761
|
+
|
|
762
|
+
normalized = WriteFile(path=path, content=content, mode="create")
|
|
763
|
+
message = _format_write_file_message(path, content, mode="create")
|
|
764
|
+
return ToolResult(message=message, value=normalized)
|
|
765
|
+
|
|
766
|
+
def edit_file(
|
|
767
|
+
self, params: EditFileParams, *, context: ToolContext
|
|
768
|
+
) -> ToolResult[WriteFile]:
|
|
769
|
+
ensure_context_uses_session(context=context, session=self._section.session)
|
|
770
|
+
del context
|
|
771
|
+
path = _normalize_string_path(params.file_path, field="file_path")
|
|
772
|
+
snapshot = self._section.latest_snapshot()
|
|
773
|
+
file = _require_file(snapshot.files, path)
|
|
774
|
+
|
|
775
|
+
old = params.old_string
|
|
776
|
+
new = params.new_string
|
|
777
|
+
if not old:
|
|
778
|
+
raise ToolValidationError("old_string must not be empty.")
|
|
779
|
+
if len(old) > _MAX_WRITE_LENGTH or len(new) > _MAX_WRITE_LENGTH:
|
|
780
|
+
raise ToolValidationError(
|
|
781
|
+
"Replacement strings must be 48,000 characters or fewer."
|
|
782
|
+
)
|
|
783
|
+
|
|
784
|
+
occurrences = file.content.count(old)
|
|
785
|
+
if occurrences == 0:
|
|
786
|
+
raise ToolValidationError("old_string not found in the target file.")
|
|
787
|
+
if not params.replace_all and occurrences != 1:
|
|
788
|
+
raise ToolValidationError(
|
|
789
|
+
"old_string must match exactly once unless replace_all is true."
|
|
790
|
+
)
|
|
791
|
+
|
|
792
|
+
if params.replace_all:
|
|
793
|
+
replacements = occurrences
|
|
794
|
+
updated = file.content.replace(old, new)
|
|
795
|
+
else:
|
|
796
|
+
replacements = 1
|
|
797
|
+
updated = file.content.replace(old, new, 1)
|
|
798
|
+
|
|
799
|
+
normalized_content = _normalize_content(updated)
|
|
800
|
+
normalized = WriteFile(
|
|
801
|
+
path=path,
|
|
802
|
+
content=normalized_content,
|
|
803
|
+
mode="overwrite",
|
|
804
|
+
)
|
|
805
|
+
message = _format_edit_message(path, replacements)
|
|
806
|
+
return ToolResult(message=message, value=normalized)
|
|
807
|
+
|
|
808
|
+
def glob(
|
|
809
|
+
self, params: GlobParams, *, context: ToolContext
|
|
810
|
+
) -> ToolResult[tuple[GlobMatch, ...]]:
|
|
811
|
+
ensure_context_uses_session(context=context, session=self._section.session)
|
|
812
|
+
del context
|
|
813
|
+
base = _normalize_string_path(params.path, allow_empty=True, field="path")
|
|
814
|
+
pattern = params.pattern.strip()
|
|
815
|
+
if not pattern:
|
|
816
|
+
raise ToolValidationError("Pattern must not be empty.")
|
|
817
|
+
_ensure_ascii(pattern, "pattern")
|
|
818
|
+
|
|
819
|
+
snapshot = self._section.latest_snapshot()
|
|
820
|
+
matches: list[GlobMatch] = []
|
|
821
|
+
for file in snapshot.files:
|
|
822
|
+
if not _is_path_prefix(file.path.segments, base.segments):
|
|
823
|
+
continue
|
|
824
|
+
relative_segments = file.path.segments[len(base.segments) :]
|
|
825
|
+
relative = "/".join(relative_segments)
|
|
826
|
+
if fnmatch.fnmatchcase(relative, pattern):
|
|
827
|
+
matches.append(
|
|
828
|
+
GlobMatch(
|
|
829
|
+
path=file.path,
|
|
830
|
+
size_bytes=file.size_bytes,
|
|
831
|
+
version=file.version,
|
|
832
|
+
updated_at=file.updated_at,
|
|
833
|
+
)
|
|
834
|
+
)
|
|
835
|
+
matches.sort(key=lambda match: match.path.segments)
|
|
836
|
+
message = _format_glob_message(base, pattern, matches)
|
|
837
|
+
return ToolResult(message=message, value=tuple(matches))
|
|
838
|
+
|
|
839
|
+
def grep(
|
|
840
|
+
self, params: GrepParams, *, context: ToolContext
|
|
841
|
+
) -> ToolResult[tuple[GrepMatch, ...]]:
|
|
842
|
+
ensure_context_uses_session(context=context, session=self._section.session)
|
|
843
|
+
del context
|
|
844
|
+
try:
|
|
845
|
+
pattern = re.compile(params.pattern)
|
|
846
|
+
except re.error as error:
|
|
847
|
+
return ToolResult(
|
|
848
|
+
message=f"Invalid regular expression: {error}",
|
|
849
|
+
value=None,
|
|
850
|
+
success=False,
|
|
851
|
+
)
|
|
852
|
+
|
|
853
|
+
base_path: VfsPath | None = None
|
|
854
|
+
if params.path is not None:
|
|
855
|
+
base_path = _normalize_string_path(
|
|
856
|
+
params.path, allow_empty=True, field="path"
|
|
857
|
+
)
|
|
858
|
+
glob_pattern = params.glob.strip() if params.glob is not None else None
|
|
859
|
+
if glob_pattern:
|
|
860
|
+
_ensure_ascii(glob_pattern, "glob")
|
|
861
|
+
|
|
862
|
+
snapshot = self._section.latest_snapshot()
|
|
863
|
+
matches: list[GrepMatch] = []
|
|
864
|
+
for file in snapshot.files:
|
|
865
|
+
if base_path is not None and not _is_path_prefix(
|
|
866
|
+
file.path.segments, base_path.segments
|
|
867
|
+
):
|
|
868
|
+
continue
|
|
869
|
+
if glob_pattern:
|
|
870
|
+
relative = (
|
|
871
|
+
"/".join(file.path.segments[len(base_path.segments) :])
|
|
872
|
+
if base_path is not None
|
|
873
|
+
else "/".join(file.path.segments)
|
|
874
|
+
)
|
|
875
|
+
if not fnmatch.fnmatchcase(relative, glob_pattern):
|
|
876
|
+
continue
|
|
877
|
+
for index, line in enumerate(file.content.splitlines(), start=1):
|
|
878
|
+
if pattern.search(line):
|
|
879
|
+
matches.append(
|
|
880
|
+
GrepMatch(
|
|
881
|
+
path=file.path,
|
|
882
|
+
line_number=index,
|
|
883
|
+
line=line,
|
|
884
|
+
)
|
|
885
|
+
)
|
|
886
|
+
matches.sort(key=lambda match: (match.path.segments, match.line_number))
|
|
887
|
+
message = _format_grep_message(params.pattern, matches)
|
|
888
|
+
return ToolResult(message=message, value=tuple(matches))
|
|
889
|
+
|
|
890
|
+
def remove(
|
|
891
|
+
self, params: RemoveParams, *, context: ToolContext
|
|
892
|
+
) -> ToolResult[DeleteEntry]:
|
|
893
|
+
ensure_context_uses_session(context=context, session=self._section.session)
|
|
894
|
+
del context
|
|
895
|
+
path = _normalize_string_path(params.path, field="path")
|
|
896
|
+
snapshot = self._section.latest_snapshot()
|
|
897
|
+
matches = [
|
|
898
|
+
file
|
|
899
|
+
for file in snapshot.files
|
|
900
|
+
if _is_path_prefix(file.path.segments, path.segments)
|
|
901
|
+
]
|
|
902
|
+
if not matches:
|
|
903
|
+
raise ToolValidationError("No files matched the provided path.")
|
|
904
|
+
normalized = DeleteEntry(path=path)
|
|
905
|
+
message = _format_delete_message(path, matches)
|
|
906
|
+
return ToolResult(message=message, value=normalized)
|
|
907
|
+
|
|
908
|
+
|
|
909
|
+
def _normalize_content(content: str) -> str:
|
|
910
|
+
if len(content) > _MAX_WRITE_LENGTH:
|
|
911
|
+
raise ToolValidationError(
|
|
912
|
+
"Content exceeds maximum length of 48,000 characters."
|
|
913
|
+
)
|
|
914
|
+
return content
|
|
915
|
+
|
|
916
|
+
|
|
917
|
+
def _normalize_offset(offset: int) -> int:
|
|
918
|
+
if offset < 0:
|
|
919
|
+
raise ToolValidationError("offset must be non-negative.")
|
|
920
|
+
return offset
|
|
921
|
+
|
|
922
|
+
|
|
923
|
+
def _normalize_limit(limit: int) -> int:
|
|
924
|
+
if limit <= 0:
|
|
925
|
+
raise ToolValidationError("limit must be a positive integer.")
|
|
926
|
+
return min(limit, _MAX_READ_LIMIT)
|
|
927
|
+
|
|
928
|
+
|
|
929
|
+
def _normalize_string_path(
|
|
930
|
+
raw: str | None, *, allow_empty: bool = False, field: str
|
|
931
|
+
) -> VfsPath:
|
|
932
|
+
if raw is None:
|
|
933
|
+
if not allow_empty:
|
|
934
|
+
raise ToolValidationError(f"{field} is required.")
|
|
935
|
+
return VfsPath(())
|
|
936
|
+
|
|
937
|
+
stripped = raw.strip()
|
|
938
|
+
if not stripped:
|
|
939
|
+
if allow_empty:
|
|
940
|
+
return VfsPath(())
|
|
941
|
+
raise ToolValidationError(f"{field} must not be empty.")
|
|
942
|
+
|
|
943
|
+
if stripped.startswith("/"):
|
|
944
|
+
stripped = stripped.lstrip("/")
|
|
945
|
+
|
|
946
|
+
segments = _normalize_segments(stripped.split("/"))
|
|
947
|
+
if len(segments) > _MAX_PATH_DEPTH:
|
|
948
|
+
raise ToolValidationError("Path depth exceeds the allowed limit (16 segments).")
|
|
949
|
+
if not segments and not allow_empty:
|
|
950
|
+
raise ToolValidationError(f"{field} must reference a file or directory.")
|
|
951
|
+
return VfsPath(segments)
|
|
952
|
+
|
|
953
|
+
|
|
954
|
+
def _normalize_optional_path(path: VfsPath | None) -> VfsPath:
|
|
955
|
+
if path is None:
|
|
956
|
+
return VfsPath(())
|
|
957
|
+
return _normalize_path(path)
|
|
958
|
+
|
|
959
|
+
|
|
960
|
+
def _normalize_path(path: VfsPath) -> VfsPath:
|
|
961
|
+
segments = _normalize_segments(path.segments)
|
|
962
|
+
if len(segments) > _MAX_PATH_DEPTH:
|
|
963
|
+
raise ToolValidationError("Path depth exceeds the allowed limit (16 segments).")
|
|
964
|
+
return VfsPath(segments)
|
|
965
|
+
|
|
966
|
+
|
|
967
|
+
def _normalize_segments(raw_segments: Sequence[str]) -> tuple[str, ...]:
|
|
968
|
+
segments: list[str] = []
|
|
969
|
+
for raw_segment in raw_segments:
|
|
970
|
+
cleaned_segment = raw_segment.strip()
|
|
971
|
+
if not cleaned_segment:
|
|
972
|
+
continue
|
|
973
|
+
if cleaned_segment.startswith("/"):
|
|
974
|
+
raise ToolValidationError("Absolute paths are not allowed in the VFS.")
|
|
975
|
+
for piece in cleaned_segment.split("/"):
|
|
976
|
+
if not piece:
|
|
977
|
+
continue
|
|
978
|
+
if piece in {".", ".."}:
|
|
979
|
+
raise ToolValidationError("Path segments may not include '.' or '..'.")
|
|
980
|
+
_ensure_ascii(piece, "path segment")
|
|
981
|
+
if len(piece) > _MAX_SEGMENT_LENGTH:
|
|
982
|
+
raise ToolValidationError(
|
|
983
|
+
"Path segments must be 80 characters or fewer."
|
|
984
|
+
)
|
|
985
|
+
segments.append(piece)
|
|
986
|
+
return tuple(segments)
|
|
987
|
+
|
|
988
|
+
|
|
989
|
+
def _ensure_ascii(value: str, field: str) -> None:
|
|
990
|
+
try:
|
|
991
|
+
_ = value.encode(_ASCII)
|
|
992
|
+
except UnicodeEncodeError as error: # pragma: no cover - defensive guard
|
|
993
|
+
raise ToolValidationError(
|
|
994
|
+
f"{field.capitalize()} must be ASCII text."
|
|
995
|
+
) from error
|
|
996
|
+
|
|
997
|
+
|
|
998
|
+
def _find_file(files: Iterable[VfsFile], path: VfsPath) -> VfsFile | None:
|
|
999
|
+
target = path.segments
|
|
1000
|
+
for file in files:
|
|
1001
|
+
if file.path.segments == target:
|
|
1002
|
+
return file
|
|
1003
|
+
return None
|
|
1004
|
+
|
|
1005
|
+
|
|
1006
|
+
def _require_file(files: Iterable[VfsFile], path: VfsPath) -> VfsFile:
|
|
1007
|
+
file = _find_file(files, path)
|
|
1008
|
+
if file is None:
|
|
1009
|
+
raise ToolValidationError("File does not exist in the virtual filesystem.")
|
|
1010
|
+
return file
|
|
1011
|
+
|
|
1012
|
+
|
|
1013
|
+
def _is_path_prefix(path: Sequence[str], prefix: Sequence[str]) -> bool:
|
|
1014
|
+
if len(path) < len(prefix):
|
|
1015
|
+
return False
|
|
1016
|
+
return all(path[index] == prefix[index] for index in range(len(prefix)))
|
|
1017
|
+
|
|
1018
|
+
|
|
1019
|
+
def _format_directory_message(path: VfsPath, entries: Sequence[FileInfo]) -> str:
|
|
1020
|
+
directory_count = sum(1 for entry in entries if entry.kind == "directory")
|
|
1021
|
+
file_count = sum(1 for entry in entries if entry.kind == "file")
|
|
1022
|
+
prefix = _format_path(path)
|
|
1023
|
+
subdir_label = "subdir" if directory_count == 1 else "subdirs"
|
|
1024
|
+
file_label = "file" if file_count == 1 else "files"
|
|
1025
|
+
return (
|
|
1026
|
+
f"Listed directory {prefix} ("
|
|
1027
|
+
f"{directory_count} {subdir_label}, {file_count} {file_label})."
|
|
1028
|
+
)
|
|
1029
|
+
|
|
1030
|
+
|
|
1031
|
+
def _format_read_file_message(file: VfsFile, start: int, end: int) -> str:
|
|
1032
|
+
path_label = _format_path(file.path)
|
|
1033
|
+
if start == end:
|
|
1034
|
+
return f"Read file {path_label} (no lines returned)."
|
|
1035
|
+
return f"Read file {path_label} (lines {start + 1}-{end})."
|
|
1036
|
+
|
|
1037
|
+
|
|
1038
|
+
def _format_write_file_message(path: VfsPath, content: str, mode: WriteMode) -> str:
|
|
1039
|
+
path_label = _format_path(path)
|
|
1040
|
+
action = {
|
|
1041
|
+
"create": "Created",
|
|
1042
|
+
"overwrite": "Updated",
|
|
1043
|
+
"append": "Appended to",
|
|
1044
|
+
}[mode]
|
|
1045
|
+
size = len(content.encode(_DEFAULT_ENCODING))
|
|
1046
|
+
return f"{action} {path_label} ({size} bytes)."
|
|
1047
|
+
|
|
1048
|
+
|
|
1049
|
+
def _format_edit_message(path: VfsPath, replacements: int) -> str:
|
|
1050
|
+
path_label = _format_path(path)
|
|
1051
|
+
label = "occurrence" if replacements == 1 else "occurrences"
|
|
1052
|
+
return f"Replaced {replacements} {label} in {path_label}."
|
|
1053
|
+
|
|
1054
|
+
|
|
1055
|
+
def _format_glob_message(
|
|
1056
|
+
base: VfsPath, pattern: str, matches: Sequence[GlobMatch]
|
|
1057
|
+
) -> str:
|
|
1058
|
+
path_label = _format_path(base)
|
|
1059
|
+
match_label = "match" if len(matches) == 1 else "matches"
|
|
1060
|
+
return f"Found {len(matches)} {match_label} under {path_label} for pattern '{pattern}'."
|
|
1061
|
+
|
|
1062
|
+
|
|
1063
|
+
def _format_grep_message(pattern: str, matches: Sequence[GrepMatch]) -> str:
|
|
1064
|
+
match_label = "match" if len(matches) == 1 else "matches"
|
|
1065
|
+
return f"Found {len(matches)} {match_label} for pattern '{pattern}'."
|
|
1066
|
+
|
|
1067
|
+
|
|
1068
|
+
def _format_delete_message(path: VfsPath, files: Sequence[VfsFile]) -> str:
|
|
1069
|
+
path_label = _format_path(path)
|
|
1070
|
+
entry_label = "entry" if len(files) == 1 else "entries"
|
|
1071
|
+
return f"Deleted {len(files)} {entry_label} under {path_label}."
|
|
1072
|
+
|
|
1073
|
+
|
|
1074
|
+
def _format_path(path: VfsPath) -> str:
|
|
1075
|
+
return "/".join(path.segments) or "/"
|
|
1076
|
+
|
|
1077
|
+
|
|
1078
|
+
def normalize_host_root(path: os.PathLike[str] | str) -> Path:
|
|
1079
|
+
root = Path(path).expanduser().resolve()
|
|
1080
|
+
if not root.exists():
|
|
1081
|
+
raise ToolValidationError("Allowed host root does not exist.")
|
|
1082
|
+
return root
|
|
1083
|
+
|
|
1084
|
+
|
|
1085
|
+
def materialize_host_mounts(
|
|
1086
|
+
mounts: Sequence[HostMount], allowed_roots: Sequence[Path]
|
|
1087
|
+
) -> tuple[VirtualFileSystem, tuple[HostMountPreview, ...]]:
|
|
1088
|
+
if not mounts:
|
|
1089
|
+
return VirtualFileSystem(), ()
|
|
1090
|
+
|
|
1091
|
+
aggregated: dict[tuple[str, ...], VfsFile] = {}
|
|
1092
|
+
previews: list[HostMountPreview] = []
|
|
1093
|
+
for mount in mounts:
|
|
1094
|
+
loaded, preview = _load_mount(mount, allowed_roots)
|
|
1095
|
+
previews.append(preview)
|
|
1096
|
+
for file in loaded:
|
|
1097
|
+
aggregated[file.path.segments] = file
|
|
1098
|
+
files = tuple(sorted(aggregated.values(), key=lambda file: file.path.segments))
|
|
1099
|
+
return VirtualFileSystem(files=files), tuple(previews)
|
|
1100
|
+
|
|
1101
|
+
|
|
1102
|
+
def render_host_mounts_block(previews: Sequence[HostMountPreview]) -> str:
|
|
1103
|
+
if not previews:
|
|
1104
|
+
return ""
|
|
1105
|
+
|
|
1106
|
+
lines: list[str] = ["Configured host mounts:"]
|
|
1107
|
+
for preview in previews:
|
|
1108
|
+
mount_label = _format_path(preview.mount_path)
|
|
1109
|
+
resolved_label = str(preview.resolved_host)
|
|
1110
|
+
lines.append(
|
|
1111
|
+
f"- Host `{resolved_label}` (configured as `{preview.host_path}`) mounted at `{mount_label}`."
|
|
1112
|
+
)
|
|
1113
|
+
contents = _format_mount_entries(preview.entries)
|
|
1114
|
+
lines.append(f" Contents: {contents}")
|
|
1115
|
+
return "\n".join(lines)
|
|
1116
|
+
|
|
1117
|
+
|
|
1118
|
+
def _render_section_template(previews: Sequence[HostMountPreview]) -> str:
|
|
1119
|
+
block = render_host_mounts_block(previews)
|
|
1120
|
+
if not block:
|
|
1121
|
+
return _VFS_SECTION_TEMPLATE
|
|
1122
|
+
return f"{_VFS_SECTION_TEMPLATE}\n\n{block}"
|
|
1123
|
+
|
|
1124
|
+
|
|
1125
|
+
def _format_mount_entries(entries: Sequence[str]) -> str:
|
|
1126
|
+
if not entries:
|
|
1127
|
+
return "<empty>"
|
|
1128
|
+
preview = entries[:_MAX_MOUNT_PREVIEW_ENTRIES]
|
|
1129
|
+
formatted = " ".join(f"`{entry}`" for entry in preview)
|
|
1130
|
+
remaining = len(entries) - len(preview)
|
|
1131
|
+
if remaining > 0:
|
|
1132
|
+
formatted += f" … (+{remaining} more)"
|
|
1133
|
+
return formatted
|
|
1134
|
+
|
|
1135
|
+
|
|
1136
|
+
def _load_mount(
|
|
1137
|
+
mount: HostMount, allowed_roots: Sequence[Path]
|
|
1138
|
+
) -> tuple[tuple[VfsFile, ...], HostMountPreview]:
|
|
1139
|
+
host_path = mount.host_path.strip()
|
|
1140
|
+
if not host_path:
|
|
1141
|
+
raise ToolValidationError("Host mount path must not be empty.")
|
|
1142
|
+
_ensure_ascii(host_path, "host path")
|
|
1143
|
+
resolved_host = _resolve_mount_path(host_path, allowed_roots)
|
|
1144
|
+
include_patterns = _normalize_globs(mount.include_glob, "include_glob")
|
|
1145
|
+
exclude_patterns = _normalize_globs(mount.exclude_glob, "exclude_glob")
|
|
1146
|
+
mount_prefix = _normalize_optional_path(mount.mount_path)
|
|
1147
|
+
preview_entries = _list_mount_entries(resolved_host)
|
|
1148
|
+
preview = HostMountPreview(
|
|
1149
|
+
host_path=host_path,
|
|
1150
|
+
resolved_host=resolved_host,
|
|
1151
|
+
mount_path=mount_prefix,
|
|
1152
|
+
entries=preview_entries,
|
|
1153
|
+
is_directory=resolved_host.is_dir(),
|
|
1154
|
+
)
|
|
1155
|
+
|
|
1156
|
+
files: list[VfsFile] = []
|
|
1157
|
+
consumed_bytes = 0
|
|
1158
|
+
timestamp = _now()
|
|
1159
|
+
for path in _iter_mount_files(resolved_host, mount.follow_symlinks):
|
|
1160
|
+
relative = (
|
|
1161
|
+
Path(path.name)
|
|
1162
|
+
if resolved_host.is_file()
|
|
1163
|
+
else path.relative_to(resolved_host)
|
|
1164
|
+
)
|
|
1165
|
+
relative_posix = relative.as_posix()
|
|
1166
|
+
if include_patterns and not any(
|
|
1167
|
+
fnmatch.fnmatchcase(relative_posix, pattern) for pattern in include_patterns
|
|
1168
|
+
):
|
|
1169
|
+
continue
|
|
1170
|
+
if any(
|
|
1171
|
+
fnmatch.fnmatchcase(relative_posix, pattern) for pattern in exclude_patterns
|
|
1172
|
+
):
|
|
1173
|
+
continue
|
|
1174
|
+
|
|
1175
|
+
try:
|
|
1176
|
+
content = path.read_text(encoding=_DEFAULT_ENCODING)
|
|
1177
|
+
except UnicodeDecodeError as error: # pragma: no cover - defensive guard
|
|
1178
|
+
raise ToolValidationError("Mounted file must be valid UTF-8.") from error
|
|
1179
|
+
except OSError as error:
|
|
1180
|
+
raise ToolValidationError(f"Failed to read mounted file {path}.") from error
|
|
1181
|
+
size = len(content.encode(_DEFAULT_ENCODING))
|
|
1182
|
+
if mount.max_bytes is not None and consumed_bytes + size > mount.max_bytes:
|
|
1183
|
+
raise ToolValidationError("Host mount exceeded the configured byte budget.")
|
|
1184
|
+
consumed_bytes += size
|
|
1185
|
+
|
|
1186
|
+
segments = mount_prefix.segments + relative.parts
|
|
1187
|
+
normalized_path = _normalize_path(VfsPath(segments))
|
|
1188
|
+
file = VfsFile(
|
|
1189
|
+
path=normalized_path,
|
|
1190
|
+
content=content,
|
|
1191
|
+
encoding=_DEFAULT_ENCODING,
|
|
1192
|
+
size_bytes=size,
|
|
1193
|
+
version=1,
|
|
1194
|
+
created_at=timestamp,
|
|
1195
|
+
updated_at=timestamp,
|
|
1196
|
+
)
|
|
1197
|
+
files.append(file)
|
|
1198
|
+
return tuple(files), preview
|
|
1199
|
+
|
|
1200
|
+
|
|
1201
|
+
def _list_mount_entries(root: Path) -> tuple[str, ...]:
|
|
1202
|
+
if root.is_file():
|
|
1203
|
+
return (root.name,)
|
|
1204
|
+
try:
|
|
1205
|
+
children = sorted(root.iterdir(), key=lambda path: path.name.lower())
|
|
1206
|
+
except OSError as error: # pragma: no cover - defensive guard
|
|
1207
|
+
raise ToolValidationError(f"Failed to inspect host mount {root}.") from error
|
|
1208
|
+
labels: list[str] = []
|
|
1209
|
+
for child in children:
|
|
1210
|
+
suffix = "/" if child.is_dir() else ""
|
|
1211
|
+
labels.append(f"{child.name}{suffix}")
|
|
1212
|
+
return tuple(labels)
|
|
1213
|
+
|
|
1214
|
+
|
|
1215
|
+
def _resolve_mount_path(host_path: str, allowed_roots: Sequence[Path]) -> Path:
|
|
1216
|
+
if not allowed_roots:
|
|
1217
|
+
raise ToolValidationError("No allowed host roots configured for mounts.")
|
|
1218
|
+
for root in allowed_roots:
|
|
1219
|
+
candidate = (root / host_path).resolve()
|
|
1220
|
+
try:
|
|
1221
|
+
_ = candidate.relative_to(root)
|
|
1222
|
+
except ValueError:
|
|
1223
|
+
continue
|
|
1224
|
+
if candidate.exists():
|
|
1225
|
+
return candidate
|
|
1226
|
+
raise ToolValidationError("Host path is outside the allowed roots or missing.")
|
|
1227
|
+
|
|
1228
|
+
|
|
1229
|
+
def _normalize_globs(patterns: Sequence[str], field: str) -> tuple[str, ...]:
|
|
1230
|
+
normalized: list[str] = []
|
|
1231
|
+
for pattern in patterns:
|
|
1232
|
+
stripped = pattern.strip()
|
|
1233
|
+
if not stripped:
|
|
1234
|
+
continue
|
|
1235
|
+
_ensure_ascii(stripped, field)
|
|
1236
|
+
normalized.append(stripped)
|
|
1237
|
+
return tuple(normalized)
|
|
1238
|
+
|
|
1239
|
+
|
|
1240
|
+
def _iter_mount_files(root: Path, follow_symlinks: bool) -> Iterable[Path]:
|
|
1241
|
+
if root.is_file():
|
|
1242
|
+
yield root
|
|
1243
|
+
return
|
|
1244
|
+
for dirpath, _dirnames, filenames in os.walk(root, followlinks=follow_symlinks):
|
|
1245
|
+
current = Path(dirpath)
|
|
1246
|
+
for name in filenames:
|
|
1247
|
+
yield current / name
|
|
1248
|
+
|
|
1249
|
+
|
|
1250
|
+
def _make_write_reducer() -> TypedReducer[VirtualFileSystem]:
|
|
1251
|
+
def reducer(
|
|
1252
|
+
slice_values: tuple[VirtualFileSystem, ...],
|
|
1253
|
+
event: ReducerEvent,
|
|
1254
|
+
*,
|
|
1255
|
+
context: ReducerContextProtocol,
|
|
1256
|
+
) -> tuple[VirtualFileSystem, ...]:
|
|
1257
|
+
del context
|
|
1258
|
+
previous = slice_values[-1] if slice_values else VirtualFileSystem()
|
|
1259
|
+
params = cast(WriteFile, event.value)
|
|
1260
|
+
timestamp = _now()
|
|
1261
|
+
files = list(previous.files)
|
|
1262
|
+
existing_index = _index_of(files, params.path)
|
|
1263
|
+
existing = files[existing_index] if existing_index is not None else None
|
|
1264
|
+
if params.mode == "append" and existing is not None:
|
|
1265
|
+
content = existing.content + params.content
|
|
1266
|
+
created_at = existing.created_at
|
|
1267
|
+
version = existing.version + 1
|
|
1268
|
+
elif existing is not None:
|
|
1269
|
+
content = params.content
|
|
1270
|
+
created_at = existing.created_at
|
|
1271
|
+
version = existing.version + 1
|
|
1272
|
+
else:
|
|
1273
|
+
content = params.content
|
|
1274
|
+
created_at = timestamp
|
|
1275
|
+
version = 1
|
|
1276
|
+
size = len(content.encode(_DEFAULT_ENCODING))
|
|
1277
|
+
updated_file = VfsFile(
|
|
1278
|
+
path=params.path,
|
|
1279
|
+
content=content,
|
|
1280
|
+
encoding=_DEFAULT_ENCODING,
|
|
1281
|
+
size_bytes=size,
|
|
1282
|
+
version=version,
|
|
1283
|
+
created_at=_truncate_to_milliseconds(created_at),
|
|
1284
|
+
updated_at=_truncate_to_milliseconds(timestamp),
|
|
1285
|
+
)
|
|
1286
|
+
if existing_index is not None:
|
|
1287
|
+
del files[existing_index]
|
|
1288
|
+
files.append(updated_file)
|
|
1289
|
+
files.sort(key=lambda file: file.path.segments)
|
|
1290
|
+
snapshot = VirtualFileSystem(files=tuple(files))
|
|
1291
|
+
return (snapshot,)
|
|
1292
|
+
|
|
1293
|
+
return reducer
|
|
1294
|
+
|
|
1295
|
+
|
|
1296
|
+
def _make_delete_reducer() -> TypedReducer[VirtualFileSystem]:
|
|
1297
|
+
def reducer(
|
|
1298
|
+
slice_values: tuple[VirtualFileSystem, ...],
|
|
1299
|
+
event: ReducerEvent,
|
|
1300
|
+
*,
|
|
1301
|
+
context: ReducerContextProtocol,
|
|
1302
|
+
) -> tuple[VirtualFileSystem, ...]:
|
|
1303
|
+
del context
|
|
1304
|
+
previous = slice_values[-1] if slice_values else VirtualFileSystem()
|
|
1305
|
+
params = cast(DeleteEntry, event.value)
|
|
1306
|
+
target = params.path.segments
|
|
1307
|
+
files = [
|
|
1308
|
+
file
|
|
1309
|
+
for file in previous.files
|
|
1310
|
+
if not _is_path_prefix(file.path.segments, target)
|
|
1311
|
+
]
|
|
1312
|
+
files.sort(key=lambda file: file.path.segments)
|
|
1313
|
+
snapshot = VirtualFileSystem(files=tuple(files))
|
|
1314
|
+
return (snapshot,)
|
|
1315
|
+
|
|
1316
|
+
return reducer
|
|
1317
|
+
|
|
1318
|
+
|
|
1319
|
+
def _index_of(files: list[VfsFile], path: VfsPath) -> int | None:
|
|
1320
|
+
for index, file in enumerate(files):
|
|
1321
|
+
if file.path.segments == path.segments:
|
|
1322
|
+
return index
|
|
1323
|
+
return None
|
|
1324
|
+
|
|
1325
|
+
|
|
1326
|
+
def _now() -> datetime:
|
|
1327
|
+
return _truncate_to_milliseconds(datetime.now(UTC))
|
|
1328
|
+
|
|
1329
|
+
|
|
1330
|
+
def _truncate_to_milliseconds(value: datetime) -> datetime:
|
|
1331
|
+
microsecond = value.microsecond - (value.microsecond % 1000)
|
|
1332
|
+
return value.replace(microsecond=microsecond, tzinfo=UTC)
|
|
1333
|
+
|
|
1334
|
+
|
|
1335
|
+
# Public helper exports reused by Podman tooling.
|
|
1336
|
+
MAX_WRITE_LENGTH: Final[int] = _MAX_WRITE_LENGTH
|
|
1337
|
+
normalize_string_path = _normalize_string_path
|
|
1338
|
+
normalize_path = _normalize_path
|
|
1339
|
+
normalize_content = _normalize_content
|
|
1340
|
+
normalize_offset = _normalize_offset
|
|
1341
|
+
normalize_limit = _normalize_limit
|
|
1342
|
+
ensure_ascii = _ensure_ascii
|
|
1343
|
+
format_directory_message = _format_directory_message
|
|
1344
|
+
format_write_file_message = _format_write_file_message
|
|
1345
|
+
format_edit_message = _format_edit_message
|
|
1346
|
+
format_glob_message = _format_glob_message
|
|
1347
|
+
format_grep_message = _format_grep_message
|
|
1348
|
+
find_file = _find_file
|
|
1349
|
+
make_write_reducer = _make_write_reducer
|
|
1350
|
+
make_delete_reducer = _make_delete_reducer
|
|
1351
|
+
|
|
1352
|
+
|
|
1353
|
+
__all__ = [
|
|
1354
|
+
"MAX_WRITE_LENGTH",
|
|
1355
|
+
"DeleteEntry",
|
|
1356
|
+
"EditFileParams",
|
|
1357
|
+
"FileInfo",
|
|
1358
|
+
"GlobMatch",
|
|
1359
|
+
"GlobParams",
|
|
1360
|
+
"GrepMatch",
|
|
1361
|
+
"GrepParams",
|
|
1362
|
+
"HostMount",
|
|
1363
|
+
"ListDirectory",
|
|
1364
|
+
"ListDirectoryParams",
|
|
1365
|
+
"ListDirectoryResult",
|
|
1366
|
+
"ReadFile",
|
|
1367
|
+
"ReadFileParams",
|
|
1368
|
+
"ReadFileResult",
|
|
1369
|
+
"RemoveParams",
|
|
1370
|
+
"VfsFile",
|
|
1371
|
+
"VfsPath",
|
|
1372
|
+
"VfsToolsSection",
|
|
1373
|
+
"VirtualFileSystem",
|
|
1374
|
+
"WriteFile",
|
|
1375
|
+
"WriteFileParams",
|
|
1376
|
+
"ensure_ascii",
|
|
1377
|
+
"find_file",
|
|
1378
|
+
"format_directory_message",
|
|
1379
|
+
"format_edit_message",
|
|
1380
|
+
"format_glob_message",
|
|
1381
|
+
"format_grep_message",
|
|
1382
|
+
"format_write_file_message",
|
|
1383
|
+
"make_delete_reducer",
|
|
1384
|
+
"make_write_reducer",
|
|
1385
|
+
"normalize_content",
|
|
1386
|
+
"normalize_limit",
|
|
1387
|
+
"normalize_offset",
|
|
1388
|
+
"normalize_path",
|
|
1389
|
+
"normalize_string_path",
|
|
1390
|
+
]
|