deepagents 1.9.0-alpha.1 → 1.9.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +47 -707
- package/dist/index.cjs +1592 -1338
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +191 -20
- package/dist/index.d.ts +191 -20
- package/dist/index.js +1592 -1338
- package/dist/index.js.map +1 -1
- package/package.json +6 -6
package/dist/index.js
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import { AIMessage, HumanMessage, SystemMessage, ToolMessage, anthropicPromptCachingMiddleware, context, countTokensApproximately, createAgent, createMiddleware, humanInTheLoopMiddleware, todoListMiddleware, tool } from "langchain";
|
|
2
|
-
import { ChatAnthropic } from "@langchain/anthropic";
|
|
3
2
|
import { Command, REMOVE_ALL_MESSAGES, ReducedValue, StateSchema, getConfig, getCurrentTaskInput, getStore, isCommand } from "@langchain/langgraph";
|
|
4
3
|
import { z } from "zod/v4";
|
|
5
4
|
import micromatch from "micromatch";
|
|
@@ -247,7 +246,7 @@ function truncateIfTooLong(result) {
|
|
|
247
246
|
* validatePath("C:\\Users\\file") // Throws: Windows absolute paths not supported
|
|
248
247
|
* ```
|
|
249
248
|
*/
|
|
250
|
-
function validatePath(path) {
|
|
249
|
+
function validatePath$1(path) {
|
|
251
250
|
const pathStr = path || "/";
|
|
252
251
|
if (!pathStr || pathStr.trim() === "") throw new Error("Path cannot be empty");
|
|
253
252
|
let normalized = pathStr.startsWith("/") ? pathStr : "/" + pathStr;
|
|
@@ -273,7 +272,7 @@ function validatePath(path) {
|
|
|
273
272
|
function globSearchFiles(files, pattern, path = "/") {
|
|
274
273
|
let normalizedPath;
|
|
275
274
|
try {
|
|
276
|
-
normalizedPath = validatePath(path);
|
|
275
|
+
normalizedPath = validatePath$1(path);
|
|
277
276
|
} catch {
|
|
278
277
|
return "No files found";
|
|
279
278
|
}
|
|
@@ -305,7 +304,7 @@ function globSearchFiles(files, pattern, path = "/") {
|
|
|
305
304
|
function grepMatchesFromFiles(files, pattern, path = null, glob = null) {
|
|
306
305
|
let normalizedPath;
|
|
307
306
|
try {
|
|
308
|
-
normalizedPath = validatePath(path);
|
|
307
|
+
normalizedPath = validatePath$1(path);
|
|
309
308
|
} catch {
|
|
310
309
|
return [];
|
|
311
310
|
}
|
|
@@ -540,6 +539,7 @@ async function resolveBackend(backend, runtime) {
|
|
|
540
539
|
//#endregion
|
|
541
540
|
//#region src/backends/state.ts
|
|
542
541
|
const PREGEL_SEND_KEY = "__pregel_send";
|
|
542
|
+
const PREGEL_READ_KEY = "__pregel_read";
|
|
543
543
|
/**
|
|
544
544
|
* Backend that stores files in agent state (ephemeral).
|
|
545
545
|
*
|
|
@@ -577,12 +577,13 @@ var StateBackend = class {
|
|
|
577
577
|
* Get files from current state.
|
|
578
578
|
*
|
|
579
579
|
* In legacy mode, reads from the injected {@link BackendRuntime}.
|
|
580
|
-
* In zero-arg mode, reads
|
|
581
|
-
*
|
|
580
|
+
* In zero-arg mode, reads via {@link PREGEL_READ_KEY} with fresh=true,
|
|
581
|
+
* which applies any pending task writes through the reducer before returning.
|
|
582
582
|
*/
|
|
583
|
-
|
|
584
|
-
if (this.runtime) return this.runtime.state.files
|
|
585
|
-
|
|
583
|
+
get files() {
|
|
584
|
+
if (this.runtime) return this.runtime.state.files ?? {};
|
|
585
|
+
const read = getConfig().configurable?.[PREGEL_READ_KEY];
|
|
586
|
+
return read?.("files", true) ?? {};
|
|
586
587
|
}
|
|
587
588
|
/**
|
|
588
589
|
* Push a files state update through LangGraph's internal send channel.
|
|
@@ -607,7 +608,7 @@ var StateBackend = class {
|
|
|
607
608
|
* Directories have a trailing / in their path and is_dir=true.
|
|
608
609
|
*/
|
|
609
610
|
ls(path) {
|
|
610
|
-
const files = this.
|
|
611
|
+
const files = this.files;
|
|
611
612
|
const infos = [];
|
|
612
613
|
const subdirs = /* @__PURE__ */ new Set();
|
|
613
614
|
const normalizedPath = path.endsWith("/") ? path : path + "/";
|
|
@@ -648,7 +649,7 @@ var StateBackend = class {
|
|
|
648
649
|
* @returns ReadResult with content on success or error on failure
|
|
649
650
|
*/
|
|
650
651
|
read(filePath, offset = 0, limit = 500) {
|
|
651
|
-
const fileData = this.
|
|
652
|
+
const fileData = this.files[filePath];
|
|
652
653
|
if (!fileData) return { error: `File '${filePath}' not found` };
|
|
653
654
|
const fileDataV2 = migrateToFileDataV2(fileData, filePath);
|
|
654
655
|
if (!isTextMimeType(fileDataV2.mimeType)) return {
|
|
@@ -668,7 +669,7 @@ var StateBackend = class {
|
|
|
668
669
|
* @returns ReadRawResult with raw file data on success or error on failure
|
|
669
670
|
*/
|
|
670
671
|
readRaw(filePath) {
|
|
671
|
-
const fileData = this.
|
|
672
|
+
const fileData = this.files[filePath];
|
|
672
673
|
if (!fileData) return { error: `File '${filePath}' not found` };
|
|
673
674
|
return { data: fileData };
|
|
674
675
|
}
|
|
@@ -677,7 +678,7 @@ var StateBackend = class {
|
|
|
677
678
|
* Returns WriteResult with filesUpdate to update LangGraph state.
|
|
678
679
|
*/
|
|
679
680
|
write(filePath, content) {
|
|
680
|
-
if (filePath in this.
|
|
681
|
+
if (filePath in this.files) return { error: `Cannot write to ${filePath} because it already exists. Read and then make an edit, or write to a new path.` };
|
|
681
682
|
const mimeType = getMimeType(filePath);
|
|
682
683
|
const newFileData = createFileData(content, void 0, this.fileFormat, mimeType);
|
|
683
684
|
const update = { [filePath]: newFileData };
|
|
@@ -695,7 +696,7 @@ var StateBackend = class {
|
|
|
695
696
|
* Returns EditResult with filesUpdate and occurrences.
|
|
696
697
|
*/
|
|
697
698
|
edit(filePath, oldString, newString, replaceAll = false) {
|
|
698
|
-
const fileData = this.
|
|
699
|
+
const fileData = this.files[filePath];
|
|
699
700
|
if (!fileData) return { error: `Error: File '${filePath}' not found` };
|
|
700
701
|
const result = performStringReplacement(fileDataToString(fileData), oldString, newString, replaceAll);
|
|
701
702
|
if (typeof result === "string") return { error: result };
|
|
@@ -720,13 +721,14 @@ var StateBackend = class {
|
|
|
720
721
|
* Binary files are skipped.
|
|
721
722
|
*/
|
|
722
723
|
grep(pattern, path = "/", glob = null) {
|
|
723
|
-
|
|
724
|
+
const files = this.files;
|
|
725
|
+
return { matches: grepMatchesFromFiles(files, pattern, path, glob) };
|
|
724
726
|
}
|
|
725
727
|
/**
|
|
726
728
|
* Structured glob matching returning FileInfo objects.
|
|
727
729
|
*/
|
|
728
730
|
glob(pattern, path = "/") {
|
|
729
|
-
const files = this.
|
|
731
|
+
const files = this.files;
|
|
730
732
|
const result = globSearchFiles(files, pattern, path);
|
|
731
733
|
if (result === "No files found") return { files: [] };
|
|
732
734
|
const paths = result.split("\n");
|
|
@@ -784,7 +786,7 @@ var StateBackend = class {
|
|
|
784
786
|
* @returns List of FileDownloadResponse objects, one per input path
|
|
785
787
|
*/
|
|
786
788
|
downloadFiles(paths) {
|
|
787
|
-
const files = this.
|
|
789
|
+
const files = this.files;
|
|
788
790
|
const responses = [];
|
|
789
791
|
for (const path of paths) {
|
|
790
792
|
const fileData = files[path];
|
|
@@ -814,277 +816,633 @@ var StateBackend = class {
|
|
|
814
816
|
}
|
|
815
817
|
};
|
|
816
818
|
//#endregion
|
|
817
|
-
//#region src/
|
|
818
|
-
/**
|
|
819
|
-
* Middleware for providing filesystem tools to an agent.
|
|
820
|
-
*
|
|
821
|
-
* Provides ls, read_file, write_file, edit_file, glob, and grep tools with support for:
|
|
822
|
-
* - Pluggable backends (StateBackend, StoreBackend, FilesystemBackend, CompositeBackend)
|
|
823
|
-
* - Tool result eviction for large outputs
|
|
824
|
-
*/
|
|
825
|
-
const INT_FORMATTER = new Intl.NumberFormat("en-US");
|
|
826
|
-
/**
|
|
827
|
-
* Tools that should be excluded from the large result eviction logic.
|
|
828
|
-
*
|
|
829
|
-
* This array contains tools that should NOT have their results evicted to the filesystem
|
|
830
|
-
* when they exceed token limits. Tools are excluded for different reasons:
|
|
831
|
-
*
|
|
832
|
-
* 1. Tools with built-in truncation (ls, glob, grep):
|
|
833
|
-
* These tools truncate their own output when it becomes too large. When these tools
|
|
834
|
-
* produce truncated output due to many matches, it typically indicates the query
|
|
835
|
-
* needs refinement rather than full result preservation. In such cases, the truncated
|
|
836
|
-
* matches are potentially more like noise and the LLM should be prompted to narrow
|
|
837
|
-
* its search criteria instead.
|
|
838
|
-
*
|
|
839
|
-
* 2. Tools with problematic truncation behavior (read_file):
|
|
840
|
-
* read_file is tricky to handle as the failure mode here is single long lines
|
|
841
|
-
* (e.g., imagine a jsonl file with very long payloads on each line). If we try to
|
|
842
|
-
* truncate the result of read_file, the agent may then attempt to re-read the
|
|
843
|
-
* truncated file using read_file again, which won't help.
|
|
844
|
-
*
|
|
845
|
-
* 3. Tools that never exceed limits (edit_file, write_file):
|
|
846
|
-
* These tools return minimal confirmation messages and are never expected to produce
|
|
847
|
-
* output large enough to exceed token limits, so checking them would be unnecessary.
|
|
848
|
-
*/
|
|
849
|
-
/**
|
|
850
|
-
* All tool names registered by FilesystemMiddleware.
|
|
851
|
-
* This is the single source of truth — used by createDeepAgent to detect
|
|
852
|
-
* collisions with user-supplied tools at construction time.
|
|
853
|
-
*/
|
|
854
|
-
const FILESYSTEM_TOOL_NAMES = [
|
|
855
|
-
"ls",
|
|
856
|
-
"read_file",
|
|
857
|
-
"write_file",
|
|
858
|
-
"edit_file",
|
|
859
|
-
"glob",
|
|
860
|
-
"grep",
|
|
861
|
-
"execute"
|
|
862
|
-
];
|
|
863
|
-
const TOOLS_EXCLUDED_FROM_EVICTION = [
|
|
864
|
-
"ls",
|
|
865
|
-
"glob",
|
|
866
|
-
"grep",
|
|
867
|
-
"read_file",
|
|
868
|
-
"edit_file",
|
|
869
|
-
"write_file"
|
|
870
|
-
];
|
|
871
|
-
/**
|
|
872
|
-
* Maximum size for binary (non-text) files read via read_file, in bytes.
|
|
873
|
-
* Base64-encoded content is ~33% larger, so 10MB raw ≈ 13.3MB in context.
|
|
874
|
-
* This keeps inline multimodal payloads within all major provider limits.
|
|
875
|
-
*/
|
|
876
|
-
const MAX_BINARY_READ_SIZE_BYTES = 10 * 1024 * 1024;
|
|
877
|
-
/**
|
|
878
|
-
* Template for truncation message in read_file.
|
|
879
|
-
* {file_path} will be filled in at runtime.
|
|
880
|
-
*/
|
|
881
|
-
const READ_FILE_TRUNCATION_MSG = `
|
|
882
|
-
|
|
883
|
-
[Output was truncated due to size limits. The file content is very large. Consider reformatting the file to make it easier to navigate. For example, if this is JSON, use execute(command='jq . {file_path}') to pretty-print it with line breaks. For other formats, you can use appropriate formatting tools to split long lines.]`;
|
|
884
|
-
/**
|
|
885
|
-
* Message template for evicted tool results.
|
|
886
|
-
*/
|
|
887
|
-
const TOO_LARGE_TOOL_MSG = context`
|
|
888
|
-
Tool result too large, the result of this tool call {tool_call_id} was saved in the filesystem at this path: {file_path}
|
|
889
|
-
You can read the result from the filesystem by using the read_file tool, but make sure to only read part of the result at a time.
|
|
890
|
-
You can do this by specifying an offset and limit in the read_file tool call.
|
|
891
|
-
For example, to read the first 100 lines, you can use the read_file tool with offset=0 and limit=100.
|
|
892
|
-
|
|
893
|
-
Here is a preview showing the head and tail of the result (lines of the form
|
|
894
|
-
... [N lines truncated] ...
|
|
895
|
-
indicate omitted lines in the middle of the content):
|
|
896
|
-
|
|
897
|
-
{content_sample}
|
|
898
|
-
`;
|
|
899
|
-
/**
|
|
900
|
-
* Message template for evicted HumanMessages.
|
|
901
|
-
*/
|
|
902
|
-
const TOO_LARGE_HUMAN_MSG = `Message content too large and was saved to the filesystem at: {file_path}
|
|
903
|
-
|
|
904
|
-
You can read the full content using the read_file tool with pagination (offset and limit parameters).
|
|
905
|
-
|
|
906
|
-
Here is a preview showing the head and tail of the content:
|
|
907
|
-
|
|
908
|
-
{content_sample}`;
|
|
819
|
+
//#region src/permissions/enforce.ts
|
|
909
820
|
/**
|
|
910
|
-
*
|
|
911
|
-
*
|
|
912
|
-
* For string content, returns it directly. For array content (mixed block types
|
|
913
|
-
* like text + image), joins all text blocks. Returns empty string if no text found.
|
|
821
|
+
* Validate permission rule paths at setup time. Throws if any path is
|
|
822
|
+
* relative, contains `..`, or contains `~`.
|
|
914
823
|
*/
|
|
915
|
-
function
|
|
916
|
-
|
|
917
|
-
if (Array.isArray(message.content)) return message.content.filter((block) => block.type === "text" && typeof block.text === "string").map((block) => block.text).join("\n");
|
|
918
|
-
return String(message.content);
|
|
824
|
+
function validatePermissionPaths(permissions) {
|
|
825
|
+
for (const permission of permissions) for (const path of permission.paths) validatePath(path);
|
|
919
826
|
}
|
|
920
827
|
/**
|
|
921
|
-
*
|
|
922
|
-
*
|
|
923
|
-
*
|
|
924
|
-
*
|
|
925
|
-
*
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
}
|
|
937
|
-
return replacementText;
|
|
828
|
+
* Canonicalize and validate an absolute path before permission checking.
|
|
829
|
+
*
|
|
830
|
+
* Throws for:
|
|
831
|
+
* - Empty or non-string input
|
|
832
|
+
* - Non-absolute paths (must start with `/`)
|
|
833
|
+
* - Paths containing `..`
|
|
834
|
+
* - Paths containing `~`
|
|
835
|
+
*/
|
|
836
|
+
function validatePath(raw) {
|
|
837
|
+
if (typeof raw !== "string" || raw.length === 0) throw new Error("path must be a non-empty string");
|
|
838
|
+
if (!raw.startsWith("/")) throw new Error(`path must be absolute: ${JSON.stringify(raw)}`);
|
|
839
|
+
const segments = raw.split("/").filter((s) => s.length > 0);
|
|
840
|
+
if (segments.includes("..")) throw new Error(`path must not contain "..": ${JSON.stringify(raw)}`);
|
|
841
|
+
if (segments.includes("~")) throw new Error(`path must not contain "~": ${JSON.stringify(raw)}`);
|
|
842
|
+
return `/${segments.join("/")}`;
|
|
938
843
|
}
|
|
939
844
|
/**
|
|
940
|
-
*
|
|
845
|
+
* Test whether `path` matches a glob `pattern`.
|
|
941
846
|
*
|
|
942
|
-
*
|
|
943
|
-
*
|
|
944
|
-
*
|
|
945
|
-
|
|
946
|
-
function buildTruncatedHumanMessage(message, filePath) {
|
|
947
|
-
const contentSample = createContentPreview(extractTextFromMessage(message));
|
|
948
|
-
return new HumanMessage({
|
|
949
|
-
content: buildEvictedHumanContent(message, TOO_LARGE_HUMAN_MSG.replace("{file_path}", filePath).replace("{content_sample}", contentSample)),
|
|
950
|
-
id: message.id,
|
|
951
|
-
additional_kwargs: { ...message.additional_kwargs },
|
|
952
|
-
response_metadata: { ...message.response_metadata }
|
|
953
|
-
});
|
|
954
|
-
}
|
|
955
|
-
/**
|
|
956
|
-
* Create a preview of content showing head and tail with truncation marker.
|
|
847
|
+
* Supports:
|
|
848
|
+
* - `**` — any number of directory levels
|
|
849
|
+
* - `*` — within a single path segment
|
|
850
|
+
* - `{a,b}` — brace expansion
|
|
957
851
|
*
|
|
958
|
-
*
|
|
959
|
-
* @param headLines - Number of lines to show from the start (default: 5).
|
|
960
|
-
* @param tailLines - Number of lines to show from the end (default: 5).
|
|
961
|
-
* @returns Formatted preview string with line numbers.
|
|
852
|
+
* Uses `micromatch` with `dot: true` so dotfiles are matched by default.
|
|
962
853
|
*/
|
|
963
|
-
function
|
|
964
|
-
|
|
965
|
-
if (lines.length <= headLines + tailLines) return formatContentWithLineNumbers(lines.map((line) => line.substring(0, 1e3)), 1);
|
|
966
|
-
const head = lines.slice(0, headLines).map((line) => line.substring(0, 1e3));
|
|
967
|
-
const tail = lines.slice(-tailLines).map((line) => line.substring(0, 1e3));
|
|
968
|
-
const headSample = formatContentWithLineNumbers(head, 1);
|
|
969
|
-
const truncationNotice = `\n... [${lines.length - headLines - tailLines} lines truncated] ...\n`;
|
|
970
|
-
const tailSample = formatContentWithLineNumbers(tail, lines.length - tailLines + 1);
|
|
971
|
-
return headSample + truncationNotice + tailSample;
|
|
854
|
+
function globMatch(path, pattern) {
|
|
855
|
+
return micromatch.isMatch(path, pattern, { dot: true });
|
|
972
856
|
}
|
|
973
857
|
/**
|
|
974
|
-
*
|
|
975
|
-
|
|
976
|
-
const FileDataV1Schema = z.object({
|
|
977
|
-
content: z.array(z.string()),
|
|
978
|
-
created_at: z.string(),
|
|
979
|
-
modified_at: z.string()
|
|
980
|
-
});
|
|
981
|
-
/**
|
|
982
|
-
* Zod schema for FileDataV2 (content as string for text or Uint8Array for binary).
|
|
983
|
-
*/
|
|
984
|
-
const FileDataV2Schema = z.object({
|
|
985
|
-
content: z.union([z.string(), z.instanceof(Uint8Array)]),
|
|
986
|
-
mimeType: z.string(),
|
|
987
|
-
created_at: z.string(),
|
|
988
|
-
modified_at: z.string()
|
|
989
|
-
});
|
|
990
|
-
/**
|
|
991
|
-
* Zod v3 schema for FileData (re-export from backends)
|
|
992
|
-
*/
|
|
993
|
-
const FileDataSchema = z.union([FileDataV1Schema, FileDataV2Schema]);
|
|
994
|
-
/**
|
|
995
|
-
* Reducer for files state that merges file updates with support for deletions.
|
|
996
|
-
* When a file value is null, the file is deleted from state.
|
|
997
|
-
* When a file value is non-null, it is added or updated in state.
|
|
858
|
+
* Evaluate permission rules against an operation + path and return the
|
|
859
|
+
* access decision.
|
|
998
860
|
*
|
|
999
|
-
*
|
|
1000
|
-
* merging their file changes instead of requiring LastValue semantics.
|
|
861
|
+
* First-match-wins; permissive default.
|
|
1001
862
|
*
|
|
1002
|
-
* @
|
|
1003
|
-
* @param update - The new files record (from a subagent update), with null values for deletions
|
|
1004
|
-
* @returns Merged files record with deletions applied
|
|
863
|
+
* @returns `"allow"` if the operation is permitted, `"deny"` otherwise.
|
|
1005
864
|
*/
|
|
1006
|
-
function
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
for (const [key, value] of Object.entries(update)) if (value !== null) result[key] = value;
|
|
1011
|
-
return result;
|
|
865
|
+
function decidePathAccess(rules, operation, path) {
|
|
866
|
+
for (const rule of rules) {
|
|
867
|
+
if (!rule.operations.includes(operation)) continue;
|
|
868
|
+
if (rule.paths.some((pattern) => globMatch(path, pattern))) return rule.mode ?? "allow";
|
|
1012
869
|
}
|
|
1013
|
-
|
|
1014
|
-
for (const [key, value] of Object.entries(update)) if (value === null) delete result[key];
|
|
1015
|
-
else result[key] = value;
|
|
1016
|
-
return result;
|
|
870
|
+
return "allow";
|
|
1017
871
|
}
|
|
872
|
+
//#endregion
|
|
873
|
+
//#region src/backends/composite.ts
|
|
1018
874
|
/**
|
|
1019
|
-
*
|
|
1020
|
-
* Defined at module level to ensure the same object identity is used across all agents,
|
|
1021
|
-
* preventing "Channel already exists with different type" errors when multiple agents
|
|
1022
|
-
* use createFilesystemMiddleware.
|
|
875
|
+
* Backend that routes file operations to different backends based on path prefix.
|
|
1023
876
|
*
|
|
1024
|
-
*
|
|
877
|
+
* This enables hybrid storage strategies like:
|
|
878
|
+
* - `/memories/` → StoreBackend (persistent, cross-thread)
|
|
879
|
+
* - Everything else → StateBackend (ephemeral, per-thread)
|
|
880
|
+
*
|
|
881
|
+
* The CompositeBackend handles path prefix stripping/re-adding transparently.
|
|
1025
882
|
*/
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
|
|
1039
|
-
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
|
|
1078
|
-
|
|
1079
|
-
const
|
|
1080
|
-
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
|
|
883
|
+
var CompositeBackend = class {
|
|
884
|
+
default;
|
|
885
|
+
routes;
|
|
886
|
+
sortedRoutes;
|
|
887
|
+
constructor(defaultBackend, routes) {
|
|
888
|
+
this.default = isSandboxProtocol(defaultBackend) ? adaptSandboxProtocol(defaultBackend) : adaptBackendProtocol(defaultBackend);
|
|
889
|
+
this.routes = Object.fromEntries(Object.entries(routes).map(([k, v]) => [k, isSandboxProtocol(v) ? adaptSandboxProtocol(v) : adaptBackendProtocol(v)]));
|
|
890
|
+
this.sortedRoutes = Object.entries(this.routes).sort((a, b) => b[0].length - a[0].length);
|
|
891
|
+
}
|
|
892
|
+
/** Delegates to default backend's id if it is a sandbox, otherwise empty string. */
|
|
893
|
+
get id() {
|
|
894
|
+
return isSandboxBackend(this.default) ? this.default.id : "";
|
|
895
|
+
}
|
|
896
|
+
/** Route prefixes registered on this backend (e.g. `["/workspace"]`). */
|
|
897
|
+
get routePrefixes() {
|
|
898
|
+
return Object.keys(this.routes);
|
|
899
|
+
}
|
|
900
|
+
/**
|
|
901
|
+
* Type guard — returns true if `backend` is a {@link CompositeBackend}.
|
|
902
|
+
*
|
|
903
|
+
* Uses duck-typing on `routePrefixes` so it works across module boundaries
|
|
904
|
+
* where `instanceof` may fail.
|
|
905
|
+
*/
|
|
906
|
+
static isInstance(backend) {
|
|
907
|
+
return typeof backend === "object" && backend !== null && Array.isArray(backend.routePrefixes);
|
|
908
|
+
}
|
|
909
|
+
/**
|
|
910
|
+
* Determine which backend handles this key and strip prefix.
|
|
911
|
+
*
|
|
912
|
+
* @param key - Original file path
|
|
913
|
+
* @returns Tuple of [backend, stripped_key] where stripped_key has the route
|
|
914
|
+
* prefix removed (but keeps leading slash).
|
|
915
|
+
*/
|
|
916
|
+
getBackendAndKey(key) {
|
|
917
|
+
for (const [prefix, backend] of this.sortedRoutes) if (key.startsWith(prefix)) {
|
|
918
|
+
const suffix = key.substring(prefix.length);
|
|
919
|
+
return [backend, suffix ? "/" + suffix : "/"];
|
|
920
|
+
}
|
|
921
|
+
return [this.default, key];
|
|
922
|
+
}
|
|
923
|
+
/**
|
|
924
|
+
* List files and directories in the specified directory (non-recursive).
|
|
925
|
+
*
|
|
926
|
+
* @param path - Absolute path to directory
|
|
927
|
+
* @returns LsResult with list of FileInfo objects (with route prefixes added) on success or error on failure.
|
|
928
|
+
* Directories have a trailing / in their path and is_dir=true.
|
|
929
|
+
*/
|
|
930
|
+
async ls(path) {
|
|
931
|
+
for (const [routePrefix, backend] of this.sortedRoutes) if (path.startsWith(routePrefix.replace(/\/$/, ""))) {
|
|
932
|
+
const suffix = path.substring(routePrefix.length);
|
|
933
|
+
const searchPath = suffix ? "/" + suffix : "/";
|
|
934
|
+
const result = await backend.ls(searchPath);
|
|
935
|
+
if (result.error) return result;
|
|
936
|
+
const prefixed = [];
|
|
937
|
+
for (const fi of result.files || []) prefixed.push({
|
|
938
|
+
...fi,
|
|
939
|
+
path: routePrefix.slice(0, -1) + fi.path
|
|
940
|
+
});
|
|
941
|
+
return { files: prefixed };
|
|
942
|
+
}
|
|
943
|
+
if (path === "/") {
|
|
944
|
+
const results = [];
|
|
945
|
+
const defaultResult = await this.default.ls(path);
|
|
946
|
+
if (defaultResult.error) return defaultResult;
|
|
947
|
+
results.push(...defaultResult.files || []);
|
|
948
|
+
for (const [routePrefix] of this.sortedRoutes) results.push({
|
|
949
|
+
path: routePrefix,
|
|
950
|
+
is_dir: true,
|
|
951
|
+
size: 0,
|
|
952
|
+
modified_at: ""
|
|
953
|
+
});
|
|
954
|
+
results.sort((a, b) => a.path.localeCompare(b.path));
|
|
955
|
+
return { files: results };
|
|
956
|
+
}
|
|
957
|
+
return await this.default.ls(path);
|
|
958
|
+
}
|
|
959
|
+
/**
|
|
960
|
+
* Read file content, routing to appropriate backend.
|
|
961
|
+
*
|
|
962
|
+
* @param filePath - Absolute file path
|
|
963
|
+
* @param offset - Line offset to start reading from (0-indexed)
|
|
964
|
+
* @param limit - Maximum number of lines to read
|
|
965
|
+
* @returns Formatted file content with line numbers, or error message
|
|
966
|
+
*/
|
|
967
|
+
async read(filePath, offset = 0, limit = 500) {
|
|
968
|
+
const [backend, strippedKey] = this.getBackendAndKey(filePath);
|
|
969
|
+
return await backend.read(strippedKey, offset, limit);
|
|
970
|
+
}
|
|
971
|
+
/**
|
|
972
|
+
* Read file content as raw FileData.
|
|
973
|
+
*
|
|
974
|
+
* @param filePath - Absolute file path
|
|
975
|
+
* @returns ReadRawResult with raw file data on success or error on failure
|
|
976
|
+
*/
|
|
977
|
+
async readRaw(filePath) {
|
|
978
|
+
const [backend, strippedKey] = this.getBackendAndKey(filePath);
|
|
979
|
+
return await backend.readRaw(strippedKey);
|
|
980
|
+
}
|
|
981
|
+
/**
|
|
982
|
+
* Structured search results or error string for invalid input.
|
|
983
|
+
*/
|
|
984
|
+
async grep(pattern, path = "/", glob = null) {
|
|
985
|
+
for (const [routePrefix, backend] of this.sortedRoutes) if (path.startsWith(routePrefix.replace(/\/$/, ""))) {
|
|
986
|
+
const searchPath = path.substring(routePrefix.length - 1);
|
|
987
|
+
const raw = await backend.grep(pattern, searchPath || "/", glob);
|
|
988
|
+
if (raw.error) return raw;
|
|
989
|
+
return { matches: (raw.matches || []).map((m) => ({
|
|
990
|
+
...m,
|
|
991
|
+
path: routePrefix.slice(0, -1) + m.path
|
|
992
|
+
})) };
|
|
993
|
+
}
|
|
994
|
+
const allMatches = [];
|
|
995
|
+
const rawDefault = await this.default.grep(pattern, path, glob);
|
|
996
|
+
if (rawDefault.error) return rawDefault;
|
|
997
|
+
allMatches.push(...rawDefault.matches || []);
|
|
998
|
+
for (const [routePrefix, backend] of Object.entries(this.routes)) {
|
|
999
|
+
const raw = await backend.grep(pattern, "/", glob);
|
|
1000
|
+
if (raw.error) return raw;
|
|
1001
|
+
const matches = (raw.matches || []).map((m) => ({
|
|
1002
|
+
...m,
|
|
1003
|
+
path: routePrefix.slice(0, -1) + m.path
|
|
1004
|
+
}));
|
|
1005
|
+
allMatches.push(...matches);
|
|
1006
|
+
}
|
|
1007
|
+
return { matches: allMatches };
|
|
1008
|
+
}
|
|
1009
|
+
/**
|
|
1010
|
+
* Structured glob matching returning FileInfo objects.
|
|
1011
|
+
*/
|
|
1012
|
+
async glob(pattern, path = "/") {
|
|
1013
|
+
const results = [];
|
|
1014
|
+
for (const [routePrefix, backend] of this.sortedRoutes) if (path.startsWith(routePrefix.replace(/\/$/, ""))) {
|
|
1015
|
+
const searchPath = path.substring(routePrefix.length - 1);
|
|
1016
|
+
const result = await backend.glob(pattern, searchPath || "/");
|
|
1017
|
+
if (result.error) return result;
|
|
1018
|
+
return { files: (result.files || []).map((fi) => ({
|
|
1019
|
+
...fi,
|
|
1020
|
+
path: routePrefix.slice(0, -1) + fi.path
|
|
1021
|
+
})) };
|
|
1022
|
+
}
|
|
1023
|
+
const defaultResult = await this.default.glob(pattern, path);
|
|
1024
|
+
if (defaultResult.error) return defaultResult;
|
|
1025
|
+
results.push(...defaultResult.files || []);
|
|
1026
|
+
for (const [routePrefix, backend] of Object.entries(this.routes)) {
|
|
1027
|
+
const result = await backend.glob(pattern, "/");
|
|
1028
|
+
if (result.error) continue;
|
|
1029
|
+
const files = (result.files || []).map((fi) => ({
|
|
1030
|
+
...fi,
|
|
1031
|
+
path: routePrefix.slice(0, -1) + fi.path
|
|
1032
|
+
}));
|
|
1033
|
+
results.push(...files);
|
|
1034
|
+
}
|
|
1035
|
+
results.sort((a, b) => a.path.localeCompare(b.path));
|
|
1036
|
+
return { files: results };
|
|
1037
|
+
}
|
|
1038
|
+
/**
|
|
1039
|
+
* Create a new file, routing to appropriate backend.
|
|
1040
|
+
*
|
|
1041
|
+
* @param filePath - Absolute file path
|
|
1042
|
+
* @param content - File content as string
|
|
1043
|
+
* @returns WriteResult with path or error
|
|
1044
|
+
*/
|
|
1045
|
+
async write(filePath, content) {
|
|
1046
|
+
const [backend, strippedKey] = this.getBackendAndKey(filePath);
|
|
1047
|
+
return await backend.write(strippedKey, content);
|
|
1048
|
+
}
|
|
1049
|
+
/**
|
|
1050
|
+
* Edit a file, routing to appropriate backend.
|
|
1051
|
+
*
|
|
1052
|
+
* @param filePath - Absolute file path
|
|
1053
|
+
* @param oldString - String to find and replace
|
|
1054
|
+
* @param newString - Replacement string
|
|
1055
|
+
* @param replaceAll - If true, replace all occurrences
|
|
1056
|
+
* @returns EditResult with path, occurrences, or error
|
|
1057
|
+
*/
|
|
1058
|
+
async edit(filePath, oldString, newString, replaceAll = false) {
|
|
1059
|
+
const [backend, strippedKey] = this.getBackendAndKey(filePath);
|
|
1060
|
+
return await backend.edit(strippedKey, oldString, newString, replaceAll);
|
|
1061
|
+
}
|
|
1062
|
+
/**
|
|
1063
|
+
* Execute a command via the default backend.
|
|
1064
|
+
* Execution is not path-specific, so it always delegates to the default backend.
|
|
1065
|
+
*
|
|
1066
|
+
* @param command - Full shell command string to execute
|
|
1067
|
+
* @returns ExecuteResponse with combined output, exit code, and truncation flag
|
|
1068
|
+
* @throws Error if the default backend doesn't support command execution
|
|
1069
|
+
*/
|
|
1070
|
+
execute(command) {
|
|
1071
|
+
if (!isSandboxBackend(this.default)) throw new Error("Default backend doesn't support command execution (SandboxBackendProtocol). To enable execution, provide a default backend that implements SandboxBackendProtocol.");
|
|
1072
|
+
return Promise.resolve(this.default.execute(command));
|
|
1073
|
+
}
|
|
1074
|
+
/**
|
|
1075
|
+
* Upload multiple files, batching by backend for efficiency.
|
|
1076
|
+
*
|
|
1077
|
+
* @param files - List of [path, content] tuples to upload
|
|
1078
|
+
* @returns List of FileUploadResponse objects, one per input file
|
|
1079
|
+
*/
|
|
1080
|
+
async uploadFiles(files) {
|
|
1081
|
+
const results = Array.from({ length: files.length }, () => null);
|
|
1082
|
+
const batchesByBackend = /* @__PURE__ */ new Map();
|
|
1083
|
+
for (let idx = 0; idx < files.length; idx++) {
|
|
1084
|
+
const [path, content] = files[idx];
|
|
1085
|
+
const [backend, strippedPath] = this.getBackendAndKey(path);
|
|
1086
|
+
if (!batchesByBackend.has(backend)) batchesByBackend.set(backend, []);
|
|
1087
|
+
batchesByBackend.get(backend).push({
|
|
1088
|
+
idx,
|
|
1089
|
+
path: strippedPath,
|
|
1090
|
+
content
|
|
1091
|
+
});
|
|
1092
|
+
}
|
|
1093
|
+
for (const [backend, batch] of batchesByBackend) {
|
|
1094
|
+
if (!backend.uploadFiles) throw new Error("Backend does not support uploadFiles");
|
|
1095
|
+
const batchFiles = batch.map((b) => [b.path, b.content]);
|
|
1096
|
+
const batchResponses = await backend.uploadFiles(batchFiles);
|
|
1097
|
+
for (let i = 0; i < batch.length; i++) {
|
|
1098
|
+
const originalIdx = batch[i].idx;
|
|
1099
|
+
results[originalIdx] = {
|
|
1100
|
+
path: files[originalIdx][0],
|
|
1101
|
+
error: batchResponses[i]?.error ?? null
|
|
1102
|
+
};
|
|
1103
|
+
}
|
|
1104
|
+
}
|
|
1105
|
+
return results;
|
|
1106
|
+
}
|
|
1107
|
+
/**
|
|
1108
|
+
* Download multiple files, batching by backend for efficiency.
|
|
1109
|
+
*
|
|
1110
|
+
* @param paths - List of file paths to download
|
|
1111
|
+
* @returns List of FileDownloadResponse objects, one per input path
|
|
1112
|
+
*/
|
|
1113
|
+
async downloadFiles(paths) {
|
|
1114
|
+
const results = Array.from({ length: paths.length }, () => null);
|
|
1115
|
+
const batchesByBackend = /* @__PURE__ */ new Map();
|
|
1116
|
+
for (let idx = 0; idx < paths.length; idx++) {
|
|
1117
|
+
const path = paths[idx];
|
|
1118
|
+
const [backend, strippedPath] = this.getBackendAndKey(path);
|
|
1119
|
+
if (!batchesByBackend.has(backend)) batchesByBackend.set(backend, []);
|
|
1120
|
+
batchesByBackend.get(backend).push({
|
|
1121
|
+
idx,
|
|
1122
|
+
path: strippedPath
|
|
1123
|
+
});
|
|
1124
|
+
}
|
|
1125
|
+
for (const [backend, batch] of batchesByBackend) {
|
|
1126
|
+
if (!backend.downloadFiles) throw new Error("Backend does not support downloadFiles");
|
|
1127
|
+
const batchPaths = batch.map((b) => b.path);
|
|
1128
|
+
const batchResponses = await backend.downloadFiles(batchPaths);
|
|
1129
|
+
for (let i = 0; i < batch.length; i++) {
|
|
1130
|
+
const originalIdx = batch[i].idx;
|
|
1131
|
+
results[originalIdx] = {
|
|
1132
|
+
path: paths[originalIdx],
|
|
1133
|
+
content: batchResponses[i]?.content ?? null,
|
|
1134
|
+
error: batchResponses[i]?.error ?? null
|
|
1135
|
+
};
|
|
1136
|
+
}
|
|
1137
|
+
}
|
|
1138
|
+
return results;
|
|
1139
|
+
}
|
|
1140
|
+
};
|
|
1141
|
+
//#endregion
|
|
1142
|
+
//#region src/middleware/fs.ts
|
|
1143
|
+
/**
|
|
1144
|
+
* Middleware for providing filesystem tools to an agent.
|
|
1145
|
+
*
|
|
1146
|
+
* Provides ls, read_file, write_file, edit_file, glob, and grep tools with support for:
|
|
1147
|
+
* - Pluggable backends (StateBackend, StoreBackend, FilesystemBackend, CompositeBackend)
|
|
1148
|
+
* - Tool result eviction for large outputs
|
|
1149
|
+
*/
|
|
1150
|
+
const INT_FORMATTER = new Intl.NumberFormat("en-US");
|
|
1151
|
+
/**
|
|
1152
|
+
* Tools that should be excluded from the large result eviction logic.
|
|
1153
|
+
*
|
|
1154
|
+
* This array contains tools that should NOT have their results evicted to the filesystem
|
|
1155
|
+
* when they exceed token limits. Tools are excluded for different reasons:
|
|
1156
|
+
*
|
|
1157
|
+
* 1. Tools with built-in truncation (ls, glob, grep):
|
|
1158
|
+
* These tools truncate their own output when it becomes too large. When these tools
|
|
1159
|
+
* produce truncated output due to many matches, it typically indicates the query
|
|
1160
|
+
* needs refinement rather than full result preservation. In such cases, the truncated
|
|
1161
|
+
* matches are potentially more like noise and the LLM should be prompted to narrow
|
|
1162
|
+
* its search criteria instead.
|
|
1163
|
+
*
|
|
1164
|
+
* 2. Tools with problematic truncation behavior (read_file):
|
|
1165
|
+
* read_file is tricky to handle as the failure mode here is single long lines
|
|
1166
|
+
* (e.g., imagine a jsonl file with very long payloads on each line). If we try to
|
|
1167
|
+
* truncate the result of read_file, the agent may then attempt to re-read the
|
|
1168
|
+
* truncated file using read_file again, which won't help.
|
|
1169
|
+
*
|
|
1170
|
+
* 3. Tools that never exceed limits (edit_file, write_file):
|
|
1171
|
+
* These tools return minimal confirmation messages and are never expected to produce
|
|
1172
|
+
* output large enough to exceed token limits, so checking them would be unnecessary.
|
|
1173
|
+
*/
|
|
1174
|
+
/**
|
|
1175
|
+
* All tool names registered by FilesystemMiddleware.
|
|
1176
|
+
* This is the single source of truth — used by createDeepAgent to detect
|
|
1177
|
+
* collisions with user-supplied tools at construction time.
|
|
1178
|
+
*/
|
|
1179
|
+
const FILESYSTEM_TOOL_NAMES = [
|
|
1180
|
+
"ls",
|
|
1181
|
+
"read_file",
|
|
1182
|
+
"write_file",
|
|
1183
|
+
"edit_file",
|
|
1184
|
+
"glob",
|
|
1185
|
+
"grep",
|
|
1186
|
+
"execute"
|
|
1187
|
+
];
|
|
1188
|
+
const TOOLS_EXCLUDED_FROM_EVICTION = [
|
|
1189
|
+
"ls",
|
|
1190
|
+
"glob",
|
|
1191
|
+
"grep",
|
|
1192
|
+
"read_file",
|
|
1193
|
+
"edit_file",
|
|
1194
|
+
"write_file"
|
|
1195
|
+
];
|
|
1196
|
+
/**
|
|
1197
|
+
* Maximum size for binary (non-text) files read via read_file, in bytes.
|
|
1198
|
+
* Base64-encoded content is ~33% larger, so 10MB raw ≈ 13.3MB in context.
|
|
1199
|
+
* This keeps inline multimodal payloads within all major provider limits.
|
|
1200
|
+
*/
|
|
1201
|
+
const MAX_BINARY_READ_SIZE_BYTES = 10 * 1024 * 1024;
|
|
1202
|
+
/**
|
|
1203
|
+
* Template for truncation message in read_file.
|
|
1204
|
+
* {file_path} will be filled in at runtime.
|
|
1205
|
+
*/
|
|
1206
|
+
const READ_FILE_TRUNCATION_MSG = `
|
|
1207
|
+
|
|
1208
|
+
[Output was truncated due to size limits. The file content is very large. Consider reformatting the file to make it easier to navigate. For example, if this is JSON, use execute(command='jq . {file_path}') to pretty-print it with line breaks. For other formats, you can use appropriate formatting tools to split long lines.]`;
|
|
1209
|
+
/**
|
|
1210
|
+
* Message template for evicted tool results.
|
|
1211
|
+
*/
|
|
1212
|
+
const TOO_LARGE_TOOL_MSG = context`
|
|
1213
|
+
Tool result too large, the result of this tool call {tool_call_id} was saved in the filesystem at this path: {file_path}
|
|
1214
|
+
You can read the result from the filesystem by using the read_file tool, but make sure to only read part of the result at a time.
|
|
1215
|
+
You can do this by specifying an offset and limit in the read_file tool call.
|
|
1216
|
+
For example, to read the first ${100} lines, you can use the read_file tool with offset=0 and limit=${100}.
|
|
1217
|
+
|
|
1218
|
+
Here is a preview showing the head and tail of the result (lines of the form
|
|
1219
|
+
... [N lines truncated] ...
|
|
1220
|
+
indicate omitted lines in the middle of the content):
|
|
1221
|
+
|
|
1222
|
+
{content_sample}
|
|
1223
|
+
`;
|
|
1224
|
+
/**
|
|
1225
|
+
* Message template for evicted HumanMessages.
|
|
1226
|
+
*/
|
|
1227
|
+
const TOO_LARGE_HUMAN_MSG = `Message content too large and was saved to the filesystem at: {file_path}
|
|
1228
|
+
|
|
1229
|
+
You can read the full content using the read_file tool with pagination (offset and limit parameters).
|
|
1230
|
+
|
|
1231
|
+
Here is a preview showing the head and tail of the content:
|
|
1232
|
+
|
|
1233
|
+
{content_sample}`;
|
|
1234
|
+
/**
|
|
1235
|
+
* Extract text content from a message.
|
|
1236
|
+
*
|
|
1237
|
+
* For string content, returns it directly. For array content (mixed block types
|
|
1238
|
+
* like text + image), joins all text blocks. Returns empty string if no text found.
|
|
1239
|
+
*/
|
|
1240
|
+
function extractTextFromMessage(message) {
|
|
1241
|
+
if (typeof message.content === "string") return message.content;
|
|
1242
|
+
if (Array.isArray(message.content)) return message.content.filter((block) => block.type === "text" && typeof block.text === "string").map((block) => block.text).join("\n");
|
|
1243
|
+
return String(message.content);
|
|
1244
|
+
}
|
|
1245
|
+
/**
|
|
1246
|
+
* Build replacement content for an evicted HumanMessage, preserving non-text blocks.
|
|
1247
|
+
*
|
|
1248
|
+
* For plain string content, returns the replacement text directly. For list content
|
|
1249
|
+
* with mixed block types (e.g., text + image), replaces all text blocks with a single
|
|
1250
|
+
* text block containing the replacement text while keeping non-text blocks intact.
|
|
1251
|
+
*/
|
|
1252
|
+
function buildEvictedHumanContent(message, replacementText) {
|
|
1253
|
+
if (typeof message.content === "string") return replacementText;
|
|
1254
|
+
if (Array.isArray(message.content)) {
|
|
1255
|
+
const mediaBlocks = message.content.filter((block) => typeof block === "object" && block !== null && block.type !== "text");
|
|
1256
|
+
if (mediaBlocks.length === 0) return replacementText;
|
|
1257
|
+
return [{
|
|
1258
|
+
type: "text",
|
|
1259
|
+
text: replacementText
|
|
1260
|
+
}, ...mediaBlocks];
|
|
1261
|
+
}
|
|
1262
|
+
return replacementText;
|
|
1263
|
+
}
|
|
1264
|
+
/**
|
|
1265
|
+
* Build a truncated HumanMessage for the model request.
|
|
1266
|
+
*
|
|
1267
|
+
* Computes a preview from the full content still in state and returns a
|
|
1268
|
+
* lightweight replacement the model will see. Pure string computation — no
|
|
1269
|
+
* backend I/O.
|
|
1270
|
+
*/
|
|
1271
|
+
function buildTruncatedHumanMessage(message, filePath) {
|
|
1272
|
+
const contentSample = createContentPreview(extractTextFromMessage(message));
|
|
1273
|
+
return new HumanMessage({
|
|
1274
|
+
content: buildEvictedHumanContent(message, TOO_LARGE_HUMAN_MSG.replace("{file_path}", filePath).replace("{content_sample}", contentSample)),
|
|
1275
|
+
id: message.id,
|
|
1276
|
+
additional_kwargs: { ...message.additional_kwargs },
|
|
1277
|
+
response_metadata: { ...message.response_metadata }
|
|
1278
|
+
});
|
|
1279
|
+
}
|
|
1280
|
+
/**
|
|
1281
|
+
* Create a preview of content showing head and tail with truncation marker.
|
|
1282
|
+
*
|
|
1283
|
+
* @param contentStr - The full content string to preview.
|
|
1284
|
+
* @param headLines - Number of lines to show from the start (default: 5).
|
|
1285
|
+
* @param tailLines - Number of lines to show from the end (default: 5).
|
|
1286
|
+
* @returns Formatted preview string with line numbers.
|
|
1287
|
+
*/
|
|
1288
|
+
function createContentPreview(contentStr, headLines = 5, tailLines = 5) {
|
|
1289
|
+
const lines = contentStr.split("\n");
|
|
1290
|
+
if (lines.length <= headLines + tailLines) return formatContentWithLineNumbers(lines.map((line) => line.substring(0, 1e3)), 1);
|
|
1291
|
+
const head = lines.slice(0, headLines).map((line) => line.substring(0, 1e3));
|
|
1292
|
+
const tail = lines.slice(-tailLines).map((line) => line.substring(0, 1e3));
|
|
1293
|
+
const headSample = formatContentWithLineNumbers(head, 1);
|
|
1294
|
+
const truncationNotice = `\n... [${lines.length - headLines - tailLines} lines truncated] ...\n`;
|
|
1295
|
+
const tailSample = formatContentWithLineNumbers(tail, lines.length - tailLines + 1);
|
|
1296
|
+
return headSample + truncationNotice + tailSample;
|
|
1297
|
+
}
|
|
1298
|
+
/**
|
|
1299
|
+
* Zod schema for legacy FileDataV1 (content as line array).
|
|
1300
|
+
*/
|
|
1301
|
+
const FileDataV1Schema = z.object({
|
|
1302
|
+
content: z.array(z.string()),
|
|
1303
|
+
created_at: z.string(),
|
|
1304
|
+
modified_at: z.string()
|
|
1305
|
+
});
|
|
1306
|
+
/**
|
|
1307
|
+
* Zod schema for FileDataV2 (content as string for text or Uint8Array for binary).
|
|
1308
|
+
*/
|
|
1309
|
+
const FileDataV2Schema = z.object({
|
|
1310
|
+
content: z.union([z.string(), z.instanceof(Uint8Array)]),
|
|
1311
|
+
mimeType: z.string(),
|
|
1312
|
+
created_at: z.string(),
|
|
1313
|
+
modified_at: z.string()
|
|
1314
|
+
});
|
|
1315
|
+
/**
|
|
1316
|
+
* Zod v3 schema for FileData (re-export from backends)
|
|
1317
|
+
*/
|
|
1318
|
+
const FileDataSchema = z.union([FileDataV1Schema, FileDataV2Schema]);
|
|
1319
|
+
/**
|
|
1320
|
+
* Reducer for files state that merges file updates with support for deletions.
|
|
1321
|
+
* When a file value is null, the file is deleted from state.
|
|
1322
|
+
* When a file value is non-null, it is added or updated in state.
|
|
1323
|
+
*
|
|
1324
|
+
* This reducer enables concurrent updates from parallel subagents by properly
|
|
1325
|
+
* merging their file changes instead of requiring LastValue semantics.
|
|
1326
|
+
*
|
|
1327
|
+
* @param current - The current files record (from state)
|
|
1328
|
+
* @param update - The new files record (from a subagent update), with null values for deletions
|
|
1329
|
+
* @returns Merged files record with deletions applied
|
|
1330
|
+
*/
|
|
1331
|
+
function fileDataReducer(current, update) {
|
|
1332
|
+
if (update === void 0) return current || {};
|
|
1333
|
+
if (current === void 0) {
|
|
1334
|
+
const result = {};
|
|
1335
|
+
for (const [key, value] of Object.entries(update)) if (value !== null) result[key] = value;
|
|
1336
|
+
return result;
|
|
1337
|
+
}
|
|
1338
|
+
const result = { ...current };
|
|
1339
|
+
for (const [key, value] of Object.entries(update)) if (value === null) delete result[key];
|
|
1340
|
+
else result[key] = value;
|
|
1341
|
+
return result;
|
|
1342
|
+
}
|
|
1343
|
+
/**
|
|
1344
|
+
* Shared filesystem state schema.
|
|
1345
|
+
* Defined at module level to ensure the same object identity is used across all agents,
|
|
1346
|
+
* preventing "Channel already exists with different type" errors when multiple agents
|
|
1347
|
+
* use createFilesystemMiddleware.
|
|
1348
|
+
*
|
|
1349
|
+
* Uses ReducedValue for files to allow concurrent updates from parallel subagents.
|
|
1350
|
+
*/
|
|
1351
|
+
const FilesystemStateSchema = new StateSchema({ files: new ReducedValue(z.record(z.string(), FileDataSchema).default(() => ({})), {
|
|
1352
|
+
inputSchema: z.record(z.string(), FileDataSchema.nullable()).optional(),
|
|
1353
|
+
reducer: fileDataReducer
|
|
1354
|
+
}) });
|
|
1355
|
+
/**
|
|
1356
|
+
* Throw a permission-denied error if `path` is denied under `rules`.
|
|
1357
|
+
*
|
|
1358
|
+
* No-op when `rules` is empty (permissive default). Paths that fail
|
|
1359
|
+
* `validatePath` are silently skipped — the tool's own input validation
|
|
1360
|
+
* will surface a better error.
|
|
1361
|
+
*
|
|
1362
|
+
* @internal
|
|
1363
|
+
*/
|
|
1364
|
+
function enforcePermission(rules, operation, path) {
|
|
1365
|
+
if (rules.length === 0) return;
|
|
1366
|
+
const canonical = validatePath(path);
|
|
1367
|
+
if (decidePathAccess(rules, operation, canonical) === "deny") throw new Error(`Error: permission denied for ${operation} on ${canonical}`);
|
|
1368
|
+
}
|
|
1369
|
+
/**
|
|
1370
|
+
* Filter a list of filesystem entries to those the rules permit.
|
|
1371
|
+
*
|
|
1372
|
+
* `getPath` extracts the absolute path from each entry. Entries with
|
|
1373
|
+
* unparsable paths are included (not silently dropped). Returns the
|
|
1374
|
+
* original array unchanged when `rules` is empty.
|
|
1375
|
+
*
|
|
1376
|
+
* @internal
|
|
1377
|
+
*/
|
|
1378
|
+
function filterByPermissions(entries, rules, operation, getPath) {
|
|
1379
|
+
if (rules.length === 0) return entries;
|
|
1380
|
+
return entries.filter((entry) => {
|
|
1381
|
+
try {
|
|
1382
|
+
return decidePathAccess(rules, operation, validatePath(getPath(entry))) !== "deny";
|
|
1383
|
+
} catch {
|
|
1384
|
+
return true;
|
|
1385
|
+
}
|
|
1386
|
+
});
|
|
1387
|
+
}
|
|
1388
|
+
const FILESYSTEM_SYSTEM_PROMPT = context`
|
|
1389
|
+
## Following Conventions
|
|
1390
|
+
|
|
1391
|
+
- Read files before editing — understand existing content before making changes
|
|
1392
|
+
- Mimic existing style, naming conventions, and patterns
|
|
1393
|
+
|
|
1394
|
+
## Filesystem Tools \`ls\`, \`read_file\`, \`write_file\`, \`edit_file\`, \`glob\`, \`grep\`
|
|
1395
|
+
|
|
1396
|
+
You have access to a filesystem which you can interact with using these tools.
|
|
1397
|
+
All file paths must start with a /.
|
|
1398
|
+
|
|
1399
|
+
- ls: list files in a directory (requires absolute path)
|
|
1400
|
+
- read_file: read a file from the filesystem
|
|
1401
|
+
- write_file: write to a file in the filesystem
|
|
1402
|
+
- edit_file: edit a file in the filesystem
|
|
1403
|
+
- glob: find files matching a pattern (e.g., "**/*.py")
|
|
1404
|
+
- grep: search for text within files
|
|
1405
|
+
`;
|
|
1406
|
+
const LS_TOOL_DESCRIPTION = context`
|
|
1407
|
+
Lists all files in a directory.
|
|
1408
|
+
|
|
1409
|
+
This is useful for exploring the filesystem and finding the right file to read or edit.
|
|
1410
|
+
You should almost ALWAYS use this tool before using the read_file or edit_file tools.
|
|
1411
|
+
`;
|
|
1412
|
+
const READ_FILE_TOOL_DESCRIPTION = context`
|
|
1413
|
+
Reads a file from the filesystem.
|
|
1414
|
+
|
|
1415
|
+
Assume this tool is able to read all files. If the User provides a path to a file assume that path is valid. It is okay to read a file that does not exist; an error will be returned.
|
|
1416
|
+
|
|
1417
|
+
Usage:
|
|
1418
|
+
- By default, it reads up to ${100} lines starting from the beginning of the file
|
|
1419
|
+
- **IMPORTANT for large files and codebase exploration**: Use pagination with offset and limit parameters to avoid context overflow
|
|
1420
|
+
- First scan: read_file(path, limit=${100}) to see file structure
|
|
1421
|
+
- Read more sections: read_file(path, offset=${100}, limit=200) for next 200 lines
|
|
1422
|
+
- Only omit limit (read full file) when necessary for editing
|
|
1423
|
+
- Specify offset and limit: read_file(path, offset=0, limit=${100}) reads first ${100} lines
|
|
1424
|
+
- Results are returned using cat -n format, with line numbers starting at 1
|
|
1425
|
+
- Lines longer than ${INT_FORMATTER.format(MAX_LINE_LENGTH)} characters will be split into multiple lines with continuation markers (e.g., 5.1, 5.2, etc.). When you specify a limit, these continuation lines count towards the limit.
|
|
1426
|
+
- You have the capability to call multiple tools in a single response. It is always better to speculatively read multiple files as a batch that are potentially useful.
|
|
1427
|
+
- If you read a file that exists but has empty contents you will receive a system reminder warning in place of file contents.
|
|
1428
|
+
- You should ALWAYS make sure a file has been read before editing it.
|
|
1429
|
+
`;
|
|
1430
|
+
const WRITE_FILE_TOOL_DESCRIPTION = context`
|
|
1431
|
+
Writes to a new file in the filesystem.
|
|
1432
|
+
|
|
1433
|
+
Usage:
|
|
1434
|
+
- The write_file tool will create a new file.
|
|
1435
|
+
- Prefer to edit existing files (with the edit_file tool) over creating new ones when possible.
|
|
1436
|
+
`;
|
|
1437
|
+
const EDIT_FILE_TOOL_DESCRIPTION = context`
|
|
1438
|
+
Performs exact string replacements in files.
|
|
1439
|
+
|
|
1440
|
+
Usage:
|
|
1441
|
+
- You must read the file before editing. This tool will error if you attempt an edit without reading the file first.
|
|
1442
|
+
- When editing, preserve the exact indentation (tabs/spaces) from the read output. Never include line number prefixes in old_string or new_string.
|
|
1443
|
+
- ALWAYS prefer editing existing files over creating new ones.
|
|
1444
|
+
- Only use emojis if the user explicitly requests it.
|
|
1445
|
+
`;
|
|
1088
1446
|
const GLOB_TOOL_DESCRIPTION = context`
|
|
1089
1447
|
Find files matching a glob pattern.
|
|
1090
1448
|
|
|
@@ -1166,13 +1524,14 @@ const EXECUTION_SYSTEM_PROMPT = context`
|
|
|
1166
1524
|
* Create ls tool using backend.
|
|
1167
1525
|
*/
|
|
1168
1526
|
function createLsTool(backend, options) {
|
|
1169
|
-
const { customDescription } = options;
|
|
1527
|
+
const { customDescription, permissions } = options;
|
|
1170
1528
|
return tool(async (input, runtime) => {
|
|
1529
|
+
enforcePermission(permissions, "read", input.path ?? "/");
|
|
1171
1530
|
const resolvedBackend = await resolveBackend(backend, runtime);
|
|
1172
1531
|
const path = input.path || "/";
|
|
1173
1532
|
const lsResult = await resolvedBackend.ls(path);
|
|
1174
1533
|
if (lsResult.error) return `Error listing files: ${lsResult.error}`;
|
|
1175
|
-
const infos = lsResult.files
|
|
1534
|
+
const infos = filterByPermissions(lsResult.files ?? [], permissions, "read", (info) => info.path);
|
|
1176
1535
|
if (infos.length === 0) return `No files found in ${path}`;
|
|
1177
1536
|
const lines = [];
|
|
1178
1537
|
for (const info of infos) if (info.is_dir) lines.push(`${info.path} (directory)`);
|
|
@@ -1193,8 +1552,9 @@ function createLsTool(backend, options) {
|
|
|
1193
1552
|
* Create read_file tool using backend.
|
|
1194
1553
|
*/
|
|
1195
1554
|
function createReadFileTool(backend, options) {
|
|
1196
|
-
const { customDescription, toolTokenLimitBeforeEvict } = options;
|
|
1555
|
+
const { customDescription, toolTokenLimitBeforeEvict, permissions } = options;
|
|
1197
1556
|
return tool(async (input, runtime) => {
|
|
1557
|
+
enforcePermission(permissions, "read", input.file_path);
|
|
1198
1558
|
const resolvedBackend = await resolveBackend(backend, runtime);
|
|
1199
1559
|
const { file_path, offset = 0, limit = 100 } = input;
|
|
1200
1560
|
const readResult = await resolvedBackend.read(file_path, offset, limit);
|
|
@@ -1269,8 +1629,9 @@ function createReadFileTool(backend, options) {
|
|
|
1269
1629
|
* Create write_file tool using backend.
|
|
1270
1630
|
*/
|
|
1271
1631
|
function createWriteFileTool(backend, options) {
|
|
1272
|
-
const { customDescription } = options;
|
|
1632
|
+
const { customDescription, permissions } = options;
|
|
1273
1633
|
return tool(async (input, runtime) => {
|
|
1634
|
+
enforcePermission(permissions, "write", input.file_path);
|
|
1274
1635
|
const resolvedBackend = await resolveBackend(backend, runtime);
|
|
1275
1636
|
const { file_path, content } = input;
|
|
1276
1637
|
const result = await resolvedBackend.write(file_path, content);
|
|
@@ -1299,8 +1660,9 @@ function createWriteFileTool(backend, options) {
|
|
|
1299
1660
|
* Create edit_file tool using backend.
|
|
1300
1661
|
*/
|
|
1301
1662
|
function createEditFileTool(backend, options) {
|
|
1302
|
-
const { customDescription } = options;
|
|
1663
|
+
const { customDescription, permissions } = options;
|
|
1303
1664
|
return tool(async (input, runtime) => {
|
|
1665
|
+
enforcePermission(permissions, "write", input.file_path);
|
|
1304
1666
|
const resolvedBackend = await resolveBackend(backend, runtime);
|
|
1305
1667
|
const { file_path, old_string, new_string, replace_all = false } = input;
|
|
1306
1668
|
const result = await resolvedBackend.edit(file_path, old_string, new_string, replace_all);
|
|
@@ -1331,13 +1693,14 @@ function createEditFileTool(backend, options) {
|
|
|
1331
1693
|
* Create glob tool using backend.
|
|
1332
1694
|
*/
|
|
1333
1695
|
function createGlobTool(backend, options) {
|
|
1334
|
-
const { customDescription } = options;
|
|
1696
|
+
const { customDescription, permissions } = options;
|
|
1335
1697
|
return tool(async (input, runtime) => {
|
|
1698
|
+
enforcePermission(permissions, "read", input.path ?? "/");
|
|
1336
1699
|
const resolvedBackend = await resolveBackend(backend, runtime);
|
|
1337
1700
|
const { pattern, path = "/" } = input;
|
|
1338
1701
|
const globResult = await resolvedBackend.glob(pattern, path);
|
|
1339
1702
|
if (globResult.error) return `Error finding files: ${globResult.error}`;
|
|
1340
|
-
const infos = globResult.files
|
|
1703
|
+
const infos = filterByPermissions(globResult.files ?? [], permissions, "read", (info) => info.path);
|
|
1341
1704
|
if (infos.length === 0) return `No files found matching pattern '${pattern}'`;
|
|
1342
1705
|
const result = truncateIfTooLong(infos.map((info) => info.path));
|
|
1343
1706
|
if (Array.isArray(result)) return result.join("\n");
|
|
@@ -1355,13 +1718,14 @@ function createGlobTool(backend, options) {
|
|
|
1355
1718
|
* Create grep tool using backend.
|
|
1356
1719
|
*/
|
|
1357
1720
|
function createGrepTool(backend, options) {
|
|
1358
|
-
const { customDescription } = options;
|
|
1721
|
+
const { customDescription, permissions } = options;
|
|
1359
1722
|
return tool(async (input, runtime) => {
|
|
1723
|
+
enforcePermission(permissions, "read", input.path ?? "/");
|
|
1360
1724
|
const resolvedBackend = await resolveBackend(backend, runtime);
|
|
1361
1725
|
const { pattern, path = "/", glob = null } = input;
|
|
1362
1726
|
const result = await resolvedBackend.grep(pattern, path, glob);
|
|
1363
1727
|
if (result.error) return result.error;
|
|
1364
|
-
const matches = result.matches ?? [];
|
|
1728
|
+
const matches = filterByPermissions(result.matches ?? [], permissions, "read", (m) => m.path);
|
|
1365
1729
|
if (matches.length === 0) return `No matches found for pattern '${pattern}'`;
|
|
1366
1730
|
const lines = [];
|
|
1367
1731
|
let currentFile = null;
|
|
@@ -1389,10 +1753,11 @@ function createGrepTool(backend, options) {
|
|
|
1389
1753
|
* Create execute tool using backend.
|
|
1390
1754
|
*/
|
|
1391
1755
|
function createExecuteTool(backend, options) {
|
|
1392
|
-
const { customDescription } = options;
|
|
1756
|
+
const { customDescription, permissions } = options;
|
|
1393
1757
|
return tool(async (input, runtime) => {
|
|
1394
1758
|
const resolvedBackend = await resolveBackend(backend, runtime);
|
|
1395
1759
|
if (!isSandboxBackend(resolvedBackend)) return "Error: Execution not available. This agent's backend does not support command execution (SandboxBackendProtocol). To use the execute tool, provide a backend that implements SandboxBackendProtocol.";
|
|
1760
|
+
if (permissions.length > 0 && !allPathsScopedToRoutes(permissions, resolvedBackend)) return "Error: Execution not available. Filesystem permissions cannot be used with a backend that supports command execution because shell commands can access any path, making path-based rules ineffective.";
|
|
1396
1761
|
const result = await resolvedBackend.execute(input.command);
|
|
1397
1762
|
const parts = [result.output];
|
|
1398
1763
|
if (result.exitCode !== null) {
|
|
@@ -1408,22 +1773,53 @@ function createExecuteTool(backend, options) {
|
|
|
1408
1773
|
});
|
|
1409
1774
|
}
|
|
1410
1775
|
/**
|
|
1776
|
+
* Returns true only when backend exposes route prefixes (CompositeBackend) and
|
|
1777
|
+
* every permission path is scoped under one of them.
|
|
1778
|
+
*/
|
|
1779
|
+
function allPathsScopedToRoutes(permissions, backend) {
|
|
1780
|
+
if (!CompositeBackend.isInstance(backend)) return false;
|
|
1781
|
+
const prefixes = backend.routePrefixes;
|
|
1782
|
+
if (prefixes.length === 0) return false;
|
|
1783
|
+
return permissions.every((rule) => rule.paths.every((path) => prefixes.some((prefix) => path.startsWith(prefix.endsWith("/") ? prefix : `${prefix}/`))));
|
|
1784
|
+
}
|
|
1785
|
+
/**
|
|
1411
1786
|
* Create filesystem middleware with all tools and features.
|
|
1412
1787
|
*/
|
|
1413
1788
|
function createFilesystemMiddleware(options = {}) {
|
|
1414
|
-
const { backend = (runtime) => new StateBackend(runtime), systemPrompt: customSystemPrompt = null, customToolDescriptions = null, toolTokenLimitBeforeEvict = 2e4, humanMessageTokenLimitBeforeEvict = 5e4 } = options;
|
|
1789
|
+
const { backend = (runtime) => new StateBackend(runtime), systemPrompt: customSystemPrompt = null, customToolDescriptions = null, toolTokenLimitBeforeEvict = 2e4, humanMessageTokenLimitBeforeEvict = 5e4, permissions = [] } = options;
|
|
1790
|
+
if (permissions.length > 0) validatePermissionPaths(permissions);
|
|
1791
|
+
if (permissions.length > 0 && typeof backend !== "function" && isSandboxBackend(backend) && !allPathsScopedToRoutes(permissions, backend)) throw new Error("Filesystem permissions cannot be used with a backend that supports command execution. Shell commands can access any path, making path-based rules ineffective. Either remove permissions, use a backend without execution support, or use a CompositeBackend with all permission paths scoped to a route prefix.");
|
|
1415
1792
|
const baseSystemPrompt = customSystemPrompt || FILESYSTEM_SYSTEM_PROMPT;
|
|
1416
1793
|
const allToolsByName = {
|
|
1417
|
-
ls: createLsTool(backend, {
|
|
1794
|
+
ls: createLsTool(backend, {
|
|
1795
|
+
customDescription: customToolDescriptions?.ls,
|
|
1796
|
+
permissions
|
|
1797
|
+
}),
|
|
1418
1798
|
read_file: createReadFileTool(backend, {
|
|
1419
1799
|
customDescription: customToolDescriptions?.read_file,
|
|
1420
|
-
toolTokenLimitBeforeEvict
|
|
1800
|
+
toolTokenLimitBeforeEvict,
|
|
1801
|
+
permissions
|
|
1802
|
+
}),
|
|
1803
|
+
write_file: createWriteFileTool(backend, {
|
|
1804
|
+
customDescription: customToolDescriptions?.write_file,
|
|
1805
|
+
permissions
|
|
1806
|
+
}),
|
|
1807
|
+
edit_file: createEditFileTool(backend, {
|
|
1808
|
+
customDescription: customToolDescriptions?.edit_file,
|
|
1809
|
+
permissions
|
|
1810
|
+
}),
|
|
1811
|
+
glob: createGlobTool(backend, {
|
|
1812
|
+
customDescription: customToolDescriptions?.glob,
|
|
1813
|
+
permissions
|
|
1814
|
+
}),
|
|
1815
|
+
grep: createGrepTool(backend, {
|
|
1816
|
+
customDescription: customToolDescriptions?.grep,
|
|
1817
|
+
permissions
|
|
1421
1818
|
}),
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
|
|
1425
|
-
|
|
1426
|
-
execute: createExecuteTool(backend, { customDescription: customToolDescriptions?.execute })
|
|
1819
|
+
execute: createExecuteTool(backend, {
|
|
1820
|
+
customDescription: customToolDescriptions?.execute,
|
|
1821
|
+
permissions
|
|
1822
|
+
})
|
|
1427
1823
|
};
|
|
1428
1824
|
return createMiddleware({
|
|
1429
1825
|
name: "FilesystemMiddleware",
|
|
@@ -1883,7 +2279,14 @@ function createTaskTool(options) {
|
|
|
1883
2279
|
const subagent = subagentGraphs[subagent_type];
|
|
1884
2280
|
const subagentState = filterStateForSubagent(getCurrentTaskInput());
|
|
1885
2281
|
subagentState.messages = [new HumanMessage$1({ content: description })];
|
|
1886
|
-
const
|
|
2282
|
+
const subagentConfig = {
|
|
2283
|
+
...config,
|
|
2284
|
+
configurable: {
|
|
2285
|
+
...config.configurable,
|
|
2286
|
+
ls_agent_type: "subagent"
|
|
2287
|
+
}
|
|
2288
|
+
};
|
|
2289
|
+
const result = await subagent.invoke(subagentState, subagentConfig);
|
|
1887
2290
|
if (!config.toolCall?.id) {
|
|
1888
2291
|
if (result.structuredResponse != null) return JSON.stringify(result.structuredResponse);
|
|
1889
2292
|
const messages = result.messages;
|
|
@@ -2339,9 +2742,23 @@ function createMemoryMiddleware(options) {
|
|
|
2339
2742
|
* ```
|
|
2340
2743
|
*/
|
|
2341
2744
|
const MAX_SKILL_FILE_SIZE = 10 * 1024 * 1024;
|
|
2745
|
+
const DEFAULT_SKILL_READ_LINE_LIMIT = 1e3;
|
|
2342
2746
|
const MAX_SKILL_NAME_LENGTH = 64;
|
|
2343
2747
|
const MAX_SKILL_DESCRIPTION_LENGTH = 1024;
|
|
2344
2748
|
/**
|
|
2749
|
+
* File extensions a skill module entrypoint may use.
|
|
2750
|
+
*/
|
|
2751
|
+
const SKILL_MODULE_EXTENSIONS = [
|
|
2752
|
+
".js",
|
|
2753
|
+
".mjs",
|
|
2754
|
+
".cjs",
|
|
2755
|
+
".ts",
|
|
2756
|
+
".mts",
|
|
2757
|
+
".cts",
|
|
2758
|
+
".jsx",
|
|
2759
|
+
".tsx"
|
|
2760
|
+
];
|
|
2761
|
+
/**
|
|
2345
2762
|
* Zod schema for a single skill metadata entry.
|
|
2346
2763
|
*/
|
|
2347
2764
|
const SkillMetadataEntrySchema = z$1.object({
|
|
@@ -2351,7 +2768,8 @@ const SkillMetadataEntrySchema = z$1.object({
|
|
|
2351
2768
|
license: z$1.string().nullable().optional(),
|
|
2352
2769
|
compatibility: z$1.string().nullable().optional(),
|
|
2353
2770
|
metadata: z$1.record(z$1.string(), z$1.string()).optional(),
|
|
2354
|
-
allowedTools: z$1.array(z$1.string()).optional()
|
|
2771
|
+
allowedTools: z$1.array(z$1.string()).optional(),
|
|
2772
|
+
module: z$1.string().optional()
|
|
2355
2773
|
});
|
|
2356
2774
|
/**
|
|
2357
2775
|
* Reducer for skillsMetadata that merges arrays from parallel subagents.
|
|
@@ -2383,48 +2801,49 @@ const SkillsStateSchema = new StateSchema({
|
|
|
2383
2801
|
/**
|
|
2384
2802
|
* Skills System Documentation prompt template.
|
|
2385
2803
|
*/
|
|
2386
|
-
const SKILLS_SYSTEM_PROMPT = `
|
|
2387
|
-
## Skills System
|
|
2804
|
+
const SKILLS_SYSTEM_PROMPT = context`
|
|
2805
|
+
## Skills System
|
|
2388
2806
|
|
|
2389
|
-
You have access to a skills library that provides specialized capabilities and domain knowledge.
|
|
2807
|
+
You have access to a skills library that provides specialized capabilities and domain knowledge.
|
|
2390
2808
|
|
|
2391
|
-
{skills_locations}
|
|
2809
|
+
{skills_locations}
|
|
2392
2810
|
|
|
2393
|
-
**Available Skills:**
|
|
2811
|
+
**Available Skills:**
|
|
2394
2812
|
|
|
2395
|
-
{skills_list}
|
|
2813
|
+
{skills_list}
|
|
2396
2814
|
|
|
2397
|
-
**How to Use Skills (Progressive Disclosure):**
|
|
2815
|
+
**How to Use Skills (Progressive Disclosure):**
|
|
2398
2816
|
|
|
2399
|
-
Skills follow a **progressive disclosure** pattern - you know they exist (name + description above), but you only read the full instructions when needed:
|
|
2817
|
+
Skills follow a **progressive disclosure** pattern - you know they exist (name + description above), but you only read the full instructions when needed:
|
|
2400
2818
|
|
|
2401
|
-
1. **Recognize when a skill applies**: Check if the user's task matches any skill's description
|
|
2402
|
-
2. **Read the skill's full instructions**:
|
|
2403
|
-
|
|
2404
|
-
|
|
2819
|
+
1. **Recognize when a skill applies**: Check if the user's task matches any skill's description
|
|
2820
|
+
2. **Read the skill's full instructions**: Use \`read_file\` on the path shown in the skill list above.
|
|
2821
|
+
Pass \`limit=${DEFAULT_SKILL_READ_LINE_LIMIT}\` since the default of ${100} lines is too small for most skill files.
|
|
2822
|
+
3. **Follow the skill's instructions**: SKILL.md contains step-by-step workflows, best practices, and examples
|
|
2823
|
+
4. **Access supporting files**: Skills may include scripts, configs, or reference docs - use absolute paths
|
|
2405
2824
|
|
|
2406
|
-
**When to Use Skills:**
|
|
2407
|
-
- When the user's request matches a skill's domain (e.g., "research X" → web-research skill)
|
|
2408
|
-
- When you need specialized knowledge or structured workflows
|
|
2409
|
-
- When a skill provides proven patterns for complex tasks
|
|
2825
|
+
**When to Use Skills:**
|
|
2826
|
+
- When the user's request matches a skill's domain (e.g., "research X" → web-research skill)
|
|
2827
|
+
- When you need specialized knowledge or structured workflows
|
|
2828
|
+
- When a skill provides proven patterns for complex tasks
|
|
2410
2829
|
|
|
2411
|
-
**Skills are Self-Documenting:**
|
|
2412
|
-
- Each SKILL.md tells you exactly what the skill does and how to use it
|
|
2413
|
-
- The skill list above shows the full path for each skill's SKILL.md file
|
|
2830
|
+
**Skills are Self-Documenting:**
|
|
2831
|
+
- Each SKILL.md tells you exactly what the skill does and how to use it
|
|
2832
|
+
- The skill list above shows the full path for each skill's SKILL.md file
|
|
2414
2833
|
|
|
2415
|
-
**Executing Skill Scripts:**
|
|
2416
|
-
Skills may contain scripts or other executable files. Always use absolute paths from the skill list.
|
|
2834
|
+
**Executing Skill Scripts:**
|
|
2835
|
+
Skills may contain scripts or other executable files. Always use absolute paths from the skill list.
|
|
2417
2836
|
|
|
2418
|
-
**Example Workflow:**
|
|
2837
|
+
**Example Workflow:**
|
|
2419
2838
|
|
|
2420
|
-
User: "Can you research the latest developments in quantum computing?"
|
|
2839
|
+
User: "Can you research the latest developments in quantum computing?"
|
|
2421
2840
|
|
|
2422
|
-
1. Check available skills above → See "web-research" skill with its full path
|
|
2423
|
-
2. Read the skill
|
|
2424
|
-
3. Follow the skill's research workflow (search → organize → synthesize)
|
|
2425
|
-
4. Use any helper scripts with absolute paths
|
|
2841
|
+
1. Check available skills above → See "web-research" skill with its full path
|
|
2842
|
+
2. Read the full skill file: \`read_file(path, limit=${DEFAULT_SKILL_READ_LINE_LIMIT})\`
|
|
2843
|
+
3. Follow the skill's research workflow (search → organize → synthesize)
|
|
2844
|
+
4. Use any helper scripts with absolute paths
|
|
2426
2845
|
|
|
2427
|
-
Remember: Skills are tools to make you more capable and consistent. When in doubt, check if a skill exists for the task!
|
|
2846
|
+
Remember: Skills are tools to make you more capable and consistent. When in doubt, check if a skill exists for the task!
|
|
2428
2847
|
`;
|
|
2429
2848
|
/**
|
|
2430
2849
|
* Validate skill name per Agent Skills specification.
|
|
@@ -2575,7 +2994,8 @@ function parseSkillMetadataFromContent(content, skillPath, directoryName) {
|
|
|
2575
2994
|
metadata: validateMetadata(frontmatterData.metadata ?? {}, skillPath),
|
|
2576
2995
|
license: String(frontmatterData.license ?? "").trim() || null,
|
|
2577
2996
|
compatibility: compatibilityStr,
|
|
2578
|
-
allowedTools
|
|
2997
|
+
allowedTools,
|
|
2998
|
+
module: validateModulePath(frontmatterData.module)
|
|
2579
2999
|
};
|
|
2580
3000
|
}
|
|
2581
3001
|
/**
|
|
@@ -2648,10 +3068,38 @@ function formatSkillsList(skills, sources) {
|
|
|
2648
3068
|
lines.push(descLine);
|
|
2649
3069
|
if (skill.allowedTools && skill.allowedTools.length > 0) lines.push(` → Allowed tools: ${skill.allowedTools.join(", ")}`);
|
|
2650
3070
|
lines.push(` → Read \`${skill.path}\` for full instructions`);
|
|
3071
|
+
if (skill.module !== void 0) lines.push(` → Import: \`await import("@/skills/${skill.name}")\``);
|
|
2651
3072
|
}
|
|
2652
3073
|
return lines.join("\n");
|
|
2653
3074
|
}
|
|
2654
3075
|
/**
|
|
3076
|
+
* Returns true when `value` ends with a recognized skill module extension.
|
|
3077
|
+
*/
|
|
3078
|
+
function endsWithModuleExtension(value) {
|
|
3079
|
+
for (const ext of SKILL_MODULE_EXTENSIONS) if (value.endsWith(ext)) return true;
|
|
3080
|
+
return false;
|
|
3081
|
+
}
|
|
3082
|
+
/**
|
|
3083
|
+
* Validate and normalize the `module` frontmatter key from a `SKILL.md`.
|
|
3084
|
+
*
|
|
3085
|
+
* Returns the normalized path (e.g. `"index.ts"`, `"lib/entry.js"`) or
|
|
3086
|
+
* `undefined` when the key is absent, empty, non-string, absolute, contains
|
|
3087
|
+
* path traversal, or uses an unsupported extension. Invalid values silently
|
|
3088
|
+
* degrade the skill to prose-only.
|
|
3089
|
+
*/
|
|
3090
|
+
function validateModulePath(raw) {
|
|
3091
|
+
if (raw === null || raw === void 0) return;
|
|
3092
|
+
if (typeof raw !== "string") return;
|
|
3093
|
+
const stripped = raw.trim();
|
|
3094
|
+
if (stripped === "") return;
|
|
3095
|
+
const normalized = stripped.startsWith("./") ? stripped.slice(2) : stripped;
|
|
3096
|
+
if (normalized.startsWith("/")) return;
|
|
3097
|
+
if (normalized === ".." || normalized.startsWith("../") || normalized.includes("/../") || normalized.endsWith("/..")) return;
|
|
3098
|
+
if (normalized.endsWith(".d.ts") || normalized.endsWith(".d.mts") || normalized.endsWith(".d.cts")) return;
|
|
3099
|
+
if (!endsWithModuleExtension(normalized)) return;
|
|
3100
|
+
return normalized;
|
|
3101
|
+
}
|
|
3102
|
+
/**
|
|
2655
3103
|
* Create backend-agnostic middleware for loading and exposing agent skills.
|
|
2656
3104
|
*
|
|
2657
3105
|
* This middleware loads skills from configurable backend sources and injects
|
|
@@ -3151,6 +3599,7 @@ function createSummarizationMiddleware(options) {
|
|
|
3151
3599
|
*/
|
|
3152
3600
|
async function getChatModel() {
|
|
3153
3601
|
if (cachedModel) return cachedModel;
|
|
3602
|
+
if (!model) throw new Error("Summarization middleware could not resolve a model. Provide `options.model` or ensure `request.model` is present.");
|
|
3154
3603
|
if (typeof model === "string") cachedModel = await initChatModel(model);
|
|
3155
3604
|
else cachedModel = model;
|
|
3156
3605
|
return cachedModel;
|
|
@@ -3581,7 +4030,7 @@ function createSummarizationMiddleware(options) {
|
|
|
3581
4030
|
/**
|
|
3582
4031
|
* Resolve the chat model and get max input tokens from its profile.
|
|
3583
4032
|
*/
|
|
3584
|
-
const resolvedModel = await getChatModel();
|
|
4033
|
+
const resolvedModel = request.model ?? await getChatModel();
|
|
3585
4034
|
const maxInputTokens = getMaxInputTokens(resolvedModel);
|
|
3586
4035
|
applyModelDefaults(resolvedModel);
|
|
3587
4036
|
/**
|
|
@@ -4190,6 +4639,13 @@ function createAsyncSubAgentMiddleware(options) {
|
|
|
4190
4639
|
* StoreBackend: Adapter for LangGraph's BaseStore (persistent, cross-thread).
|
|
4191
4640
|
*/
|
|
4192
4641
|
const NAMESPACE_COMPONENT_RE = /^[A-Za-z0-9\-_.@+:~]+$/;
|
|
4642
|
+
function getObjectRecord(value) {
|
|
4643
|
+
return value != null && typeof value === "object" ? value : void 0;
|
|
4644
|
+
}
|
|
4645
|
+
function getAssistantIdFromRecord(value) {
|
|
4646
|
+
const assistantId = value?.assistant_id ?? value?.assistantId;
|
|
4647
|
+
return typeof assistantId === "string" && assistantId.length > 0 ? assistantId : void 0;
|
|
4648
|
+
}
|
|
4193
4649
|
/**
|
|
4194
4650
|
* Validate a namespace array.
|
|
4195
4651
|
*
|
|
@@ -4222,536 +4678,236 @@ function validateNamespace(namespace) {
|
|
|
4222
4678
|
*/
|
|
4223
4679
|
var StoreBackend = class {
|
|
4224
4680
|
stateAndStore;
|
|
4681
|
+
storeOverride;
|
|
4225
4682
|
_namespace;
|
|
4226
|
-
fileFormat;
|
|
4227
|
-
constructor(stateAndStoreOrOptions, options) {
|
|
4228
|
-
let opts;
|
|
4229
|
-
if (stateAndStoreOrOptions != null && typeof stateAndStoreOrOptions === "object" && "state" in stateAndStoreOrOptions) {
|
|
4230
|
-
this.stateAndStore = stateAndStoreOrOptions;
|
|
4231
|
-
opts = options;
|
|
4232
|
-
} else {
|
|
4233
|
-
this.stateAndStore = void 0;
|
|
4234
|
-
opts = stateAndStoreOrOptions;
|
|
4235
|
-
}
|
|
4236
|
-
if (opts?.namespace) this._namespace = validateNamespace(opts.namespace);
|
|
4237
|
-
this.fileFormat = opts?.fileFormat ?? "v2";
|
|
4238
|
-
}
|
|
4239
|
-
/**
|
|
4240
|
-
* Get the BaseStore instance for persistent storage operations.
|
|
4241
|
-
*
|
|
4242
|
-
* In legacy mode, reads from the injected {@link StateAndStore}.
|
|
4243
|
-
* In zero-arg mode, retrieves the store from the LangGraph execution
|
|
4244
|
-
* context via {@link getLangGraphStore}.
|
|
4245
|
-
*
|
|
4246
|
-
* @returns BaseStore instance
|
|
4247
|
-
* @throws Error if no store is available in either mode
|
|
4248
|
-
*/
|
|
4249
|
-
getStore() {
|
|
4250
|
-
if (this.stateAndStore) {
|
|
4251
|
-
const store = this.stateAndStore.store;
|
|
4252
|
-
if (!store) throw new Error("Store is required but not available in runtime");
|
|
4253
|
-
return store;
|
|
4254
|
-
}
|
|
4255
|
-
const store = getStore();
|
|
4256
|
-
if (!store) throw new Error("Store is required but not available in LangGraph execution context. Ensure the graph was configured with a store.");
|
|
4257
|
-
return store;
|
|
4258
|
-
}
|
|
4259
|
-
/**
|
|
4260
|
-
* Get the namespace for store operations.
|
|
4261
|
-
*
|
|
4262
|
-
* Resolution order:
|
|
4263
|
-
* 1. Explicit namespace from constructor options (both modes)
|
|
4264
|
-
* 2. Legacy mode: `[assistantId, "filesystem"]` fallback from {@link StateAndStore}
|
|
4265
|
-
* 3. Zero-arg mode without namespace: `["filesystem"]` with a deprecation warning
|
|
4266
|
-
* nudging callers to pass an explicit namespace
|
|
4267
|
-
* 4. Legacy mode without assistantId: `["filesystem"]`
|
|
4268
|
-
*/
|
|
4269
|
-
getNamespace() {
|
|
4270
|
-
if (this._namespace) return this._namespace;
|
|
4271
|
-
if (this.stateAndStore) {
|
|
4272
|
-
const assistantId = this.stateAndStore.assistantId;
|
|
4273
|
-
if (assistantId) return [assistantId, "filesystem"];
|
|
4274
|
-
}
|
|
4275
|
-
return ["filesystem"];
|
|
4276
|
-
}
|
|
4277
|
-
/**
|
|
4278
|
-
* Convert a store Item to FileData format.
|
|
4279
|
-
*
|
|
4280
|
-
* @param storeItem - The store Item containing file data
|
|
4281
|
-
* @returns FileData object
|
|
4282
|
-
* @throws Error if required fields are missing or have incorrect types
|
|
4283
|
-
*/
|
|
4284
|
-
convertStoreItemToFileData(storeItem) {
|
|
4285
|
-
const value = storeItem.value;
|
|
4286
|
-
if (!(value.content !== void 0 && (Array.isArray(value.content) || typeof value.content === "string" || ArrayBuffer.isView(value.content))) || typeof value.created_at !== "string" || typeof value.modified_at !== "string") throw new Error(`Store item does not contain valid FileData fields. Got keys: ${Object.keys(value).join(", ")}`);
|
|
4287
|
-
return {
|
|
4288
|
-
content: value.content,
|
|
4289
|
-
...value.mimeType ? { mimeType: value.mimeType } : {},
|
|
4290
|
-
created_at: value.created_at,
|
|
4291
|
-
modified_at: value.modified_at
|
|
4292
|
-
};
|
|
4293
|
-
}
|
|
4294
|
-
/**
|
|
4295
|
-
* Convert FileData to a value suitable for store.put().
|
|
4296
|
-
*
|
|
4297
|
-
* @param fileData - The FileData to convert
|
|
4298
|
-
* @returns Object with content, mimeType, created_at, and modified_at fields
|
|
4299
|
-
*/
|
|
4300
|
-
convertFileDataToStoreValue(fileData) {
|
|
4301
|
-
return {
|
|
4302
|
-
content: fileData.content,
|
|
4303
|
-
..."mimeType" in fileData ? { mimeType: fileData.mimeType } : {},
|
|
4304
|
-
created_at: fileData.created_at,
|
|
4305
|
-
modified_at: fileData.modified_at
|
|
4306
|
-
};
|
|
4307
|
-
}
|
|
4308
|
-
/**
|
|
4309
|
-
* Search store with automatic pagination to retrieve all results.
|
|
4310
|
-
*
|
|
4311
|
-
* @param store - The store to search
|
|
4312
|
-
* @param namespace - Hierarchical path prefix to search within
|
|
4313
|
-
* @param options - Optional query, filter, and page_size
|
|
4314
|
-
* @returns List of all items matching the search criteria
|
|
4315
|
-
*/
|
|
4316
|
-
async searchStorePaginated(store, namespace, options = {}) {
|
|
4317
|
-
const { query, filter, pageSize = 100 } = options;
|
|
4318
|
-
const allItems = [];
|
|
4319
|
-
let offset = 0;
|
|
4320
|
-
while (true) {
|
|
4321
|
-
const pageItems = await store.search(namespace, {
|
|
4322
|
-
query,
|
|
4323
|
-
filter,
|
|
4324
|
-
limit: pageSize,
|
|
4325
|
-
offset
|
|
4326
|
-
});
|
|
4327
|
-
if (!pageItems || pageItems.length === 0) break;
|
|
4328
|
-
allItems.push(...pageItems);
|
|
4329
|
-
if (pageItems.length < pageSize) break;
|
|
4330
|
-
offset += pageSize;
|
|
4331
|
-
}
|
|
4332
|
-
return allItems;
|
|
4333
|
-
}
|
|
4334
|
-
/**
|
|
4335
|
-
* List files and directories in the specified directory (non-recursive).
|
|
4336
|
-
*
|
|
4337
|
-
* @param path - Absolute path to directory
|
|
4338
|
-
* @returns LsResult with list of FileInfo objects on success or error on failure.
|
|
4339
|
-
* Directories have a trailing / in their path and is_dir=true.
|
|
4340
|
-
*/
|
|
4341
|
-
async ls(path) {
|
|
4342
|
-
const store = this.getStore();
|
|
4343
|
-
const namespace = this.getNamespace();
|
|
4344
|
-
const items = await this.searchStorePaginated(store, namespace);
|
|
4345
|
-
const infos = [];
|
|
4346
|
-
const subdirs = /* @__PURE__ */ new Set();
|
|
4347
|
-
const normalizedPath = path.endsWith("/") ? path : path + "/";
|
|
4348
|
-
for (const item of items) {
|
|
4349
|
-
const itemKey = String(item.key);
|
|
4350
|
-
if (!itemKey.startsWith(normalizedPath)) continue;
|
|
4351
|
-
const relative = itemKey.substring(normalizedPath.length);
|
|
4352
|
-
if (relative.includes("/")) {
|
|
4353
|
-
const subdirName = relative.split("/")[0];
|
|
4354
|
-
subdirs.add(normalizedPath + subdirName + "/");
|
|
4355
|
-
continue;
|
|
4356
|
-
}
|
|
4357
|
-
try {
|
|
4358
|
-
const fd = this.convertStoreItemToFileData(item);
|
|
4359
|
-
const size = isFileDataV1(fd) ? fd.content.join("\n").length : isFileDataBinary(fd) ? fd.content.byteLength : fd.content.length;
|
|
4360
|
-
infos.push({
|
|
4361
|
-
path: itemKey,
|
|
4362
|
-
is_dir: false,
|
|
4363
|
-
size,
|
|
4364
|
-
modified_at: fd.modified_at
|
|
4365
|
-
});
|
|
4366
|
-
} catch {
|
|
4367
|
-
continue;
|
|
4368
|
-
}
|
|
4369
|
-
}
|
|
4370
|
-
for (const subdir of Array.from(subdirs).sort()) infos.push({
|
|
4371
|
-
path: subdir,
|
|
4372
|
-
is_dir: true,
|
|
4373
|
-
size: 0,
|
|
4374
|
-
modified_at: ""
|
|
4375
|
-
});
|
|
4376
|
-
infos.sort((a, b) => a.path.localeCompare(b.path));
|
|
4377
|
-
return { files: infos };
|
|
4378
|
-
}
|
|
4379
|
-
/**
|
|
4380
|
-
* Read file content.
|
|
4381
|
-
*
|
|
4382
|
-
* Text files are paginated by line offset/limit.
|
|
4383
|
-
* Binary files return full Uint8Array content (offset/limit ignored).
|
|
4384
|
-
*
|
|
4385
|
-
* @param filePath - Absolute file path
|
|
4386
|
-
* @param offset - Line offset to start reading from (0-indexed)
|
|
4387
|
-
* @param limit - Maximum number of lines to read
|
|
4388
|
-
* @returns ReadResult with content on success or error on failure
|
|
4389
|
-
*/
|
|
4390
|
-
async read(filePath, offset = 0, limit = 500) {
|
|
4391
|
-
try {
|
|
4392
|
-
const readRawResult = await this.readRaw(filePath);
|
|
4393
|
-
if (readRawResult.error || !readRawResult.data) return { error: readRawResult.error || "File data not found" };
|
|
4394
|
-
const fileDataV2 = migrateToFileDataV2(readRawResult.data, filePath);
|
|
4395
|
-
if (!isTextMimeType(fileDataV2.mimeType)) return {
|
|
4396
|
-
content: fileDataV2.content,
|
|
4397
|
-
mimeType: fileDataV2.mimeType
|
|
4398
|
-
};
|
|
4399
|
-
if (typeof fileDataV2.content !== "string") return { error: `File '${filePath}' has binary content but text MIME type` };
|
|
4400
|
-
return {
|
|
4401
|
-
content: fileDataV2.content.split("\n").slice(offset, offset + limit).join("\n"),
|
|
4402
|
-
mimeType: fileDataV2.mimeType
|
|
4403
|
-
};
|
|
4404
|
-
} catch (e) {
|
|
4405
|
-
return { error: e.message };
|
|
4406
|
-
}
|
|
4407
|
-
}
|
|
4408
|
-
/**
|
|
4409
|
-
* Read file content as raw FileData.
|
|
4410
|
-
*
|
|
4411
|
-
* @param filePath - Absolute file path
|
|
4412
|
-
* @returns ReadRawResult with raw file data on success or error on failure
|
|
4413
|
-
*/
|
|
4414
|
-
async readRaw(filePath) {
|
|
4415
|
-
const store = this.getStore();
|
|
4416
|
-
const namespace = this.getNamespace();
|
|
4417
|
-
const item = await store.get(namespace, filePath);
|
|
4418
|
-
if (!item) return { error: `File '${filePath}' not found` };
|
|
4419
|
-
return { data: this.convertStoreItemToFileData(item) };
|
|
4420
|
-
}
|
|
4421
|
-
/**
|
|
4422
|
-
* Create a new file with content.
|
|
4423
|
-
* Returns WriteResult. External storage sets filesUpdate=null.
|
|
4424
|
-
*/
|
|
4425
|
-
async write(filePath, content) {
|
|
4426
|
-
const store = this.getStore();
|
|
4427
|
-
const namespace = this.getNamespace();
|
|
4428
|
-
if (await store.get(namespace, filePath)) return { error: `Cannot write to ${filePath} because it already exists. Read and then make an edit, or write to a new path.` };
|
|
4429
|
-
const mimeType = getMimeType(filePath);
|
|
4430
|
-
const fileData = createFileData(content, void 0, this.fileFormat, mimeType);
|
|
4431
|
-
const storeValue = this.convertFileDataToStoreValue(fileData);
|
|
4432
|
-
await store.put(namespace, filePath, storeValue);
|
|
4433
|
-
return {
|
|
4434
|
-
path: filePath,
|
|
4435
|
-
filesUpdate: null
|
|
4436
|
-
};
|
|
4437
|
-
}
|
|
4438
|
-
/**
|
|
4439
|
-
* Edit a file by replacing string occurrences.
|
|
4440
|
-
* Returns EditResult. External storage sets filesUpdate=null.
|
|
4441
|
-
*/
|
|
4442
|
-
async edit(filePath, oldString, newString, replaceAll = false) {
|
|
4443
|
-
const store = this.getStore();
|
|
4444
|
-
const namespace = this.getNamespace();
|
|
4445
|
-
const item = await store.get(namespace, filePath);
|
|
4446
|
-
if (!item) return { error: `Error: File '${filePath}' not found` };
|
|
4447
|
-
try {
|
|
4448
|
-
const fileData = this.convertStoreItemToFileData(item);
|
|
4449
|
-
const result = performStringReplacement(fileDataToString(fileData), oldString, newString, replaceAll);
|
|
4450
|
-
if (typeof result === "string") return { error: result };
|
|
4451
|
-
const [newContent, occurrences] = result;
|
|
4452
|
-
const newFileData = updateFileData(fileData, newContent);
|
|
4453
|
-
const storeValue = this.convertFileDataToStoreValue(newFileData);
|
|
4454
|
-
await store.put(namespace, filePath, storeValue);
|
|
4455
|
-
return {
|
|
4456
|
-
path: filePath,
|
|
4457
|
-
filesUpdate: null,
|
|
4458
|
-
occurrences
|
|
4459
|
-
};
|
|
4460
|
-
} catch (e) {
|
|
4461
|
-
return { error: `Error: ${e.message}` };
|
|
4683
|
+
fileFormat;
|
|
4684
|
+
constructor(stateAndStoreOrOptions, options) {
|
|
4685
|
+
let opts;
|
|
4686
|
+
if (stateAndStoreOrOptions != null && typeof stateAndStoreOrOptions === "object" && "state" in stateAndStoreOrOptions) {
|
|
4687
|
+
this.stateAndStore = stateAndStoreOrOptions;
|
|
4688
|
+
opts = options;
|
|
4689
|
+
} else {
|
|
4690
|
+
this.stateAndStore = void 0;
|
|
4691
|
+
opts = stateAndStoreOrOptions;
|
|
4462
4692
|
}
|
|
4693
|
+
if (Array.isArray(opts?.namespace)) this._namespace = validateNamespace(opts.namespace);
|
|
4694
|
+
else if (opts?.namespace) this._namespace = opts.namespace;
|
|
4695
|
+
this.storeOverride = opts?.store;
|
|
4696
|
+
this.fileFormat = opts?.fileFormat ?? "v2";
|
|
4463
4697
|
}
|
|
4464
4698
|
/**
|
|
4465
|
-
*
|
|
4466
|
-
*
|
|
4699
|
+
* Get the BaseStore instance for persistent storage operations.
|
|
4700
|
+
*
|
|
4701
|
+
* In legacy mode, reads from the injected {@link StateAndStore}.
|
|
4702
|
+
* In zero-arg mode, retrieves the store from the LangGraph execution
|
|
4703
|
+
* context via {@link getLangGraphStore}.
|
|
4704
|
+
*
|
|
4705
|
+
* @returns BaseStore instance
|
|
4706
|
+
* @throws Error if no store is available in either mode
|
|
4467
4707
|
*/
|
|
4468
|
-
|
|
4469
|
-
|
|
4470
|
-
|
|
4471
|
-
|
|
4472
|
-
|
|
4473
|
-
for (const item of items) try {
|
|
4474
|
-
files[item.key] = this.convertStoreItemToFileData(item);
|
|
4475
|
-
} catch {
|
|
4476
|
-
continue;
|
|
4708
|
+
getStore() {
|
|
4709
|
+
if (this.stateAndStore) {
|
|
4710
|
+
const store = this.stateAndStore.store;
|
|
4711
|
+
if (!store) throw new Error("Store is required but not available in runtime");
|
|
4712
|
+
return store;
|
|
4477
4713
|
}
|
|
4478
|
-
|
|
4714
|
+
if (this.storeOverride) return this.storeOverride;
|
|
4715
|
+
const store = getStore();
|
|
4716
|
+
if (!store) throw new Error("Store is required but not available in LangGraph execution context. Ensure the graph was configured with a store.");
|
|
4717
|
+
return store;
|
|
4479
4718
|
}
|
|
4480
4719
|
/**
|
|
4481
|
-
*
|
|
4720
|
+
* Get the current graph state when available.
|
|
4482
4721
|
*/
|
|
4483
|
-
|
|
4484
|
-
|
|
4485
|
-
|
|
4486
|
-
|
|
4487
|
-
const files = {};
|
|
4488
|
-
for (const item of items) try {
|
|
4489
|
-
files[item.key] = this.convertStoreItemToFileData(item);
|
|
4722
|
+
getState() {
|
|
4723
|
+
if (this.stateAndStore) return this.stateAndStore.state;
|
|
4724
|
+
try {
|
|
4725
|
+
return getCurrentTaskInput();
|
|
4490
4726
|
} catch {
|
|
4491
|
-
|
|
4727
|
+
return;
|
|
4492
4728
|
}
|
|
4493
|
-
const result = globSearchFiles(files, pattern, path);
|
|
4494
|
-
if (result === "No files found") return { files: [] };
|
|
4495
|
-
const paths = result.split("\n");
|
|
4496
|
-
const infos = [];
|
|
4497
|
-
for (const p of paths) {
|
|
4498
|
-
const fd = files[p];
|
|
4499
|
-
const size = fd ? isFileDataV1(fd) ? fd.content.join("\n").length : isFileDataBinary(fd) ? fd.content.byteLength : fd.content.length : 0;
|
|
4500
|
-
infos.push({
|
|
4501
|
-
path: p,
|
|
4502
|
-
is_dir: false,
|
|
4503
|
-
size,
|
|
4504
|
-
modified_at: fd?.modified_at || ""
|
|
4505
|
-
});
|
|
4506
|
-
}
|
|
4507
|
-
return { files: infos };
|
|
4508
4729
|
}
|
|
4509
4730
|
/**
|
|
4510
|
-
*
|
|
4511
|
-
*
|
|
4512
|
-
* @param files - List of [path, content] tuples to upload
|
|
4513
|
-
* @returns List of FileUploadResponse objects, one per input file
|
|
4731
|
+
* Get the most relevant runnable config for namespace resolution.
|
|
4514
4732
|
*/
|
|
4515
|
-
|
|
4516
|
-
const
|
|
4517
|
-
|
|
4518
|
-
|
|
4519
|
-
|
|
4520
|
-
|
|
4521
|
-
|
|
4522
|
-
|
|
4523
|
-
if (
|
|
4524
|
-
|
|
4525
|
-
|
|
4526
|
-
|
|
4527
|
-
|
|
4528
|
-
path,
|
|
4529
|
-
error: null
|
|
4530
|
-
});
|
|
4733
|
+
getNamespaceConfig() {
|
|
4734
|
+
const injectedConfig = getObjectRecord(this.stateAndStore?.config);
|
|
4735
|
+
if (injectedConfig) return {
|
|
4736
|
+
metadata: getObjectRecord(injectedConfig.metadata),
|
|
4737
|
+
configurable: getObjectRecord(injectedConfig.configurable)
|
|
4738
|
+
};
|
|
4739
|
+
try {
|
|
4740
|
+
const configRecord = getObjectRecord(getConfig());
|
|
4741
|
+
if (!configRecord) return;
|
|
4742
|
+
return {
|
|
4743
|
+
metadata: getObjectRecord(configRecord.metadata),
|
|
4744
|
+
configurable: getObjectRecord(configRecord.configurable)
|
|
4745
|
+
};
|
|
4531
4746
|
} catch {
|
|
4532
|
-
|
|
4533
|
-
path,
|
|
4534
|
-
error: "invalid_path"
|
|
4535
|
-
});
|
|
4747
|
+
return;
|
|
4536
4748
|
}
|
|
4537
|
-
return responses;
|
|
4538
4749
|
}
|
|
4539
4750
|
/**
|
|
4540
|
-
*
|
|
4541
|
-
*
|
|
4542
|
-
* @param paths - List of file paths to download
|
|
4543
|
-
* @returns List of FileDownloadResponse objects, one per input path
|
|
4751
|
+
* Legacy assistant-id detection compatible with both Python and the
|
|
4752
|
+
* historical TypeScript `assistantId` runtime property.
|
|
4544
4753
|
*/
|
|
4545
|
-
|
|
4546
|
-
const
|
|
4547
|
-
const
|
|
4548
|
-
|
|
4549
|
-
|
|
4550
|
-
|
|
4551
|
-
if (!item) {
|
|
4552
|
-
responses.push({
|
|
4553
|
-
path,
|
|
4554
|
-
content: null,
|
|
4555
|
-
error: "file_not_found"
|
|
4556
|
-
});
|
|
4557
|
-
continue;
|
|
4558
|
-
}
|
|
4559
|
-
const fileDataV2 = migrateToFileDataV2(this.convertStoreItemToFileData(item), path);
|
|
4560
|
-
if (typeof fileDataV2.content === "string") {
|
|
4561
|
-
const content = new TextEncoder().encode(fileDataV2.content);
|
|
4562
|
-
responses.push({
|
|
4563
|
-
path,
|
|
4564
|
-
content,
|
|
4565
|
-
error: null
|
|
4566
|
-
});
|
|
4567
|
-
} else responses.push({
|
|
4568
|
-
path,
|
|
4569
|
-
content: fileDataV2.content,
|
|
4570
|
-
error: null
|
|
4571
|
-
});
|
|
4572
|
-
} catch {
|
|
4573
|
-
responses.push({
|
|
4574
|
-
path,
|
|
4575
|
-
content: null,
|
|
4576
|
-
error: "file_not_found"
|
|
4577
|
-
});
|
|
4578
|
-
}
|
|
4579
|
-
return responses;
|
|
4580
|
-
}
|
|
4581
|
-
};
|
|
4582
|
-
//#endregion
|
|
4583
|
-
//#region src/backends/filesystem.ts
|
|
4584
|
-
/**
|
|
4585
|
-
* FilesystemBackend: Read and write files directly from the filesystem.
|
|
4586
|
-
*
|
|
4587
|
-
* Security and search upgrades:
|
|
4588
|
-
* - Secure path resolution with root containment when in virtual_mode (sandboxed to cwd)
|
|
4589
|
-
* - Prevent symlink-following on file I/O using O_NOFOLLOW when available
|
|
4590
|
-
* - Ripgrep-powered grep with literal (fixed-string) search, plus substring fallback
|
|
4591
|
-
* and optional glob include filtering, while preserving virtual path behavior
|
|
4592
|
-
*/
|
|
4593
|
-
const SUPPORTS_NOFOLLOW = fs$1.constants.O_NOFOLLOW !== void 0;
|
|
4594
|
-
/**
|
|
4595
|
-
* Backend that reads and writes files directly from the filesystem.
|
|
4596
|
-
*
|
|
4597
|
-
* Files are accessed using their actual filesystem paths. Relative paths are
|
|
4598
|
-
* resolved relative to the current working directory. Content is read/written
|
|
4599
|
-
* as plain text, and metadata (timestamps) are derived from filesystem stats.
|
|
4600
|
-
*/
|
|
4601
|
-
var FilesystemBackend = class {
|
|
4602
|
-
cwd;
|
|
4603
|
-
virtualMode;
|
|
4604
|
-
maxFileSizeBytes;
|
|
4605
|
-
constructor(options = {}) {
|
|
4606
|
-
const { rootDir, virtualMode = false, maxFileSizeMb = 10 } = options;
|
|
4607
|
-
this.cwd = rootDir ? path$1.resolve(rootDir) : process.cwd();
|
|
4608
|
-
this.virtualMode = virtualMode;
|
|
4609
|
-
this.maxFileSizeBytes = maxFileSizeMb * 1024 * 1024;
|
|
4754
|
+
getLegacyAssistantId() {
|
|
4755
|
+
const config = this.getNamespaceConfig();
|
|
4756
|
+
const assistantIdFromConfig = getAssistantIdFromRecord(config?.metadata) ?? getAssistantIdFromRecord(config?.configurable);
|
|
4757
|
+
if (assistantIdFromConfig) return assistantIdFromConfig;
|
|
4758
|
+
const assistantId = this.stateAndStore?.assistantId;
|
|
4759
|
+
return typeof assistantId === "string" && assistantId.length > 0 ? assistantId : void 0;
|
|
4610
4760
|
}
|
|
4611
4761
|
/**
|
|
4612
|
-
*
|
|
4762
|
+
* Get the namespace for store operations.
|
|
4613
4763
|
*
|
|
4614
|
-
*
|
|
4615
|
-
*
|
|
4616
|
-
*
|
|
4617
|
-
*
|
|
4764
|
+
* Resolution order:
|
|
4765
|
+
* 1. Explicit namespace from constructor options
|
|
4766
|
+
* 2. Namespace factory resolved from the current backend context
|
|
4767
|
+
* 3. Assistant ID from runtime config / LangGraph config metadata
|
|
4768
|
+
* 4. Legacy `assistantId` from the injected runtime
|
|
4769
|
+
* 5. `["filesystem"]`
|
|
4770
|
+
*/
|
|
4771
|
+
getNamespace() {
|
|
4772
|
+
if (Array.isArray(this._namespace)) return this._namespace;
|
|
4773
|
+
if (this._namespace) return validateNamespace(this._namespace({
|
|
4774
|
+
state: this.getState(),
|
|
4775
|
+
config: this.getNamespaceConfig(),
|
|
4776
|
+
assistantId: this.getLegacyAssistantId()
|
|
4777
|
+
}));
|
|
4778
|
+
const assistantId = this.getLegacyAssistantId();
|
|
4779
|
+
if (assistantId) return [assistantId, "filesystem"];
|
|
4780
|
+
return ["filesystem"];
|
|
4781
|
+
}
|
|
4782
|
+
/**
|
|
4783
|
+
* Convert a store Item to FileData format.
|
|
4618
4784
|
*
|
|
4619
|
-
* @param
|
|
4620
|
-
* @returns
|
|
4621
|
-
* @throws Error if
|
|
4785
|
+
* @param storeItem - The store Item containing file data
|
|
4786
|
+
* @returns FileData object
|
|
4787
|
+
* @throws Error if required fields are missing or have incorrect types
|
|
4622
4788
|
*/
|
|
4623
|
-
|
|
4624
|
-
|
|
4625
|
-
|
|
4626
|
-
|
|
4627
|
-
|
|
4628
|
-
|
|
4629
|
-
|
|
4630
|
-
|
|
4631
|
-
}
|
|
4632
|
-
if (path$1.isAbsolute(key)) return key;
|
|
4633
|
-
return path$1.resolve(this.cwd, key);
|
|
4789
|
+
convertStoreItemToFileData(storeItem) {
|
|
4790
|
+
const value = storeItem.value;
|
|
4791
|
+
if (!(value.content !== void 0 && (Array.isArray(value.content) || typeof value.content === "string" || ArrayBuffer.isView(value.content))) || typeof value.created_at !== "string" || typeof value.modified_at !== "string") throw new Error(`Store item does not contain valid FileData fields. Got keys: ${Object.keys(value).join(", ")}`);
|
|
4792
|
+
return {
|
|
4793
|
+
content: value.content,
|
|
4794
|
+
...value.mimeType ? { mimeType: value.mimeType } : {},
|
|
4795
|
+
created_at: value.created_at,
|
|
4796
|
+
modified_at: value.modified_at
|
|
4797
|
+
};
|
|
4634
4798
|
}
|
|
4635
4799
|
/**
|
|
4636
|
-
*
|
|
4800
|
+
* Convert FileData to a value suitable for store.put().
|
|
4637
4801
|
*
|
|
4638
|
-
* @param
|
|
4639
|
-
* @returns
|
|
4640
|
-
* Directories have a trailing / in their path and is_dir=true.
|
|
4802
|
+
* @param fileData - The FileData to convert
|
|
4803
|
+
* @returns Object with content, mimeType, created_at, and modified_at fields
|
|
4641
4804
|
*/
|
|
4642
|
-
|
|
4643
|
-
|
|
4644
|
-
|
|
4645
|
-
|
|
4646
|
-
|
|
4647
|
-
|
|
4648
|
-
|
|
4649
|
-
|
|
4650
|
-
|
|
4651
|
-
|
|
4652
|
-
|
|
4653
|
-
|
|
4654
|
-
|
|
4655
|
-
|
|
4656
|
-
|
|
4657
|
-
|
|
4658
|
-
|
|
4659
|
-
|
|
4660
|
-
|
|
4661
|
-
|
|
4662
|
-
|
|
4663
|
-
|
|
4664
|
-
|
|
4665
|
-
|
|
4666
|
-
|
|
4667
|
-
|
|
4668
|
-
|
|
4669
|
-
|
|
4670
|
-
|
|
4671
|
-
|
|
4672
|
-
|
|
4673
|
-
|
|
4674
|
-
|
|
4675
|
-
|
|
4676
|
-
|
|
4677
|
-
|
|
4678
|
-
|
|
4679
|
-
|
|
4680
|
-
|
|
4681
|
-
|
|
4682
|
-
|
|
4683
|
-
|
|
4684
|
-
|
|
4685
|
-
|
|
4686
|
-
|
|
4687
|
-
|
|
4688
|
-
|
|
4689
|
-
|
|
4690
|
-
|
|
4805
|
+
convertFileDataToStoreValue(fileData) {
|
|
4806
|
+
return {
|
|
4807
|
+
content: fileData.content,
|
|
4808
|
+
..."mimeType" in fileData ? { mimeType: fileData.mimeType } : {},
|
|
4809
|
+
created_at: fileData.created_at,
|
|
4810
|
+
modified_at: fileData.modified_at
|
|
4811
|
+
};
|
|
4812
|
+
}
|
|
4813
|
+
/**
|
|
4814
|
+
* Search store with automatic pagination to retrieve all results.
|
|
4815
|
+
*
|
|
4816
|
+
* @param store - The store to search
|
|
4817
|
+
* @param namespace - Hierarchical path prefix to search within
|
|
4818
|
+
* @param options - Optional query, filter, and page_size
|
|
4819
|
+
* @returns List of all items matching the search criteria
|
|
4820
|
+
*/
|
|
4821
|
+
async searchStorePaginated(store, namespace, options = {}) {
|
|
4822
|
+
const { query, filter, pageSize = 100 } = options;
|
|
4823
|
+
const allItems = [];
|
|
4824
|
+
let offset = 0;
|
|
4825
|
+
while (true) {
|
|
4826
|
+
const pageItems = await store.search(namespace, {
|
|
4827
|
+
query,
|
|
4828
|
+
filter,
|
|
4829
|
+
limit: pageSize,
|
|
4830
|
+
offset
|
|
4831
|
+
});
|
|
4832
|
+
if (!pageItems || pageItems.length === 0) break;
|
|
4833
|
+
allItems.push(...pageItems);
|
|
4834
|
+
if (pageItems.length < pageSize) break;
|
|
4835
|
+
offset += pageSize;
|
|
4836
|
+
}
|
|
4837
|
+
return allItems;
|
|
4838
|
+
}
|
|
4839
|
+
/**
|
|
4840
|
+
* List files and directories in the specified directory (non-recursive).
|
|
4841
|
+
*
|
|
4842
|
+
* @param path - Absolute path to directory
|
|
4843
|
+
* @returns LsResult with list of FileInfo objects on success or error on failure.
|
|
4844
|
+
* Directories have a trailing / in their path and is_dir=true.
|
|
4845
|
+
*/
|
|
4846
|
+
async ls(path) {
|
|
4847
|
+
const store = this.getStore();
|
|
4848
|
+
const namespace = this.getNamespace();
|
|
4849
|
+
const items = await this.searchStorePaginated(store, namespace);
|
|
4850
|
+
const infos = [];
|
|
4851
|
+
const subdirs = /* @__PURE__ */ new Set();
|
|
4852
|
+
const normalizedPath = path.endsWith("/") ? path : path + "/";
|
|
4853
|
+
for (const item of items) {
|
|
4854
|
+
const itemKey = String(item.key);
|
|
4855
|
+
if (!itemKey.startsWith(normalizedPath)) continue;
|
|
4856
|
+
const relative = itemKey.substring(normalizedPath.length);
|
|
4857
|
+
if (relative.includes("/")) {
|
|
4858
|
+
const subdirName = relative.split("/")[0];
|
|
4859
|
+
subdirs.add(normalizedPath + subdirName + "/");
|
|
4860
|
+
continue;
|
|
4861
|
+
}
|
|
4862
|
+
try {
|
|
4863
|
+
const fd = this.convertStoreItemToFileData(item);
|
|
4864
|
+
const size = isFileDataV1(fd) ? fd.content.join("\n").length : isFileDataBinary(fd) ? fd.content.byteLength : fd.content.length;
|
|
4865
|
+
infos.push({
|
|
4866
|
+
path: itemKey,
|
|
4867
|
+
is_dir: false,
|
|
4868
|
+
size,
|
|
4869
|
+
modified_at: fd.modified_at
|
|
4870
|
+
});
|
|
4871
|
+
} catch {
|
|
4872
|
+
continue;
|
|
4691
4873
|
}
|
|
4692
|
-
results.sort((a, b) => a.path.localeCompare(b.path));
|
|
4693
|
-
return { files: results };
|
|
4694
|
-
} catch {
|
|
4695
|
-
return { files: [] };
|
|
4696
4874
|
}
|
|
4875
|
+
for (const subdir of Array.from(subdirs).sort()) infos.push({
|
|
4876
|
+
path: subdir,
|
|
4877
|
+
is_dir: true,
|
|
4878
|
+
size: 0,
|
|
4879
|
+
modified_at: ""
|
|
4880
|
+
});
|
|
4881
|
+
infos.sort((a, b) => a.path.localeCompare(b.path));
|
|
4882
|
+
return { files: infos };
|
|
4697
4883
|
}
|
|
4698
4884
|
/**
|
|
4699
|
-
* Read file content
|
|
4885
|
+
* Read file content.
|
|
4700
4886
|
*
|
|
4701
|
-
*
|
|
4887
|
+
* Text files are paginated by line offset/limit.
|
|
4888
|
+
* Binary files return full Uint8Array content (offset/limit ignored).
|
|
4889
|
+
*
|
|
4890
|
+
* @param filePath - Absolute file path
|
|
4702
4891
|
* @param offset - Line offset to start reading from (0-indexed)
|
|
4703
4892
|
* @param limit - Maximum number of lines to read
|
|
4704
|
-
* @returns
|
|
4893
|
+
* @returns ReadResult with content on success or error on failure
|
|
4705
4894
|
*/
|
|
4706
4895
|
async read(filePath, offset = 0, limit = 500) {
|
|
4707
4896
|
try {
|
|
4708
|
-
const
|
|
4709
|
-
|
|
4710
|
-
const
|
|
4711
|
-
|
|
4712
|
-
|
|
4713
|
-
|
|
4714
|
-
const fd = await fs.open(resolvedPath, fs$1.constants.O_RDONLY | fs$1.constants.O_NOFOLLOW);
|
|
4715
|
-
try {
|
|
4716
|
-
if (isBinary) {
|
|
4717
|
-
const buffer = await fd.readFile();
|
|
4718
|
-
return {
|
|
4719
|
-
content: new Uint8Array(buffer),
|
|
4720
|
-
mimeType
|
|
4721
|
-
};
|
|
4722
|
-
}
|
|
4723
|
-
content = await fd.readFile({ encoding: "utf-8" });
|
|
4724
|
-
} finally {
|
|
4725
|
-
await fd.close();
|
|
4726
|
-
}
|
|
4727
|
-
} else {
|
|
4728
|
-
const stat = await fs.lstat(resolvedPath);
|
|
4729
|
-
if (stat.isSymbolicLink()) return { error: `Symlinks are not allowed: ${filePath}` };
|
|
4730
|
-
if (!stat.isFile()) return { error: `File '${filePath}' not found` };
|
|
4731
|
-
if (isBinary) {
|
|
4732
|
-
const buffer = await fs.readFile(resolvedPath);
|
|
4733
|
-
return {
|
|
4734
|
-
content: new Uint8Array(buffer),
|
|
4735
|
-
mimeType
|
|
4736
|
-
};
|
|
4737
|
-
}
|
|
4738
|
-
content = await fs.readFile(resolvedPath, "utf-8");
|
|
4739
|
-
}
|
|
4740
|
-
const emptyMsg = checkEmptyContent(content);
|
|
4741
|
-
if (emptyMsg) return {
|
|
4742
|
-
content: emptyMsg,
|
|
4743
|
-
mimeType
|
|
4897
|
+
const readRawResult = await this.readRaw(filePath);
|
|
4898
|
+
if (readRawResult.error || !readRawResult.data) return { error: readRawResult.error || "File data not found" };
|
|
4899
|
+
const fileDataV2 = migrateToFileDataV2(readRawResult.data, filePath);
|
|
4900
|
+
if (!isTextMimeType(fileDataV2.mimeType)) return {
|
|
4901
|
+
content: fileDataV2.content,
|
|
4902
|
+
mimeType: fileDataV2.mimeType
|
|
4744
4903
|
};
|
|
4745
|
-
|
|
4746
|
-
const startIdx = offset;
|
|
4747
|
-
const endIdx = Math.min(startIdx + limit, lines.length);
|
|
4748
|
-
if (startIdx >= lines.length) return { error: `Line offset ${offset} exceeds file length (${lines.length} lines)` };
|
|
4904
|
+
if (typeof fileDataV2.content !== "string") return { error: `File '${filePath}' has binary content but text MIME type` };
|
|
4749
4905
|
return {
|
|
4750
|
-
content:
|
|
4751
|
-
mimeType
|
|
4906
|
+
content: fileDataV2.content.split("\n").slice(offset, offset + limit).join("\n"),
|
|
4907
|
+
mimeType: fileDataV2.mimeType
|
|
4752
4908
|
};
|
|
4753
4909
|
} catch (e) {
|
|
4754
|
-
return { error:
|
|
4910
|
+
return { error: e.message };
|
|
4755
4911
|
}
|
|
4756
4912
|
}
|
|
4757
4913
|
/**
|
|
@@ -4761,478 +4917,347 @@ var FilesystemBackend = class {
|
|
|
4761
4917
|
* @returns ReadRawResult with raw file data on success or error on failure
|
|
4762
4918
|
*/
|
|
4763
4919
|
async readRaw(filePath) {
|
|
4764
|
-
const
|
|
4765
|
-
const
|
|
4766
|
-
const
|
|
4767
|
-
|
|
4768
|
-
|
|
4769
|
-
if (SUPPORTS_NOFOLLOW) {
|
|
4770
|
-
stat = await fs.stat(resolvedPath);
|
|
4771
|
-
if (!stat.isFile()) return { error: `File '${filePath}' not found` };
|
|
4772
|
-
const fd = await fs.open(resolvedPath, fs$1.constants.O_RDONLY | fs$1.constants.O_NOFOLLOW);
|
|
4773
|
-
try {
|
|
4774
|
-
if (isBinary) {
|
|
4775
|
-
const buffer = await fd.readFile();
|
|
4776
|
-
return { data: {
|
|
4777
|
-
content: new Uint8Array(buffer),
|
|
4778
|
-
mimeType,
|
|
4779
|
-
created_at: stat.ctime.toISOString(),
|
|
4780
|
-
modified_at: stat.mtime.toISOString()
|
|
4781
|
-
} };
|
|
4782
|
-
}
|
|
4783
|
-
content = await fd.readFile({ encoding: "utf-8" });
|
|
4784
|
-
} finally {
|
|
4785
|
-
await fd.close();
|
|
4786
|
-
}
|
|
4787
|
-
} else {
|
|
4788
|
-
stat = await fs.lstat(resolvedPath);
|
|
4789
|
-
if (stat.isSymbolicLink()) return { error: `Symlinks are not allowed: ${filePath}` };
|
|
4790
|
-
if (!stat.isFile()) return { error: `File '${filePath}' not found` };
|
|
4791
|
-
if (isBinary) {
|
|
4792
|
-
const buffer = await fs.readFile(resolvedPath);
|
|
4793
|
-
return { data: {
|
|
4794
|
-
content: new Uint8Array(buffer),
|
|
4795
|
-
mimeType,
|
|
4796
|
-
created_at: stat.ctime.toISOString(),
|
|
4797
|
-
modified_at: stat.mtime.toISOString()
|
|
4798
|
-
} };
|
|
4799
|
-
}
|
|
4800
|
-
content = await fs.readFile(resolvedPath, "utf-8");
|
|
4801
|
-
}
|
|
4802
|
-
return { data: {
|
|
4803
|
-
content,
|
|
4804
|
-
mimeType,
|
|
4805
|
-
created_at: stat.ctime.toISOString(),
|
|
4806
|
-
modified_at: stat.mtime.toISOString()
|
|
4807
|
-
} };
|
|
4920
|
+
const store = this.getStore();
|
|
4921
|
+
const namespace = this.getNamespace();
|
|
4922
|
+
const item = await store.get(namespace, filePath);
|
|
4923
|
+
if (!item) return { error: `File '${filePath}' not found` };
|
|
4924
|
+
return { data: this.convertStoreItemToFileData(item) };
|
|
4808
4925
|
}
|
|
4809
4926
|
/**
|
|
4810
4927
|
* Create a new file with content.
|
|
4811
4928
|
* Returns WriteResult. External storage sets filesUpdate=null.
|
|
4812
4929
|
*/
|
|
4813
4930
|
async write(filePath, content) {
|
|
4814
|
-
|
|
4815
|
-
|
|
4816
|
-
|
|
4817
|
-
|
|
4818
|
-
|
|
4819
|
-
|
|
4820
|
-
|
|
4821
|
-
|
|
4822
|
-
|
|
4823
|
-
|
|
4824
|
-
|
|
4825
|
-
|
|
4826
|
-
|
|
4827
|
-
|
|
4828
|
-
|
|
4829
|
-
|
|
4830
|
-
|
|
4831
|
-
|
|
4832
|
-
|
|
4833
|
-
|
|
4834
|
-
|
|
4835
|
-
|
|
4836
|
-
|
|
4837
|
-
|
|
4838
|
-
path: filePath,
|
|
4839
|
-
filesUpdate: null
|
|
4840
|
-
};
|
|
4841
|
-
} catch (e) {
|
|
4842
|
-
return { error: `Error writing file '${filePath}': ${e.message}` };
|
|
4843
|
-
}
|
|
4844
|
-
}
|
|
4845
|
-
/**
|
|
4846
|
-
* Edit a file by replacing string occurrences.
|
|
4847
|
-
* Returns EditResult. External storage sets filesUpdate=null.
|
|
4848
|
-
*/
|
|
4849
|
-
async edit(filePath, oldString, newString, replaceAll = false) {
|
|
4850
|
-
try {
|
|
4851
|
-
const resolvedPath = this.resolvePath(filePath);
|
|
4852
|
-
let content;
|
|
4853
|
-
if (SUPPORTS_NOFOLLOW) {
|
|
4854
|
-
if (!(await fs.stat(resolvedPath)).isFile()) return { error: `Error: File '${filePath}' not found` };
|
|
4855
|
-
const fd = await fs.open(resolvedPath, fs$1.constants.O_RDONLY | fs$1.constants.O_NOFOLLOW);
|
|
4856
|
-
try {
|
|
4857
|
-
content = await fd.readFile({ encoding: "utf-8" });
|
|
4858
|
-
} finally {
|
|
4859
|
-
await fd.close();
|
|
4860
|
-
}
|
|
4861
|
-
} else {
|
|
4862
|
-
const stat = await fs.lstat(resolvedPath);
|
|
4863
|
-
if (stat.isSymbolicLink()) return { error: `Error: Symlinks are not allowed: ${filePath}` };
|
|
4864
|
-
if (!stat.isFile()) return { error: `Error: File '${filePath}' not found` };
|
|
4865
|
-
content = await fs.readFile(resolvedPath, "utf-8");
|
|
4866
|
-
}
|
|
4867
|
-
const result = performStringReplacement(content, oldString, newString, replaceAll);
|
|
4931
|
+
const store = this.getStore();
|
|
4932
|
+
const namespace = this.getNamespace();
|
|
4933
|
+
if (await store.get(namespace, filePath)) return { error: `Cannot write to ${filePath} because it already exists. Read and then make an edit, or write to a new path.` };
|
|
4934
|
+
const mimeType = getMimeType(filePath);
|
|
4935
|
+
const fileData = createFileData(content, void 0, this.fileFormat, mimeType);
|
|
4936
|
+
const storeValue = this.convertFileDataToStoreValue(fileData);
|
|
4937
|
+
await store.put(namespace, filePath, storeValue);
|
|
4938
|
+
return {
|
|
4939
|
+
path: filePath,
|
|
4940
|
+
filesUpdate: null
|
|
4941
|
+
};
|
|
4942
|
+
}
|
|
4943
|
+
/**
|
|
4944
|
+
* Edit a file by replacing string occurrences.
|
|
4945
|
+
* Returns EditResult. External storage sets filesUpdate=null.
|
|
4946
|
+
*/
|
|
4947
|
+
async edit(filePath, oldString, newString, replaceAll = false) {
|
|
4948
|
+
const store = this.getStore();
|
|
4949
|
+
const namespace = this.getNamespace();
|
|
4950
|
+
const item = await store.get(namespace, filePath);
|
|
4951
|
+
if (!item) return { error: `Error: File '${filePath}' not found` };
|
|
4952
|
+
try {
|
|
4953
|
+
const fileData = this.convertStoreItemToFileData(item);
|
|
4954
|
+
const result = performStringReplacement(fileDataToString(fileData), oldString, newString, replaceAll);
|
|
4868
4955
|
if (typeof result === "string") return { error: result };
|
|
4869
4956
|
const [newContent, occurrences] = result;
|
|
4870
|
-
|
|
4871
|
-
|
|
4872
|
-
|
|
4873
|
-
try {
|
|
4874
|
-
await fd.writeFile(newContent, "utf-8");
|
|
4875
|
-
} finally {
|
|
4876
|
-
await fd.close();
|
|
4877
|
-
}
|
|
4878
|
-
} else await fs.writeFile(resolvedPath, newContent, "utf-8");
|
|
4957
|
+
const newFileData = updateFileData(fileData, newContent);
|
|
4958
|
+
const storeValue = this.convertFileDataToStoreValue(newFileData);
|
|
4959
|
+
await store.put(namespace, filePath, storeValue);
|
|
4879
4960
|
return {
|
|
4880
4961
|
path: filePath,
|
|
4881
4962
|
filesUpdate: null,
|
|
4882
4963
|
occurrences
|
|
4883
4964
|
};
|
|
4884
4965
|
} catch (e) {
|
|
4885
|
-
return { error: `Error
|
|
4886
|
-
}
|
|
4887
|
-
}
|
|
4888
|
-
/**
|
|
4889
|
-
* Search for a literal text pattern in files.
|
|
4890
|
-
*
|
|
4891
|
-
* Uses ripgrep if available, falling back to substring search.
|
|
4892
|
-
*
|
|
4893
|
-
* @param pattern - Literal string to search for (NOT regex).
|
|
4894
|
-
* @param dirPath - Directory or file path to search in. Defaults to current directory.
|
|
4895
|
-
* @param glob - Optional glob pattern to filter which files to search.
|
|
4896
|
-
* @returns List of GrepMatch dicts containing path, line number, and matched text.
|
|
4897
|
-
*/
|
|
4898
|
-
async grep(pattern, dirPath = "/", glob = null) {
|
|
4899
|
-
let baseFull;
|
|
4900
|
-
try {
|
|
4901
|
-
baseFull = this.resolvePath(dirPath || ".");
|
|
4902
|
-
} catch {
|
|
4903
|
-
return { matches: [] };
|
|
4904
|
-
}
|
|
4905
|
-
try {
|
|
4906
|
-
await fs.stat(baseFull);
|
|
4907
|
-
} catch {
|
|
4908
|
-
return { matches: [] };
|
|
4966
|
+
return { error: `Error: ${e.message}` };
|
|
4909
4967
|
}
|
|
4910
|
-
let results = await this.ripgrepSearch(pattern, baseFull, glob);
|
|
4911
|
-
if (results === null) results = await this.literalSearch(pattern, baseFull, glob);
|
|
4912
|
-
const matches = [];
|
|
4913
|
-
for (const [fpath, items] of Object.entries(results)) for (const [lineNum, lineText] of items) matches.push({
|
|
4914
|
-
path: fpath,
|
|
4915
|
-
line: lineNum,
|
|
4916
|
-
text: lineText
|
|
4917
|
-
});
|
|
4918
|
-
return { matches };
|
|
4919
|
-
}
|
|
4920
|
-
/**
|
|
4921
|
-
* Search using ripgrep with fixed-string (literal) mode.
|
|
4922
|
-
*
|
|
4923
|
-
* @param pattern - Literal string to search for (unescaped).
|
|
4924
|
-
* @param baseFull - Resolved base path to search in.
|
|
4925
|
-
* @param includeGlob - Optional glob pattern to filter files.
|
|
4926
|
-
* @returns Dict mapping file paths to list of (line_number, line_text) tuples.
|
|
4927
|
-
* Returns null if ripgrep is unavailable or times out.
|
|
4928
|
-
*/
|
|
4929
|
-
async ripgrepSearch(pattern, baseFull, includeGlob) {
|
|
4930
|
-
return new Promise((resolve) => {
|
|
4931
|
-
const args = ["--json", "-F"];
|
|
4932
|
-
if (includeGlob) args.push("--glob", includeGlob);
|
|
4933
|
-
args.push("--", pattern, baseFull);
|
|
4934
|
-
const proc = spawn("rg", args, { timeout: 3e4 });
|
|
4935
|
-
const results = {};
|
|
4936
|
-
let output = "";
|
|
4937
|
-
proc.stdout.on("data", (data) => {
|
|
4938
|
-
output += data.toString();
|
|
4939
|
-
});
|
|
4940
|
-
proc.on("close", (code) => {
|
|
4941
|
-
if (code !== 0 && code !== 1) {
|
|
4942
|
-
resolve(null);
|
|
4943
|
-
return;
|
|
4944
|
-
}
|
|
4945
|
-
for (const line of output.split("\n")) {
|
|
4946
|
-
if (!line.trim()) continue;
|
|
4947
|
-
try {
|
|
4948
|
-
const data = JSON.parse(line);
|
|
4949
|
-
if (data.type !== "match") continue;
|
|
4950
|
-
const pdata = data.data || {};
|
|
4951
|
-
const ftext = pdata.path?.text;
|
|
4952
|
-
if (!ftext) continue;
|
|
4953
|
-
let virtPath;
|
|
4954
|
-
if (this.virtualMode) try {
|
|
4955
|
-
const resolved = path$1.resolve(ftext);
|
|
4956
|
-
const relative = path$1.relative(this.cwd, resolved);
|
|
4957
|
-
if (relative.startsWith("..")) continue;
|
|
4958
|
-
virtPath = "/" + relative.split(path$1.sep).join("/");
|
|
4959
|
-
} catch {
|
|
4960
|
-
continue;
|
|
4961
|
-
}
|
|
4962
|
-
else virtPath = ftext;
|
|
4963
|
-
const ln = pdata.line_number;
|
|
4964
|
-
const lt = pdata.lines?.text?.replace(/\n$/, "") || "";
|
|
4965
|
-
if (ln === void 0) continue;
|
|
4966
|
-
if (!results[virtPath]) results[virtPath] = [];
|
|
4967
|
-
results[virtPath].push([ln, lt]);
|
|
4968
|
-
} catch {
|
|
4969
|
-
continue;
|
|
4970
|
-
}
|
|
4971
|
-
}
|
|
4972
|
-
resolve(results);
|
|
4973
|
-
});
|
|
4974
|
-
proc.on("error", () => {
|
|
4975
|
-
resolve(null);
|
|
4976
|
-
});
|
|
4977
|
-
});
|
|
4978
4968
|
}
|
|
4979
4969
|
/**
|
|
4980
|
-
*
|
|
4981
|
-
*
|
|
4982
|
-
* Recursively searches files, respecting maxFileSizeBytes limit.
|
|
4983
|
-
*
|
|
4984
|
-
* @param pattern - Literal string to search for.
|
|
4985
|
-
* @param baseFull - Resolved base path to search in.
|
|
4986
|
-
* @param includeGlob - Optional glob pattern to filter files by name.
|
|
4987
|
-
* @returns Dict mapping file paths to list of (line_number, line_text) tuples.
|
|
4970
|
+
* Search file contents for a literal text pattern.
|
|
4971
|
+
* Binary files are skipped.
|
|
4988
4972
|
*/
|
|
4989
|
-
async
|
|
4990
|
-
const
|
|
4991
|
-
const
|
|
4992
|
-
|
|
4993
|
-
|
|
4994
|
-
|
|
4995
|
-
|
|
4996
|
-
});
|
|
4997
|
-
for (const fp of files) try {
|
|
4998
|
-
if (!isTextMimeType(getMimeType(fp))) continue;
|
|
4999
|
-
if (includeGlob && !micromatch.isMatch(path$1.basename(fp), includeGlob)) continue;
|
|
5000
|
-
if ((await fs.stat(fp)).size > this.maxFileSizeBytes) continue;
|
|
5001
|
-
const lines = (await fs.readFile(fp, "utf-8")).split("\n");
|
|
5002
|
-
for (let i = 0; i < lines.length; i++) {
|
|
5003
|
-
const line = lines[i];
|
|
5004
|
-
if (line.includes(pattern)) {
|
|
5005
|
-
let virtPath;
|
|
5006
|
-
if (this.virtualMode) try {
|
|
5007
|
-
const relative = path$1.relative(this.cwd, fp);
|
|
5008
|
-
if (relative.startsWith("..")) continue;
|
|
5009
|
-
virtPath = "/" + relative.split(path$1.sep).join("/");
|
|
5010
|
-
} catch {
|
|
5011
|
-
continue;
|
|
5012
|
-
}
|
|
5013
|
-
else virtPath = fp;
|
|
5014
|
-
if (!results[virtPath]) results[virtPath] = [];
|
|
5015
|
-
results[virtPath].push([i + 1, line]);
|
|
5016
|
-
}
|
|
5017
|
-
}
|
|
4973
|
+
async grep(pattern, path = "/", glob = null) {
|
|
4974
|
+
const store = this.getStore();
|
|
4975
|
+
const namespace = this.getNamespace();
|
|
4976
|
+
const items = await this.searchStorePaginated(store, namespace);
|
|
4977
|
+
const files = {};
|
|
4978
|
+
for (const item of items) try {
|
|
4979
|
+
files[item.key] = this.convertStoreItemToFileData(item);
|
|
5018
4980
|
} catch {
|
|
5019
4981
|
continue;
|
|
5020
4982
|
}
|
|
5021
|
-
return
|
|
4983
|
+
return { matches: grepMatchesFromFiles(files, pattern, path, glob) };
|
|
5022
4984
|
}
|
|
5023
4985
|
/**
|
|
5024
4986
|
* Structured glob matching returning FileInfo objects.
|
|
5025
4987
|
*/
|
|
5026
|
-
async glob(pattern,
|
|
5027
|
-
|
|
5028
|
-
const
|
|
5029
|
-
|
|
5030
|
-
|
|
4988
|
+
async glob(pattern, path = "/") {
|
|
4989
|
+
const store = this.getStore();
|
|
4990
|
+
const namespace = this.getNamespace();
|
|
4991
|
+
const items = await this.searchStorePaginated(store, namespace);
|
|
4992
|
+
const files = {};
|
|
4993
|
+
for (const item of items) try {
|
|
4994
|
+
files[item.key] = this.convertStoreItemToFileData(item);
|
|
5031
4995
|
} catch {
|
|
5032
|
-
|
|
4996
|
+
continue;
|
|
5033
4997
|
}
|
|
5034
|
-
const
|
|
5035
|
-
|
|
5036
|
-
|
|
5037
|
-
|
|
5038
|
-
|
|
5039
|
-
|
|
5040
|
-
|
|
4998
|
+
const result = globSearchFiles(files, pattern, path);
|
|
4999
|
+
if (result === "No files found") return { files: [] };
|
|
5000
|
+
const paths = result.split("\n");
|
|
5001
|
+
const infos = [];
|
|
5002
|
+
for (const p of paths) {
|
|
5003
|
+
const fd = files[p];
|
|
5004
|
+
const size = fd ? isFileDataV1(fd) ? fd.content.join("\n").length : isFileDataBinary(fd) ? fd.content.byteLength : fd.content.length : 0;
|
|
5005
|
+
infos.push({
|
|
5006
|
+
path: p,
|
|
5007
|
+
is_dir: false,
|
|
5008
|
+
size,
|
|
5009
|
+
modified_at: fd?.modified_at || ""
|
|
5041
5010
|
});
|
|
5042
|
-
|
|
5043
|
-
|
|
5044
|
-
if (!stat.isFile()) continue;
|
|
5045
|
-
const normalizedPath = matchedPath.split("/").join(path$1.sep);
|
|
5046
|
-
if (!this.virtualMode) results.push({
|
|
5047
|
-
path: normalizedPath,
|
|
5048
|
-
is_dir: false,
|
|
5049
|
-
size: stat.size,
|
|
5050
|
-
modified_at: stat.mtime.toISOString()
|
|
5051
|
-
});
|
|
5052
|
-
else {
|
|
5053
|
-
const cwdStr = this.cwd.endsWith(path$1.sep) ? this.cwd : this.cwd + path$1.sep;
|
|
5054
|
-
let relativePath;
|
|
5055
|
-
if (normalizedPath.startsWith(cwdStr)) relativePath = normalizedPath.substring(cwdStr.length);
|
|
5056
|
-
else if (normalizedPath.startsWith(this.cwd)) relativePath = normalizedPath.substring(this.cwd.length).replace(/^[/\\]/, "");
|
|
5057
|
-
else relativePath = normalizedPath;
|
|
5058
|
-
relativePath = relativePath.split(path$1.sep).join("/");
|
|
5059
|
-
const virt = "/" + relativePath;
|
|
5060
|
-
results.push({
|
|
5061
|
-
path: virt,
|
|
5062
|
-
is_dir: false,
|
|
5063
|
-
size: stat.size,
|
|
5064
|
-
modified_at: stat.mtime.toISOString()
|
|
5065
|
-
});
|
|
5066
|
-
}
|
|
5067
|
-
} catch {
|
|
5068
|
-
continue;
|
|
5069
|
-
}
|
|
5070
|
-
} catch {}
|
|
5071
|
-
results.sort((a, b) => a.path.localeCompare(b.path));
|
|
5072
|
-
return { files: results };
|
|
5011
|
+
}
|
|
5012
|
+
return { files: infos };
|
|
5073
5013
|
}
|
|
5074
5014
|
/**
|
|
5075
|
-
* Upload multiple files
|
|
5015
|
+
* Upload multiple files.
|
|
5076
5016
|
*
|
|
5077
5017
|
* @param files - List of [path, content] tuples to upload
|
|
5078
5018
|
* @returns List of FileUploadResponse objects, one per input file
|
|
5079
5019
|
*/
|
|
5080
5020
|
async uploadFiles(files) {
|
|
5021
|
+
const store = this.getStore();
|
|
5022
|
+
const namespace = this.getNamespace();
|
|
5081
5023
|
const responses = [];
|
|
5082
|
-
for (const [
|
|
5083
|
-
const
|
|
5084
|
-
|
|
5085
|
-
|
|
5086
|
-
|
|
5087
|
-
|
|
5088
|
-
|
|
5089
|
-
|
|
5090
|
-
|
|
5091
|
-
|
|
5092
|
-
|
|
5093
|
-
error: "file_not_found"
|
|
5094
|
-
});
|
|
5095
|
-
else if (e.code === "EACCES") responses.push({
|
|
5096
|
-
path: filePath,
|
|
5097
|
-
error: "permission_denied"
|
|
5098
|
-
});
|
|
5099
|
-
else if (e.code === "EISDIR") responses.push({
|
|
5100
|
-
path: filePath,
|
|
5101
|
-
error: "is_directory"
|
|
5024
|
+
for (const [path, content] of files) try {
|
|
5025
|
+
const mimeType = getMimeType(path);
|
|
5026
|
+
const isBinary = this.fileFormat === "v2" && !isTextMimeType(mimeType);
|
|
5027
|
+
let fileData;
|
|
5028
|
+
if (isBinary) fileData = createFileData(content, void 0, "v2", mimeType);
|
|
5029
|
+
else fileData = createFileData(new TextDecoder().decode(content), void 0, this.fileFormat, mimeType);
|
|
5030
|
+
const storeValue = this.convertFileDataToStoreValue(fileData);
|
|
5031
|
+
await store.put(namespace, path, storeValue);
|
|
5032
|
+
responses.push({
|
|
5033
|
+
path,
|
|
5034
|
+
error: null
|
|
5102
5035
|
});
|
|
5103
|
-
|
|
5104
|
-
|
|
5036
|
+
} catch {
|
|
5037
|
+
responses.push({
|
|
5038
|
+
path,
|
|
5105
5039
|
error: "invalid_path"
|
|
5106
5040
|
});
|
|
5107
5041
|
}
|
|
5108
5042
|
return responses;
|
|
5109
5043
|
}
|
|
5110
5044
|
/**
|
|
5111
|
-
* Download multiple files
|
|
5045
|
+
* Download multiple files.
|
|
5112
5046
|
*
|
|
5113
5047
|
* @param paths - List of file paths to download
|
|
5114
5048
|
* @returns List of FileDownloadResponse objects, one per input path
|
|
5115
5049
|
*/
|
|
5116
5050
|
async downloadFiles(paths) {
|
|
5051
|
+
const store = this.getStore();
|
|
5052
|
+
const namespace = this.getNamespace();
|
|
5117
5053
|
const responses = [];
|
|
5118
|
-
for (const
|
|
5119
|
-
const
|
|
5120
|
-
|
|
5121
|
-
|
|
5122
|
-
|
|
5123
|
-
|
|
5054
|
+
for (const path of paths) try {
|
|
5055
|
+
const item = await store.get(namespace, path);
|
|
5056
|
+
if (!item) {
|
|
5057
|
+
responses.push({
|
|
5058
|
+
path,
|
|
5059
|
+
content: null,
|
|
5060
|
+
error: "file_not_found"
|
|
5061
|
+
});
|
|
5062
|
+
continue;
|
|
5063
|
+
}
|
|
5064
|
+
const fileDataV2 = migrateToFileDataV2(this.convertStoreItemToFileData(item), path);
|
|
5065
|
+
if (typeof fileDataV2.content === "string") {
|
|
5066
|
+
const content = new TextEncoder().encode(fileDataV2.content);
|
|
5067
|
+
responses.push({
|
|
5068
|
+
path,
|
|
5069
|
+
content,
|
|
5070
|
+
error: null
|
|
5071
|
+
});
|
|
5072
|
+
} else responses.push({
|
|
5073
|
+
path,
|
|
5074
|
+
content: fileDataV2.content,
|
|
5124
5075
|
error: null
|
|
5125
5076
|
});
|
|
5126
|
-
} catch
|
|
5127
|
-
|
|
5128
|
-
path
|
|
5077
|
+
} catch {
|
|
5078
|
+
responses.push({
|
|
5079
|
+
path,
|
|
5129
5080
|
content: null,
|
|
5130
5081
|
error: "file_not_found"
|
|
5131
5082
|
});
|
|
5132
|
-
else if (e.code === "EACCES") responses.push({
|
|
5133
|
-
path: filePath,
|
|
5134
|
-
content: null,
|
|
5135
|
-
error: "permission_denied"
|
|
5136
|
-
});
|
|
5137
|
-
else if (e.code === "EISDIR") responses.push({
|
|
5138
|
-
path: filePath,
|
|
5139
|
-
content: null,
|
|
5140
|
-
error: "is_directory"
|
|
5141
|
-
});
|
|
5142
|
-
else responses.push({
|
|
5143
|
-
path: filePath,
|
|
5144
|
-
content: null,
|
|
5145
|
-
error: "invalid_path"
|
|
5146
|
-
});
|
|
5147
5083
|
}
|
|
5148
5084
|
return responses;
|
|
5149
5085
|
}
|
|
5150
5086
|
};
|
|
5151
5087
|
//#endregion
|
|
5152
|
-
//#region src/backends/
|
|
5088
|
+
//#region src/backends/filesystem.ts
|
|
5153
5089
|
/**
|
|
5154
|
-
*
|
|
5090
|
+
* FilesystemBackend: Read and write files directly from the filesystem.
|
|
5155
5091
|
*
|
|
5156
|
-
*
|
|
5157
|
-
* -
|
|
5158
|
-
* -
|
|
5092
|
+
* Security and search upgrades:
|
|
5093
|
+
* - Secure path resolution with root containment when in virtual_mode (sandboxed to cwd)
|
|
5094
|
+
* - Prevent symlink-following on file I/O using O_NOFOLLOW when available
|
|
5095
|
+
* - Ripgrep-powered grep with literal (fixed-string) search, plus substring fallback
|
|
5096
|
+
* and optional glob include filtering, while preserving virtual path behavior
|
|
5097
|
+
*/
|
|
5098
|
+
const SUPPORTS_NOFOLLOW = fs$1.constants.O_NOFOLLOW !== void 0;
|
|
5099
|
+
/**
|
|
5100
|
+
* Backend that reads and writes files directly from the filesystem.
|
|
5159
5101
|
*
|
|
5160
|
-
*
|
|
5102
|
+
* Files are accessed using their actual filesystem paths. Relative paths are
|
|
5103
|
+
* resolved relative to the current working directory. Content is read/written
|
|
5104
|
+
* as plain text, and metadata (timestamps) are derived from filesystem stats.
|
|
5161
5105
|
*/
|
|
5162
|
-
var
|
|
5163
|
-
|
|
5164
|
-
|
|
5165
|
-
|
|
5166
|
-
constructor(
|
|
5167
|
-
|
|
5168
|
-
this.
|
|
5169
|
-
this.
|
|
5170
|
-
|
|
5171
|
-
/** Delegates to default backend's id if it is a sandbox, otherwise empty string. */
|
|
5172
|
-
get id() {
|
|
5173
|
-
return isSandboxBackend(this.default) ? this.default.id : "";
|
|
5106
|
+
var FilesystemBackend = class {
|
|
5107
|
+
cwd;
|
|
5108
|
+
virtualMode;
|
|
5109
|
+
maxFileSizeBytes;
|
|
5110
|
+
constructor(options = {}) {
|
|
5111
|
+
const { rootDir, virtualMode = false, maxFileSizeMb = 10 } = options;
|
|
5112
|
+
this.cwd = rootDir ? path$1.resolve(rootDir) : process.cwd();
|
|
5113
|
+
this.virtualMode = virtualMode;
|
|
5114
|
+
this.maxFileSizeBytes = maxFileSizeMb * 1024 * 1024;
|
|
5174
5115
|
}
|
|
5175
5116
|
/**
|
|
5176
|
-
*
|
|
5117
|
+
* Resolve a file path with security checks.
|
|
5177
5118
|
*
|
|
5178
|
-
*
|
|
5179
|
-
*
|
|
5180
|
-
*
|
|
5119
|
+
* When virtualMode=true, treat incoming paths as virtual absolute paths under
|
|
5120
|
+
* this.cwd, disallow traversal (.., ~) and ensure resolved path stays within root.
|
|
5121
|
+
* When virtualMode=false, preserve legacy behavior: absolute paths are allowed
|
|
5122
|
+
* as-is; relative paths resolve under cwd.
|
|
5123
|
+
*
|
|
5124
|
+
* @param key - File path (absolute, relative, or virtual when virtualMode=true)
|
|
5125
|
+
* @returns Resolved absolute path string
|
|
5126
|
+
* @throws Error if path traversal detected or path outside root
|
|
5181
5127
|
*/
|
|
5182
|
-
|
|
5183
|
-
|
|
5184
|
-
const
|
|
5185
|
-
|
|
5128
|
+
resolvePath(key) {
|
|
5129
|
+
if (this.virtualMode) {
|
|
5130
|
+
const vpath = key.startsWith("/") ? key : "/" + key;
|
|
5131
|
+
if (vpath.includes("..") || vpath.startsWith("~")) throw new Error("Path traversal not allowed");
|
|
5132
|
+
const full = path$1.resolve(this.cwd, vpath.substring(1));
|
|
5133
|
+
const relative = path$1.relative(this.cwd, full);
|
|
5134
|
+
if (relative.startsWith("..") || path$1.isAbsolute(relative)) throw new Error(`Path: ${full} outside root directory: ${this.cwd}`);
|
|
5135
|
+
return full;
|
|
5186
5136
|
}
|
|
5187
|
-
|
|
5137
|
+
if (path$1.isAbsolute(key)) return key;
|
|
5138
|
+
return path$1.resolve(this.cwd, key);
|
|
5188
5139
|
}
|
|
5189
5140
|
/**
|
|
5190
5141
|
* List files and directories in the specified directory (non-recursive).
|
|
5191
5142
|
*
|
|
5192
|
-
* @param
|
|
5193
|
-
* @returns
|
|
5143
|
+
* @param dirPath - Absolute directory path to list files from
|
|
5144
|
+
* @returns List of FileInfo objects for files and directories directly in the directory.
|
|
5194
5145
|
* Directories have a trailing / in their path and is_dir=true.
|
|
5195
5146
|
*/
|
|
5196
|
-
async ls(
|
|
5197
|
-
|
|
5198
|
-
const
|
|
5199
|
-
|
|
5200
|
-
const
|
|
5201
|
-
if (result.error) return result;
|
|
5202
|
-
const prefixed = [];
|
|
5203
|
-
for (const fi of result.files || []) prefixed.push({
|
|
5204
|
-
...fi,
|
|
5205
|
-
path: routePrefix.slice(0, -1) + fi.path
|
|
5206
|
-
});
|
|
5207
|
-
return { files: prefixed };
|
|
5208
|
-
}
|
|
5209
|
-
if (path === "/") {
|
|
5147
|
+
async ls(dirPath) {
|
|
5148
|
+
try {
|
|
5149
|
+
const resolvedPath = this.resolvePath(dirPath);
|
|
5150
|
+
if (!(await fs.stat(resolvedPath)).isDirectory()) return { files: [] };
|
|
5151
|
+
const entries = await fs.readdir(resolvedPath, { withFileTypes: true });
|
|
5210
5152
|
const results = [];
|
|
5211
|
-
const
|
|
5212
|
-
|
|
5213
|
-
|
|
5214
|
-
|
|
5215
|
-
|
|
5216
|
-
|
|
5217
|
-
|
|
5218
|
-
|
|
5219
|
-
|
|
5153
|
+
const cwdStr = this.cwd.endsWith(path$1.sep) ? this.cwd : this.cwd + path$1.sep;
|
|
5154
|
+
for (const entry of entries) {
|
|
5155
|
+
const fullPath = path$1.join(resolvedPath, entry.name);
|
|
5156
|
+
try {
|
|
5157
|
+
const entryStat = await fs.stat(fullPath);
|
|
5158
|
+
const isFile = entryStat.isFile();
|
|
5159
|
+
const isDir = entryStat.isDirectory();
|
|
5160
|
+
if (!this.virtualMode) {
|
|
5161
|
+
if (isFile) results.push({
|
|
5162
|
+
path: fullPath,
|
|
5163
|
+
is_dir: false,
|
|
5164
|
+
size: entryStat.size,
|
|
5165
|
+
modified_at: entryStat.mtime.toISOString()
|
|
5166
|
+
});
|
|
5167
|
+
else if (isDir) results.push({
|
|
5168
|
+
path: fullPath + path$1.sep,
|
|
5169
|
+
is_dir: true,
|
|
5170
|
+
size: 0,
|
|
5171
|
+
modified_at: entryStat.mtime.toISOString()
|
|
5172
|
+
});
|
|
5173
|
+
} else {
|
|
5174
|
+
let relativePath;
|
|
5175
|
+
if (fullPath.startsWith(cwdStr)) relativePath = fullPath.substring(cwdStr.length);
|
|
5176
|
+
else if (fullPath.startsWith(this.cwd)) relativePath = fullPath.substring(this.cwd.length).replace(/^[/\\]/, "");
|
|
5177
|
+
else relativePath = fullPath;
|
|
5178
|
+
relativePath = relativePath.split(path$1.sep).join("/");
|
|
5179
|
+
const virtPath = "/" + relativePath;
|
|
5180
|
+
if (isFile) results.push({
|
|
5181
|
+
path: virtPath,
|
|
5182
|
+
is_dir: false,
|
|
5183
|
+
size: entryStat.size,
|
|
5184
|
+
modified_at: entryStat.mtime.toISOString()
|
|
5185
|
+
});
|
|
5186
|
+
else if (isDir) results.push({
|
|
5187
|
+
path: virtPath + "/",
|
|
5188
|
+
is_dir: true,
|
|
5189
|
+
size: 0,
|
|
5190
|
+
modified_at: entryStat.mtime.toISOString()
|
|
5191
|
+
});
|
|
5192
|
+
}
|
|
5193
|
+
} catch {
|
|
5194
|
+
continue;
|
|
5195
|
+
}
|
|
5196
|
+
}
|
|
5220
5197
|
results.sort((a, b) => a.path.localeCompare(b.path));
|
|
5221
5198
|
return { files: results };
|
|
5199
|
+
} catch {
|
|
5200
|
+
return { files: [] };
|
|
5201
|
+
}
|
|
5202
|
+
}
|
|
5203
|
+
/**
|
|
5204
|
+
* Read file content with line numbers.
|
|
5205
|
+
*
|
|
5206
|
+
* @param filePath - Absolute or relative file path
|
|
5207
|
+
* @param offset - Line offset to start reading from (0-indexed)
|
|
5208
|
+
* @param limit - Maximum number of lines to read
|
|
5209
|
+
* @returns Formatted file content with line numbers, or error message
|
|
5210
|
+
*/
|
|
5211
|
+
async read(filePath, offset = 0, limit = 500) {
|
|
5212
|
+
try {
|
|
5213
|
+
const resolvedPath = this.resolvePath(filePath);
|
|
5214
|
+
const mimeType = getMimeType(filePath);
|
|
5215
|
+
const isBinary = !isTextMimeType(mimeType);
|
|
5216
|
+
let content;
|
|
5217
|
+
if (SUPPORTS_NOFOLLOW) {
|
|
5218
|
+
if (!(await fs.stat(resolvedPath)).isFile()) return { error: `File '${filePath}' not found` };
|
|
5219
|
+
const fd = await fs.open(resolvedPath, fs$1.constants.O_RDONLY | fs$1.constants.O_NOFOLLOW);
|
|
5220
|
+
try {
|
|
5221
|
+
if (isBinary) {
|
|
5222
|
+
const buffer = await fd.readFile();
|
|
5223
|
+
return {
|
|
5224
|
+
content: new Uint8Array(buffer),
|
|
5225
|
+
mimeType
|
|
5226
|
+
};
|
|
5227
|
+
}
|
|
5228
|
+
content = await fd.readFile({ encoding: "utf-8" });
|
|
5229
|
+
} finally {
|
|
5230
|
+
await fd.close();
|
|
5231
|
+
}
|
|
5232
|
+
} else {
|
|
5233
|
+
const stat = await fs.lstat(resolvedPath);
|
|
5234
|
+
if (stat.isSymbolicLink()) return { error: `Symlinks are not allowed: ${filePath}` };
|
|
5235
|
+
if (!stat.isFile()) return { error: `File '${filePath}' not found` };
|
|
5236
|
+
if (isBinary) {
|
|
5237
|
+
const buffer = await fs.readFile(resolvedPath);
|
|
5238
|
+
return {
|
|
5239
|
+
content: new Uint8Array(buffer),
|
|
5240
|
+
mimeType
|
|
5241
|
+
};
|
|
5242
|
+
}
|
|
5243
|
+
content = await fs.readFile(resolvedPath, "utf-8");
|
|
5244
|
+
}
|
|
5245
|
+
const emptyMsg = checkEmptyContent(content);
|
|
5246
|
+
if (emptyMsg) return {
|
|
5247
|
+
content: emptyMsg,
|
|
5248
|
+
mimeType
|
|
5249
|
+
};
|
|
5250
|
+
const lines = content.split("\n");
|
|
5251
|
+
const startIdx = offset;
|
|
5252
|
+
const endIdx = Math.min(startIdx + limit, lines.length);
|
|
5253
|
+
if (startIdx >= lines.length) return { error: `Line offset ${offset} exceeds file length (${lines.length} lines)` };
|
|
5254
|
+
return {
|
|
5255
|
+
content: lines.slice(startIdx, endIdx).join("\n"),
|
|
5256
|
+
mimeType
|
|
5257
|
+
};
|
|
5258
|
+
} catch (e) {
|
|
5259
|
+
return { error: `Error reading file '${filePath}': ${e.message}` };
|
|
5222
5260
|
}
|
|
5223
|
-
return await this.default.ls(path);
|
|
5224
|
-
}
|
|
5225
|
-
/**
|
|
5226
|
-
* Read file content, routing to appropriate backend.
|
|
5227
|
-
*
|
|
5228
|
-
* @param filePath - Absolute file path
|
|
5229
|
-
* @param offset - Line offset to start reading from (0-indexed)
|
|
5230
|
-
* @param limit - Maximum number of lines to read
|
|
5231
|
-
* @returns Formatted file content with line numbers, or error message
|
|
5232
|
-
*/
|
|
5233
|
-
async read(filePath, offset = 0, limit = 500) {
|
|
5234
|
-
const [backend, strippedKey] = this.getBackendAndKey(filePath);
|
|
5235
|
-
return await backend.read(strippedKey, offset, limit);
|
|
5236
5261
|
}
|
|
5237
5262
|
/**
|
|
5238
5263
|
* Read file content as raw FileData.
|
|
@@ -5241,167 +5266,391 @@ var CompositeBackend = class {
|
|
|
5241
5266
|
* @returns ReadRawResult with raw file data on success or error on failure
|
|
5242
5267
|
*/
|
|
5243
5268
|
async readRaw(filePath) {
|
|
5244
|
-
const
|
|
5245
|
-
|
|
5269
|
+
const resolvedPath = this.resolvePath(filePath);
|
|
5270
|
+
const mimeType = getMimeType(filePath);
|
|
5271
|
+
const isBinary = !isTextMimeType(mimeType);
|
|
5272
|
+
let content;
|
|
5273
|
+
let stat;
|
|
5274
|
+
if (SUPPORTS_NOFOLLOW) {
|
|
5275
|
+
stat = await fs.stat(resolvedPath);
|
|
5276
|
+
if (!stat.isFile()) return { error: `File '${filePath}' not found` };
|
|
5277
|
+
const fd = await fs.open(resolvedPath, fs$1.constants.O_RDONLY | fs$1.constants.O_NOFOLLOW);
|
|
5278
|
+
try {
|
|
5279
|
+
if (isBinary) {
|
|
5280
|
+
const buffer = await fd.readFile();
|
|
5281
|
+
return { data: {
|
|
5282
|
+
content: new Uint8Array(buffer),
|
|
5283
|
+
mimeType,
|
|
5284
|
+
created_at: stat.ctime.toISOString(),
|
|
5285
|
+
modified_at: stat.mtime.toISOString()
|
|
5286
|
+
} };
|
|
5287
|
+
}
|
|
5288
|
+
content = await fd.readFile({ encoding: "utf-8" });
|
|
5289
|
+
} finally {
|
|
5290
|
+
await fd.close();
|
|
5291
|
+
}
|
|
5292
|
+
} else {
|
|
5293
|
+
stat = await fs.lstat(resolvedPath);
|
|
5294
|
+
if (stat.isSymbolicLink()) return { error: `Symlinks are not allowed: ${filePath}` };
|
|
5295
|
+
if (!stat.isFile()) return { error: `File '${filePath}' not found` };
|
|
5296
|
+
if (isBinary) {
|
|
5297
|
+
const buffer = await fs.readFile(resolvedPath);
|
|
5298
|
+
return { data: {
|
|
5299
|
+
content: new Uint8Array(buffer),
|
|
5300
|
+
mimeType,
|
|
5301
|
+
created_at: stat.ctime.toISOString(),
|
|
5302
|
+
modified_at: stat.mtime.toISOString()
|
|
5303
|
+
} };
|
|
5304
|
+
}
|
|
5305
|
+
content = await fs.readFile(resolvedPath, "utf-8");
|
|
5306
|
+
}
|
|
5307
|
+
return { data: {
|
|
5308
|
+
content,
|
|
5309
|
+
mimeType,
|
|
5310
|
+
created_at: stat.ctime.toISOString(),
|
|
5311
|
+
modified_at: stat.mtime.toISOString()
|
|
5312
|
+
} };
|
|
5246
5313
|
}
|
|
5247
5314
|
/**
|
|
5248
|
-
*
|
|
5315
|
+
* Create a new file with content.
|
|
5316
|
+
* Returns WriteResult. External storage sets filesUpdate=null.
|
|
5249
5317
|
*/
|
|
5250
|
-
async
|
|
5251
|
-
|
|
5252
|
-
const
|
|
5253
|
-
const
|
|
5254
|
-
|
|
5255
|
-
|
|
5256
|
-
|
|
5257
|
-
|
|
5258
|
-
|
|
5318
|
+
async write(filePath, content) {
|
|
5319
|
+
try {
|
|
5320
|
+
const resolvedPath = this.resolvePath(filePath);
|
|
5321
|
+
const isBinary = !isTextMimeType(getMimeType(filePath));
|
|
5322
|
+
try {
|
|
5323
|
+
if ((await fs.lstat(resolvedPath)).isSymbolicLink()) return { error: `Cannot write to ${filePath} because it is a symlink. Symlinks are not allowed.` };
|
|
5324
|
+
return { error: `Cannot write to ${filePath} because it already exists. Read and then make an edit, or write to a new path.` };
|
|
5325
|
+
} catch {}
|
|
5326
|
+
await fs.mkdir(path$1.dirname(resolvedPath), { recursive: true });
|
|
5327
|
+
if (SUPPORTS_NOFOLLOW) {
|
|
5328
|
+
const flags = fs$1.constants.O_WRONLY | fs$1.constants.O_CREAT | fs$1.constants.O_TRUNC | fs$1.constants.O_NOFOLLOW;
|
|
5329
|
+
const fd = await fs.open(resolvedPath, flags, 420);
|
|
5330
|
+
try {
|
|
5331
|
+
if (isBinary) {
|
|
5332
|
+
const buffer = Buffer.from(content, "base64");
|
|
5333
|
+
await fd.writeFile(buffer);
|
|
5334
|
+
} else await fd.writeFile(content, "utf-8");
|
|
5335
|
+
} finally {
|
|
5336
|
+
await fd.close();
|
|
5337
|
+
}
|
|
5338
|
+
} else if (isBinary) {
|
|
5339
|
+
const buffer = Buffer.from(content, "base64");
|
|
5340
|
+
await fs.writeFile(resolvedPath, buffer);
|
|
5341
|
+
} else await fs.writeFile(resolvedPath, content, "utf-8");
|
|
5342
|
+
return {
|
|
5343
|
+
path: filePath,
|
|
5344
|
+
filesUpdate: null
|
|
5345
|
+
};
|
|
5346
|
+
} catch (e) {
|
|
5347
|
+
return { error: `Error writing file '${filePath}': ${e.message}` };
|
|
5259
5348
|
}
|
|
5260
|
-
|
|
5261
|
-
|
|
5262
|
-
|
|
5263
|
-
|
|
5264
|
-
|
|
5265
|
-
|
|
5266
|
-
|
|
5267
|
-
const
|
|
5268
|
-
|
|
5269
|
-
|
|
5270
|
-
|
|
5271
|
-
|
|
5349
|
+
}
|
|
5350
|
+
/**
|
|
5351
|
+
* Edit a file by replacing string occurrences.
|
|
5352
|
+
* Returns EditResult. External storage sets filesUpdate=null.
|
|
5353
|
+
*/
|
|
5354
|
+
async edit(filePath, oldString, newString, replaceAll = false) {
|
|
5355
|
+
try {
|
|
5356
|
+
const resolvedPath = this.resolvePath(filePath);
|
|
5357
|
+
let content;
|
|
5358
|
+
if (SUPPORTS_NOFOLLOW) {
|
|
5359
|
+
if (!(await fs.stat(resolvedPath)).isFile()) return { error: `Error: File '${filePath}' not found` };
|
|
5360
|
+
const fd = await fs.open(resolvedPath, fs$1.constants.O_RDONLY | fs$1.constants.O_NOFOLLOW);
|
|
5361
|
+
try {
|
|
5362
|
+
content = await fd.readFile({ encoding: "utf-8" });
|
|
5363
|
+
} finally {
|
|
5364
|
+
await fd.close();
|
|
5365
|
+
}
|
|
5366
|
+
} else {
|
|
5367
|
+
const stat = await fs.lstat(resolvedPath);
|
|
5368
|
+
if (stat.isSymbolicLink()) return { error: `Error: Symlinks are not allowed: ${filePath}` };
|
|
5369
|
+
if (!stat.isFile()) return { error: `Error: File '${filePath}' not found` };
|
|
5370
|
+
content = await fs.readFile(resolvedPath, "utf-8");
|
|
5371
|
+
}
|
|
5372
|
+
const result = performStringReplacement(content, oldString, newString, replaceAll);
|
|
5373
|
+
if (typeof result === "string") return { error: result };
|
|
5374
|
+
const [newContent, occurrences] = result;
|
|
5375
|
+
if (SUPPORTS_NOFOLLOW) {
|
|
5376
|
+
const flags = fs$1.constants.O_WRONLY | fs$1.constants.O_TRUNC | fs$1.constants.O_NOFOLLOW;
|
|
5377
|
+
const fd = await fs.open(resolvedPath, flags);
|
|
5378
|
+
try {
|
|
5379
|
+
await fd.writeFile(newContent, "utf-8");
|
|
5380
|
+
} finally {
|
|
5381
|
+
await fd.close();
|
|
5382
|
+
}
|
|
5383
|
+
} else await fs.writeFile(resolvedPath, newContent, "utf-8");
|
|
5384
|
+
return {
|
|
5385
|
+
path: filePath,
|
|
5386
|
+
filesUpdate: null,
|
|
5387
|
+
occurrences
|
|
5388
|
+
};
|
|
5389
|
+
} catch (e) {
|
|
5390
|
+
return { error: `Error editing file '${filePath}': ${e.message}` };
|
|
5272
5391
|
}
|
|
5273
|
-
return { matches: allMatches };
|
|
5274
5392
|
}
|
|
5275
5393
|
/**
|
|
5276
|
-
*
|
|
5394
|
+
* Search for a literal text pattern in files.
|
|
5395
|
+
*
|
|
5396
|
+
* Uses ripgrep if available, falling back to substring search.
|
|
5397
|
+
*
|
|
5398
|
+
* @param pattern - Literal string to search for (NOT regex).
|
|
5399
|
+
* @param dirPath - Directory or file path to search in. Defaults to current directory.
|
|
5400
|
+
* @param glob - Optional glob pattern to filter which files to search.
|
|
5401
|
+
* @returns List of GrepMatch dicts containing path, line number, and matched text.
|
|
5277
5402
|
*/
|
|
5278
|
-
async
|
|
5279
|
-
|
|
5280
|
-
|
|
5281
|
-
|
|
5282
|
-
|
|
5283
|
-
|
|
5284
|
-
return { files: (result.files || []).map((fi) => ({
|
|
5285
|
-
...fi,
|
|
5286
|
-
path: routePrefix.slice(0, -1) + fi.path
|
|
5287
|
-
})) };
|
|
5403
|
+
async grep(pattern, dirPath = "/", glob = null) {
|
|
5404
|
+
let baseFull;
|
|
5405
|
+
try {
|
|
5406
|
+
baseFull = this.resolvePath(dirPath || ".");
|
|
5407
|
+
} catch {
|
|
5408
|
+
return { matches: [] };
|
|
5288
5409
|
}
|
|
5289
|
-
|
|
5290
|
-
|
|
5291
|
-
|
|
5292
|
-
|
|
5293
|
-
const result = await backend.glob(pattern, "/");
|
|
5294
|
-
if (result.error) continue;
|
|
5295
|
-
const files = (result.files || []).map((fi) => ({
|
|
5296
|
-
...fi,
|
|
5297
|
-
path: routePrefix.slice(0, -1) + fi.path
|
|
5298
|
-
}));
|
|
5299
|
-
results.push(...files);
|
|
5410
|
+
try {
|
|
5411
|
+
await fs.stat(baseFull);
|
|
5412
|
+
} catch {
|
|
5413
|
+
return { matches: [] };
|
|
5300
5414
|
}
|
|
5301
|
-
results.
|
|
5302
|
-
|
|
5415
|
+
let results = await this.ripgrepSearch(pattern, baseFull, glob);
|
|
5416
|
+
if (results === null) results = await this.literalSearch(pattern, baseFull, glob);
|
|
5417
|
+
const matches = [];
|
|
5418
|
+
for (const [fpath, items] of Object.entries(results)) for (const [lineNum, lineText] of items) matches.push({
|
|
5419
|
+
path: fpath,
|
|
5420
|
+
line: lineNum,
|
|
5421
|
+
text: lineText
|
|
5422
|
+
});
|
|
5423
|
+
return { matches };
|
|
5424
|
+
}
|
|
5425
|
+
/**
|
|
5426
|
+
* Search using ripgrep with fixed-string (literal) mode.
|
|
5427
|
+
*
|
|
5428
|
+
* @param pattern - Literal string to search for (unescaped).
|
|
5429
|
+
* @param baseFull - Resolved base path to search in.
|
|
5430
|
+
* @param includeGlob - Optional glob pattern to filter files.
|
|
5431
|
+
* @returns Dict mapping file paths to list of (line_number, line_text) tuples.
|
|
5432
|
+
* Returns null if ripgrep is unavailable or times out.
|
|
5433
|
+
*/
|
|
5434
|
+
async ripgrepSearch(pattern, baseFull, includeGlob) {
|
|
5435
|
+
return new Promise((resolve) => {
|
|
5436
|
+
const args = ["--json", "-F"];
|
|
5437
|
+
if (includeGlob) args.push("--glob", includeGlob);
|
|
5438
|
+
args.push("--", pattern, baseFull);
|
|
5439
|
+
const proc = spawn("rg", args, { timeout: 3e4 });
|
|
5440
|
+
const results = {};
|
|
5441
|
+
let output = "";
|
|
5442
|
+
proc.stdout.on("data", (data) => {
|
|
5443
|
+
output += data.toString();
|
|
5444
|
+
});
|
|
5445
|
+
proc.on("close", (code) => {
|
|
5446
|
+
if (code !== 0 && code !== 1) {
|
|
5447
|
+
resolve(null);
|
|
5448
|
+
return;
|
|
5449
|
+
}
|
|
5450
|
+
for (const line of output.split("\n")) {
|
|
5451
|
+
if (!line.trim()) continue;
|
|
5452
|
+
try {
|
|
5453
|
+
const data = JSON.parse(line);
|
|
5454
|
+
if (data.type !== "match") continue;
|
|
5455
|
+
const pdata = data.data || {};
|
|
5456
|
+
const ftext = pdata.path?.text;
|
|
5457
|
+
if (!ftext) continue;
|
|
5458
|
+
let virtPath;
|
|
5459
|
+
if (this.virtualMode) try {
|
|
5460
|
+
const resolved = path$1.resolve(ftext);
|
|
5461
|
+
const relative = path$1.relative(this.cwd, resolved);
|
|
5462
|
+
if (relative.startsWith("..")) continue;
|
|
5463
|
+
virtPath = "/" + relative.split(path$1.sep).join("/");
|
|
5464
|
+
} catch {
|
|
5465
|
+
continue;
|
|
5466
|
+
}
|
|
5467
|
+
else virtPath = ftext;
|
|
5468
|
+
const ln = pdata.line_number;
|
|
5469
|
+
const lt = pdata.lines?.text?.replace(/\n$/, "") || "";
|
|
5470
|
+
if (ln === void 0) continue;
|
|
5471
|
+
if (!results[virtPath]) results[virtPath] = [];
|
|
5472
|
+
results[virtPath].push([ln, lt]);
|
|
5473
|
+
} catch {
|
|
5474
|
+
continue;
|
|
5475
|
+
}
|
|
5476
|
+
}
|
|
5477
|
+
resolve(results);
|
|
5478
|
+
});
|
|
5479
|
+
proc.on("error", () => {
|
|
5480
|
+
resolve(null);
|
|
5481
|
+
});
|
|
5482
|
+
});
|
|
5303
5483
|
}
|
|
5304
5484
|
/**
|
|
5305
|
-
*
|
|
5485
|
+
* Fallback search using literal substring matching when ripgrep is unavailable.
|
|
5306
5486
|
*
|
|
5307
|
-
*
|
|
5308
|
-
* @param content - File content as string
|
|
5309
|
-
* @returns WriteResult with path or error
|
|
5310
|
-
*/
|
|
5311
|
-
async write(filePath, content) {
|
|
5312
|
-
const [backend, strippedKey] = this.getBackendAndKey(filePath);
|
|
5313
|
-
return await backend.write(strippedKey, content);
|
|
5314
|
-
}
|
|
5315
|
-
/**
|
|
5316
|
-
* Edit a file, routing to appropriate backend.
|
|
5487
|
+
* Recursively searches files, respecting maxFileSizeBytes limit.
|
|
5317
5488
|
*
|
|
5318
|
-
* @param
|
|
5319
|
-
* @param
|
|
5320
|
-
* @param
|
|
5321
|
-
* @
|
|
5322
|
-
* @returns EditResult with path, occurrences, or error
|
|
5489
|
+
* @param pattern - Literal string to search for.
|
|
5490
|
+
* @param baseFull - Resolved base path to search in.
|
|
5491
|
+
* @param includeGlob - Optional glob pattern to filter files by name.
|
|
5492
|
+
* @returns Dict mapping file paths to list of (line_number, line_text) tuples.
|
|
5323
5493
|
*/
|
|
5324
|
-
async
|
|
5325
|
-
const
|
|
5326
|
-
|
|
5494
|
+
async literalSearch(pattern, baseFull, includeGlob) {
|
|
5495
|
+
const results = {};
|
|
5496
|
+
const files = await fg("**/*", {
|
|
5497
|
+
cwd: (await fs.stat(baseFull)).isDirectory() ? baseFull : path$1.dirname(baseFull),
|
|
5498
|
+
absolute: true,
|
|
5499
|
+
onlyFiles: true,
|
|
5500
|
+
dot: true
|
|
5501
|
+
});
|
|
5502
|
+
for (const fp of files) try {
|
|
5503
|
+
if (!isTextMimeType(getMimeType(fp))) continue;
|
|
5504
|
+
if (includeGlob && !micromatch.isMatch(path$1.basename(fp), includeGlob)) continue;
|
|
5505
|
+
if ((await fs.stat(fp)).size > this.maxFileSizeBytes) continue;
|
|
5506
|
+
const lines = (await fs.readFile(fp, "utf-8")).split("\n");
|
|
5507
|
+
for (let i = 0; i < lines.length; i++) {
|
|
5508
|
+
const line = lines[i];
|
|
5509
|
+
if (line.includes(pattern)) {
|
|
5510
|
+
let virtPath;
|
|
5511
|
+
if (this.virtualMode) try {
|
|
5512
|
+
const relative = path$1.relative(this.cwd, fp);
|
|
5513
|
+
if (relative.startsWith("..")) continue;
|
|
5514
|
+
virtPath = "/" + relative.split(path$1.sep).join("/");
|
|
5515
|
+
} catch {
|
|
5516
|
+
continue;
|
|
5517
|
+
}
|
|
5518
|
+
else virtPath = fp;
|
|
5519
|
+
if (!results[virtPath]) results[virtPath] = [];
|
|
5520
|
+
results[virtPath].push([i + 1, line]);
|
|
5521
|
+
}
|
|
5522
|
+
}
|
|
5523
|
+
} catch {
|
|
5524
|
+
continue;
|
|
5525
|
+
}
|
|
5526
|
+
return results;
|
|
5327
5527
|
}
|
|
5328
5528
|
/**
|
|
5329
|
-
*
|
|
5330
|
-
* Execution is not path-specific, so it always delegates to the default backend.
|
|
5331
|
-
*
|
|
5332
|
-
* @param command - Full shell command string to execute
|
|
5333
|
-
* @returns ExecuteResponse with combined output, exit code, and truncation flag
|
|
5334
|
-
* @throws Error if the default backend doesn't support command execution
|
|
5529
|
+
* Structured glob matching returning FileInfo objects.
|
|
5335
5530
|
*/
|
|
5336
|
-
|
|
5337
|
-
if (
|
|
5338
|
-
|
|
5531
|
+
async glob(pattern, searchPath = "/") {
|
|
5532
|
+
if (pattern.startsWith("/")) pattern = pattern.substring(1);
|
|
5533
|
+
const resolvedSearchPath = searchPath === "/" ? this.cwd : this.resolvePath(searchPath);
|
|
5534
|
+
try {
|
|
5535
|
+
if (!(await fs.stat(resolvedSearchPath)).isDirectory()) return { files: [] };
|
|
5536
|
+
} catch {
|
|
5537
|
+
return { files: [] };
|
|
5538
|
+
}
|
|
5539
|
+
const results = [];
|
|
5540
|
+
try {
|
|
5541
|
+
const matches = await fg(pattern, {
|
|
5542
|
+
cwd: resolvedSearchPath,
|
|
5543
|
+
absolute: true,
|
|
5544
|
+
onlyFiles: true,
|
|
5545
|
+
dot: true
|
|
5546
|
+
});
|
|
5547
|
+
for (const matchedPath of matches) try {
|
|
5548
|
+
const stat = await fs.stat(matchedPath);
|
|
5549
|
+
if (!stat.isFile()) continue;
|
|
5550
|
+
const normalizedPath = matchedPath.split("/").join(path$1.sep);
|
|
5551
|
+
if (!this.virtualMode) results.push({
|
|
5552
|
+
path: normalizedPath,
|
|
5553
|
+
is_dir: false,
|
|
5554
|
+
size: stat.size,
|
|
5555
|
+
modified_at: stat.mtime.toISOString()
|
|
5556
|
+
});
|
|
5557
|
+
else {
|
|
5558
|
+
const cwdStr = this.cwd.endsWith(path$1.sep) ? this.cwd : this.cwd + path$1.sep;
|
|
5559
|
+
let relativePath;
|
|
5560
|
+
if (normalizedPath.startsWith(cwdStr)) relativePath = normalizedPath.substring(cwdStr.length);
|
|
5561
|
+
else if (normalizedPath.startsWith(this.cwd)) relativePath = normalizedPath.substring(this.cwd.length).replace(/^[/\\]/, "");
|
|
5562
|
+
else relativePath = normalizedPath;
|
|
5563
|
+
relativePath = relativePath.split(path$1.sep).join("/");
|
|
5564
|
+
const virt = "/" + relativePath;
|
|
5565
|
+
results.push({
|
|
5566
|
+
path: virt,
|
|
5567
|
+
is_dir: false,
|
|
5568
|
+
size: stat.size,
|
|
5569
|
+
modified_at: stat.mtime.toISOString()
|
|
5570
|
+
});
|
|
5571
|
+
}
|
|
5572
|
+
} catch {
|
|
5573
|
+
continue;
|
|
5574
|
+
}
|
|
5575
|
+
} catch {}
|
|
5576
|
+
results.sort((a, b) => a.path.localeCompare(b.path));
|
|
5577
|
+
return { files: results };
|
|
5339
5578
|
}
|
|
5340
5579
|
/**
|
|
5341
|
-
* Upload multiple files
|
|
5580
|
+
* Upload multiple files to the filesystem.
|
|
5342
5581
|
*
|
|
5343
5582
|
* @param files - List of [path, content] tuples to upload
|
|
5344
5583
|
* @returns List of FileUploadResponse objects, one per input file
|
|
5345
5584
|
*/
|
|
5346
5585
|
async uploadFiles(files) {
|
|
5347
|
-
const
|
|
5348
|
-
const
|
|
5349
|
-
|
|
5350
|
-
|
|
5351
|
-
|
|
5352
|
-
|
|
5353
|
-
|
|
5354
|
-
|
|
5355
|
-
|
|
5356
|
-
|
|
5586
|
+
const responses = [];
|
|
5587
|
+
for (const [filePath, content] of files) try {
|
|
5588
|
+
const resolvedPath = this.resolvePath(filePath);
|
|
5589
|
+
await fs.mkdir(path$1.dirname(resolvedPath), { recursive: true });
|
|
5590
|
+
await fs.writeFile(resolvedPath, content);
|
|
5591
|
+
responses.push({
|
|
5592
|
+
path: filePath,
|
|
5593
|
+
error: null
|
|
5594
|
+
});
|
|
5595
|
+
} catch (e) {
|
|
5596
|
+
if (e.code === "ENOENT") responses.push({
|
|
5597
|
+
path: filePath,
|
|
5598
|
+
error: "file_not_found"
|
|
5599
|
+
});
|
|
5600
|
+
else if (e.code === "EACCES") responses.push({
|
|
5601
|
+
path: filePath,
|
|
5602
|
+
error: "permission_denied"
|
|
5603
|
+
});
|
|
5604
|
+
else if (e.code === "EISDIR") responses.push({
|
|
5605
|
+
path: filePath,
|
|
5606
|
+
error: "is_directory"
|
|
5607
|
+
});
|
|
5608
|
+
else responses.push({
|
|
5609
|
+
path: filePath,
|
|
5610
|
+
error: "invalid_path"
|
|
5357
5611
|
});
|
|
5358
5612
|
}
|
|
5359
|
-
|
|
5360
|
-
if (!backend.uploadFiles) throw new Error("Backend does not support uploadFiles");
|
|
5361
|
-
const batchFiles = batch.map((b) => [b.path, b.content]);
|
|
5362
|
-
const batchResponses = await backend.uploadFiles(batchFiles);
|
|
5363
|
-
for (let i = 0; i < batch.length; i++) {
|
|
5364
|
-
const originalIdx = batch[i].idx;
|
|
5365
|
-
results[originalIdx] = {
|
|
5366
|
-
path: files[originalIdx][0],
|
|
5367
|
-
error: batchResponses[i]?.error ?? null
|
|
5368
|
-
};
|
|
5369
|
-
}
|
|
5370
|
-
}
|
|
5371
|
-
return results;
|
|
5613
|
+
return responses;
|
|
5372
5614
|
}
|
|
5373
5615
|
/**
|
|
5374
|
-
* Download multiple files
|
|
5616
|
+
* Download multiple files from the filesystem.
|
|
5375
5617
|
*
|
|
5376
5618
|
* @param paths - List of file paths to download
|
|
5377
5619
|
* @returns List of FileDownloadResponse objects, one per input path
|
|
5378
5620
|
*/
|
|
5379
5621
|
async downloadFiles(paths) {
|
|
5380
|
-
const
|
|
5381
|
-
const
|
|
5382
|
-
|
|
5383
|
-
const
|
|
5384
|
-
|
|
5385
|
-
|
|
5386
|
-
|
|
5387
|
-
|
|
5388
|
-
|
|
5622
|
+
const responses = [];
|
|
5623
|
+
for (const filePath of paths) try {
|
|
5624
|
+
const resolvedPath = this.resolvePath(filePath);
|
|
5625
|
+
const content = await fs.readFile(resolvedPath);
|
|
5626
|
+
responses.push({
|
|
5627
|
+
path: filePath,
|
|
5628
|
+
content,
|
|
5629
|
+
error: null
|
|
5630
|
+
});
|
|
5631
|
+
} catch (e) {
|
|
5632
|
+
if (e.code === "ENOENT") responses.push({
|
|
5633
|
+
path: filePath,
|
|
5634
|
+
content: null,
|
|
5635
|
+
error: "file_not_found"
|
|
5636
|
+
});
|
|
5637
|
+
else if (e.code === "EACCES") responses.push({
|
|
5638
|
+
path: filePath,
|
|
5639
|
+
content: null,
|
|
5640
|
+
error: "permission_denied"
|
|
5641
|
+
});
|
|
5642
|
+
else if (e.code === "EISDIR") responses.push({
|
|
5643
|
+
path: filePath,
|
|
5644
|
+
content: null,
|
|
5645
|
+
error: "is_directory"
|
|
5646
|
+
});
|
|
5647
|
+
else responses.push({
|
|
5648
|
+
path: filePath,
|
|
5649
|
+
content: null,
|
|
5650
|
+
error: "invalid_path"
|
|
5389
5651
|
});
|
|
5390
5652
|
}
|
|
5391
|
-
|
|
5392
|
-
if (!backend.downloadFiles) throw new Error("Backend does not support downloadFiles");
|
|
5393
|
-
const batchPaths = batch.map((b) => b.path);
|
|
5394
|
-
const batchResponses = await backend.downloadFiles(batchPaths);
|
|
5395
|
-
for (let i = 0; i < batch.length; i++) {
|
|
5396
|
-
const originalIdx = batch[i].idx;
|
|
5397
|
-
results[originalIdx] = {
|
|
5398
|
-
path: paths[originalIdx],
|
|
5399
|
-
content: batchResponses[i]?.content ?? null,
|
|
5400
|
-
error: batchResponses[i]?.error ?? null
|
|
5401
|
-
};
|
|
5402
|
-
}
|
|
5403
|
-
}
|
|
5404
|
-
return results;
|
|
5653
|
+
return responses;
|
|
5405
5654
|
}
|
|
5406
5655
|
};
|
|
5407
5656
|
//#endregion
|
|
@@ -5814,7 +6063,7 @@ const STAT_C_SCRIPT = "for f; do if [ -d \"$f\" ]; then t=d; elif [ -L \"$f\" ];
|
|
|
5814
6063
|
*/
|
|
5815
6064
|
function buildLsCommand(dirPath) {
|
|
5816
6065
|
const quotedPath = shellQuote(dirPath);
|
|
5817
|
-
const findBase = `find ${quotedPath} -maxdepth 1 -not -path ${quotedPath}`;
|
|
6066
|
+
const findBase = `find -L ${quotedPath} -maxdepth 1 -not -path ${quotedPath}`;
|
|
5818
6067
|
return `if find /dev/null -maxdepth 0 -printf '' 2>/dev/null; then ${findBase} -printf '%s\\t%T@\\t%y\\t%p\\n' 2>/dev/null; elif stat -c %s /dev/null >/dev/null 2>&1; then ${findBase} -exec sh -c '${STAT_C_SCRIPT}' _ {} +; else ${findBase} -exec stat -f '%z\t%m\t%Sp\t%N' {} + 2>/dev/null; fi || true`;
|
|
5819
6068
|
}
|
|
5820
6069
|
/**
|
|
@@ -5825,7 +6074,7 @@ function buildLsCommand(dirPath) {
|
|
|
5825
6074
|
*/
|
|
5826
6075
|
function buildFindCommand(searchPath) {
|
|
5827
6076
|
const quotedPath = shellQuote(searchPath);
|
|
5828
|
-
const findBase = `find ${quotedPath} -not -path ${quotedPath}`;
|
|
6077
|
+
const findBase = `find -L ${quotedPath} -not -path ${quotedPath}`;
|
|
5829
6078
|
return `if find /dev/null -maxdepth 0 -printf '' 2>/dev/null; then ${findBase} -printf '%s\\t%T@\\t%y\\t%p\\n' 2>/dev/null; elif stat -c %s /dev/null >/dev/null 2>&1; then ${findBase} -exec sh -c '${STAT_C_SCRIPT}' _ {} +; else ${findBase} -exec stat -f '%z\t%m\t%Sp\t%N' {} + 2>/dev/null; fi || true`;
|
|
5830
6079
|
}
|
|
5831
6080
|
/**
|
|
@@ -5858,7 +6107,7 @@ function buildReadCommand(filePath, offset, limit) {
|
|
|
5858
6107
|
function buildGrepCommand(pattern, searchPath, globPattern) {
|
|
5859
6108
|
const patternEscaped = shellQuote(pattern);
|
|
5860
6109
|
const searchPathQuoted = shellQuote(searchPath);
|
|
5861
|
-
if (globPattern) return `find ${searchPathQuoted} -type f -name ${shellQuote(globPattern)} -exec grep -HnF -e ${patternEscaped} {} + 2>/dev/null || true`;
|
|
6110
|
+
if (globPattern) return `find -L ${searchPathQuoted} -type f -name ${shellQuote(globPattern)} -exec grep -HnF -e ${patternEscaped} {} + 2>/dev/null || true`;
|
|
5862
6111
|
return `grep -rHnF -e ${patternEscaped} ${searchPathQuoted} 2>/dev/null || true`;
|
|
5863
6112
|
}
|
|
5864
6113
|
/**
|
|
@@ -6153,6 +6402,8 @@ var BaseSandbox = class {
|
|
|
6153
6402
|
* await sandbox.close();
|
|
6154
6403
|
* }
|
|
6155
6404
|
* ```
|
|
6405
|
+
*
|
|
6406
|
+
* @module
|
|
6156
6407
|
*/
|
|
6157
6408
|
/**
|
|
6158
6409
|
* LangSmith Sandbox backend for deepagents.
|
|
@@ -6162,6 +6413,8 @@ var BaseSandbox = class {
|
|
|
6162
6413
|
*
|
|
6163
6414
|
* Use the static `LangSmithSandbox.create()` factory for the simplest setup,
|
|
6164
6415
|
* or construct directly with an existing `Sandbox` instance.
|
|
6416
|
+
*
|
|
6417
|
+
* @experimental This feature is experimental, and breaking changes are expected.
|
|
6165
6418
|
*/
|
|
6166
6419
|
var LangSmithSandbox = class LangSmithSandbox extends BaseSandbox {
|
|
6167
6420
|
#sandbox;
|
|
@@ -6454,7 +6707,7 @@ function isAnthropicModel(model) {
|
|
|
6454
6707
|
* ```
|
|
6455
6708
|
*/
|
|
6456
6709
|
function createDeepAgent(params = {}) {
|
|
6457
|
-
const { model =
|
|
6710
|
+
const { model = "anthropic:claude-sonnet-4-6", tools = [], systemPrompt, middleware: customMiddleware = [], subagents = [], responseFormat, contextSchema, checkpointer, store, backend = (config) => new StateBackend(config), interruptOn, name, memory, skills, permissions = [] } = params;
|
|
6458
6711
|
const collidingTools = tools.map((t) => t.name).filter((n) => typeof n === "string" && BUILTIN_TOOL_NAMES.has(n));
|
|
6459
6712
|
if (collidingTools.length > 0) throw new ConfigurationError(`Tool name(s) [${collidingTools.join(", ")}] conflict with built-in tools. Rename your custom tools to avoid this.`, "TOOL_NAME_COLLISION");
|
|
6460
6713
|
const anthropicModel = isAnthropicModel(model);
|
|
@@ -6470,13 +6723,14 @@ function createDeepAgent(params = {}) {
|
|
|
6470
6723
|
* If a custom subagent needs skills, it must specify its own `skills` array.
|
|
6471
6724
|
*/
|
|
6472
6725
|
const normalizeSubagentSpec = (input) => {
|
|
6726
|
+
const effectivePermissions = input.permissions ?? permissions;
|
|
6473
6727
|
const subagentMiddleware = [
|
|
6474
6728
|
todoListMiddleware(),
|
|
6475
|
-
createFilesystemMiddleware({
|
|
6476
|
-
createSummarizationMiddleware({
|
|
6729
|
+
createFilesystemMiddleware({
|
|
6477
6730
|
backend,
|
|
6478
|
-
|
|
6731
|
+
permissions: effectivePermissions
|
|
6479
6732
|
}),
|
|
6733
|
+
createSummarizationMiddleware({ backend }),
|
|
6480
6734
|
createPatchToolCallsMiddleware(),
|
|
6481
6735
|
...input.skills != null && input.skills.length > 0 ? [createSkillsMiddleware({
|
|
6482
6736
|
backend,
|
|
@@ -6509,7 +6763,10 @@ function createDeepAgent(params = {}) {
|
|
|
6509
6763
|
})] : [];
|
|
6510
6764
|
const [todoMiddleware, fsMiddleware, subagentMiddleware, summarizationMiddleware, patchToolCallsMiddleware] = [
|
|
6511
6765
|
todoListMiddleware(),
|
|
6512
|
-
createFilesystemMiddleware({
|
|
6766
|
+
createFilesystemMiddleware({
|
|
6767
|
+
backend,
|
|
6768
|
+
permissions
|
|
6769
|
+
}),
|
|
6513
6770
|
createSubAgentMiddleware({
|
|
6514
6771
|
defaultModel: model,
|
|
6515
6772
|
defaultTools: tools,
|
|
@@ -6517,10 +6774,7 @@ function createDeepAgent(params = {}) {
|
|
|
6517
6774
|
subagents: inlineSubagents,
|
|
6518
6775
|
generalPurposeAgent: false
|
|
6519
6776
|
}),
|
|
6520
|
-
createSummarizationMiddleware({
|
|
6521
|
-
model,
|
|
6522
|
-
backend
|
|
6523
|
-
}),
|
|
6777
|
+
createSummarizationMiddleware({ backend }),
|
|
6524
6778
|
createPatchToolCallsMiddleware()
|
|
6525
6779
|
];
|
|
6526
6780
|
const middleware = [
|