deepagents 1.9.0 → 1.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -22,7 +22,6 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
22
22
  }) : target, mod));
23
23
  //#endregion
24
24
  let langchain = require("langchain");
25
- let _langchain_anthropic = require("@langchain/anthropic");
26
25
  let _langchain_langgraph = require("@langchain/langgraph");
27
26
  let zod_v4 = require("zod/v4");
28
27
  let micromatch = require("micromatch");
@@ -279,7 +278,7 @@ function truncateIfTooLong(result) {
279
278
  * validatePath("C:\\Users\\file") // Throws: Windows absolute paths not supported
280
279
  * ```
281
280
  */
282
- function validatePath(path$6) {
281
+ function validatePath$1(path$6) {
283
282
  const pathStr = path$6 || "/";
284
283
  if (!pathStr || pathStr.trim() === "") throw new Error("Path cannot be empty");
285
284
  let normalized = pathStr.startsWith("/") ? pathStr : "/" + pathStr;
@@ -305,7 +304,7 @@ function validatePath(path$6) {
305
304
  function globSearchFiles(files, pattern, path$8 = "/") {
306
305
  let normalizedPath;
307
306
  try {
308
- normalizedPath = validatePath(path$8);
307
+ normalizedPath = validatePath$1(path$8);
309
308
  } catch {
310
309
  return "No files found";
311
310
  }
@@ -337,7 +336,7 @@ function globSearchFiles(files, pattern, path$8 = "/") {
337
336
  function grepMatchesFromFiles(files, pattern, path$10 = null, glob = null) {
338
337
  let normalizedPath;
339
338
  try {
340
- normalizedPath = validatePath(path$10);
339
+ normalizedPath = validatePath$1(path$10);
341
340
  } catch {
342
341
  return [];
343
342
  }
@@ -572,6 +571,7 @@ async function resolveBackend(backend, runtime) {
572
571
  //#endregion
573
572
  //#region src/backends/state.ts
574
573
  const PREGEL_SEND_KEY = "__pregel_send";
574
+ const PREGEL_READ_KEY = "__pregel_read";
575
575
  /**
576
576
  * Backend that stores files in agent state (ephemeral).
577
577
  *
@@ -609,12 +609,13 @@ var StateBackend = class {
609
609
  * Get files from current state.
610
610
  *
611
611
  * In legacy mode, reads from the injected {@link BackendRuntime}.
612
- * In zero-arg mode, reads from the LangGraph execution context via
613
- * {@link getCurrentTaskInput}.
612
+ * In zero-arg mode, reads via {@link PREGEL_READ_KEY} with fresh=true,
613
+ * which applies any pending task writes through the reducer before returning.
614
614
  */
615
- getFiles() {
616
- if (this.runtime) return this.runtime.state.files || {};
617
- return (0, _langchain_langgraph.getCurrentTaskInput)()?.files || {};
615
+ get files() {
616
+ if (this.runtime) return this.runtime.state.files ?? {};
617
+ const read = (0, _langchain_langgraph.getConfig)().configurable?.[PREGEL_READ_KEY];
618
+ return read?.("files", true) ?? {};
618
619
  }
619
620
  /**
620
621
  * Push a files state update through LangGraph's internal send channel.
@@ -639,7 +640,7 @@ var StateBackend = class {
639
640
  * Directories have a trailing / in their path and is_dir=true.
640
641
  */
641
642
  ls(path) {
642
- const files = this.getFiles();
643
+ const files = this.files;
643
644
  const infos = [];
644
645
  const subdirs = /* @__PURE__ */ new Set();
645
646
  const normalizedPath = path.endsWith("/") ? path : path + "/";
@@ -680,7 +681,7 @@ var StateBackend = class {
680
681
  * @returns ReadResult with content on success or error on failure
681
682
  */
682
683
  read(filePath, offset = 0, limit = 500) {
683
- const fileData = this.getFiles()[filePath];
684
+ const fileData = this.files[filePath];
684
685
  if (!fileData) return { error: `File '${filePath}' not found` };
685
686
  const fileDataV2 = migrateToFileDataV2(fileData, filePath);
686
687
  if (!isTextMimeType(fileDataV2.mimeType)) return {
@@ -700,7 +701,7 @@ var StateBackend = class {
700
701
  * @returns ReadRawResult with raw file data on success or error on failure
701
702
  */
702
703
  readRaw(filePath) {
703
- const fileData = this.getFiles()[filePath];
704
+ const fileData = this.files[filePath];
704
705
  if (!fileData) return { error: `File '${filePath}' not found` };
705
706
  return { data: fileData };
706
707
  }
@@ -709,7 +710,7 @@ var StateBackend = class {
709
710
  * Returns WriteResult with filesUpdate to update LangGraph state.
710
711
  */
711
712
  write(filePath, content) {
712
- if (filePath in this.getFiles()) return { error: `Cannot write to ${filePath} because it already exists. Read and then make an edit, or write to a new path.` };
713
+ if (filePath in this.files) return { error: `Cannot write to ${filePath} because it already exists. Read and then make an edit, or write to a new path.` };
713
714
  const mimeType = getMimeType(filePath);
714
715
  const newFileData = createFileData(content, void 0, this.fileFormat, mimeType);
715
716
  const update = { [filePath]: newFileData };
@@ -727,7 +728,7 @@ var StateBackend = class {
727
728
  * Returns EditResult with filesUpdate and occurrences.
728
729
  */
729
730
  edit(filePath, oldString, newString, replaceAll = false) {
730
- const fileData = this.getFiles()[filePath];
731
+ const fileData = this.files[filePath];
731
732
  if (!fileData) return { error: `Error: File '${filePath}' not found` };
732
733
  const result = performStringReplacement(fileDataToString(fileData), oldString, newString, replaceAll);
733
734
  if (typeof result === "string") return { error: result };
@@ -752,13 +753,14 @@ var StateBackend = class {
752
753
  * Binary files are skipped.
753
754
  */
754
755
  grep(pattern, path = "/", glob = null) {
755
- return { matches: grepMatchesFromFiles(this.getFiles(), pattern, path, glob) };
756
+ const files = this.files;
757
+ return { matches: grepMatchesFromFiles(files, pattern, path, glob) };
756
758
  }
757
759
  /**
758
760
  * Structured glob matching returning FileInfo objects.
759
761
  */
760
762
  glob(pattern, path = "/") {
761
- const files = this.getFiles();
763
+ const files = this.files;
762
764
  const result = globSearchFiles(files, pattern, path);
763
765
  if (result === "No files found") return { files: [] };
764
766
  const paths = result.split("\n");
@@ -816,7 +818,7 @@ var StateBackend = class {
816
818
  * @returns List of FileDownloadResponse objects, one per input path
817
819
  */
818
820
  downloadFiles(paths) {
819
- const files = this.getFiles();
821
+ const files = this.files;
820
822
  const responses = [];
821
823
  for (const path of paths) {
822
824
  const fileData = files[path];
@@ -846,277 +848,633 @@ var StateBackend = class {
846
848
  }
847
849
  };
848
850
  //#endregion
849
- //#region src/middleware/fs.ts
850
- /**
851
- * Middleware for providing filesystem tools to an agent.
852
- *
853
- * Provides ls, read_file, write_file, edit_file, glob, and grep tools with support for:
854
- * - Pluggable backends (StateBackend, StoreBackend, FilesystemBackend, CompositeBackend)
855
- * - Tool result eviction for large outputs
856
- */
857
- const INT_FORMATTER = new Intl.NumberFormat("en-US");
858
- /**
859
- * Tools that should be excluded from the large result eviction logic.
860
- *
861
- * This array contains tools that should NOT have their results evicted to the filesystem
862
- * when they exceed token limits. Tools are excluded for different reasons:
863
- *
864
- * 1. Tools with built-in truncation (ls, glob, grep):
865
- * These tools truncate their own output when it becomes too large. When these tools
866
- * produce truncated output due to many matches, it typically indicates the query
867
- * needs refinement rather than full result preservation. In such cases, the truncated
868
- * matches are potentially more like noise and the LLM should be prompted to narrow
869
- * its search criteria instead.
870
- *
871
- * 2. Tools with problematic truncation behavior (read_file):
872
- * read_file is tricky to handle as the failure mode here is single long lines
873
- * (e.g., imagine a jsonl file with very long payloads on each line). If we try to
874
- * truncate the result of read_file, the agent may then attempt to re-read the
875
- * truncated file using read_file again, which won't help.
876
- *
877
- * 3. Tools that never exceed limits (edit_file, write_file):
878
- * These tools return minimal confirmation messages and are never expected to produce
879
- * output large enough to exceed token limits, so checking them would be unnecessary.
880
- */
881
- /**
882
- * All tool names registered by FilesystemMiddleware.
883
- * This is the single source of truth — used by createDeepAgent to detect
884
- * collisions with user-supplied tools at construction time.
885
- */
886
- const FILESYSTEM_TOOL_NAMES = [
887
- "ls",
888
- "read_file",
889
- "write_file",
890
- "edit_file",
891
- "glob",
892
- "grep",
893
- "execute"
894
- ];
895
- const TOOLS_EXCLUDED_FROM_EVICTION = [
896
- "ls",
897
- "glob",
898
- "grep",
899
- "read_file",
900
- "edit_file",
901
- "write_file"
902
- ];
903
- /**
904
- * Maximum size for binary (non-text) files read via read_file, in bytes.
905
- * Base64-encoded content is ~33% larger, so 10MB raw ≈ 13.3MB in context.
906
- * This keeps inline multimodal payloads within all major provider limits.
907
- */
908
- const MAX_BINARY_READ_SIZE_BYTES = 10 * 1024 * 1024;
909
- /**
910
- * Template for truncation message in read_file.
911
- * {file_path} will be filled in at runtime.
912
- */
913
- const READ_FILE_TRUNCATION_MSG = `
914
-
915
- [Output was truncated due to size limits. The file content is very large. Consider reformatting the file to make it easier to navigate. For example, if this is JSON, use execute(command='jq . {file_path}') to pretty-print it with line breaks. For other formats, you can use appropriate formatting tools to split long lines.]`;
916
- /**
917
- * Message template for evicted tool results.
918
- */
919
- const TOO_LARGE_TOOL_MSG = langchain.context`
920
- Tool result too large, the result of this tool call {tool_call_id} was saved in the filesystem at this path: {file_path}
921
- You can read the result from the filesystem by using the read_file tool, but make sure to only read part of the result at a time.
922
- You can do this by specifying an offset and limit in the read_file tool call.
923
- For example, to read the first 100 lines, you can use the read_file tool with offset=0 and limit=100.
924
-
925
- Here is a preview showing the head and tail of the result (lines of the form
926
- ... [N lines truncated] ...
927
- indicate omitted lines in the middle of the content):
928
-
929
- {content_sample}
930
- `;
931
- /**
932
- * Message template for evicted HumanMessages.
933
- */
934
- const TOO_LARGE_HUMAN_MSG = `Message content too large and was saved to the filesystem at: {file_path}
935
-
936
- You can read the full content using the read_file tool with pagination (offset and limit parameters).
937
-
938
- Here is a preview showing the head and tail of the content:
939
-
940
- {content_sample}`;
851
+ //#region src/permissions/enforce.ts
941
852
  /**
942
- * Extract text content from a message.
943
- *
944
- * For string content, returns it directly. For array content (mixed block types
945
- * like text + image), joins all text blocks. Returns empty string if no text found.
853
+ * Validate permission rule paths at setup time. Throws if any path is
854
+ * relative, contains `..`, or contains `~`.
946
855
  */
947
- function extractTextFromMessage(message) {
948
- if (typeof message.content === "string") return message.content;
949
- if (Array.isArray(message.content)) return message.content.filter((block) => block.type === "text" && typeof block.text === "string").map((block) => block.text).join("\n");
950
- return String(message.content);
856
+ function validatePermissionPaths(permissions) {
857
+ for (const permission of permissions) for (const path of permission.paths) validatePath(path);
951
858
  }
952
859
  /**
953
- * Build replacement content for an evicted HumanMessage, preserving non-text blocks.
954
- *
955
- * For plain string content, returns the replacement text directly. For list content
956
- * with mixed block types (e.g., text + image), replaces all text blocks with a single
957
- * text block containing the replacement text while keeping non-text blocks intact.
958
- */
959
- function buildEvictedHumanContent(message, replacementText) {
960
- if (typeof message.content === "string") return replacementText;
961
- if (Array.isArray(message.content)) {
962
- const mediaBlocks = message.content.filter((block) => typeof block === "object" && block !== null && block.type !== "text");
963
- if (mediaBlocks.length === 0) return replacementText;
964
- return [{
965
- type: "text",
966
- text: replacementText
967
- }, ...mediaBlocks];
968
- }
969
- return replacementText;
860
+ * Canonicalize and validate an absolute path before permission checking.
861
+ *
862
+ * Throws for:
863
+ * - Empty or non-string input
864
+ * - Non-absolute paths (must start with `/`)
865
+ * - Paths containing `..`
866
+ * - Paths containing `~`
867
+ */
868
+ function validatePath(raw) {
869
+ if (typeof raw !== "string" || raw.length === 0) throw new Error("path must be a non-empty string");
870
+ if (!raw.startsWith("/")) throw new Error(`path must be absolute: ${JSON.stringify(raw)}`);
871
+ const segments = raw.split("/").filter((s) => s.length > 0);
872
+ if (segments.includes("..")) throw new Error(`path must not contain "..": ${JSON.stringify(raw)}`);
873
+ if (segments.includes("~")) throw new Error(`path must not contain "~": ${JSON.stringify(raw)}`);
874
+ return `/${segments.join("/")}`;
970
875
  }
971
876
  /**
972
- * Build a truncated HumanMessage for the model request.
877
+ * Test whether `path` matches a glob `pattern`.
973
878
  *
974
- * Computes a preview from the full content still in state and returns a
975
- * lightweight replacement the model will see. Pure string computation — no
976
- * backend I/O.
977
- */
978
- function buildTruncatedHumanMessage(message, filePath) {
979
- const contentSample = createContentPreview(extractTextFromMessage(message));
980
- return new langchain.HumanMessage({
981
- content: buildEvictedHumanContent(message, TOO_LARGE_HUMAN_MSG.replace("{file_path}", filePath).replace("{content_sample}", contentSample)),
982
- id: message.id,
983
- additional_kwargs: { ...message.additional_kwargs },
984
- response_metadata: { ...message.response_metadata }
985
- });
986
- }
987
- /**
988
- * Create a preview of content showing head and tail with truncation marker.
879
+ * Supports:
880
+ * - `**` any number of directory levels
881
+ * - `*` — within a single path segment
882
+ * - `{a,b}` — brace expansion
989
883
  *
990
- * @param contentStr - The full content string to preview.
991
- * @param headLines - Number of lines to show from the start (default: 5).
992
- * @param tailLines - Number of lines to show from the end (default: 5).
993
- * @returns Formatted preview string with line numbers.
884
+ * Uses `micromatch` with `dot: true` so dotfiles are matched by default.
994
885
  */
995
- function createContentPreview(contentStr, headLines = 5, tailLines = 5) {
996
- const lines = contentStr.split("\n");
997
- if (lines.length <= headLines + tailLines) return formatContentWithLineNumbers(lines.map((line) => line.substring(0, 1e3)), 1);
998
- const head = lines.slice(0, headLines).map((line) => line.substring(0, 1e3));
999
- const tail = lines.slice(-tailLines).map((line) => line.substring(0, 1e3));
1000
- const headSample = formatContentWithLineNumbers(head, 1);
1001
- const truncationNotice = `\n... [${lines.length - headLines - tailLines} lines truncated] ...\n`;
1002
- const tailSample = formatContentWithLineNumbers(tail, lines.length - tailLines + 1);
1003
- return headSample + truncationNotice + tailSample;
886
+ function globMatch(path, pattern) {
887
+ return micromatch.default.isMatch(path, pattern, { dot: true });
1004
888
  }
1005
889
  /**
1006
- * Zod schema for legacy FileDataV1 (content as line array).
1007
- */
1008
- const FileDataV1Schema = zod_v4.z.object({
1009
- content: zod_v4.z.array(zod_v4.z.string()),
1010
- created_at: zod_v4.z.string(),
1011
- modified_at: zod_v4.z.string()
1012
- });
1013
- /**
1014
- * Zod schema for FileDataV2 (content as string for text or Uint8Array for binary).
1015
- */
1016
- const FileDataV2Schema = zod_v4.z.object({
1017
- content: zod_v4.z.union([zod_v4.z.string(), zod_v4.z.instanceof(Uint8Array)]),
1018
- mimeType: zod_v4.z.string(),
1019
- created_at: zod_v4.z.string(),
1020
- modified_at: zod_v4.z.string()
1021
- });
1022
- /**
1023
- * Zod v3 schema for FileData (re-export from backends)
1024
- */
1025
- const FileDataSchema = zod_v4.z.union([FileDataV1Schema, FileDataV2Schema]);
1026
- /**
1027
- * Reducer for files state that merges file updates with support for deletions.
1028
- * When a file value is null, the file is deleted from state.
1029
- * When a file value is non-null, it is added or updated in state.
890
+ * Evaluate permission rules against an operation + path and return the
891
+ * access decision.
1030
892
  *
1031
- * This reducer enables concurrent updates from parallel subagents by properly
1032
- * merging their file changes instead of requiring LastValue semantics.
893
+ * First-match-wins; permissive default.
1033
894
  *
1034
- * @param current - The current files record (from state)
1035
- * @param update - The new files record (from a subagent update), with null values for deletions
1036
- * @returns Merged files record with deletions applied
895
+ * @returns `"allow"` if the operation is permitted, `"deny"` otherwise.
1037
896
  */
1038
- function fileDataReducer(current, update) {
1039
- if (update === void 0) return current || {};
1040
- if (current === void 0) {
1041
- const result = {};
1042
- for (const [key, value] of Object.entries(update)) if (value !== null) result[key] = value;
1043
- return result;
897
+ function decidePathAccess(rules, operation, path) {
898
+ for (const rule of rules) {
899
+ if (!rule.operations.includes(operation)) continue;
900
+ if (rule.paths.some((pattern) => globMatch(path, pattern))) return rule.mode ?? "allow";
1044
901
  }
1045
- const result = { ...current };
1046
- for (const [key, value] of Object.entries(update)) if (value === null) delete result[key];
1047
- else result[key] = value;
1048
- return result;
902
+ return "allow";
1049
903
  }
904
+ //#endregion
905
+ //#region src/backends/composite.ts
1050
906
  /**
1051
- * Shared filesystem state schema.
1052
- * Defined at module level to ensure the same object identity is used across all agents,
1053
- * preventing "Channel already exists with different type" errors when multiple agents
1054
- * use createFilesystemMiddleware.
907
+ * Backend that routes file operations to different backends based on path prefix.
1055
908
  *
1056
- * Uses ReducedValue for files to allow concurrent updates from parallel subagents.
909
+ * This enables hybrid storage strategies like:
910
+ * - `/memories/` → StoreBackend (persistent, cross-thread)
911
+ * - Everything else → StateBackend (ephemeral, per-thread)
912
+ *
913
+ * The CompositeBackend handles path prefix stripping/re-adding transparently.
1057
914
  */
1058
- const FilesystemStateSchema = new _langchain_langgraph.StateSchema({ files: new _langchain_langgraph.ReducedValue(zod_v4.z.record(zod_v4.z.string(), FileDataSchema).default(() => ({})), {
1059
- inputSchema: zod_v4.z.record(zod_v4.z.string(), FileDataSchema.nullable()).optional(),
1060
- reducer: fileDataReducer
1061
- }) });
1062
- const FILESYSTEM_SYSTEM_PROMPT = langchain.context`
1063
- ## Following Conventions
1064
-
1065
- - Read files before editing understand existing content before making changes
1066
- - Mimic existing style, naming conventions, and patterns
1067
-
1068
- ## Filesystem Tools \`ls\`, \`read_file\`, \`write_file\`, \`edit_file\`, \`glob\`, \`grep\`
1069
-
1070
- You have access to a filesystem which you can interact with using these tools.
1071
- All file paths must start with a /.
1072
-
1073
- - ls: list files in a directory (requires absolute path)
1074
- - read_file: read a file from the filesystem
1075
- - write_file: write to a file in the filesystem
1076
- - edit_file: edit a file in the filesystem
1077
- - glob: find files matching a pattern (e.g., "**/*.py")
1078
- - grep: search for text within files
1079
- `;
1080
- const LS_TOOL_DESCRIPTION = langchain.context`
1081
- Lists all files in a directory.
1082
-
1083
- This is useful for exploring the filesystem and finding the right file to read or edit.
1084
- You should almost ALWAYS use this tool before using the read_file or edit_file tools.
1085
- `;
1086
- const READ_FILE_TOOL_DESCRIPTION = langchain.context`
1087
- Reads a file from the filesystem.
1088
-
1089
- Assume this tool is able to read all files. If the User provides a path to a file assume that path is valid. It is okay to read a file that does not exist; an error will be returned.
1090
-
1091
- Usage:
1092
- - By default, it reads up to 100 lines starting from the beginning of the file
1093
- - **IMPORTANT for large files and codebase exploration**: Use pagination with offset and limit parameters to avoid context overflow
1094
- - First scan: read_file(path, limit=100) to see file structure
1095
- - Read more sections: read_file(path, offset=100, limit=200) for next 200 lines
1096
- - Only omit limit (read full file) when necessary for editing
1097
- - Specify offset and limit: read_file(path, offset=0, limit=100) reads first 100 lines
1098
- - Results are returned using cat -n format, with line numbers starting at 1
1099
- - Lines longer than ${INT_FORMATTER.format(MAX_LINE_LENGTH)} characters will be split into multiple lines with continuation markers (e.g., 5.1, 5.2, etc.). When you specify a limit, these continuation lines count towards the limit.
1100
- - You have the capability to call multiple tools in a single response. It is always better to speculatively read multiple files as a batch that are potentially useful.
1101
- - If you read a file that exists but has empty contents you will receive a system reminder warning in place of file contents.
1102
- - You should ALWAYS make sure a file has been read before editing it.
1103
- `;
1104
- const WRITE_FILE_TOOL_DESCRIPTION = langchain.context`
1105
- Writes to a new file in the filesystem.
1106
-
1107
- Usage:
1108
- - The write_file tool will create a new file.
1109
- - Prefer to edit existing files (with the edit_file tool) over creating new ones when possible.
1110
- `;
1111
- const EDIT_FILE_TOOL_DESCRIPTION = langchain.context`
1112
- Performs exact string replacements in files.
1113
-
1114
- Usage:
1115
- - You must read the file before editing. This tool will error if you attempt an edit without reading the file first.
1116
- - When editing, preserve the exact indentation (tabs/spaces) from the read output. Never include line number prefixes in old_string or new_string.
1117
- - ALWAYS prefer editing existing files over creating new ones.
1118
- - Only use emojis if the user explicitly requests it.
1119
- `;
915
+ var CompositeBackend = class {
916
+ default;
917
+ routes;
918
+ sortedRoutes;
919
+ constructor(defaultBackend, routes) {
920
+ this.default = isSandboxProtocol(defaultBackend) ? adaptSandboxProtocol(defaultBackend) : adaptBackendProtocol(defaultBackend);
921
+ this.routes = Object.fromEntries(Object.entries(routes).map(([k, v]) => [k, isSandboxProtocol(v) ? adaptSandboxProtocol(v) : adaptBackendProtocol(v)]));
922
+ this.sortedRoutes = Object.entries(this.routes).sort((a, b) => b[0].length - a[0].length);
923
+ }
924
+ /** Delegates to default backend's id if it is a sandbox, otherwise empty string. */
925
+ get id() {
926
+ return isSandboxBackend(this.default) ? this.default.id : "";
927
+ }
928
+ /** Route prefixes registered on this backend (e.g. `["/workspace"]`). */
929
+ get routePrefixes() {
930
+ return Object.keys(this.routes);
931
+ }
932
+ /**
933
+ * Type guard returns true if `backend` is a {@link CompositeBackend}.
934
+ *
935
+ * Uses duck-typing on `routePrefixes` so it works across module boundaries
936
+ * where `instanceof` may fail.
937
+ */
938
+ static isInstance(backend) {
939
+ return typeof backend === "object" && backend !== null && Array.isArray(backend.routePrefixes);
940
+ }
941
+ /**
942
+ * Determine which backend handles this key and strip prefix.
943
+ *
944
+ * @param key - Original file path
945
+ * @returns Tuple of [backend, stripped_key] where stripped_key has the route
946
+ * prefix removed (but keeps leading slash).
947
+ */
948
+ getBackendAndKey(key) {
949
+ for (const [prefix, backend] of this.sortedRoutes) if (key.startsWith(prefix)) {
950
+ const suffix = key.substring(prefix.length);
951
+ return [backend, suffix ? "/" + suffix : "/"];
952
+ }
953
+ return [this.default, key];
954
+ }
955
+ /**
956
+ * List files and directories in the specified directory (non-recursive).
957
+ *
958
+ * @param path - Absolute path to directory
959
+ * @returns LsResult with list of FileInfo objects (with route prefixes added) on success or error on failure.
960
+ * Directories have a trailing / in their path and is_dir=true.
961
+ */
962
+ async ls(path) {
963
+ for (const [routePrefix, backend] of this.sortedRoutes) if (path.startsWith(routePrefix.replace(/\/$/, ""))) {
964
+ const suffix = path.substring(routePrefix.length);
965
+ const searchPath = suffix ? "/" + suffix : "/";
966
+ const result = await backend.ls(searchPath);
967
+ if (result.error) return result;
968
+ const prefixed = [];
969
+ for (const fi of result.files || []) prefixed.push({
970
+ ...fi,
971
+ path: routePrefix.slice(0, -1) + fi.path
972
+ });
973
+ return { files: prefixed };
974
+ }
975
+ if (path === "/") {
976
+ const results = [];
977
+ const defaultResult = await this.default.ls(path);
978
+ if (defaultResult.error) return defaultResult;
979
+ results.push(...defaultResult.files || []);
980
+ for (const [routePrefix] of this.sortedRoutes) results.push({
981
+ path: routePrefix,
982
+ is_dir: true,
983
+ size: 0,
984
+ modified_at: ""
985
+ });
986
+ results.sort((a, b) => a.path.localeCompare(b.path));
987
+ return { files: results };
988
+ }
989
+ return await this.default.ls(path);
990
+ }
991
+ /**
992
+ * Read file content, routing to appropriate backend.
993
+ *
994
+ * @param filePath - Absolute file path
995
+ * @param offset - Line offset to start reading from (0-indexed)
996
+ * @param limit - Maximum number of lines to read
997
+ * @returns Formatted file content with line numbers, or error message
998
+ */
999
+ async read(filePath, offset = 0, limit = 500) {
1000
+ const [backend, strippedKey] = this.getBackendAndKey(filePath);
1001
+ return await backend.read(strippedKey, offset, limit);
1002
+ }
1003
+ /**
1004
+ * Read file content as raw FileData.
1005
+ *
1006
+ * @param filePath - Absolute file path
1007
+ * @returns ReadRawResult with raw file data on success or error on failure
1008
+ */
1009
+ async readRaw(filePath) {
1010
+ const [backend, strippedKey] = this.getBackendAndKey(filePath);
1011
+ return await backend.readRaw(strippedKey);
1012
+ }
1013
+ /**
1014
+ * Structured search results or error string for invalid input.
1015
+ */
1016
+ async grep(pattern, path = "/", glob = null) {
1017
+ for (const [routePrefix, backend] of this.sortedRoutes) if (path.startsWith(routePrefix.replace(/\/$/, ""))) {
1018
+ const searchPath = path.substring(routePrefix.length - 1);
1019
+ const raw = await backend.grep(pattern, searchPath || "/", glob);
1020
+ if (raw.error) return raw;
1021
+ return { matches: (raw.matches || []).map((m) => ({
1022
+ ...m,
1023
+ path: routePrefix.slice(0, -1) + m.path
1024
+ })) };
1025
+ }
1026
+ const allMatches = [];
1027
+ const rawDefault = await this.default.grep(pattern, path, glob);
1028
+ if (rawDefault.error) return rawDefault;
1029
+ allMatches.push(...rawDefault.matches || []);
1030
+ for (const [routePrefix, backend] of Object.entries(this.routes)) {
1031
+ const raw = await backend.grep(pattern, "/", glob);
1032
+ if (raw.error) return raw;
1033
+ const matches = (raw.matches || []).map((m) => ({
1034
+ ...m,
1035
+ path: routePrefix.slice(0, -1) + m.path
1036
+ }));
1037
+ allMatches.push(...matches);
1038
+ }
1039
+ return { matches: allMatches };
1040
+ }
1041
+ /**
1042
+ * Structured glob matching returning FileInfo objects.
1043
+ */
1044
+ async glob(pattern, path = "/") {
1045
+ const results = [];
1046
+ for (const [routePrefix, backend] of this.sortedRoutes) if (path.startsWith(routePrefix.replace(/\/$/, ""))) {
1047
+ const searchPath = path.substring(routePrefix.length - 1);
1048
+ const result = await backend.glob(pattern, searchPath || "/");
1049
+ if (result.error) return result;
1050
+ return { files: (result.files || []).map((fi) => ({
1051
+ ...fi,
1052
+ path: routePrefix.slice(0, -1) + fi.path
1053
+ })) };
1054
+ }
1055
+ const defaultResult = await this.default.glob(pattern, path);
1056
+ if (defaultResult.error) return defaultResult;
1057
+ results.push(...defaultResult.files || []);
1058
+ for (const [routePrefix, backend] of Object.entries(this.routes)) {
1059
+ const result = await backend.glob(pattern, "/");
1060
+ if (result.error) continue;
1061
+ const files = (result.files || []).map((fi) => ({
1062
+ ...fi,
1063
+ path: routePrefix.slice(0, -1) + fi.path
1064
+ }));
1065
+ results.push(...files);
1066
+ }
1067
+ results.sort((a, b) => a.path.localeCompare(b.path));
1068
+ return { files: results };
1069
+ }
1070
+ /**
1071
+ * Create a new file, routing to appropriate backend.
1072
+ *
1073
+ * @param filePath - Absolute file path
1074
+ * @param content - File content as string
1075
+ * @returns WriteResult with path or error
1076
+ */
1077
+ async write(filePath, content) {
1078
+ const [backend, strippedKey] = this.getBackendAndKey(filePath);
1079
+ return await backend.write(strippedKey, content);
1080
+ }
1081
+ /**
1082
+ * Edit a file, routing to appropriate backend.
1083
+ *
1084
+ * @param filePath - Absolute file path
1085
+ * @param oldString - String to find and replace
1086
+ * @param newString - Replacement string
1087
+ * @param replaceAll - If true, replace all occurrences
1088
+ * @returns EditResult with path, occurrences, or error
1089
+ */
1090
+ async edit(filePath, oldString, newString, replaceAll = false) {
1091
+ const [backend, strippedKey] = this.getBackendAndKey(filePath);
1092
+ return await backend.edit(strippedKey, oldString, newString, replaceAll);
1093
+ }
1094
+ /**
1095
+ * Execute a command via the default backend.
1096
+ * Execution is not path-specific, so it always delegates to the default backend.
1097
+ *
1098
+ * @param command - Full shell command string to execute
1099
+ * @returns ExecuteResponse with combined output, exit code, and truncation flag
1100
+ * @throws Error if the default backend doesn't support command execution
1101
+ */
1102
+ execute(command) {
1103
+ if (!isSandboxBackend(this.default)) throw new Error("Default backend doesn't support command execution (SandboxBackendProtocol). To enable execution, provide a default backend that implements SandboxBackendProtocol.");
1104
+ return Promise.resolve(this.default.execute(command));
1105
+ }
1106
+ /**
1107
+ * Upload multiple files, batching by backend for efficiency.
1108
+ *
1109
+ * @param files - List of [path, content] tuples to upload
1110
+ * @returns List of FileUploadResponse objects, one per input file
1111
+ */
1112
+ async uploadFiles(files) {
1113
+ const results = Array.from({ length: files.length }, () => null);
1114
+ const batchesByBackend = /* @__PURE__ */ new Map();
1115
+ for (let idx = 0; idx < files.length; idx++) {
1116
+ const [path, content] = files[idx];
1117
+ const [backend, strippedPath] = this.getBackendAndKey(path);
1118
+ if (!batchesByBackend.has(backend)) batchesByBackend.set(backend, []);
1119
+ batchesByBackend.get(backend).push({
1120
+ idx,
1121
+ path: strippedPath,
1122
+ content
1123
+ });
1124
+ }
1125
+ for (const [backend, batch] of batchesByBackend) {
1126
+ if (!backend.uploadFiles) throw new Error("Backend does not support uploadFiles");
1127
+ const batchFiles = batch.map((b) => [b.path, b.content]);
1128
+ const batchResponses = await backend.uploadFiles(batchFiles);
1129
+ for (let i = 0; i < batch.length; i++) {
1130
+ const originalIdx = batch[i].idx;
1131
+ results[originalIdx] = {
1132
+ path: files[originalIdx][0],
1133
+ error: batchResponses[i]?.error ?? null
1134
+ };
1135
+ }
1136
+ }
1137
+ return results;
1138
+ }
1139
+ /**
1140
+ * Download multiple files, batching by backend for efficiency.
1141
+ *
1142
+ * @param paths - List of file paths to download
1143
+ * @returns List of FileDownloadResponse objects, one per input path
1144
+ */
1145
+ async downloadFiles(paths) {
1146
+ const results = Array.from({ length: paths.length }, () => null);
1147
+ const batchesByBackend = /* @__PURE__ */ new Map();
1148
+ for (let idx = 0; idx < paths.length; idx++) {
1149
+ const path = paths[idx];
1150
+ const [backend, strippedPath] = this.getBackendAndKey(path);
1151
+ if (!batchesByBackend.has(backend)) batchesByBackend.set(backend, []);
1152
+ batchesByBackend.get(backend).push({
1153
+ idx,
1154
+ path: strippedPath
1155
+ });
1156
+ }
1157
+ for (const [backend, batch] of batchesByBackend) {
1158
+ if (!backend.downloadFiles) throw new Error("Backend does not support downloadFiles");
1159
+ const batchPaths = batch.map((b) => b.path);
1160
+ const batchResponses = await backend.downloadFiles(batchPaths);
1161
+ for (let i = 0; i < batch.length; i++) {
1162
+ const originalIdx = batch[i].idx;
1163
+ results[originalIdx] = {
1164
+ path: paths[originalIdx],
1165
+ content: batchResponses[i]?.content ?? null,
1166
+ error: batchResponses[i]?.error ?? null
1167
+ };
1168
+ }
1169
+ }
1170
+ return results;
1171
+ }
1172
+ };
1173
+ //#endregion
1174
+ //#region src/middleware/fs.ts
1175
+ /**
1176
+ * Middleware for providing filesystem tools to an agent.
1177
+ *
1178
+ * Provides ls, read_file, write_file, edit_file, glob, and grep tools with support for:
1179
+ * - Pluggable backends (StateBackend, StoreBackend, FilesystemBackend, CompositeBackend)
1180
+ * - Tool result eviction for large outputs
1181
+ */
1182
+ const INT_FORMATTER = new Intl.NumberFormat("en-US");
1183
+ /**
1184
+ * Tools that should be excluded from the large result eviction logic.
1185
+ *
1186
+ * This array contains tools that should NOT have their results evicted to the filesystem
1187
+ * when they exceed token limits. Tools are excluded for different reasons:
1188
+ *
1189
+ * 1. Tools with built-in truncation (ls, glob, grep):
1190
+ * These tools truncate their own output when it becomes too large. When these tools
1191
+ * produce truncated output due to many matches, it typically indicates the query
1192
+ * needs refinement rather than full result preservation. In such cases, the truncated
1193
+ * matches are potentially more like noise and the LLM should be prompted to narrow
1194
+ * its search criteria instead.
1195
+ *
1196
+ * 2. Tools with problematic truncation behavior (read_file):
1197
+ * read_file is tricky to handle as the failure mode here is single long lines
1198
+ * (e.g., imagine a jsonl file with very long payloads on each line). If we try to
1199
+ * truncate the result of read_file, the agent may then attempt to re-read the
1200
+ * truncated file using read_file again, which won't help.
1201
+ *
1202
+ * 3. Tools that never exceed limits (edit_file, write_file):
1203
+ * These tools return minimal confirmation messages and are never expected to produce
1204
+ * output large enough to exceed token limits, so checking them would be unnecessary.
1205
+ */
1206
+ /**
1207
+ * All tool names registered by FilesystemMiddleware.
1208
+ * This is the single source of truth — used by createDeepAgent to detect
1209
+ * collisions with user-supplied tools at construction time.
1210
+ */
1211
+ const FILESYSTEM_TOOL_NAMES = [
1212
+ "ls",
1213
+ "read_file",
1214
+ "write_file",
1215
+ "edit_file",
1216
+ "glob",
1217
+ "grep",
1218
+ "execute"
1219
+ ];
1220
+ const TOOLS_EXCLUDED_FROM_EVICTION = [
1221
+ "ls",
1222
+ "glob",
1223
+ "grep",
1224
+ "read_file",
1225
+ "edit_file",
1226
+ "write_file"
1227
+ ];
1228
+ /**
1229
+ * Maximum size for binary (non-text) files read via read_file, in bytes.
1230
+ * Base64-encoded content is ~33% larger, so 10MB raw ≈ 13.3MB in context.
1231
+ * This keeps inline multimodal payloads within all major provider limits.
1232
+ */
1233
+ const MAX_BINARY_READ_SIZE_BYTES = 10 * 1024 * 1024;
1234
+ /**
1235
+ * Template for truncation message in read_file.
1236
+ * {file_path} will be filled in at runtime.
1237
+ */
1238
+ const READ_FILE_TRUNCATION_MSG = `
1239
+
1240
+ [Output was truncated due to size limits. The file content is very large. Consider reformatting the file to make it easier to navigate. For example, if this is JSON, use execute(command='jq . {file_path}') to pretty-print it with line breaks. For other formats, you can use appropriate formatting tools to split long lines.]`;
1241
+ /**
1242
+ * Message template for evicted tool results.
1243
+ */
1244
+ const TOO_LARGE_TOOL_MSG = langchain.context`
1245
+ Tool result too large, the result of this tool call {tool_call_id} was saved in the filesystem at this path: {file_path}
1246
+ You can read the result from the filesystem by using the read_file tool, but make sure to only read part of the result at a time.
1247
+ You can do this by specifying an offset and limit in the read_file tool call.
1248
+ For example, to read the first ${100} lines, you can use the read_file tool with offset=0 and limit=${100}.
1249
+
1250
+ Here is a preview showing the head and tail of the result (lines of the form
1251
+ ... [N lines truncated] ...
1252
+ indicate omitted lines in the middle of the content):
1253
+
1254
+ {content_sample}
1255
+ `;
1256
+ /**
1257
+ * Message template for evicted HumanMessages.
1258
+ */
1259
+ const TOO_LARGE_HUMAN_MSG = `Message content too large and was saved to the filesystem at: {file_path}
1260
+
1261
+ You can read the full content using the read_file tool with pagination (offset and limit parameters).
1262
+
1263
+ Here is a preview showing the head and tail of the content:
1264
+
1265
+ {content_sample}`;
1266
+ /**
1267
+ * Extract text content from a message.
1268
+ *
1269
+ * For string content, returns it directly. For array content (mixed block types
1270
+ * like text + image), joins all text blocks. Returns empty string if no text found.
1271
+ */
1272
+ function extractTextFromMessage(message) {
1273
+ if (typeof message.content === "string") return message.content;
1274
+ if (Array.isArray(message.content)) return message.content.filter((block) => block.type === "text" && typeof block.text === "string").map((block) => block.text).join("\n");
1275
+ return String(message.content);
1276
+ }
1277
+ /**
1278
+ * Build replacement content for an evicted HumanMessage, preserving non-text blocks.
1279
+ *
1280
+ * For plain string content, returns the replacement text directly. For list content
1281
+ * with mixed block types (e.g., text + image), replaces all text blocks with a single
1282
+ * text block containing the replacement text while keeping non-text blocks intact.
1283
+ */
1284
+ function buildEvictedHumanContent(message, replacementText) {
1285
+ if (typeof message.content === "string") return replacementText;
1286
+ if (Array.isArray(message.content)) {
1287
+ const mediaBlocks = message.content.filter((block) => typeof block === "object" && block !== null && block.type !== "text");
1288
+ if (mediaBlocks.length === 0) return replacementText;
1289
+ return [{
1290
+ type: "text",
1291
+ text: replacementText
1292
+ }, ...mediaBlocks];
1293
+ }
1294
+ return replacementText;
1295
+ }
1296
+ /**
1297
+ * Build a truncated HumanMessage for the model request.
1298
+ *
1299
+ * Computes a preview from the full content still in state and returns a
1300
+ * lightweight replacement the model will see. Pure string computation — no
1301
+ * backend I/O.
1302
+ */
1303
+ function buildTruncatedHumanMessage(message, filePath) {
1304
+ const contentSample = createContentPreview(extractTextFromMessage(message));
1305
+ return new langchain.HumanMessage({
1306
+ content: buildEvictedHumanContent(message, TOO_LARGE_HUMAN_MSG.replace("{file_path}", filePath).replace("{content_sample}", contentSample)),
1307
+ id: message.id,
1308
+ additional_kwargs: { ...message.additional_kwargs },
1309
+ response_metadata: { ...message.response_metadata }
1310
+ });
1311
+ }
1312
+ /**
1313
+ * Create a preview of content showing head and tail with truncation marker.
1314
+ *
1315
+ * @param contentStr - The full content string to preview.
1316
+ * @param headLines - Number of lines to show from the start (default: 5).
1317
+ * @param tailLines - Number of lines to show from the end (default: 5).
1318
+ * @returns Formatted preview string with line numbers.
1319
+ */
1320
+ function createContentPreview(contentStr, headLines = 5, tailLines = 5) {
1321
+ const lines = contentStr.split("\n");
1322
+ if (lines.length <= headLines + tailLines) return formatContentWithLineNumbers(lines.map((line) => line.substring(0, 1e3)), 1);
1323
+ const head = lines.slice(0, headLines).map((line) => line.substring(0, 1e3));
1324
+ const tail = lines.slice(-tailLines).map((line) => line.substring(0, 1e3));
1325
+ const headSample = formatContentWithLineNumbers(head, 1);
1326
+ const truncationNotice = `\n... [${lines.length - headLines - tailLines} lines truncated] ...\n`;
1327
+ const tailSample = formatContentWithLineNumbers(tail, lines.length - tailLines + 1);
1328
+ return headSample + truncationNotice + tailSample;
1329
+ }
1330
+ /**
1331
+ * Zod schema for legacy FileDataV1 (content as line array).
1332
+ */
1333
+ const FileDataV1Schema = zod_v4.z.object({
1334
+ content: zod_v4.z.array(zod_v4.z.string()),
1335
+ created_at: zod_v4.z.string(),
1336
+ modified_at: zod_v4.z.string()
1337
+ });
1338
+ /**
1339
+ * Zod schema for FileDataV2 (content as string for text or Uint8Array for binary).
1340
+ */
1341
+ const FileDataV2Schema = zod_v4.z.object({
1342
+ content: zod_v4.z.union([zod_v4.z.string(), zod_v4.z.instanceof(Uint8Array)]),
1343
+ mimeType: zod_v4.z.string(),
1344
+ created_at: zod_v4.z.string(),
1345
+ modified_at: zod_v4.z.string()
1346
+ });
1347
+ /**
1348
+ * Zod v3 schema for FileData (re-export from backends)
1349
+ */
1350
+ const FileDataSchema = zod_v4.z.union([FileDataV1Schema, FileDataV2Schema]);
1351
+ /**
1352
+ * Reducer for files state that merges file updates with support for deletions.
1353
+ * When a file value is null, the file is deleted from state.
1354
+ * When a file value is non-null, it is added or updated in state.
1355
+ *
1356
+ * This reducer enables concurrent updates from parallel subagents by properly
1357
+ * merging their file changes instead of requiring LastValue semantics.
1358
+ *
1359
+ * @param current - The current files record (from state)
1360
+ * @param update - The new files record (from a subagent update), with null values for deletions
1361
+ * @returns Merged files record with deletions applied
1362
+ */
1363
+ function fileDataReducer(current, update) {
1364
+ if (update === void 0) return current || {};
1365
+ if (current === void 0) {
1366
+ const result = {};
1367
+ for (const [key, value] of Object.entries(update)) if (value !== null) result[key] = value;
1368
+ return result;
1369
+ }
1370
+ const result = { ...current };
1371
+ for (const [key, value] of Object.entries(update)) if (value === null) delete result[key];
1372
+ else result[key] = value;
1373
+ return result;
1374
+ }
1375
+ /**
1376
+ * Shared filesystem state schema.
1377
+ * Defined at module level to ensure the same object identity is used across all agents,
1378
+ * preventing "Channel already exists with different type" errors when multiple agents
1379
+ * use createFilesystemMiddleware.
1380
+ *
1381
+ * Uses ReducedValue for files to allow concurrent updates from parallel subagents.
1382
+ */
1383
+ const FilesystemStateSchema = new _langchain_langgraph.StateSchema({ files: new _langchain_langgraph.ReducedValue(zod_v4.z.record(zod_v4.z.string(), FileDataSchema).default(() => ({})), {
1384
+ inputSchema: zod_v4.z.record(zod_v4.z.string(), FileDataSchema.nullable()).optional(),
1385
+ reducer: fileDataReducer
1386
+ }) });
1387
+ /**
1388
+ * Throw a permission-denied error if `path` is denied under `rules`.
1389
+ *
1390
+ * No-op when `rules` is empty (permissive default). Paths that fail
1391
+ * `validatePath` are silently skipped — the tool's own input validation
1392
+ * will surface a better error.
1393
+ *
1394
+ * @internal
1395
+ */
1396
+ function enforcePermission(rules, operation, path) {
1397
+ if (rules.length === 0) return;
1398
+ const canonical = validatePath(path);
1399
+ if (decidePathAccess(rules, operation, canonical) === "deny") throw new Error(`Error: permission denied for ${operation} on ${canonical}`);
1400
+ }
1401
+ /**
1402
+ * Filter a list of filesystem entries to those the rules permit.
1403
+ *
1404
+ * `getPath` extracts the absolute path from each entry. Entries with
1405
+ * unparsable paths are included (not silently dropped). Returns the
1406
+ * original array unchanged when `rules` is empty.
1407
+ *
1408
+ * @internal
1409
+ */
1410
+ function filterByPermissions(entries, rules, operation, getPath) {
1411
+ if (rules.length === 0) return entries;
1412
+ return entries.filter((entry) => {
1413
+ try {
1414
+ return decidePathAccess(rules, operation, validatePath(getPath(entry))) !== "deny";
1415
+ } catch {
1416
+ return true;
1417
+ }
1418
+ });
1419
+ }
1420
+ const FILESYSTEM_SYSTEM_PROMPT = langchain.context`
1421
+ ## Following Conventions
1422
+
1423
+ - Read files before editing — understand existing content before making changes
1424
+ - Mimic existing style, naming conventions, and patterns
1425
+
1426
+ ## Filesystem Tools \`ls\`, \`read_file\`, \`write_file\`, \`edit_file\`, \`glob\`, \`grep\`
1427
+
1428
+ You have access to a filesystem which you can interact with using these tools.
1429
+ All file paths must start with a /.
1430
+
1431
+ - ls: list files in a directory (requires absolute path)
1432
+ - read_file: read a file from the filesystem
1433
+ - write_file: write to a file in the filesystem
1434
+ - edit_file: edit a file in the filesystem
1435
+ - glob: find files matching a pattern (e.g., "**/*.py")
1436
+ - grep: search for text within files
1437
+ `;
1438
+ const LS_TOOL_DESCRIPTION = langchain.context`
1439
+ Lists all files in a directory.
1440
+
1441
+ This is useful for exploring the filesystem and finding the right file to read or edit.
1442
+ You should almost ALWAYS use this tool before using the read_file or edit_file tools.
1443
+ `;
1444
+ const READ_FILE_TOOL_DESCRIPTION = langchain.context`
1445
+ Reads a file from the filesystem.
1446
+
1447
+ Assume this tool is able to read all files. If the User provides a path to a file assume that path is valid. It is okay to read a file that does not exist; an error will be returned.
1448
+
1449
+ Usage:
1450
+ - By default, it reads up to ${100} lines starting from the beginning of the file
1451
+ - **IMPORTANT for large files and codebase exploration**: Use pagination with offset and limit parameters to avoid context overflow
1452
+ - First scan: read_file(path, limit=${100}) to see file structure
1453
+ - Read more sections: read_file(path, offset=${100}, limit=200) for next 200 lines
1454
+ - Only omit limit (read full file) when necessary for editing
1455
+ - Specify offset and limit: read_file(path, offset=0, limit=${100}) reads first ${100} lines
1456
+ - Results are returned using cat -n format, with line numbers starting at 1
1457
+ - Lines longer than ${INT_FORMATTER.format(MAX_LINE_LENGTH)} characters will be split into multiple lines with continuation markers (e.g., 5.1, 5.2, etc.). When you specify a limit, these continuation lines count towards the limit.
1458
+ - You have the capability to call multiple tools in a single response. It is always better to speculatively read multiple files as a batch that are potentially useful.
1459
+ - If you read a file that exists but has empty contents you will receive a system reminder warning in place of file contents.
1460
+ - You should ALWAYS make sure a file has been read before editing it.
1461
+ `;
1462
+ const WRITE_FILE_TOOL_DESCRIPTION = langchain.context`
1463
+ Writes to a new file in the filesystem.
1464
+
1465
+ Usage:
1466
+ - The write_file tool will create a new file.
1467
+ - Prefer to edit existing files (with the edit_file tool) over creating new ones when possible.
1468
+ `;
1469
+ const EDIT_FILE_TOOL_DESCRIPTION = langchain.context`
1470
+ Performs exact string replacements in files.
1471
+
1472
+ Usage:
1473
+ - You must read the file before editing. This tool will error if you attempt an edit without reading the file first.
1474
+ - When editing, preserve the exact indentation (tabs/spaces) from the read output. Never include line number prefixes in old_string or new_string.
1475
+ - ALWAYS prefer editing existing files over creating new ones.
1476
+ - Only use emojis if the user explicitly requests it.
1477
+ `;
1120
1478
  const GLOB_TOOL_DESCRIPTION = langchain.context`
1121
1479
  Find files matching a glob pattern.
1122
1480
 
@@ -1198,13 +1556,14 @@ const EXECUTION_SYSTEM_PROMPT = langchain.context`
1198
1556
  * Create ls tool using backend.
1199
1557
  */
1200
1558
  function createLsTool(backend, options) {
1201
- const { customDescription } = options;
1559
+ const { customDescription, permissions } = options;
1202
1560
  return (0, langchain.tool)(async (input, runtime) => {
1561
+ enforcePermission(permissions, "read", input.path ?? "/");
1203
1562
  const resolvedBackend = await resolveBackend(backend, runtime);
1204
1563
  const path = input.path || "/";
1205
1564
  const lsResult = await resolvedBackend.ls(path);
1206
1565
  if (lsResult.error) return `Error listing files: ${lsResult.error}`;
1207
- const infos = lsResult.files || [];
1566
+ const infos = filterByPermissions(lsResult.files ?? [], permissions, "read", (info) => info.path);
1208
1567
  if (infos.length === 0) return `No files found in ${path}`;
1209
1568
  const lines = [];
1210
1569
  for (const info of infos) if (info.is_dir) lines.push(`${info.path} (directory)`);
@@ -1225,8 +1584,9 @@ function createLsTool(backend, options) {
1225
1584
  * Create read_file tool using backend.
1226
1585
  */
1227
1586
  function createReadFileTool(backend, options) {
1228
- const { customDescription, toolTokenLimitBeforeEvict } = options;
1587
+ const { customDescription, toolTokenLimitBeforeEvict, permissions } = options;
1229
1588
  return (0, langchain.tool)(async (input, runtime) => {
1589
+ enforcePermission(permissions, "read", input.file_path);
1230
1590
  const resolvedBackend = await resolveBackend(backend, runtime);
1231
1591
  const { file_path, offset = 0, limit = 100 } = input;
1232
1592
  const readResult = await resolvedBackend.read(file_path, offset, limit);
@@ -1301,8 +1661,9 @@ function createReadFileTool(backend, options) {
1301
1661
  * Create write_file tool using backend.
1302
1662
  */
1303
1663
  function createWriteFileTool(backend, options) {
1304
- const { customDescription } = options;
1664
+ const { customDescription, permissions } = options;
1305
1665
  return (0, langchain.tool)(async (input, runtime) => {
1666
+ enforcePermission(permissions, "write", input.file_path);
1306
1667
  const resolvedBackend = await resolveBackend(backend, runtime);
1307
1668
  const { file_path, content } = input;
1308
1669
  const result = await resolvedBackend.write(file_path, content);
@@ -1331,8 +1692,9 @@ function createWriteFileTool(backend, options) {
1331
1692
  * Create edit_file tool using backend.
1332
1693
  */
1333
1694
  function createEditFileTool(backend, options) {
1334
- const { customDescription } = options;
1695
+ const { customDescription, permissions } = options;
1335
1696
  return (0, langchain.tool)(async (input, runtime) => {
1697
+ enforcePermission(permissions, "write", input.file_path);
1336
1698
  const resolvedBackend = await resolveBackend(backend, runtime);
1337
1699
  const { file_path, old_string, new_string, replace_all = false } = input;
1338
1700
  const result = await resolvedBackend.edit(file_path, old_string, new_string, replace_all);
@@ -1363,13 +1725,14 @@ function createEditFileTool(backend, options) {
1363
1725
  * Create glob tool using backend.
1364
1726
  */
1365
1727
  function createGlobTool(backend, options) {
1366
- const { customDescription } = options;
1728
+ const { customDescription, permissions } = options;
1367
1729
  return (0, langchain.tool)(async (input, runtime) => {
1730
+ enforcePermission(permissions, "read", input.path ?? "/");
1368
1731
  const resolvedBackend = await resolveBackend(backend, runtime);
1369
1732
  const { pattern, path = "/" } = input;
1370
1733
  const globResult = await resolvedBackend.glob(pattern, path);
1371
1734
  if (globResult.error) return `Error finding files: ${globResult.error}`;
1372
- const infos = globResult.files || [];
1735
+ const infos = filterByPermissions(globResult.files ?? [], permissions, "read", (info) => info.path);
1373
1736
  if (infos.length === 0) return `No files found matching pattern '${pattern}'`;
1374
1737
  const result = truncateIfTooLong(infos.map((info) => info.path));
1375
1738
  if (Array.isArray(result)) return result.join("\n");
@@ -1387,13 +1750,14 @@ function createGlobTool(backend, options) {
1387
1750
  * Create grep tool using backend.
1388
1751
  */
1389
1752
  function createGrepTool(backend, options) {
1390
- const { customDescription } = options;
1753
+ const { customDescription, permissions } = options;
1391
1754
  return (0, langchain.tool)(async (input, runtime) => {
1755
+ enforcePermission(permissions, "read", input.path ?? "/");
1392
1756
  const resolvedBackend = await resolveBackend(backend, runtime);
1393
1757
  const { pattern, path = "/", glob = null } = input;
1394
1758
  const result = await resolvedBackend.grep(pattern, path, glob);
1395
1759
  if (result.error) return result.error;
1396
- const matches = result.matches ?? [];
1760
+ const matches = filterByPermissions(result.matches ?? [], permissions, "read", (m) => m.path);
1397
1761
  if (matches.length === 0) return `No matches found for pattern '${pattern}'`;
1398
1762
  const lines = [];
1399
1763
  let currentFile = null;
@@ -1421,10 +1785,11 @@ function createGrepTool(backend, options) {
1421
1785
  * Create execute tool using backend.
1422
1786
  */
1423
1787
  function createExecuteTool(backend, options) {
1424
- const { customDescription } = options;
1788
+ const { customDescription, permissions } = options;
1425
1789
  return (0, langchain.tool)(async (input, runtime) => {
1426
1790
  const resolvedBackend = await resolveBackend(backend, runtime);
1427
1791
  if (!isSandboxBackend(resolvedBackend)) return "Error: Execution not available. This agent's backend does not support command execution (SandboxBackendProtocol). To use the execute tool, provide a backend that implements SandboxBackendProtocol.";
1792
+ if (permissions.length > 0 && !allPathsScopedToRoutes(permissions, resolvedBackend)) return "Error: Execution not available. Filesystem permissions cannot be used with a backend that supports command execution because shell commands can access any path, making path-based rules ineffective.";
1428
1793
  const result = await resolvedBackend.execute(input.command);
1429
1794
  const parts = [result.output];
1430
1795
  if (result.exitCode !== null) {
@@ -1440,22 +1805,53 @@ function createExecuteTool(backend, options) {
1440
1805
  });
1441
1806
  }
1442
1807
  /**
1808
+ * Returns true only when backend exposes route prefixes (CompositeBackend) and
1809
+ * every permission path is scoped under one of them.
1810
+ */
1811
+ function allPathsScopedToRoutes(permissions, backend) {
1812
+ if (!CompositeBackend.isInstance(backend)) return false;
1813
+ const prefixes = backend.routePrefixes;
1814
+ if (prefixes.length === 0) return false;
1815
+ return permissions.every((rule) => rule.paths.every((path) => prefixes.some((prefix) => path.startsWith(prefix.endsWith("/") ? prefix : `${prefix}/`))));
1816
+ }
1817
+ /**
1443
1818
  * Create filesystem middleware with all tools and features.
1444
1819
  */
1445
1820
  function createFilesystemMiddleware(options = {}) {
1446
- const { backend = (runtime) => new StateBackend(runtime), systemPrompt: customSystemPrompt = null, customToolDescriptions = null, toolTokenLimitBeforeEvict = 2e4, humanMessageTokenLimitBeforeEvict = 5e4 } = options;
1821
+ const { backend = (runtime) => new StateBackend(runtime), systemPrompt: customSystemPrompt = null, customToolDescriptions = null, toolTokenLimitBeforeEvict = 2e4, humanMessageTokenLimitBeforeEvict = 5e4, permissions = [] } = options;
1822
+ if (permissions.length > 0) validatePermissionPaths(permissions);
1823
+ if (permissions.length > 0 && typeof backend !== "function" && isSandboxBackend(backend) && !allPathsScopedToRoutes(permissions, backend)) throw new Error("Filesystem permissions cannot be used with a backend that supports command execution. Shell commands can access any path, making path-based rules ineffective. Either remove permissions, use a backend without execution support, or use a CompositeBackend with all permission paths scoped to a route prefix.");
1447
1824
  const baseSystemPrompt = customSystemPrompt || FILESYSTEM_SYSTEM_PROMPT;
1448
1825
  const allToolsByName = {
1449
- ls: createLsTool(backend, { customDescription: customToolDescriptions?.ls }),
1826
+ ls: createLsTool(backend, {
1827
+ customDescription: customToolDescriptions?.ls,
1828
+ permissions
1829
+ }),
1450
1830
  read_file: createReadFileTool(backend, {
1451
1831
  customDescription: customToolDescriptions?.read_file,
1452
- toolTokenLimitBeforeEvict
1832
+ toolTokenLimitBeforeEvict,
1833
+ permissions
1834
+ }),
1835
+ write_file: createWriteFileTool(backend, {
1836
+ customDescription: customToolDescriptions?.write_file,
1837
+ permissions
1838
+ }),
1839
+ edit_file: createEditFileTool(backend, {
1840
+ customDescription: customToolDescriptions?.edit_file,
1841
+ permissions
1842
+ }),
1843
+ glob: createGlobTool(backend, {
1844
+ customDescription: customToolDescriptions?.glob,
1845
+ permissions
1846
+ }),
1847
+ grep: createGrepTool(backend, {
1848
+ customDescription: customToolDescriptions?.grep,
1849
+ permissions
1453
1850
  }),
1454
- write_file: createWriteFileTool(backend, { customDescription: customToolDescriptions?.write_file }),
1455
- edit_file: createEditFileTool(backend, { customDescription: customToolDescriptions?.edit_file }),
1456
- glob: createGlobTool(backend, { customDescription: customToolDescriptions?.glob }),
1457
- grep: createGrepTool(backend, { customDescription: customToolDescriptions?.grep }),
1458
- execute: createExecuteTool(backend, { customDescription: customToolDescriptions?.execute })
1851
+ execute: createExecuteTool(backend, {
1852
+ customDescription: customToolDescriptions?.execute,
1853
+ permissions
1854
+ })
1459
1855
  };
1460
1856
  return (0, langchain.createMiddleware)({
1461
1857
  name: "FilesystemMiddleware",
@@ -1915,7 +2311,14 @@ function createTaskTool(options) {
1915
2311
  const subagent = subagentGraphs[subagent_type];
1916
2312
  const subagentState = filterStateForSubagent((0, _langchain_langgraph.getCurrentTaskInput)());
1917
2313
  subagentState.messages = [new _langchain_core_messages.HumanMessage({ content: description })];
1918
- const result = await subagent.invoke(subagentState, config);
2314
+ const subagentConfig = {
2315
+ ...config,
2316
+ configurable: {
2317
+ ...config.configurable,
2318
+ ls_agent_type: "subagent"
2319
+ }
2320
+ };
2321
+ const result = await subagent.invoke(subagentState, subagentConfig);
1919
2322
  if (!config.toolCall?.id) {
1920
2323
  if (result.structuredResponse != null) return JSON.stringify(result.structuredResponse);
1921
2324
  const messages = result.messages;
@@ -2371,9 +2774,23 @@ function createMemoryMiddleware(options) {
2371
2774
  * ```
2372
2775
  */
2373
2776
  const MAX_SKILL_FILE_SIZE = 10 * 1024 * 1024;
2777
+ const DEFAULT_SKILL_READ_LINE_LIMIT = 1e3;
2374
2778
  const MAX_SKILL_NAME_LENGTH = 64;
2375
2779
  const MAX_SKILL_DESCRIPTION_LENGTH = 1024;
2376
2780
  /**
2781
+ * File extensions a skill module entrypoint may use.
2782
+ */
2783
+ const SKILL_MODULE_EXTENSIONS = [
2784
+ ".js",
2785
+ ".mjs",
2786
+ ".cjs",
2787
+ ".ts",
2788
+ ".mts",
2789
+ ".cts",
2790
+ ".jsx",
2791
+ ".tsx"
2792
+ ];
2793
+ /**
2377
2794
  * Zod schema for a single skill metadata entry.
2378
2795
  */
2379
2796
  const SkillMetadataEntrySchema = zod.z.object({
@@ -2383,7 +2800,8 @@ const SkillMetadataEntrySchema = zod.z.object({
2383
2800
  license: zod.z.string().nullable().optional(),
2384
2801
  compatibility: zod.z.string().nullable().optional(),
2385
2802
  metadata: zod.z.record(zod.z.string(), zod.z.string()).optional(),
2386
- allowedTools: zod.z.array(zod.z.string()).optional()
2803
+ allowedTools: zod.z.array(zod.z.string()).optional(),
2804
+ module: zod.z.string().optional()
2387
2805
  });
2388
2806
  /**
2389
2807
  * Reducer for skillsMetadata that merges arrays from parallel subagents.
@@ -2415,48 +2833,49 @@ const SkillsStateSchema = new _langchain_langgraph.StateSchema({
2415
2833
  /**
2416
2834
  * Skills System Documentation prompt template.
2417
2835
  */
2418
- const SKILLS_SYSTEM_PROMPT = `
2419
- ## Skills System
2836
+ const SKILLS_SYSTEM_PROMPT = langchain.context`
2837
+ ## Skills System
2420
2838
 
2421
- You have access to a skills library that provides specialized capabilities and domain knowledge.
2839
+ You have access to a skills library that provides specialized capabilities and domain knowledge.
2422
2840
 
2423
- {skills_locations}
2841
+ {skills_locations}
2424
2842
 
2425
- **Available Skills:**
2843
+ **Available Skills:**
2426
2844
 
2427
- {skills_list}
2845
+ {skills_list}
2428
2846
 
2429
- **How to Use Skills (Progressive Disclosure):**
2847
+ **How to Use Skills (Progressive Disclosure):**
2430
2848
 
2431
- Skills follow a **progressive disclosure** pattern - you know they exist (name + description above), but you only read the full instructions when needed:
2849
+ Skills follow a **progressive disclosure** pattern - you know they exist (name + description above), but you only read the full instructions when needed:
2432
2850
 
2433
- 1. **Recognize when a skill applies**: Check if the user's task matches any skill's description
2434
- 2. **Read the skill's full instructions**: The skill list above shows the exact path to use with read_file
2435
- 3. **Follow the skill's instructions**: SKILL.md contains step-by-step workflows, best practices, and examples
2436
- 4. **Access supporting files**: Skills may include scripts, configs, or reference docs - use absolute paths
2851
+ 1. **Recognize when a skill applies**: Check if the user's task matches any skill's description
2852
+ 2. **Read the skill's full instructions**: Use \`read_file\` on the path shown in the skill list above.
2853
+ Pass \`limit=${DEFAULT_SKILL_READ_LINE_LIMIT}\` since the default of ${100} lines is too small for most skill files.
2854
+ 3. **Follow the skill's instructions**: SKILL.md contains step-by-step workflows, best practices, and examples
2855
+ 4. **Access supporting files**: Skills may include scripts, configs, or reference docs - use absolute paths
2437
2856
 
2438
- **When to Use Skills:**
2439
- - When the user's request matches a skill's domain (e.g., "research X" → web-research skill)
2440
- - When you need specialized knowledge or structured workflows
2441
- - When a skill provides proven patterns for complex tasks
2857
+ **When to Use Skills:**
2858
+ - When the user's request matches a skill's domain (e.g., "research X" → web-research skill)
2859
+ - When you need specialized knowledge or structured workflows
2860
+ - When a skill provides proven patterns for complex tasks
2442
2861
 
2443
- **Skills are Self-Documenting:**
2444
- - Each SKILL.md tells you exactly what the skill does and how to use it
2445
- - The skill list above shows the full path for each skill's SKILL.md file
2862
+ **Skills are Self-Documenting:**
2863
+ - Each SKILL.md tells you exactly what the skill does and how to use it
2864
+ - The skill list above shows the full path for each skill's SKILL.md file
2446
2865
 
2447
- **Executing Skill Scripts:**
2448
- Skills may contain scripts or other executable files. Always use absolute paths from the skill list.
2866
+ **Executing Skill Scripts:**
2867
+ Skills may contain scripts or other executable files. Always use absolute paths from the skill list.
2449
2868
 
2450
- **Example Workflow:**
2869
+ **Example Workflow:**
2451
2870
 
2452
- User: "Can you research the latest developments in quantum computing?"
2871
+ User: "Can you research the latest developments in quantum computing?"
2453
2872
 
2454
- 1. Check available skills above → See "web-research" skill with its full path
2455
- 2. Read the skill using the path shown in the list
2456
- 3. Follow the skill's research workflow (search → organize → synthesize)
2457
- 4. Use any helper scripts with absolute paths
2873
+ 1. Check available skills above → See "web-research" skill with its full path
2874
+ 2. Read the full skill file: \`read_file(path, limit=${DEFAULT_SKILL_READ_LINE_LIMIT})\`
2875
+ 3. Follow the skill's research workflow (search → organize → synthesize)
2876
+ 4. Use any helper scripts with absolute paths
2458
2877
 
2459
- Remember: Skills are tools to make you more capable and consistent. When in doubt, check if a skill exists for the task!
2878
+ Remember: Skills are tools to make you more capable and consistent. When in doubt, check if a skill exists for the task!
2460
2879
  `;
2461
2880
  /**
2462
2881
  * Validate skill name per Agent Skills specification.
@@ -2607,7 +3026,8 @@ function parseSkillMetadataFromContent(content, skillPath, directoryName) {
2607
3026
  metadata: validateMetadata(frontmatterData.metadata ?? {}, skillPath),
2608
3027
  license: String(frontmatterData.license ?? "").trim() || null,
2609
3028
  compatibility: compatibilityStr,
2610
- allowedTools
3029
+ allowedTools,
3030
+ module: validateModulePath(frontmatterData.module)
2611
3031
  };
2612
3032
  }
2613
3033
  /**
@@ -2680,10 +3100,38 @@ function formatSkillsList(skills, sources) {
2680
3100
  lines.push(descLine);
2681
3101
  if (skill.allowedTools && skill.allowedTools.length > 0) lines.push(` → Allowed tools: ${skill.allowedTools.join(", ")}`);
2682
3102
  lines.push(` → Read \`${skill.path}\` for full instructions`);
3103
+ if (skill.module !== void 0) lines.push(` → Import: \`await import("@/skills/${skill.name}")\``);
2683
3104
  }
2684
3105
  return lines.join("\n");
2685
3106
  }
2686
3107
  /**
3108
+ * Returns true when `value` ends with a recognized skill module extension.
3109
+ */
3110
+ function endsWithModuleExtension(value) {
3111
+ for (const ext of SKILL_MODULE_EXTENSIONS) if (value.endsWith(ext)) return true;
3112
+ return false;
3113
+ }
3114
+ /**
3115
+ * Validate and normalize the `module` frontmatter key from a `SKILL.md`.
3116
+ *
3117
+ * Returns the normalized path (e.g. `"index.ts"`, `"lib/entry.js"`) or
3118
+ * `undefined` when the key is absent, empty, non-string, absolute, contains
3119
+ * path traversal, or uses an unsupported extension. Invalid values silently
3120
+ * degrade the skill to prose-only.
3121
+ */
3122
+ function validateModulePath(raw) {
3123
+ if (raw === null || raw === void 0) return;
3124
+ if (typeof raw !== "string") return;
3125
+ const stripped = raw.trim();
3126
+ if (stripped === "") return;
3127
+ const normalized = stripped.startsWith("./") ? stripped.slice(2) : stripped;
3128
+ if (normalized.startsWith("/")) return;
3129
+ if (normalized === ".." || normalized.startsWith("../") || normalized.includes("/../") || normalized.endsWith("/..")) return;
3130
+ if (normalized.endsWith(".d.ts") || normalized.endsWith(".d.mts") || normalized.endsWith(".d.cts")) return;
3131
+ if (!endsWithModuleExtension(normalized)) return;
3132
+ return normalized;
3133
+ }
3134
+ /**
2687
3135
  * Create backend-agnostic middleware for loading and exposing agent skills.
2688
3136
  *
2689
3137
  * This middleware loads skills from configurable backend sources and injects
@@ -3190,6 +3638,7 @@ function createSummarizationMiddleware(options) {
3190
3638
  */
3191
3639
  async function getChatModel() {
3192
3640
  if (cachedModel) return cachedModel;
3641
+ if (!model) throw new Error("Summarization middleware could not resolve a model. Provide `options.model` or ensure `request.model` is present.");
3193
3642
  if (typeof model === "string") cachedModel = await (0, langchain_chat_models_universal.initChatModel)(model);
3194
3643
  else cachedModel = model;
3195
3644
  return cachedModel;
@@ -3620,7 +4069,7 @@ function createSummarizationMiddleware(options) {
3620
4069
  /**
3621
4070
  * Resolve the chat model and get max input tokens from its profile.
3622
4071
  */
3623
- const resolvedModel = await getChatModel();
4072
+ const resolvedModel = request.model ?? await getChatModel();
3624
4073
  const maxInputTokens = getMaxInputTokens(resolvedModel);
3625
4074
  applyModelDefaults(resolvedModel);
3626
4075
  /**
@@ -4229,6 +4678,13 @@ function createAsyncSubAgentMiddleware(options) {
4229
4678
  * StoreBackend: Adapter for LangGraph's BaseStore (persistent, cross-thread).
4230
4679
  */
4231
4680
  const NAMESPACE_COMPONENT_RE = /^[A-Za-z0-9\-_.@+:~]+$/;
4681
+ function getObjectRecord(value) {
4682
+ return value != null && typeof value === "object" ? value : void 0;
4683
+ }
4684
+ function getAssistantIdFromRecord(value) {
4685
+ const assistantId = value?.assistant_id ?? value?.assistantId;
4686
+ return typeof assistantId === "string" && assistantId.length > 0 ? assistantId : void 0;
4687
+ }
4232
4688
  /**
4233
4689
  * Validate a namespace array.
4234
4690
  *
@@ -4261,536 +4717,236 @@ function validateNamespace(namespace) {
4261
4717
  */
4262
4718
  var StoreBackend = class {
4263
4719
  stateAndStore;
4720
+ storeOverride;
4264
4721
  _namespace;
4265
- fileFormat;
4266
- constructor(stateAndStoreOrOptions, options) {
4267
- let opts;
4268
- if (stateAndStoreOrOptions != null && typeof stateAndStoreOrOptions === "object" && "state" in stateAndStoreOrOptions) {
4269
- this.stateAndStore = stateAndStoreOrOptions;
4270
- opts = options;
4271
- } else {
4272
- this.stateAndStore = void 0;
4273
- opts = stateAndStoreOrOptions;
4274
- }
4275
- if (opts?.namespace) this._namespace = validateNamespace(opts.namespace);
4276
- this.fileFormat = opts?.fileFormat ?? "v2";
4277
- }
4278
- /**
4279
- * Get the BaseStore instance for persistent storage operations.
4280
- *
4281
- * In legacy mode, reads from the injected {@link StateAndStore}.
4282
- * In zero-arg mode, retrieves the store from the LangGraph execution
4283
- * context via {@link getLangGraphStore}.
4284
- *
4285
- * @returns BaseStore instance
4286
- * @throws Error if no store is available in either mode
4287
- */
4288
- getStore() {
4289
- if (this.stateAndStore) {
4290
- const store = this.stateAndStore.store;
4291
- if (!store) throw new Error("Store is required but not available in runtime");
4292
- return store;
4293
- }
4294
- const store = (0, _langchain_langgraph.getStore)();
4295
- if (!store) throw new Error("Store is required but not available in LangGraph execution context. Ensure the graph was configured with a store.");
4296
- return store;
4297
- }
4298
- /**
4299
- * Get the namespace for store operations.
4300
- *
4301
- * Resolution order:
4302
- * 1. Explicit namespace from constructor options (both modes)
4303
- * 2. Legacy mode: `[assistantId, "filesystem"]` fallback from {@link StateAndStore}
4304
- * 3. Zero-arg mode without namespace: `["filesystem"]` with a deprecation warning
4305
- * nudging callers to pass an explicit namespace
4306
- * 4. Legacy mode without assistantId: `["filesystem"]`
4307
- */
4308
- getNamespace() {
4309
- if (this._namespace) return this._namespace;
4310
- if (this.stateAndStore) {
4311
- const assistantId = this.stateAndStore.assistantId;
4312
- if (assistantId) return [assistantId, "filesystem"];
4313
- }
4314
- return ["filesystem"];
4315
- }
4316
- /**
4317
- * Convert a store Item to FileData format.
4318
- *
4319
- * @param storeItem - The store Item containing file data
4320
- * @returns FileData object
4321
- * @throws Error if required fields are missing or have incorrect types
4322
- */
4323
- convertStoreItemToFileData(storeItem) {
4324
- const value = storeItem.value;
4325
- if (!(value.content !== void 0 && (Array.isArray(value.content) || typeof value.content === "string" || ArrayBuffer.isView(value.content))) || typeof value.created_at !== "string" || typeof value.modified_at !== "string") throw new Error(`Store item does not contain valid FileData fields. Got keys: ${Object.keys(value).join(", ")}`);
4326
- return {
4327
- content: value.content,
4328
- ...value.mimeType ? { mimeType: value.mimeType } : {},
4329
- created_at: value.created_at,
4330
- modified_at: value.modified_at
4331
- };
4332
- }
4333
- /**
4334
- * Convert FileData to a value suitable for store.put().
4335
- *
4336
- * @param fileData - The FileData to convert
4337
- * @returns Object with content, mimeType, created_at, and modified_at fields
4338
- */
4339
- convertFileDataToStoreValue(fileData) {
4340
- return {
4341
- content: fileData.content,
4342
- ..."mimeType" in fileData ? { mimeType: fileData.mimeType } : {},
4343
- created_at: fileData.created_at,
4344
- modified_at: fileData.modified_at
4345
- };
4346
- }
4347
- /**
4348
- * Search store with automatic pagination to retrieve all results.
4349
- *
4350
- * @param store - The store to search
4351
- * @param namespace - Hierarchical path prefix to search within
4352
- * @param options - Optional query, filter, and page_size
4353
- * @returns List of all items matching the search criteria
4354
- */
4355
- async searchStorePaginated(store, namespace, options = {}) {
4356
- const { query, filter, pageSize = 100 } = options;
4357
- const allItems = [];
4358
- let offset = 0;
4359
- while (true) {
4360
- const pageItems = await store.search(namespace, {
4361
- query,
4362
- filter,
4363
- limit: pageSize,
4364
- offset
4365
- });
4366
- if (!pageItems || pageItems.length === 0) break;
4367
- allItems.push(...pageItems);
4368
- if (pageItems.length < pageSize) break;
4369
- offset += pageSize;
4370
- }
4371
- return allItems;
4372
- }
4373
- /**
4374
- * List files and directories in the specified directory (non-recursive).
4375
- *
4376
- * @param path - Absolute path to directory
4377
- * @returns LsResult with list of FileInfo objects on success or error on failure.
4378
- * Directories have a trailing / in their path and is_dir=true.
4379
- */
4380
- async ls(path) {
4381
- const store = this.getStore();
4382
- const namespace = this.getNamespace();
4383
- const items = await this.searchStorePaginated(store, namespace);
4384
- const infos = [];
4385
- const subdirs = /* @__PURE__ */ new Set();
4386
- const normalizedPath = path.endsWith("/") ? path : path + "/";
4387
- for (const item of items) {
4388
- const itemKey = String(item.key);
4389
- if (!itemKey.startsWith(normalizedPath)) continue;
4390
- const relative = itemKey.substring(normalizedPath.length);
4391
- if (relative.includes("/")) {
4392
- const subdirName = relative.split("/")[0];
4393
- subdirs.add(normalizedPath + subdirName + "/");
4394
- continue;
4395
- }
4396
- try {
4397
- const fd = this.convertStoreItemToFileData(item);
4398
- const size = isFileDataV1(fd) ? fd.content.join("\n").length : isFileDataBinary(fd) ? fd.content.byteLength : fd.content.length;
4399
- infos.push({
4400
- path: itemKey,
4401
- is_dir: false,
4402
- size,
4403
- modified_at: fd.modified_at
4404
- });
4405
- } catch {
4406
- continue;
4407
- }
4408
- }
4409
- for (const subdir of Array.from(subdirs).sort()) infos.push({
4410
- path: subdir,
4411
- is_dir: true,
4412
- size: 0,
4413
- modified_at: ""
4414
- });
4415
- infos.sort((a, b) => a.path.localeCompare(b.path));
4416
- return { files: infos };
4417
- }
4418
- /**
4419
- * Read file content.
4420
- *
4421
- * Text files are paginated by line offset/limit.
4422
- * Binary files return full Uint8Array content (offset/limit ignored).
4423
- *
4424
- * @param filePath - Absolute file path
4425
- * @param offset - Line offset to start reading from (0-indexed)
4426
- * @param limit - Maximum number of lines to read
4427
- * @returns ReadResult with content on success or error on failure
4428
- */
4429
- async read(filePath, offset = 0, limit = 500) {
4430
- try {
4431
- const readRawResult = await this.readRaw(filePath);
4432
- if (readRawResult.error || !readRawResult.data) return { error: readRawResult.error || "File data not found" };
4433
- const fileDataV2 = migrateToFileDataV2(readRawResult.data, filePath);
4434
- if (!isTextMimeType(fileDataV2.mimeType)) return {
4435
- content: fileDataV2.content,
4436
- mimeType: fileDataV2.mimeType
4437
- };
4438
- if (typeof fileDataV2.content !== "string") return { error: `File '${filePath}' has binary content but text MIME type` };
4439
- return {
4440
- content: fileDataV2.content.split("\n").slice(offset, offset + limit).join("\n"),
4441
- mimeType: fileDataV2.mimeType
4442
- };
4443
- } catch (e) {
4444
- return { error: e.message };
4445
- }
4446
- }
4447
- /**
4448
- * Read file content as raw FileData.
4449
- *
4450
- * @param filePath - Absolute file path
4451
- * @returns ReadRawResult with raw file data on success or error on failure
4452
- */
4453
- async readRaw(filePath) {
4454
- const store = this.getStore();
4455
- const namespace = this.getNamespace();
4456
- const item = await store.get(namespace, filePath);
4457
- if (!item) return { error: `File '${filePath}' not found` };
4458
- return { data: this.convertStoreItemToFileData(item) };
4459
- }
4460
- /**
4461
- * Create a new file with content.
4462
- * Returns WriteResult. External storage sets filesUpdate=null.
4463
- */
4464
- async write(filePath, content) {
4465
- const store = this.getStore();
4466
- const namespace = this.getNamespace();
4467
- if (await store.get(namespace, filePath)) return { error: `Cannot write to ${filePath} because it already exists. Read and then make an edit, or write to a new path.` };
4468
- const mimeType = getMimeType(filePath);
4469
- const fileData = createFileData(content, void 0, this.fileFormat, mimeType);
4470
- const storeValue = this.convertFileDataToStoreValue(fileData);
4471
- await store.put(namespace, filePath, storeValue);
4472
- return {
4473
- path: filePath,
4474
- filesUpdate: null
4475
- };
4476
- }
4477
- /**
4478
- * Edit a file by replacing string occurrences.
4479
- * Returns EditResult. External storage sets filesUpdate=null.
4480
- */
4481
- async edit(filePath, oldString, newString, replaceAll = false) {
4482
- const store = this.getStore();
4483
- const namespace = this.getNamespace();
4484
- const item = await store.get(namespace, filePath);
4485
- if (!item) return { error: `Error: File '${filePath}' not found` };
4486
- try {
4487
- const fileData = this.convertStoreItemToFileData(item);
4488
- const result = performStringReplacement(fileDataToString(fileData), oldString, newString, replaceAll);
4489
- if (typeof result === "string") return { error: result };
4490
- const [newContent, occurrences] = result;
4491
- const newFileData = updateFileData(fileData, newContent);
4492
- const storeValue = this.convertFileDataToStoreValue(newFileData);
4493
- await store.put(namespace, filePath, storeValue);
4494
- return {
4495
- path: filePath,
4496
- filesUpdate: null,
4497
- occurrences
4498
- };
4499
- } catch (e) {
4500
- return { error: `Error: ${e.message}` };
4722
+ fileFormat;
4723
+ constructor(stateAndStoreOrOptions, options) {
4724
+ let opts;
4725
+ if (stateAndStoreOrOptions != null && typeof stateAndStoreOrOptions === "object" && "state" in stateAndStoreOrOptions) {
4726
+ this.stateAndStore = stateAndStoreOrOptions;
4727
+ opts = options;
4728
+ } else {
4729
+ this.stateAndStore = void 0;
4730
+ opts = stateAndStoreOrOptions;
4501
4731
  }
4732
+ if (Array.isArray(opts?.namespace)) this._namespace = validateNamespace(opts.namespace);
4733
+ else if (opts?.namespace) this._namespace = opts.namespace;
4734
+ this.storeOverride = opts?.store;
4735
+ this.fileFormat = opts?.fileFormat ?? "v2";
4502
4736
  }
4503
4737
  /**
4504
- * Search file contents for a literal text pattern.
4505
- * Binary files are skipped.
4738
+ * Get the BaseStore instance for persistent storage operations.
4739
+ *
4740
+ * In legacy mode, reads from the injected {@link StateAndStore}.
4741
+ * In zero-arg mode, retrieves the store from the LangGraph execution
4742
+ * context via {@link getLangGraphStore}.
4743
+ *
4744
+ * @returns BaseStore instance
4745
+ * @throws Error if no store is available in either mode
4506
4746
  */
4507
- async grep(pattern, path = "/", glob = null) {
4508
- const store = this.getStore();
4509
- const namespace = this.getNamespace();
4510
- const items = await this.searchStorePaginated(store, namespace);
4511
- const files = {};
4512
- for (const item of items) try {
4513
- files[item.key] = this.convertStoreItemToFileData(item);
4514
- } catch {
4515
- continue;
4747
+ getStore() {
4748
+ if (this.stateAndStore) {
4749
+ const store = this.stateAndStore.store;
4750
+ if (!store) throw new Error("Store is required but not available in runtime");
4751
+ return store;
4516
4752
  }
4517
- return { matches: grepMatchesFromFiles(files, pattern, path, glob) };
4753
+ if (this.storeOverride) return this.storeOverride;
4754
+ const store = (0, _langchain_langgraph.getStore)();
4755
+ if (!store) throw new Error("Store is required but not available in LangGraph execution context. Ensure the graph was configured with a store.");
4756
+ return store;
4518
4757
  }
4519
4758
  /**
4520
- * Structured glob matching returning FileInfo objects.
4759
+ * Get the current graph state when available.
4521
4760
  */
4522
- async glob(pattern, path = "/") {
4523
- const store = this.getStore();
4524
- const namespace = this.getNamespace();
4525
- const items = await this.searchStorePaginated(store, namespace);
4526
- const files = {};
4527
- for (const item of items) try {
4528
- files[item.key] = this.convertStoreItemToFileData(item);
4761
+ getState() {
4762
+ if (this.stateAndStore) return this.stateAndStore.state;
4763
+ try {
4764
+ return (0, _langchain_langgraph.getCurrentTaskInput)();
4529
4765
  } catch {
4530
- continue;
4766
+ return;
4531
4767
  }
4532
- const result = globSearchFiles(files, pattern, path);
4533
- if (result === "No files found") return { files: [] };
4534
- const paths = result.split("\n");
4535
- const infos = [];
4536
- for (const p of paths) {
4537
- const fd = files[p];
4538
- const size = fd ? isFileDataV1(fd) ? fd.content.join("\n").length : isFileDataBinary(fd) ? fd.content.byteLength : fd.content.length : 0;
4539
- infos.push({
4540
- path: p,
4541
- is_dir: false,
4542
- size,
4543
- modified_at: fd?.modified_at || ""
4544
- });
4545
- }
4546
- return { files: infos };
4547
4768
  }
4548
4769
  /**
4549
- * Upload multiple files.
4550
- *
4551
- * @param files - List of [path, content] tuples to upload
4552
- * @returns List of FileUploadResponse objects, one per input file
4770
+ * Get the most relevant runnable config for namespace resolution.
4553
4771
  */
4554
- async uploadFiles(files) {
4555
- const store = this.getStore();
4556
- const namespace = this.getNamespace();
4557
- const responses = [];
4558
- for (const [path, content] of files) try {
4559
- const mimeType = getMimeType(path);
4560
- const isBinary = this.fileFormat === "v2" && !isTextMimeType(mimeType);
4561
- let fileData;
4562
- if (isBinary) fileData = createFileData(content, void 0, "v2", mimeType);
4563
- else fileData = createFileData(new TextDecoder().decode(content), void 0, this.fileFormat, mimeType);
4564
- const storeValue = this.convertFileDataToStoreValue(fileData);
4565
- await store.put(namespace, path, storeValue);
4566
- responses.push({
4567
- path,
4568
- error: null
4569
- });
4772
+ getNamespaceConfig() {
4773
+ const injectedConfig = getObjectRecord(this.stateAndStore?.config);
4774
+ if (injectedConfig) return {
4775
+ metadata: getObjectRecord(injectedConfig.metadata),
4776
+ configurable: getObjectRecord(injectedConfig.configurable)
4777
+ };
4778
+ try {
4779
+ const configRecord = getObjectRecord((0, _langchain_langgraph.getConfig)());
4780
+ if (!configRecord) return;
4781
+ return {
4782
+ metadata: getObjectRecord(configRecord.metadata),
4783
+ configurable: getObjectRecord(configRecord.configurable)
4784
+ };
4570
4785
  } catch {
4571
- responses.push({
4572
- path,
4573
- error: "invalid_path"
4574
- });
4786
+ return;
4575
4787
  }
4576
- return responses;
4577
4788
  }
4578
4789
  /**
4579
- * Download multiple files.
4580
- *
4581
- * @param paths - List of file paths to download
4582
- * @returns List of FileDownloadResponse objects, one per input path
4790
+ * Legacy assistant-id detection compatible with both Python and the
4791
+ * historical TypeScript `assistantId` runtime property.
4583
4792
  */
4584
- async downloadFiles(paths) {
4585
- const store = this.getStore();
4586
- const namespace = this.getNamespace();
4587
- const responses = [];
4588
- for (const path of paths) try {
4589
- const item = await store.get(namespace, path);
4590
- if (!item) {
4591
- responses.push({
4592
- path,
4593
- content: null,
4594
- error: "file_not_found"
4595
- });
4596
- continue;
4597
- }
4598
- const fileDataV2 = migrateToFileDataV2(this.convertStoreItemToFileData(item), path);
4599
- if (typeof fileDataV2.content === "string") {
4600
- const content = new TextEncoder().encode(fileDataV2.content);
4601
- responses.push({
4602
- path,
4603
- content,
4604
- error: null
4605
- });
4606
- } else responses.push({
4607
- path,
4608
- content: fileDataV2.content,
4609
- error: null
4610
- });
4611
- } catch {
4612
- responses.push({
4613
- path,
4614
- content: null,
4615
- error: "file_not_found"
4616
- });
4617
- }
4618
- return responses;
4619
- }
4620
- };
4621
- //#endregion
4622
- //#region src/backends/filesystem.ts
4623
- /**
4624
- * FilesystemBackend: Read and write files directly from the filesystem.
4625
- *
4626
- * Security and search upgrades:
4627
- * - Secure path resolution with root containment when in virtual_mode (sandboxed to cwd)
4628
- * - Prevent symlink-following on file I/O using O_NOFOLLOW when available
4629
- * - Ripgrep-powered grep with literal (fixed-string) search, plus substring fallback
4630
- * and optional glob include filtering, while preserving virtual path behavior
4631
- */
4632
- const SUPPORTS_NOFOLLOW = node_fs.default.constants.O_NOFOLLOW !== void 0;
4633
- /**
4634
- * Backend that reads and writes files directly from the filesystem.
4635
- *
4636
- * Files are accessed using their actual filesystem paths. Relative paths are
4637
- * resolved relative to the current working directory. Content is read/written
4638
- * as plain text, and metadata (timestamps) are derived from filesystem stats.
4639
- */
4640
- var FilesystemBackend = class {
4641
- cwd;
4642
- virtualMode;
4643
- maxFileSizeBytes;
4644
- constructor(options = {}) {
4645
- const { rootDir, virtualMode = false, maxFileSizeMb = 10 } = options;
4646
- this.cwd = rootDir ? node_path.default.resolve(rootDir) : process.cwd();
4647
- this.virtualMode = virtualMode;
4648
- this.maxFileSizeBytes = maxFileSizeMb * 1024 * 1024;
4793
+ getLegacyAssistantId() {
4794
+ const config = this.getNamespaceConfig();
4795
+ const assistantIdFromConfig = getAssistantIdFromRecord(config?.metadata) ?? getAssistantIdFromRecord(config?.configurable);
4796
+ if (assistantIdFromConfig) return assistantIdFromConfig;
4797
+ const assistantId = this.stateAndStore?.assistantId;
4798
+ return typeof assistantId === "string" && assistantId.length > 0 ? assistantId : void 0;
4649
4799
  }
4650
4800
  /**
4651
- * Resolve a file path with security checks.
4801
+ * Get the namespace for store operations.
4652
4802
  *
4653
- * When virtualMode=true, treat incoming paths as virtual absolute paths under
4654
- * this.cwd, disallow traversal (.., ~) and ensure resolved path stays within root.
4655
- * When virtualMode=false, preserve legacy behavior: absolute paths are allowed
4656
- * as-is; relative paths resolve under cwd.
4803
+ * Resolution order:
4804
+ * 1. Explicit namespace from constructor options
4805
+ * 2. Namespace factory resolved from the current backend context
4806
+ * 3. Assistant ID from runtime config / LangGraph config metadata
4807
+ * 4. Legacy `assistantId` from the injected runtime
4808
+ * 5. `["filesystem"]`
4809
+ */
4810
+ getNamespace() {
4811
+ if (Array.isArray(this._namespace)) return this._namespace;
4812
+ if (this._namespace) return validateNamespace(this._namespace({
4813
+ state: this.getState(),
4814
+ config: this.getNamespaceConfig(),
4815
+ assistantId: this.getLegacyAssistantId()
4816
+ }));
4817
+ const assistantId = this.getLegacyAssistantId();
4818
+ if (assistantId) return [assistantId, "filesystem"];
4819
+ return ["filesystem"];
4820
+ }
4821
+ /**
4822
+ * Convert a store Item to FileData format.
4657
4823
  *
4658
- * @param key - File path (absolute, relative, or virtual when virtualMode=true)
4659
- * @returns Resolved absolute path string
4660
- * @throws Error if path traversal detected or path outside root
4824
+ * @param storeItem - The store Item containing file data
4825
+ * @returns FileData object
4826
+ * @throws Error if required fields are missing or have incorrect types
4661
4827
  */
4662
- resolvePath(key) {
4663
- if (this.virtualMode) {
4664
- const vpath = key.startsWith("/") ? key : "/" + key;
4665
- if (vpath.includes("..") || vpath.startsWith("~")) throw new Error("Path traversal not allowed");
4666
- const full = node_path.default.resolve(this.cwd, vpath.substring(1));
4667
- const relative = node_path.default.relative(this.cwd, full);
4668
- if (relative.startsWith("..") || node_path.default.isAbsolute(relative)) throw new Error(`Path: ${full} outside root directory: ${this.cwd}`);
4669
- return full;
4670
- }
4671
- if (node_path.default.isAbsolute(key)) return key;
4672
- return node_path.default.resolve(this.cwd, key);
4828
+ convertStoreItemToFileData(storeItem) {
4829
+ const value = storeItem.value;
4830
+ if (!(value.content !== void 0 && (Array.isArray(value.content) || typeof value.content === "string" || ArrayBuffer.isView(value.content))) || typeof value.created_at !== "string" || typeof value.modified_at !== "string") throw new Error(`Store item does not contain valid FileData fields. Got keys: ${Object.keys(value).join(", ")}`);
4831
+ return {
4832
+ content: value.content,
4833
+ ...value.mimeType ? { mimeType: value.mimeType } : {},
4834
+ created_at: value.created_at,
4835
+ modified_at: value.modified_at
4836
+ };
4673
4837
  }
4674
4838
  /**
4675
- * List files and directories in the specified directory (non-recursive).
4839
+ * Convert FileData to a value suitable for store.put().
4676
4840
  *
4677
- * @param dirPath - Absolute directory path to list files from
4678
- * @returns List of FileInfo objects for files and directories directly in the directory.
4679
- * Directories have a trailing / in their path and is_dir=true.
4841
+ * @param fileData - The FileData to convert
4842
+ * @returns Object with content, mimeType, created_at, and modified_at fields
4680
4843
  */
4681
- async ls(dirPath) {
4682
- try {
4683
- const resolvedPath = this.resolvePath(dirPath);
4684
- if (!(await node_fs_promises.default.stat(resolvedPath)).isDirectory()) return { files: [] };
4685
- const entries = await node_fs_promises.default.readdir(resolvedPath, { withFileTypes: true });
4686
- const results = [];
4687
- const cwdStr = this.cwd.endsWith(node_path.default.sep) ? this.cwd : this.cwd + node_path.default.sep;
4688
- for (const entry of entries) {
4689
- const fullPath = node_path.default.join(resolvedPath, entry.name);
4690
- try {
4691
- const entryStat = await node_fs_promises.default.stat(fullPath);
4692
- const isFile = entryStat.isFile();
4693
- const isDir = entryStat.isDirectory();
4694
- if (!this.virtualMode) {
4695
- if (isFile) results.push({
4696
- path: fullPath,
4697
- is_dir: false,
4698
- size: entryStat.size,
4699
- modified_at: entryStat.mtime.toISOString()
4700
- });
4701
- else if (isDir) results.push({
4702
- path: fullPath + node_path.default.sep,
4703
- is_dir: true,
4704
- size: 0,
4705
- modified_at: entryStat.mtime.toISOString()
4706
- });
4707
- } else {
4708
- let relativePath;
4709
- if (fullPath.startsWith(cwdStr)) relativePath = fullPath.substring(cwdStr.length);
4710
- else if (fullPath.startsWith(this.cwd)) relativePath = fullPath.substring(this.cwd.length).replace(/^[/\\]/, "");
4711
- else relativePath = fullPath;
4712
- relativePath = relativePath.split(node_path.default.sep).join("/");
4713
- const virtPath = "/" + relativePath;
4714
- if (isFile) results.push({
4715
- path: virtPath,
4716
- is_dir: false,
4717
- size: entryStat.size,
4718
- modified_at: entryStat.mtime.toISOString()
4719
- });
4720
- else if (isDir) results.push({
4721
- path: virtPath + "/",
4722
- is_dir: true,
4723
- size: 0,
4724
- modified_at: entryStat.mtime.toISOString()
4725
- });
4726
- }
4727
- } catch {
4728
- continue;
4729
- }
4844
+ convertFileDataToStoreValue(fileData) {
4845
+ return {
4846
+ content: fileData.content,
4847
+ ..."mimeType" in fileData ? { mimeType: fileData.mimeType } : {},
4848
+ created_at: fileData.created_at,
4849
+ modified_at: fileData.modified_at
4850
+ };
4851
+ }
4852
+ /**
4853
+ * Search store with automatic pagination to retrieve all results.
4854
+ *
4855
+ * @param store - The store to search
4856
+ * @param namespace - Hierarchical path prefix to search within
4857
+ * @param options - Optional query, filter, and page_size
4858
+ * @returns List of all items matching the search criteria
4859
+ */
4860
+ async searchStorePaginated(store, namespace, options = {}) {
4861
+ const { query, filter, pageSize = 100 } = options;
4862
+ const allItems = [];
4863
+ let offset = 0;
4864
+ while (true) {
4865
+ const pageItems = await store.search(namespace, {
4866
+ query,
4867
+ filter,
4868
+ limit: pageSize,
4869
+ offset
4870
+ });
4871
+ if (!pageItems || pageItems.length === 0) break;
4872
+ allItems.push(...pageItems);
4873
+ if (pageItems.length < pageSize) break;
4874
+ offset += pageSize;
4875
+ }
4876
+ return allItems;
4877
+ }
4878
+ /**
4879
+ * List files and directories in the specified directory (non-recursive).
4880
+ *
4881
+ * @param path - Absolute path to directory
4882
+ * @returns LsResult with list of FileInfo objects on success or error on failure.
4883
+ * Directories have a trailing / in their path and is_dir=true.
4884
+ */
4885
+ async ls(path) {
4886
+ const store = this.getStore();
4887
+ const namespace = this.getNamespace();
4888
+ const items = await this.searchStorePaginated(store, namespace);
4889
+ const infos = [];
4890
+ const subdirs = /* @__PURE__ */ new Set();
4891
+ const normalizedPath = path.endsWith("/") ? path : path + "/";
4892
+ for (const item of items) {
4893
+ const itemKey = String(item.key);
4894
+ if (!itemKey.startsWith(normalizedPath)) continue;
4895
+ const relative = itemKey.substring(normalizedPath.length);
4896
+ if (relative.includes("/")) {
4897
+ const subdirName = relative.split("/")[0];
4898
+ subdirs.add(normalizedPath + subdirName + "/");
4899
+ continue;
4900
+ }
4901
+ try {
4902
+ const fd = this.convertStoreItemToFileData(item);
4903
+ const size = isFileDataV1(fd) ? fd.content.join("\n").length : isFileDataBinary(fd) ? fd.content.byteLength : fd.content.length;
4904
+ infos.push({
4905
+ path: itemKey,
4906
+ is_dir: false,
4907
+ size,
4908
+ modified_at: fd.modified_at
4909
+ });
4910
+ } catch {
4911
+ continue;
4730
4912
  }
4731
- results.sort((a, b) => a.path.localeCompare(b.path));
4732
- return { files: results };
4733
- } catch {
4734
- return { files: [] };
4735
4913
  }
4914
+ for (const subdir of Array.from(subdirs).sort()) infos.push({
4915
+ path: subdir,
4916
+ is_dir: true,
4917
+ size: 0,
4918
+ modified_at: ""
4919
+ });
4920
+ infos.sort((a, b) => a.path.localeCompare(b.path));
4921
+ return { files: infos };
4736
4922
  }
4737
4923
  /**
4738
- * Read file content with line numbers.
4924
+ * Read file content.
4739
4925
  *
4740
- * @param filePath - Absolute or relative file path
4926
+ * Text files are paginated by line offset/limit.
4927
+ * Binary files return full Uint8Array content (offset/limit ignored).
4928
+ *
4929
+ * @param filePath - Absolute file path
4741
4930
  * @param offset - Line offset to start reading from (0-indexed)
4742
4931
  * @param limit - Maximum number of lines to read
4743
- * @returns Formatted file content with line numbers, or error message
4932
+ * @returns ReadResult with content on success or error on failure
4744
4933
  */
4745
4934
  async read(filePath, offset = 0, limit = 500) {
4746
4935
  try {
4747
- const resolvedPath = this.resolvePath(filePath);
4748
- const mimeType = getMimeType(filePath);
4749
- const isBinary = !isTextMimeType(mimeType);
4750
- let content;
4751
- if (SUPPORTS_NOFOLLOW) {
4752
- if (!(await node_fs_promises.default.stat(resolvedPath)).isFile()) return { error: `File '${filePath}' not found` };
4753
- const fd = await node_fs_promises.default.open(resolvedPath, node_fs.default.constants.O_RDONLY | node_fs.default.constants.O_NOFOLLOW);
4754
- try {
4755
- if (isBinary) {
4756
- const buffer = await fd.readFile();
4757
- return {
4758
- content: new Uint8Array(buffer),
4759
- mimeType
4760
- };
4761
- }
4762
- content = await fd.readFile({ encoding: "utf-8" });
4763
- } finally {
4764
- await fd.close();
4765
- }
4766
- } else {
4767
- const stat = await node_fs_promises.default.lstat(resolvedPath);
4768
- if (stat.isSymbolicLink()) return { error: `Symlinks are not allowed: ${filePath}` };
4769
- if (!stat.isFile()) return { error: `File '${filePath}' not found` };
4770
- if (isBinary) {
4771
- const buffer = await node_fs_promises.default.readFile(resolvedPath);
4772
- return {
4773
- content: new Uint8Array(buffer),
4774
- mimeType
4775
- };
4776
- }
4777
- content = await node_fs_promises.default.readFile(resolvedPath, "utf-8");
4778
- }
4779
- const emptyMsg = checkEmptyContent(content);
4780
- if (emptyMsg) return {
4781
- content: emptyMsg,
4782
- mimeType
4936
+ const readRawResult = await this.readRaw(filePath);
4937
+ if (readRawResult.error || !readRawResult.data) return { error: readRawResult.error || "File data not found" };
4938
+ const fileDataV2 = migrateToFileDataV2(readRawResult.data, filePath);
4939
+ if (!isTextMimeType(fileDataV2.mimeType)) return {
4940
+ content: fileDataV2.content,
4941
+ mimeType: fileDataV2.mimeType
4783
4942
  };
4784
- const lines = content.split("\n");
4785
- const startIdx = offset;
4786
- const endIdx = Math.min(startIdx + limit, lines.length);
4787
- if (startIdx >= lines.length) return { error: `Line offset ${offset} exceeds file length (${lines.length} lines)` };
4943
+ if (typeof fileDataV2.content !== "string") return { error: `File '${filePath}' has binary content but text MIME type` };
4788
4944
  return {
4789
- content: lines.slice(startIdx, endIdx).join("\n"),
4790
- mimeType
4945
+ content: fileDataV2.content.split("\n").slice(offset, offset + limit).join("\n"),
4946
+ mimeType: fileDataV2.mimeType
4791
4947
  };
4792
4948
  } catch (e) {
4793
- return { error: `Error reading file '${filePath}': ${e.message}` };
4949
+ return { error: e.message };
4794
4950
  }
4795
4951
  }
4796
4952
  /**
@@ -4800,478 +4956,347 @@ var FilesystemBackend = class {
4800
4956
  * @returns ReadRawResult with raw file data on success or error on failure
4801
4957
  */
4802
4958
  async readRaw(filePath) {
4803
- const resolvedPath = this.resolvePath(filePath);
4804
- const mimeType = getMimeType(filePath);
4805
- const isBinary = !isTextMimeType(mimeType);
4806
- let content;
4807
- let stat;
4808
- if (SUPPORTS_NOFOLLOW) {
4809
- stat = await node_fs_promises.default.stat(resolvedPath);
4810
- if (!stat.isFile()) return { error: `File '${filePath}' not found` };
4811
- const fd = await node_fs_promises.default.open(resolvedPath, node_fs.default.constants.O_RDONLY | node_fs.default.constants.O_NOFOLLOW);
4812
- try {
4813
- if (isBinary) {
4814
- const buffer = await fd.readFile();
4815
- return { data: {
4816
- content: new Uint8Array(buffer),
4817
- mimeType,
4818
- created_at: stat.ctime.toISOString(),
4819
- modified_at: stat.mtime.toISOString()
4820
- } };
4821
- }
4822
- content = await fd.readFile({ encoding: "utf-8" });
4823
- } finally {
4824
- await fd.close();
4825
- }
4826
- } else {
4827
- stat = await node_fs_promises.default.lstat(resolvedPath);
4828
- if (stat.isSymbolicLink()) return { error: `Symlinks are not allowed: ${filePath}` };
4829
- if (!stat.isFile()) return { error: `File '${filePath}' not found` };
4830
- if (isBinary) {
4831
- const buffer = await node_fs_promises.default.readFile(resolvedPath);
4832
- return { data: {
4833
- content: new Uint8Array(buffer),
4834
- mimeType,
4835
- created_at: stat.ctime.toISOString(),
4836
- modified_at: stat.mtime.toISOString()
4837
- } };
4838
- }
4839
- content = await node_fs_promises.default.readFile(resolvedPath, "utf-8");
4840
- }
4841
- return { data: {
4842
- content,
4843
- mimeType,
4844
- created_at: stat.ctime.toISOString(),
4845
- modified_at: stat.mtime.toISOString()
4846
- } };
4959
+ const store = this.getStore();
4960
+ const namespace = this.getNamespace();
4961
+ const item = await store.get(namespace, filePath);
4962
+ if (!item) return { error: `File '${filePath}' not found` };
4963
+ return { data: this.convertStoreItemToFileData(item) };
4847
4964
  }
4848
4965
  /**
4849
4966
  * Create a new file with content.
4850
4967
  * Returns WriteResult. External storage sets filesUpdate=null.
4851
4968
  */
4852
4969
  async write(filePath, content) {
4853
- try {
4854
- const resolvedPath = this.resolvePath(filePath);
4855
- const isBinary = !isTextMimeType(getMimeType(filePath));
4856
- try {
4857
- if ((await node_fs_promises.default.lstat(resolvedPath)).isSymbolicLink()) return { error: `Cannot write to ${filePath} because it is a symlink. Symlinks are not allowed.` };
4858
- return { error: `Cannot write to ${filePath} because it already exists. Read and then make an edit, or write to a new path.` };
4859
- } catch {}
4860
- await node_fs_promises.default.mkdir(node_path.default.dirname(resolvedPath), { recursive: true });
4861
- if (SUPPORTS_NOFOLLOW) {
4862
- const flags = node_fs.default.constants.O_WRONLY | node_fs.default.constants.O_CREAT | node_fs.default.constants.O_TRUNC | node_fs.default.constants.O_NOFOLLOW;
4863
- const fd = await node_fs_promises.default.open(resolvedPath, flags, 420);
4864
- try {
4865
- if (isBinary) {
4866
- const buffer = Buffer.from(content, "base64");
4867
- await fd.writeFile(buffer);
4868
- } else await fd.writeFile(content, "utf-8");
4869
- } finally {
4870
- await fd.close();
4871
- }
4872
- } else if (isBinary) {
4873
- const buffer = Buffer.from(content, "base64");
4874
- await node_fs_promises.default.writeFile(resolvedPath, buffer);
4875
- } else await node_fs_promises.default.writeFile(resolvedPath, content, "utf-8");
4876
- return {
4877
- path: filePath,
4878
- filesUpdate: null
4879
- };
4880
- } catch (e) {
4881
- return { error: `Error writing file '${filePath}': ${e.message}` };
4882
- }
4883
- }
4884
- /**
4885
- * Edit a file by replacing string occurrences.
4886
- * Returns EditResult. External storage sets filesUpdate=null.
4887
- */
4888
- async edit(filePath, oldString, newString, replaceAll = false) {
4889
- try {
4890
- const resolvedPath = this.resolvePath(filePath);
4891
- let content;
4892
- if (SUPPORTS_NOFOLLOW) {
4893
- if (!(await node_fs_promises.default.stat(resolvedPath)).isFile()) return { error: `Error: File '${filePath}' not found` };
4894
- const fd = await node_fs_promises.default.open(resolvedPath, node_fs.default.constants.O_RDONLY | node_fs.default.constants.O_NOFOLLOW);
4895
- try {
4896
- content = await fd.readFile({ encoding: "utf-8" });
4897
- } finally {
4898
- await fd.close();
4899
- }
4900
- } else {
4901
- const stat = await node_fs_promises.default.lstat(resolvedPath);
4902
- if (stat.isSymbolicLink()) return { error: `Error: Symlinks are not allowed: ${filePath}` };
4903
- if (!stat.isFile()) return { error: `Error: File '${filePath}' not found` };
4904
- content = await node_fs_promises.default.readFile(resolvedPath, "utf-8");
4905
- }
4906
- const result = performStringReplacement(content, oldString, newString, replaceAll);
4970
+ const store = this.getStore();
4971
+ const namespace = this.getNamespace();
4972
+ if (await store.get(namespace, filePath)) return { error: `Cannot write to ${filePath} because it already exists. Read and then make an edit, or write to a new path.` };
4973
+ const mimeType = getMimeType(filePath);
4974
+ const fileData = createFileData(content, void 0, this.fileFormat, mimeType);
4975
+ const storeValue = this.convertFileDataToStoreValue(fileData);
4976
+ await store.put(namespace, filePath, storeValue);
4977
+ return {
4978
+ path: filePath,
4979
+ filesUpdate: null
4980
+ };
4981
+ }
4982
+ /**
4983
+ * Edit a file by replacing string occurrences.
4984
+ * Returns EditResult. External storage sets filesUpdate=null.
4985
+ */
4986
+ async edit(filePath, oldString, newString, replaceAll = false) {
4987
+ const store = this.getStore();
4988
+ const namespace = this.getNamespace();
4989
+ const item = await store.get(namespace, filePath);
4990
+ if (!item) return { error: `Error: File '${filePath}' not found` };
4991
+ try {
4992
+ const fileData = this.convertStoreItemToFileData(item);
4993
+ const result = performStringReplacement(fileDataToString(fileData), oldString, newString, replaceAll);
4907
4994
  if (typeof result === "string") return { error: result };
4908
4995
  const [newContent, occurrences] = result;
4909
- if (SUPPORTS_NOFOLLOW) {
4910
- const flags = node_fs.default.constants.O_WRONLY | node_fs.default.constants.O_TRUNC | node_fs.default.constants.O_NOFOLLOW;
4911
- const fd = await node_fs_promises.default.open(resolvedPath, flags);
4912
- try {
4913
- await fd.writeFile(newContent, "utf-8");
4914
- } finally {
4915
- await fd.close();
4916
- }
4917
- } else await node_fs_promises.default.writeFile(resolvedPath, newContent, "utf-8");
4996
+ const newFileData = updateFileData(fileData, newContent);
4997
+ const storeValue = this.convertFileDataToStoreValue(newFileData);
4998
+ await store.put(namespace, filePath, storeValue);
4918
4999
  return {
4919
5000
  path: filePath,
4920
5001
  filesUpdate: null,
4921
5002
  occurrences
4922
5003
  };
4923
5004
  } catch (e) {
4924
- return { error: `Error editing file '${filePath}': ${e.message}` };
4925
- }
4926
- }
4927
- /**
4928
- * Search for a literal text pattern in files.
4929
- *
4930
- * Uses ripgrep if available, falling back to substring search.
4931
- *
4932
- * @param pattern - Literal string to search for (NOT regex).
4933
- * @param dirPath - Directory or file path to search in. Defaults to current directory.
4934
- * @param glob - Optional glob pattern to filter which files to search.
4935
- * @returns List of GrepMatch dicts containing path, line number, and matched text.
4936
- */
4937
- async grep(pattern, dirPath = "/", glob = null) {
4938
- let baseFull;
4939
- try {
4940
- baseFull = this.resolvePath(dirPath || ".");
4941
- } catch {
4942
- return { matches: [] };
4943
- }
4944
- try {
4945
- await node_fs_promises.default.stat(baseFull);
4946
- } catch {
4947
- return { matches: [] };
5005
+ return { error: `Error: ${e.message}` };
4948
5006
  }
4949
- let results = await this.ripgrepSearch(pattern, baseFull, glob);
4950
- if (results === null) results = await this.literalSearch(pattern, baseFull, glob);
4951
- const matches = [];
4952
- for (const [fpath, items] of Object.entries(results)) for (const [lineNum, lineText] of items) matches.push({
4953
- path: fpath,
4954
- line: lineNum,
4955
- text: lineText
4956
- });
4957
- return { matches };
4958
- }
4959
- /**
4960
- * Search using ripgrep with fixed-string (literal) mode.
4961
- *
4962
- * @param pattern - Literal string to search for (unescaped).
4963
- * @param baseFull - Resolved base path to search in.
4964
- * @param includeGlob - Optional glob pattern to filter files.
4965
- * @returns Dict mapping file paths to list of (line_number, line_text) tuples.
4966
- * Returns null if ripgrep is unavailable or times out.
4967
- */
4968
- async ripgrepSearch(pattern, baseFull, includeGlob) {
4969
- return new Promise((resolve) => {
4970
- const args = ["--json", "-F"];
4971
- if (includeGlob) args.push("--glob", includeGlob);
4972
- args.push("--", pattern, baseFull);
4973
- const proc = (0, node_child_process.spawn)("rg", args, { timeout: 3e4 });
4974
- const results = {};
4975
- let output = "";
4976
- proc.stdout.on("data", (data) => {
4977
- output += data.toString();
4978
- });
4979
- proc.on("close", (code) => {
4980
- if (code !== 0 && code !== 1) {
4981
- resolve(null);
4982
- return;
4983
- }
4984
- for (const line of output.split("\n")) {
4985
- if (!line.trim()) continue;
4986
- try {
4987
- const data = JSON.parse(line);
4988
- if (data.type !== "match") continue;
4989
- const pdata = data.data || {};
4990
- const ftext = pdata.path?.text;
4991
- if (!ftext) continue;
4992
- let virtPath;
4993
- if (this.virtualMode) try {
4994
- const resolved = node_path.default.resolve(ftext);
4995
- const relative = node_path.default.relative(this.cwd, resolved);
4996
- if (relative.startsWith("..")) continue;
4997
- virtPath = "/" + relative.split(node_path.default.sep).join("/");
4998
- } catch {
4999
- continue;
5000
- }
5001
- else virtPath = ftext;
5002
- const ln = pdata.line_number;
5003
- const lt = pdata.lines?.text?.replace(/\n$/, "") || "";
5004
- if (ln === void 0) continue;
5005
- if (!results[virtPath]) results[virtPath] = [];
5006
- results[virtPath].push([ln, lt]);
5007
- } catch {
5008
- continue;
5009
- }
5010
- }
5011
- resolve(results);
5012
- });
5013
- proc.on("error", () => {
5014
- resolve(null);
5015
- });
5016
- });
5017
5007
  }
5018
5008
  /**
5019
- * Fallback search using literal substring matching when ripgrep is unavailable.
5020
- *
5021
- * Recursively searches files, respecting maxFileSizeBytes limit.
5022
- *
5023
- * @param pattern - Literal string to search for.
5024
- * @param baseFull - Resolved base path to search in.
5025
- * @param includeGlob - Optional glob pattern to filter files by name.
5026
- * @returns Dict mapping file paths to list of (line_number, line_text) tuples.
5009
+ * Search file contents for a literal text pattern.
5010
+ * Binary files are skipped.
5027
5011
  */
5028
- async literalSearch(pattern, baseFull, includeGlob) {
5029
- const results = {};
5030
- const files = await (0, fast_glob.default)("**/*", {
5031
- cwd: (await node_fs_promises.default.stat(baseFull)).isDirectory() ? baseFull : node_path.default.dirname(baseFull),
5032
- absolute: true,
5033
- onlyFiles: true,
5034
- dot: true
5035
- });
5036
- for (const fp of files) try {
5037
- if (!isTextMimeType(getMimeType(fp))) continue;
5038
- if (includeGlob && !micromatch.default.isMatch(node_path.default.basename(fp), includeGlob)) continue;
5039
- if ((await node_fs_promises.default.stat(fp)).size > this.maxFileSizeBytes) continue;
5040
- const lines = (await node_fs_promises.default.readFile(fp, "utf-8")).split("\n");
5041
- for (let i = 0; i < lines.length; i++) {
5042
- const line = lines[i];
5043
- if (line.includes(pattern)) {
5044
- let virtPath;
5045
- if (this.virtualMode) try {
5046
- const relative = node_path.default.relative(this.cwd, fp);
5047
- if (relative.startsWith("..")) continue;
5048
- virtPath = "/" + relative.split(node_path.default.sep).join("/");
5049
- } catch {
5050
- continue;
5051
- }
5052
- else virtPath = fp;
5053
- if (!results[virtPath]) results[virtPath] = [];
5054
- results[virtPath].push([i + 1, line]);
5055
- }
5056
- }
5012
+ async grep(pattern, path = "/", glob = null) {
5013
+ const store = this.getStore();
5014
+ const namespace = this.getNamespace();
5015
+ const items = await this.searchStorePaginated(store, namespace);
5016
+ const files = {};
5017
+ for (const item of items) try {
5018
+ files[item.key] = this.convertStoreItemToFileData(item);
5057
5019
  } catch {
5058
5020
  continue;
5059
5021
  }
5060
- return results;
5022
+ return { matches: grepMatchesFromFiles(files, pattern, path, glob) };
5061
5023
  }
5062
5024
  /**
5063
5025
  * Structured glob matching returning FileInfo objects.
5064
5026
  */
5065
- async glob(pattern, searchPath = "/") {
5066
- if (pattern.startsWith("/")) pattern = pattern.substring(1);
5067
- const resolvedSearchPath = searchPath === "/" ? this.cwd : this.resolvePath(searchPath);
5068
- try {
5069
- if (!(await node_fs_promises.default.stat(resolvedSearchPath)).isDirectory()) return { files: [] };
5027
+ async glob(pattern, path = "/") {
5028
+ const store = this.getStore();
5029
+ const namespace = this.getNamespace();
5030
+ const items = await this.searchStorePaginated(store, namespace);
5031
+ const files = {};
5032
+ for (const item of items) try {
5033
+ files[item.key] = this.convertStoreItemToFileData(item);
5070
5034
  } catch {
5071
- return { files: [] };
5035
+ continue;
5072
5036
  }
5073
- const results = [];
5074
- try {
5075
- const matches = await (0, fast_glob.default)(pattern, {
5076
- cwd: resolvedSearchPath,
5077
- absolute: true,
5078
- onlyFiles: true,
5079
- dot: true
5037
+ const result = globSearchFiles(files, pattern, path);
5038
+ if (result === "No files found") return { files: [] };
5039
+ const paths = result.split("\n");
5040
+ const infos = [];
5041
+ for (const p of paths) {
5042
+ const fd = files[p];
5043
+ const size = fd ? isFileDataV1(fd) ? fd.content.join("\n").length : isFileDataBinary(fd) ? fd.content.byteLength : fd.content.length : 0;
5044
+ infos.push({
5045
+ path: p,
5046
+ is_dir: false,
5047
+ size,
5048
+ modified_at: fd?.modified_at || ""
5080
5049
  });
5081
- for (const matchedPath of matches) try {
5082
- const stat = await node_fs_promises.default.stat(matchedPath);
5083
- if (!stat.isFile()) continue;
5084
- const normalizedPath = matchedPath.split("/").join(node_path.default.sep);
5085
- if (!this.virtualMode) results.push({
5086
- path: normalizedPath,
5087
- is_dir: false,
5088
- size: stat.size,
5089
- modified_at: stat.mtime.toISOString()
5090
- });
5091
- else {
5092
- const cwdStr = this.cwd.endsWith(node_path.default.sep) ? this.cwd : this.cwd + node_path.default.sep;
5093
- let relativePath;
5094
- if (normalizedPath.startsWith(cwdStr)) relativePath = normalizedPath.substring(cwdStr.length);
5095
- else if (normalizedPath.startsWith(this.cwd)) relativePath = normalizedPath.substring(this.cwd.length).replace(/^[/\\]/, "");
5096
- else relativePath = normalizedPath;
5097
- relativePath = relativePath.split(node_path.default.sep).join("/");
5098
- const virt = "/" + relativePath;
5099
- results.push({
5100
- path: virt,
5101
- is_dir: false,
5102
- size: stat.size,
5103
- modified_at: stat.mtime.toISOString()
5104
- });
5105
- }
5106
- } catch {
5107
- continue;
5108
- }
5109
- } catch {}
5110
- results.sort((a, b) => a.path.localeCompare(b.path));
5111
- return { files: results };
5050
+ }
5051
+ return { files: infos };
5112
5052
  }
5113
5053
  /**
5114
- * Upload multiple files to the filesystem.
5054
+ * Upload multiple files.
5115
5055
  *
5116
5056
  * @param files - List of [path, content] tuples to upload
5117
5057
  * @returns List of FileUploadResponse objects, one per input file
5118
5058
  */
5119
5059
  async uploadFiles(files) {
5060
+ const store = this.getStore();
5061
+ const namespace = this.getNamespace();
5120
5062
  const responses = [];
5121
- for (const [filePath, content] of files) try {
5122
- const resolvedPath = this.resolvePath(filePath);
5123
- await node_fs_promises.default.mkdir(node_path.default.dirname(resolvedPath), { recursive: true });
5124
- await node_fs_promises.default.writeFile(resolvedPath, content);
5125
- responses.push({
5126
- path: filePath,
5127
- error: null
5128
- });
5129
- } catch (e) {
5130
- if (e.code === "ENOENT") responses.push({
5131
- path: filePath,
5132
- error: "file_not_found"
5133
- });
5134
- else if (e.code === "EACCES") responses.push({
5135
- path: filePath,
5136
- error: "permission_denied"
5137
- });
5138
- else if (e.code === "EISDIR") responses.push({
5139
- path: filePath,
5140
- error: "is_directory"
5063
+ for (const [path, content] of files) try {
5064
+ const mimeType = getMimeType(path);
5065
+ const isBinary = this.fileFormat === "v2" && !isTextMimeType(mimeType);
5066
+ let fileData;
5067
+ if (isBinary) fileData = createFileData(content, void 0, "v2", mimeType);
5068
+ else fileData = createFileData(new TextDecoder().decode(content), void 0, this.fileFormat, mimeType);
5069
+ const storeValue = this.convertFileDataToStoreValue(fileData);
5070
+ await store.put(namespace, path, storeValue);
5071
+ responses.push({
5072
+ path,
5073
+ error: null
5141
5074
  });
5142
- else responses.push({
5143
- path: filePath,
5075
+ } catch {
5076
+ responses.push({
5077
+ path,
5144
5078
  error: "invalid_path"
5145
5079
  });
5146
5080
  }
5147
5081
  return responses;
5148
5082
  }
5149
5083
  /**
5150
- * Download multiple files from the filesystem.
5084
+ * Download multiple files.
5151
5085
  *
5152
5086
  * @param paths - List of file paths to download
5153
5087
  * @returns List of FileDownloadResponse objects, one per input path
5154
5088
  */
5155
5089
  async downloadFiles(paths) {
5090
+ const store = this.getStore();
5091
+ const namespace = this.getNamespace();
5156
5092
  const responses = [];
5157
- for (const filePath of paths) try {
5158
- const resolvedPath = this.resolvePath(filePath);
5159
- const content = await node_fs_promises.default.readFile(resolvedPath);
5160
- responses.push({
5161
- path: filePath,
5162
- content,
5093
+ for (const path of paths) try {
5094
+ const item = await store.get(namespace, path);
5095
+ if (!item) {
5096
+ responses.push({
5097
+ path,
5098
+ content: null,
5099
+ error: "file_not_found"
5100
+ });
5101
+ continue;
5102
+ }
5103
+ const fileDataV2 = migrateToFileDataV2(this.convertStoreItemToFileData(item), path);
5104
+ if (typeof fileDataV2.content === "string") {
5105
+ const content = new TextEncoder().encode(fileDataV2.content);
5106
+ responses.push({
5107
+ path,
5108
+ content,
5109
+ error: null
5110
+ });
5111
+ } else responses.push({
5112
+ path,
5113
+ content: fileDataV2.content,
5163
5114
  error: null
5164
5115
  });
5165
- } catch (e) {
5166
- if (e.code === "ENOENT") responses.push({
5167
- path: filePath,
5116
+ } catch {
5117
+ responses.push({
5118
+ path,
5168
5119
  content: null,
5169
5120
  error: "file_not_found"
5170
5121
  });
5171
- else if (e.code === "EACCES") responses.push({
5172
- path: filePath,
5173
- content: null,
5174
- error: "permission_denied"
5175
- });
5176
- else if (e.code === "EISDIR") responses.push({
5177
- path: filePath,
5178
- content: null,
5179
- error: "is_directory"
5180
- });
5181
- else responses.push({
5182
- path: filePath,
5183
- content: null,
5184
- error: "invalid_path"
5185
- });
5186
5122
  }
5187
5123
  return responses;
5188
5124
  }
5189
5125
  };
5190
5126
  //#endregion
5191
- //#region src/backends/composite.ts
5127
+ //#region src/backends/filesystem.ts
5192
5128
  /**
5193
- * Backend that routes file operations to different backends based on path prefix.
5129
+ * FilesystemBackend: Read and write files directly from the filesystem.
5194
5130
  *
5195
- * This enables hybrid storage strategies like:
5196
- * - `/memories/` StoreBackend (persistent, cross-thread)
5197
- * - Everything else StateBackend (ephemeral, per-thread)
5131
+ * Security and search upgrades:
5132
+ * - Secure path resolution with root containment when in virtual_mode (sandboxed to cwd)
5133
+ * - Prevent symlink-following on file I/O using O_NOFOLLOW when available
5134
+ * - Ripgrep-powered grep with literal (fixed-string) search, plus substring fallback
5135
+ * and optional glob include filtering, while preserving virtual path behavior
5136
+ */
5137
+ const SUPPORTS_NOFOLLOW = node_fs.default.constants.O_NOFOLLOW !== void 0;
5138
+ /**
5139
+ * Backend that reads and writes files directly from the filesystem.
5198
5140
  *
5199
- * The CompositeBackend handles path prefix stripping/re-adding transparently.
5141
+ * Files are accessed using their actual filesystem paths. Relative paths are
5142
+ * resolved relative to the current working directory. Content is read/written
5143
+ * as plain text, and metadata (timestamps) are derived from filesystem stats.
5200
5144
  */
5201
- var CompositeBackend = class {
5202
- default;
5203
- routes;
5204
- sortedRoutes;
5205
- constructor(defaultBackend, routes) {
5206
- this.default = isSandboxProtocol(defaultBackend) ? adaptSandboxProtocol(defaultBackend) : adaptBackendProtocol(defaultBackend);
5207
- this.routes = Object.fromEntries(Object.entries(routes).map(([k, v]) => [k, isSandboxProtocol(v) ? adaptSandboxProtocol(v) : adaptBackendProtocol(v)]));
5208
- this.sortedRoutes = Object.entries(this.routes).sort((a, b) => b[0].length - a[0].length);
5209
- }
5210
- /** Delegates to default backend's id if it is a sandbox, otherwise empty string. */
5211
- get id() {
5212
- return isSandboxBackend(this.default) ? this.default.id : "";
5145
+ var FilesystemBackend = class {
5146
+ cwd;
5147
+ virtualMode;
5148
+ maxFileSizeBytes;
5149
+ constructor(options = {}) {
5150
+ const { rootDir, virtualMode = false, maxFileSizeMb = 10 } = options;
5151
+ this.cwd = rootDir ? node_path.default.resolve(rootDir) : process.cwd();
5152
+ this.virtualMode = virtualMode;
5153
+ this.maxFileSizeBytes = maxFileSizeMb * 1024 * 1024;
5213
5154
  }
5214
5155
  /**
5215
- * Determine which backend handles this key and strip prefix.
5156
+ * Resolve a file path with security checks.
5216
5157
  *
5217
- * @param key - Original file path
5218
- * @returns Tuple of [backend, stripped_key] where stripped_key has the route
5219
- * prefix removed (but keeps leading slash).
5158
+ * When virtualMode=true, treat incoming paths as virtual absolute paths under
5159
+ * this.cwd, disallow traversal (.., ~) and ensure resolved path stays within root.
5160
+ * When virtualMode=false, preserve legacy behavior: absolute paths are allowed
5161
+ * as-is; relative paths resolve under cwd.
5162
+ *
5163
+ * @param key - File path (absolute, relative, or virtual when virtualMode=true)
5164
+ * @returns Resolved absolute path string
5165
+ * @throws Error if path traversal detected or path outside root
5220
5166
  */
5221
- getBackendAndKey(key) {
5222
- for (const [prefix, backend] of this.sortedRoutes) if (key.startsWith(prefix)) {
5223
- const suffix = key.substring(prefix.length);
5224
- return [backend, suffix ? "/" + suffix : "/"];
5167
+ resolvePath(key) {
5168
+ if (this.virtualMode) {
5169
+ const vpath = key.startsWith("/") ? key : "/" + key;
5170
+ if (vpath.includes("..") || vpath.startsWith("~")) throw new Error("Path traversal not allowed");
5171
+ const full = node_path.default.resolve(this.cwd, vpath.substring(1));
5172
+ const relative = node_path.default.relative(this.cwd, full);
5173
+ if (relative.startsWith("..") || node_path.default.isAbsolute(relative)) throw new Error(`Path: ${full} outside root directory: ${this.cwd}`);
5174
+ return full;
5225
5175
  }
5226
- return [this.default, key];
5176
+ if (node_path.default.isAbsolute(key)) return key;
5177
+ return node_path.default.resolve(this.cwd, key);
5227
5178
  }
5228
5179
  /**
5229
5180
  * List files and directories in the specified directory (non-recursive).
5230
5181
  *
5231
- * @param path - Absolute path to directory
5232
- * @returns LsResult with list of FileInfo objects (with route prefixes added) on success or error on failure.
5182
+ * @param dirPath - Absolute directory path to list files from
5183
+ * @returns List of FileInfo objects for files and directories directly in the directory.
5233
5184
  * Directories have a trailing / in their path and is_dir=true.
5234
5185
  */
5235
- async ls(path) {
5236
- for (const [routePrefix, backend] of this.sortedRoutes) if (path.startsWith(routePrefix.replace(/\/$/, ""))) {
5237
- const suffix = path.substring(routePrefix.length);
5238
- const searchPath = suffix ? "/" + suffix : "/";
5239
- const result = await backend.ls(searchPath);
5240
- if (result.error) return result;
5241
- const prefixed = [];
5242
- for (const fi of result.files || []) prefixed.push({
5243
- ...fi,
5244
- path: routePrefix.slice(0, -1) + fi.path
5245
- });
5246
- return { files: prefixed };
5247
- }
5248
- if (path === "/") {
5186
+ async ls(dirPath) {
5187
+ try {
5188
+ const resolvedPath = this.resolvePath(dirPath);
5189
+ if (!(await node_fs_promises.default.stat(resolvedPath)).isDirectory()) return { files: [] };
5190
+ const entries = await node_fs_promises.default.readdir(resolvedPath, { withFileTypes: true });
5249
5191
  const results = [];
5250
- const defaultResult = await this.default.ls(path);
5251
- if (defaultResult.error) return defaultResult;
5252
- results.push(...defaultResult.files || []);
5253
- for (const [routePrefix] of this.sortedRoutes) results.push({
5254
- path: routePrefix,
5255
- is_dir: true,
5256
- size: 0,
5257
- modified_at: ""
5258
- });
5192
+ const cwdStr = this.cwd.endsWith(node_path.default.sep) ? this.cwd : this.cwd + node_path.default.sep;
5193
+ for (const entry of entries) {
5194
+ const fullPath = node_path.default.join(resolvedPath, entry.name);
5195
+ try {
5196
+ const entryStat = await node_fs_promises.default.stat(fullPath);
5197
+ const isFile = entryStat.isFile();
5198
+ const isDir = entryStat.isDirectory();
5199
+ if (!this.virtualMode) {
5200
+ if (isFile) results.push({
5201
+ path: fullPath,
5202
+ is_dir: false,
5203
+ size: entryStat.size,
5204
+ modified_at: entryStat.mtime.toISOString()
5205
+ });
5206
+ else if (isDir) results.push({
5207
+ path: fullPath + node_path.default.sep,
5208
+ is_dir: true,
5209
+ size: 0,
5210
+ modified_at: entryStat.mtime.toISOString()
5211
+ });
5212
+ } else {
5213
+ let relativePath;
5214
+ if (fullPath.startsWith(cwdStr)) relativePath = fullPath.substring(cwdStr.length);
5215
+ else if (fullPath.startsWith(this.cwd)) relativePath = fullPath.substring(this.cwd.length).replace(/^[/\\]/, "");
5216
+ else relativePath = fullPath;
5217
+ relativePath = relativePath.split(node_path.default.sep).join("/");
5218
+ const virtPath = "/" + relativePath;
5219
+ if (isFile) results.push({
5220
+ path: virtPath,
5221
+ is_dir: false,
5222
+ size: entryStat.size,
5223
+ modified_at: entryStat.mtime.toISOString()
5224
+ });
5225
+ else if (isDir) results.push({
5226
+ path: virtPath + "/",
5227
+ is_dir: true,
5228
+ size: 0,
5229
+ modified_at: entryStat.mtime.toISOString()
5230
+ });
5231
+ }
5232
+ } catch {
5233
+ continue;
5234
+ }
5235
+ }
5259
5236
  results.sort((a, b) => a.path.localeCompare(b.path));
5260
5237
  return { files: results };
5238
+ } catch {
5239
+ return { files: [] };
5240
+ }
5241
+ }
5242
+ /**
5243
+ * Read file content with line numbers.
5244
+ *
5245
+ * @param filePath - Absolute or relative file path
5246
+ * @param offset - Line offset to start reading from (0-indexed)
5247
+ * @param limit - Maximum number of lines to read
5248
+ * @returns Formatted file content with line numbers, or error message
5249
+ */
5250
+ async read(filePath, offset = 0, limit = 500) {
5251
+ try {
5252
+ const resolvedPath = this.resolvePath(filePath);
5253
+ const mimeType = getMimeType(filePath);
5254
+ const isBinary = !isTextMimeType(mimeType);
5255
+ let content;
5256
+ if (SUPPORTS_NOFOLLOW) {
5257
+ if (!(await node_fs_promises.default.stat(resolvedPath)).isFile()) return { error: `File '${filePath}' not found` };
5258
+ const fd = await node_fs_promises.default.open(resolvedPath, node_fs.default.constants.O_RDONLY | node_fs.default.constants.O_NOFOLLOW);
5259
+ try {
5260
+ if (isBinary) {
5261
+ const buffer = await fd.readFile();
5262
+ return {
5263
+ content: new Uint8Array(buffer),
5264
+ mimeType
5265
+ };
5266
+ }
5267
+ content = await fd.readFile({ encoding: "utf-8" });
5268
+ } finally {
5269
+ await fd.close();
5270
+ }
5271
+ } else {
5272
+ const stat = await node_fs_promises.default.lstat(resolvedPath);
5273
+ if (stat.isSymbolicLink()) return { error: `Symlinks are not allowed: ${filePath}` };
5274
+ if (!stat.isFile()) return { error: `File '${filePath}' not found` };
5275
+ if (isBinary) {
5276
+ const buffer = await node_fs_promises.default.readFile(resolvedPath);
5277
+ return {
5278
+ content: new Uint8Array(buffer),
5279
+ mimeType
5280
+ };
5281
+ }
5282
+ content = await node_fs_promises.default.readFile(resolvedPath, "utf-8");
5283
+ }
5284
+ const emptyMsg = checkEmptyContent(content);
5285
+ if (emptyMsg) return {
5286
+ content: emptyMsg,
5287
+ mimeType
5288
+ };
5289
+ const lines = content.split("\n");
5290
+ const startIdx = offset;
5291
+ const endIdx = Math.min(startIdx + limit, lines.length);
5292
+ if (startIdx >= lines.length) return { error: `Line offset ${offset} exceeds file length (${lines.length} lines)` };
5293
+ return {
5294
+ content: lines.slice(startIdx, endIdx).join("\n"),
5295
+ mimeType
5296
+ };
5297
+ } catch (e) {
5298
+ return { error: `Error reading file '${filePath}': ${e.message}` };
5261
5299
  }
5262
- return await this.default.ls(path);
5263
- }
5264
- /**
5265
- * Read file content, routing to appropriate backend.
5266
- *
5267
- * @param filePath - Absolute file path
5268
- * @param offset - Line offset to start reading from (0-indexed)
5269
- * @param limit - Maximum number of lines to read
5270
- * @returns Formatted file content with line numbers, or error message
5271
- */
5272
- async read(filePath, offset = 0, limit = 500) {
5273
- const [backend, strippedKey] = this.getBackendAndKey(filePath);
5274
- return await backend.read(strippedKey, offset, limit);
5275
5300
  }
5276
5301
  /**
5277
5302
  * Read file content as raw FileData.
@@ -5280,167 +5305,391 @@ var CompositeBackend = class {
5280
5305
  * @returns ReadRawResult with raw file data on success or error on failure
5281
5306
  */
5282
5307
  async readRaw(filePath) {
5283
- const [backend, strippedKey] = this.getBackendAndKey(filePath);
5284
- return await backend.readRaw(strippedKey);
5308
+ const resolvedPath = this.resolvePath(filePath);
5309
+ const mimeType = getMimeType(filePath);
5310
+ const isBinary = !isTextMimeType(mimeType);
5311
+ let content;
5312
+ let stat;
5313
+ if (SUPPORTS_NOFOLLOW) {
5314
+ stat = await node_fs_promises.default.stat(resolvedPath);
5315
+ if (!stat.isFile()) return { error: `File '${filePath}' not found` };
5316
+ const fd = await node_fs_promises.default.open(resolvedPath, node_fs.default.constants.O_RDONLY | node_fs.default.constants.O_NOFOLLOW);
5317
+ try {
5318
+ if (isBinary) {
5319
+ const buffer = await fd.readFile();
5320
+ return { data: {
5321
+ content: new Uint8Array(buffer),
5322
+ mimeType,
5323
+ created_at: stat.ctime.toISOString(),
5324
+ modified_at: stat.mtime.toISOString()
5325
+ } };
5326
+ }
5327
+ content = await fd.readFile({ encoding: "utf-8" });
5328
+ } finally {
5329
+ await fd.close();
5330
+ }
5331
+ } else {
5332
+ stat = await node_fs_promises.default.lstat(resolvedPath);
5333
+ if (stat.isSymbolicLink()) return { error: `Symlinks are not allowed: ${filePath}` };
5334
+ if (!stat.isFile()) return { error: `File '${filePath}' not found` };
5335
+ if (isBinary) {
5336
+ const buffer = await node_fs_promises.default.readFile(resolvedPath);
5337
+ return { data: {
5338
+ content: new Uint8Array(buffer),
5339
+ mimeType,
5340
+ created_at: stat.ctime.toISOString(),
5341
+ modified_at: stat.mtime.toISOString()
5342
+ } };
5343
+ }
5344
+ content = await node_fs_promises.default.readFile(resolvedPath, "utf-8");
5345
+ }
5346
+ return { data: {
5347
+ content,
5348
+ mimeType,
5349
+ created_at: stat.ctime.toISOString(),
5350
+ modified_at: stat.mtime.toISOString()
5351
+ } };
5285
5352
  }
5286
5353
  /**
5287
- * Structured search results or error string for invalid input.
5354
+ * Create a new file with content.
5355
+ * Returns WriteResult. External storage sets filesUpdate=null.
5288
5356
  */
5289
- async grep(pattern, path = "/", glob = null) {
5290
- for (const [routePrefix, backend] of this.sortedRoutes) if (path.startsWith(routePrefix.replace(/\/$/, ""))) {
5291
- const searchPath = path.substring(routePrefix.length - 1);
5292
- const raw = await backend.grep(pattern, searchPath || "/", glob);
5293
- if (raw.error) return raw;
5294
- return { matches: (raw.matches || []).map((m) => ({
5295
- ...m,
5296
- path: routePrefix.slice(0, -1) + m.path
5297
- })) };
5357
+ async write(filePath, content) {
5358
+ try {
5359
+ const resolvedPath = this.resolvePath(filePath);
5360
+ const isBinary = !isTextMimeType(getMimeType(filePath));
5361
+ try {
5362
+ if ((await node_fs_promises.default.lstat(resolvedPath)).isSymbolicLink()) return { error: `Cannot write to ${filePath} because it is a symlink. Symlinks are not allowed.` };
5363
+ return { error: `Cannot write to ${filePath} because it already exists. Read and then make an edit, or write to a new path.` };
5364
+ } catch {}
5365
+ await node_fs_promises.default.mkdir(node_path.default.dirname(resolvedPath), { recursive: true });
5366
+ if (SUPPORTS_NOFOLLOW) {
5367
+ const flags = node_fs.default.constants.O_WRONLY | node_fs.default.constants.O_CREAT | node_fs.default.constants.O_TRUNC | node_fs.default.constants.O_NOFOLLOW;
5368
+ const fd = await node_fs_promises.default.open(resolvedPath, flags, 420);
5369
+ try {
5370
+ if (isBinary) {
5371
+ const buffer = Buffer.from(content, "base64");
5372
+ await fd.writeFile(buffer);
5373
+ } else await fd.writeFile(content, "utf-8");
5374
+ } finally {
5375
+ await fd.close();
5376
+ }
5377
+ } else if (isBinary) {
5378
+ const buffer = Buffer.from(content, "base64");
5379
+ await node_fs_promises.default.writeFile(resolvedPath, buffer);
5380
+ } else await node_fs_promises.default.writeFile(resolvedPath, content, "utf-8");
5381
+ return {
5382
+ path: filePath,
5383
+ filesUpdate: null
5384
+ };
5385
+ } catch (e) {
5386
+ return { error: `Error writing file '${filePath}': ${e.message}` };
5298
5387
  }
5299
- const allMatches = [];
5300
- const rawDefault = await this.default.grep(pattern, path, glob);
5301
- if (rawDefault.error) return rawDefault;
5302
- allMatches.push(...rawDefault.matches || []);
5303
- for (const [routePrefix, backend] of Object.entries(this.routes)) {
5304
- const raw = await backend.grep(pattern, "/", glob);
5305
- if (raw.error) return raw;
5306
- const matches = (raw.matches || []).map((m) => ({
5307
- ...m,
5308
- path: routePrefix.slice(0, -1) + m.path
5309
- }));
5310
- allMatches.push(...matches);
5388
+ }
5389
+ /**
5390
+ * Edit a file by replacing string occurrences.
5391
+ * Returns EditResult. External storage sets filesUpdate=null.
5392
+ */
5393
+ async edit(filePath, oldString, newString, replaceAll = false) {
5394
+ try {
5395
+ const resolvedPath = this.resolvePath(filePath);
5396
+ let content;
5397
+ if (SUPPORTS_NOFOLLOW) {
5398
+ if (!(await node_fs_promises.default.stat(resolvedPath)).isFile()) return { error: `Error: File '${filePath}' not found` };
5399
+ const fd = await node_fs_promises.default.open(resolvedPath, node_fs.default.constants.O_RDONLY | node_fs.default.constants.O_NOFOLLOW);
5400
+ try {
5401
+ content = await fd.readFile({ encoding: "utf-8" });
5402
+ } finally {
5403
+ await fd.close();
5404
+ }
5405
+ } else {
5406
+ const stat = await node_fs_promises.default.lstat(resolvedPath);
5407
+ if (stat.isSymbolicLink()) return { error: `Error: Symlinks are not allowed: ${filePath}` };
5408
+ if (!stat.isFile()) return { error: `Error: File '${filePath}' not found` };
5409
+ content = await node_fs_promises.default.readFile(resolvedPath, "utf-8");
5410
+ }
5411
+ const result = performStringReplacement(content, oldString, newString, replaceAll);
5412
+ if (typeof result === "string") return { error: result };
5413
+ const [newContent, occurrences] = result;
5414
+ if (SUPPORTS_NOFOLLOW) {
5415
+ const flags = node_fs.default.constants.O_WRONLY | node_fs.default.constants.O_TRUNC | node_fs.default.constants.O_NOFOLLOW;
5416
+ const fd = await node_fs_promises.default.open(resolvedPath, flags);
5417
+ try {
5418
+ await fd.writeFile(newContent, "utf-8");
5419
+ } finally {
5420
+ await fd.close();
5421
+ }
5422
+ } else await node_fs_promises.default.writeFile(resolvedPath, newContent, "utf-8");
5423
+ return {
5424
+ path: filePath,
5425
+ filesUpdate: null,
5426
+ occurrences
5427
+ };
5428
+ } catch (e) {
5429
+ return { error: `Error editing file '${filePath}': ${e.message}` };
5311
5430
  }
5312
- return { matches: allMatches };
5313
5431
  }
5314
5432
  /**
5315
- * Structured glob matching returning FileInfo objects.
5433
+ * Search for a literal text pattern in files.
5434
+ *
5435
+ * Uses ripgrep if available, falling back to substring search.
5436
+ *
5437
+ * @param pattern - Literal string to search for (NOT regex).
5438
+ * @param dirPath - Directory or file path to search in. Defaults to current directory.
5439
+ * @param glob - Optional glob pattern to filter which files to search.
5440
+ * @returns List of GrepMatch dicts containing path, line number, and matched text.
5316
5441
  */
5317
- async glob(pattern, path = "/") {
5318
- const results = [];
5319
- for (const [routePrefix, backend] of this.sortedRoutes) if (path.startsWith(routePrefix.replace(/\/$/, ""))) {
5320
- const searchPath = path.substring(routePrefix.length - 1);
5321
- const result = await backend.glob(pattern, searchPath || "/");
5322
- if (result.error) return result;
5323
- return { files: (result.files || []).map((fi) => ({
5324
- ...fi,
5325
- path: routePrefix.slice(0, -1) + fi.path
5326
- })) };
5442
+ async grep(pattern, dirPath = "/", glob = null) {
5443
+ let baseFull;
5444
+ try {
5445
+ baseFull = this.resolvePath(dirPath || ".");
5446
+ } catch {
5447
+ return { matches: [] };
5327
5448
  }
5328
- const defaultResult = await this.default.glob(pattern, path);
5329
- if (defaultResult.error) return defaultResult;
5330
- results.push(...defaultResult.files || []);
5331
- for (const [routePrefix, backend] of Object.entries(this.routes)) {
5332
- const result = await backend.glob(pattern, "/");
5333
- if (result.error) continue;
5334
- const files = (result.files || []).map((fi) => ({
5335
- ...fi,
5336
- path: routePrefix.slice(0, -1) + fi.path
5337
- }));
5338
- results.push(...files);
5449
+ try {
5450
+ await node_fs_promises.default.stat(baseFull);
5451
+ } catch {
5452
+ return { matches: [] };
5339
5453
  }
5340
- results.sort((a, b) => a.path.localeCompare(b.path));
5341
- return { files: results };
5454
+ let results = await this.ripgrepSearch(pattern, baseFull, glob);
5455
+ if (results === null) results = await this.literalSearch(pattern, baseFull, glob);
5456
+ const matches = [];
5457
+ for (const [fpath, items] of Object.entries(results)) for (const [lineNum, lineText] of items) matches.push({
5458
+ path: fpath,
5459
+ line: lineNum,
5460
+ text: lineText
5461
+ });
5462
+ return { matches };
5463
+ }
5464
+ /**
5465
+ * Search using ripgrep with fixed-string (literal) mode.
5466
+ *
5467
+ * @param pattern - Literal string to search for (unescaped).
5468
+ * @param baseFull - Resolved base path to search in.
5469
+ * @param includeGlob - Optional glob pattern to filter files.
5470
+ * @returns Dict mapping file paths to list of (line_number, line_text) tuples.
5471
+ * Returns null if ripgrep is unavailable or times out.
5472
+ */
5473
+ async ripgrepSearch(pattern, baseFull, includeGlob) {
5474
+ return new Promise((resolve) => {
5475
+ const args = ["--json", "-F"];
5476
+ if (includeGlob) args.push("--glob", includeGlob);
5477
+ args.push("--", pattern, baseFull);
5478
+ const proc = (0, node_child_process.spawn)("rg", args, { timeout: 3e4 });
5479
+ const results = {};
5480
+ let output = "";
5481
+ proc.stdout.on("data", (data) => {
5482
+ output += data.toString();
5483
+ });
5484
+ proc.on("close", (code) => {
5485
+ if (code !== 0 && code !== 1) {
5486
+ resolve(null);
5487
+ return;
5488
+ }
5489
+ for (const line of output.split("\n")) {
5490
+ if (!line.trim()) continue;
5491
+ try {
5492
+ const data = JSON.parse(line);
5493
+ if (data.type !== "match") continue;
5494
+ const pdata = data.data || {};
5495
+ const ftext = pdata.path?.text;
5496
+ if (!ftext) continue;
5497
+ let virtPath;
5498
+ if (this.virtualMode) try {
5499
+ const resolved = node_path.default.resolve(ftext);
5500
+ const relative = node_path.default.relative(this.cwd, resolved);
5501
+ if (relative.startsWith("..")) continue;
5502
+ virtPath = "/" + relative.split(node_path.default.sep).join("/");
5503
+ } catch {
5504
+ continue;
5505
+ }
5506
+ else virtPath = ftext;
5507
+ const ln = pdata.line_number;
5508
+ const lt = pdata.lines?.text?.replace(/\n$/, "") || "";
5509
+ if (ln === void 0) continue;
5510
+ if (!results[virtPath]) results[virtPath] = [];
5511
+ results[virtPath].push([ln, lt]);
5512
+ } catch {
5513
+ continue;
5514
+ }
5515
+ }
5516
+ resolve(results);
5517
+ });
5518
+ proc.on("error", () => {
5519
+ resolve(null);
5520
+ });
5521
+ });
5342
5522
  }
5343
5523
  /**
5344
- * Create a new file, routing to appropriate backend.
5524
+ * Fallback search using literal substring matching when ripgrep is unavailable.
5345
5525
  *
5346
- * @param filePath - Absolute file path
5347
- * @param content - File content as string
5348
- * @returns WriteResult with path or error
5349
- */
5350
- async write(filePath, content) {
5351
- const [backend, strippedKey] = this.getBackendAndKey(filePath);
5352
- return await backend.write(strippedKey, content);
5353
- }
5354
- /**
5355
- * Edit a file, routing to appropriate backend.
5526
+ * Recursively searches files, respecting maxFileSizeBytes limit.
5356
5527
  *
5357
- * @param filePath - Absolute file path
5358
- * @param oldString - String to find and replace
5359
- * @param newString - Replacement string
5360
- * @param replaceAll - If true, replace all occurrences
5361
- * @returns EditResult with path, occurrences, or error
5528
+ * @param pattern - Literal string to search for.
5529
+ * @param baseFull - Resolved base path to search in.
5530
+ * @param includeGlob - Optional glob pattern to filter files by name.
5531
+ * @returns Dict mapping file paths to list of (line_number, line_text) tuples.
5362
5532
  */
5363
- async edit(filePath, oldString, newString, replaceAll = false) {
5364
- const [backend, strippedKey] = this.getBackendAndKey(filePath);
5365
- return await backend.edit(strippedKey, oldString, newString, replaceAll);
5533
+ async literalSearch(pattern, baseFull, includeGlob) {
5534
+ const results = {};
5535
+ const files = await (0, fast_glob.default)("**/*", {
5536
+ cwd: (await node_fs_promises.default.stat(baseFull)).isDirectory() ? baseFull : node_path.default.dirname(baseFull),
5537
+ absolute: true,
5538
+ onlyFiles: true,
5539
+ dot: true
5540
+ });
5541
+ for (const fp of files) try {
5542
+ if (!isTextMimeType(getMimeType(fp))) continue;
5543
+ if (includeGlob && !micromatch.default.isMatch(node_path.default.basename(fp), includeGlob)) continue;
5544
+ if ((await node_fs_promises.default.stat(fp)).size > this.maxFileSizeBytes) continue;
5545
+ const lines = (await node_fs_promises.default.readFile(fp, "utf-8")).split("\n");
5546
+ for (let i = 0; i < lines.length; i++) {
5547
+ const line = lines[i];
5548
+ if (line.includes(pattern)) {
5549
+ let virtPath;
5550
+ if (this.virtualMode) try {
5551
+ const relative = node_path.default.relative(this.cwd, fp);
5552
+ if (relative.startsWith("..")) continue;
5553
+ virtPath = "/" + relative.split(node_path.default.sep).join("/");
5554
+ } catch {
5555
+ continue;
5556
+ }
5557
+ else virtPath = fp;
5558
+ if (!results[virtPath]) results[virtPath] = [];
5559
+ results[virtPath].push([i + 1, line]);
5560
+ }
5561
+ }
5562
+ } catch {
5563
+ continue;
5564
+ }
5565
+ return results;
5366
5566
  }
5367
5567
  /**
5368
- * Execute a command via the default backend.
5369
- * Execution is not path-specific, so it always delegates to the default backend.
5370
- *
5371
- * @param command - Full shell command string to execute
5372
- * @returns ExecuteResponse with combined output, exit code, and truncation flag
5373
- * @throws Error if the default backend doesn't support command execution
5568
+ * Structured glob matching returning FileInfo objects.
5374
5569
  */
5375
- execute(command) {
5376
- if (!isSandboxBackend(this.default)) throw new Error("Default backend doesn't support command execution (SandboxBackendProtocol). To enable execution, provide a default backend that implements SandboxBackendProtocol.");
5377
- return Promise.resolve(this.default.execute(command));
5570
+ async glob(pattern, searchPath = "/") {
5571
+ if (pattern.startsWith("/")) pattern = pattern.substring(1);
5572
+ const resolvedSearchPath = searchPath === "/" ? this.cwd : this.resolvePath(searchPath);
5573
+ try {
5574
+ if (!(await node_fs_promises.default.stat(resolvedSearchPath)).isDirectory()) return { files: [] };
5575
+ } catch {
5576
+ return { files: [] };
5577
+ }
5578
+ const results = [];
5579
+ try {
5580
+ const matches = await (0, fast_glob.default)(pattern, {
5581
+ cwd: resolvedSearchPath,
5582
+ absolute: true,
5583
+ onlyFiles: true,
5584
+ dot: true
5585
+ });
5586
+ for (const matchedPath of matches) try {
5587
+ const stat = await node_fs_promises.default.stat(matchedPath);
5588
+ if (!stat.isFile()) continue;
5589
+ const normalizedPath = matchedPath.split("/").join(node_path.default.sep);
5590
+ if (!this.virtualMode) results.push({
5591
+ path: normalizedPath,
5592
+ is_dir: false,
5593
+ size: stat.size,
5594
+ modified_at: stat.mtime.toISOString()
5595
+ });
5596
+ else {
5597
+ const cwdStr = this.cwd.endsWith(node_path.default.sep) ? this.cwd : this.cwd + node_path.default.sep;
5598
+ let relativePath;
5599
+ if (normalizedPath.startsWith(cwdStr)) relativePath = normalizedPath.substring(cwdStr.length);
5600
+ else if (normalizedPath.startsWith(this.cwd)) relativePath = normalizedPath.substring(this.cwd.length).replace(/^[/\\]/, "");
5601
+ else relativePath = normalizedPath;
5602
+ relativePath = relativePath.split(node_path.default.sep).join("/");
5603
+ const virt = "/" + relativePath;
5604
+ results.push({
5605
+ path: virt,
5606
+ is_dir: false,
5607
+ size: stat.size,
5608
+ modified_at: stat.mtime.toISOString()
5609
+ });
5610
+ }
5611
+ } catch {
5612
+ continue;
5613
+ }
5614
+ } catch {}
5615
+ results.sort((a, b) => a.path.localeCompare(b.path));
5616
+ return { files: results };
5378
5617
  }
5379
5618
  /**
5380
- * Upload multiple files, batching by backend for efficiency.
5619
+ * Upload multiple files to the filesystem.
5381
5620
  *
5382
5621
  * @param files - List of [path, content] tuples to upload
5383
5622
  * @returns List of FileUploadResponse objects, one per input file
5384
5623
  */
5385
5624
  async uploadFiles(files) {
5386
- const results = Array.from({ length: files.length }, () => null);
5387
- const batchesByBackend = /* @__PURE__ */ new Map();
5388
- for (let idx = 0; idx < files.length; idx++) {
5389
- const [path, content] = files[idx];
5390
- const [backend, strippedPath] = this.getBackendAndKey(path);
5391
- if (!batchesByBackend.has(backend)) batchesByBackend.set(backend, []);
5392
- batchesByBackend.get(backend).push({
5393
- idx,
5394
- path: strippedPath,
5395
- content
5625
+ const responses = [];
5626
+ for (const [filePath, content] of files) try {
5627
+ const resolvedPath = this.resolvePath(filePath);
5628
+ await node_fs_promises.default.mkdir(node_path.default.dirname(resolvedPath), { recursive: true });
5629
+ await node_fs_promises.default.writeFile(resolvedPath, content);
5630
+ responses.push({
5631
+ path: filePath,
5632
+ error: null
5633
+ });
5634
+ } catch (e) {
5635
+ if (e.code === "ENOENT") responses.push({
5636
+ path: filePath,
5637
+ error: "file_not_found"
5638
+ });
5639
+ else if (e.code === "EACCES") responses.push({
5640
+ path: filePath,
5641
+ error: "permission_denied"
5642
+ });
5643
+ else if (e.code === "EISDIR") responses.push({
5644
+ path: filePath,
5645
+ error: "is_directory"
5646
+ });
5647
+ else responses.push({
5648
+ path: filePath,
5649
+ error: "invalid_path"
5396
5650
  });
5397
5651
  }
5398
- for (const [backend, batch] of batchesByBackend) {
5399
- if (!backend.uploadFiles) throw new Error("Backend does not support uploadFiles");
5400
- const batchFiles = batch.map((b) => [b.path, b.content]);
5401
- const batchResponses = await backend.uploadFiles(batchFiles);
5402
- for (let i = 0; i < batch.length; i++) {
5403
- const originalIdx = batch[i].idx;
5404
- results[originalIdx] = {
5405
- path: files[originalIdx][0],
5406
- error: batchResponses[i]?.error ?? null
5407
- };
5408
- }
5409
- }
5410
- return results;
5652
+ return responses;
5411
5653
  }
5412
5654
  /**
5413
- * Download multiple files, batching by backend for efficiency.
5655
+ * Download multiple files from the filesystem.
5414
5656
  *
5415
5657
  * @param paths - List of file paths to download
5416
5658
  * @returns List of FileDownloadResponse objects, one per input path
5417
5659
  */
5418
5660
  async downloadFiles(paths) {
5419
- const results = Array.from({ length: paths.length }, () => null);
5420
- const batchesByBackend = /* @__PURE__ */ new Map();
5421
- for (let idx = 0; idx < paths.length; idx++) {
5422
- const path = paths[idx];
5423
- const [backend, strippedPath] = this.getBackendAndKey(path);
5424
- if (!batchesByBackend.has(backend)) batchesByBackend.set(backend, []);
5425
- batchesByBackend.get(backend).push({
5426
- idx,
5427
- path: strippedPath
5661
+ const responses = [];
5662
+ for (const filePath of paths) try {
5663
+ const resolvedPath = this.resolvePath(filePath);
5664
+ const content = await node_fs_promises.default.readFile(resolvedPath);
5665
+ responses.push({
5666
+ path: filePath,
5667
+ content,
5668
+ error: null
5669
+ });
5670
+ } catch (e) {
5671
+ if (e.code === "ENOENT") responses.push({
5672
+ path: filePath,
5673
+ content: null,
5674
+ error: "file_not_found"
5675
+ });
5676
+ else if (e.code === "EACCES") responses.push({
5677
+ path: filePath,
5678
+ content: null,
5679
+ error: "permission_denied"
5680
+ });
5681
+ else if (e.code === "EISDIR") responses.push({
5682
+ path: filePath,
5683
+ content: null,
5684
+ error: "is_directory"
5685
+ });
5686
+ else responses.push({
5687
+ path: filePath,
5688
+ content: null,
5689
+ error: "invalid_path"
5428
5690
  });
5429
5691
  }
5430
- for (const [backend, batch] of batchesByBackend) {
5431
- if (!backend.downloadFiles) throw new Error("Backend does not support downloadFiles");
5432
- const batchPaths = batch.map((b) => b.path);
5433
- const batchResponses = await backend.downloadFiles(batchPaths);
5434
- for (let i = 0; i < batch.length; i++) {
5435
- const originalIdx = batch[i].idx;
5436
- results[originalIdx] = {
5437
- path: paths[originalIdx],
5438
- content: batchResponses[i]?.content ?? null,
5439
- error: batchResponses[i]?.error ?? null
5440
- };
5441
- }
5442
- }
5443
- return results;
5692
+ return responses;
5444
5693
  }
5445
5694
  };
5446
5695
  //#endregion
@@ -5853,7 +6102,7 @@ const STAT_C_SCRIPT = "for f; do if [ -d \"$f\" ]; then t=d; elif [ -L \"$f\" ];
5853
6102
  */
5854
6103
  function buildLsCommand(dirPath) {
5855
6104
  const quotedPath = shellQuote(dirPath);
5856
- const findBase = `find ${quotedPath} -maxdepth 1 -not -path ${quotedPath}`;
6105
+ const findBase = `find -L ${quotedPath} -maxdepth 1 -not -path ${quotedPath}`;
5857
6106
  return `if find /dev/null -maxdepth 0 -printf '' 2>/dev/null; then ${findBase} -printf '%s\\t%T@\\t%y\\t%p\\n' 2>/dev/null; elif stat -c %s /dev/null >/dev/null 2>&1; then ${findBase} -exec sh -c '${STAT_C_SCRIPT}' _ {} +; else ${findBase} -exec stat -f '%z\t%m\t%Sp\t%N' {} + 2>/dev/null; fi || true`;
5858
6107
  }
5859
6108
  /**
@@ -5864,7 +6113,7 @@ function buildLsCommand(dirPath) {
5864
6113
  */
5865
6114
  function buildFindCommand(searchPath) {
5866
6115
  const quotedPath = shellQuote(searchPath);
5867
- const findBase = `find ${quotedPath} -not -path ${quotedPath}`;
6116
+ const findBase = `find -L ${quotedPath} -not -path ${quotedPath}`;
5868
6117
  return `if find /dev/null -maxdepth 0 -printf '' 2>/dev/null; then ${findBase} -printf '%s\\t%T@\\t%y\\t%p\\n' 2>/dev/null; elif stat -c %s /dev/null >/dev/null 2>&1; then ${findBase} -exec sh -c '${STAT_C_SCRIPT}' _ {} +; else ${findBase} -exec stat -f '%z\t%m\t%Sp\t%N' {} + 2>/dev/null; fi || true`;
5869
6118
  }
5870
6119
  /**
@@ -5897,7 +6146,7 @@ function buildReadCommand(filePath, offset, limit) {
5897
6146
  function buildGrepCommand(pattern, searchPath, globPattern) {
5898
6147
  const patternEscaped = shellQuote(pattern);
5899
6148
  const searchPathQuoted = shellQuote(searchPath);
5900
- if (globPattern) return `find ${searchPathQuoted} -type f -name ${shellQuote(globPattern)} -exec grep -HnF -e ${patternEscaped} {} + 2>/dev/null || true`;
6149
+ if (globPattern) return `find -L ${searchPathQuoted} -type f -name ${shellQuote(globPattern)} -exec grep -HnF -e ${patternEscaped} {} + 2>/dev/null || true`;
5901
6150
  return `grep -rHnF -e ${patternEscaped} ${searchPathQuoted} 2>/dev/null || true`;
5902
6151
  }
5903
6152
  /**
@@ -6192,6 +6441,8 @@ var BaseSandbox = class {
6192
6441
  * await sandbox.close();
6193
6442
  * }
6194
6443
  * ```
6444
+ *
6445
+ * @module
6195
6446
  */
6196
6447
  /**
6197
6448
  * LangSmith Sandbox backend for deepagents.
@@ -6201,6 +6452,8 @@ var BaseSandbox = class {
6201
6452
  *
6202
6453
  * Use the static `LangSmithSandbox.create()` factory for the simplest setup,
6203
6454
  * or construct directly with an existing `Sandbox` instance.
6455
+ *
6456
+ * @experimental This feature is experimental, and breaking changes are expected.
6204
6457
  */
6205
6458
  var LangSmithSandbox = class LangSmithSandbox extends BaseSandbox {
6206
6459
  #sandbox;
@@ -6493,7 +6746,7 @@ function isAnthropicModel(model) {
6493
6746
  * ```
6494
6747
  */
6495
6748
  function createDeepAgent(params = {}) {
6496
- const { model = new _langchain_anthropic.ChatAnthropic("claude-sonnet-4-6"), tools = [], systemPrompt, middleware: customMiddleware = [], subagents = [], responseFormat, contextSchema, checkpointer, store, backend = (config) => new StateBackend(config), interruptOn, name, memory, skills } = params;
6749
+ const { model = "anthropic:claude-sonnet-4-6", tools = [], systemPrompt, middleware: customMiddleware = [], subagents = [], responseFormat, contextSchema, checkpointer, store, backend = (config) => new StateBackend(config), interruptOn, name, memory, skills, permissions = [] } = params;
6497
6750
  const collidingTools = tools.map((t) => t.name).filter((n) => typeof n === "string" && BUILTIN_TOOL_NAMES.has(n));
6498
6751
  if (collidingTools.length > 0) throw new ConfigurationError(`Tool name(s) [${collidingTools.join(", ")}] conflict with built-in tools. Rename your custom tools to avoid this.`, "TOOL_NAME_COLLISION");
6499
6752
  const anthropicModel = isAnthropicModel(model);
@@ -6509,13 +6762,14 @@ function createDeepAgent(params = {}) {
6509
6762
  * If a custom subagent needs skills, it must specify its own `skills` array.
6510
6763
  */
6511
6764
  const normalizeSubagentSpec = (input) => {
6765
+ const effectivePermissions = input.permissions ?? permissions;
6512
6766
  const subagentMiddleware = [
6513
6767
  (0, langchain.todoListMiddleware)(),
6514
- createFilesystemMiddleware({ backend }),
6515
- createSummarizationMiddleware({
6768
+ createFilesystemMiddleware({
6516
6769
  backend,
6517
- model
6770
+ permissions: effectivePermissions
6518
6771
  }),
6772
+ createSummarizationMiddleware({ backend }),
6519
6773
  createPatchToolCallsMiddleware(),
6520
6774
  ...input.skills != null && input.skills.length > 0 ? [createSkillsMiddleware({
6521
6775
  backend,
@@ -6548,7 +6802,10 @@ function createDeepAgent(params = {}) {
6548
6802
  })] : [];
6549
6803
  const [todoMiddleware, fsMiddleware, subagentMiddleware, summarizationMiddleware, patchToolCallsMiddleware] = [
6550
6804
  (0, langchain.todoListMiddleware)(),
6551
- createFilesystemMiddleware({ backend }),
6805
+ createFilesystemMiddleware({
6806
+ backend,
6807
+ permissions
6808
+ }),
6552
6809
  createSubAgentMiddleware({
6553
6810
  defaultModel: model,
6554
6811
  defaultTools: tools,
@@ -6556,10 +6813,7 @@ function createDeepAgent(params = {}) {
6556
6813
  subagents: inlineSubagents,
6557
6814
  generalPurposeAgent: false
6558
6815
  }),
6559
- createSummarizationMiddleware({
6560
- model,
6561
- backend
6562
- }),
6816
+ createSummarizationMiddleware({ backend }),
6563
6817
  createPatchToolCallsMiddleware()
6564
6818
  ];
6565
6819
  const middleware = [