s3-files-ai-sdk 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +195 -0
- package/dist/chunk-5U3BLEEU.js +181 -0
- package/dist/chunk-5U3BLEEU.js.map +1 -0
- package/dist/chunk-CE7EMM3L.js +572 -0
- package/dist/chunk-CE7EMM3L.js.map +1 -0
- package/dist/chunk-IMGFCLOX.js +427 -0
- package/dist/chunk-IMGFCLOX.js.map +1 -0
- package/dist/index.node.d.ts +12 -0
- package/dist/index.node.js +53 -0
- package/dist/index.node.js.map +1 -0
- package/dist/index.web.d.ts +12 -0
- package/dist/index.web.js +28 -0
- package/dist/index.web.js.map +1 -0
- package/dist/proxy.d.ts +9 -0
- package/dist/proxy.js +120 -0
- package/dist/proxy.js.map +1 -0
- package/dist/tool-schema-C7yc_tDK.d.ts +106 -0
- package/dist/types-Cr6q34CI.d.ts +114 -0
- package/package.json +77 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Tomas Holtz
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
# s3-files-ai-sdk
|
|
2
|
+
|
|
3
|
+
`s3-files-ai-sdk` gives AI agents using the Vercel AI SDK a real, persistent filesystem backed by [Amazon S3 Files](https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-files.html).
|
|
4
|
+
|
|
5
|
+
It is designed for agent workloads that want filesystem semantics without paying for EFS or relying on local disk. Each agent gets its own isolated root at `/agents/${agentId}/`, and the tool works in either direct mount mode or through a lightweight HTTP proxy for edge-style runtimes.
|
|
6
|
+
|
|
7
|
+
## Why this package exists
|
|
8
|
+
|
|
9
|
+
Amazon S3 Files turns an S3 bucket into a POSIX filesystem that you mount with:
|
|
10
|
+
|
|
11
|
+
```bash
|
|
12
|
+
sudo mount -t s3files <file-system-id>:/ /mnt/agent-fs
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
That is a great fit for AI agents, but most agent frameworks want a model-facing tool, not raw `fs.promises`. This package wraps that mounted filesystem in a compact AI SDK tool that supports safe reads, edits, writes, listings, and deletes while staying scoped to a single agent.
|
|
16
|
+
|
|
17
|
+
## Features
|
|
18
|
+
|
|
19
|
+
- Works with `generateText`, `streamText`, and `ToolLoopAgent`
|
|
20
|
+
- Two runtime modes:
|
|
21
|
+
- `direct` for Node.js runtimes with a mounted S3 Files filesystem
|
|
22
|
+
- `remote` for edge or proxy-based runtimes that call a mounted backend over HTTP
|
|
23
|
+
- Per-agent isolation under `/agents/${agentId}/`
|
|
24
|
+
- Compact tool responses to keep token usage down
|
|
25
|
+
- Optional advisory locking for mutating operations
|
|
26
|
+
- Strict TypeScript types and exported public config/input/output types
|
|
27
|
+
|
|
28
|
+
## Install
|
|
29
|
+
|
|
30
|
+
```bash
|
|
31
|
+
npm install s3-files-ai-sdk ai zod
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
Peer/runtime requirements:
|
|
35
|
+
|
|
36
|
+
- Node.js `>=20`
|
|
37
|
+
- `ai@^6`
|
|
38
|
+
- A mounted Amazon S3 Files filesystem for direct mode or for the proxy backend
|
|
39
|
+
|
|
40
|
+
## Quickstart: Direct Mode
|
|
41
|
+
|
|
42
|
+
The direct mode is for environments that already have S3 Files mounted, such as Lambda, EC2, EKS, or ECS.
|
|
43
|
+
|
|
44
|
+
```ts
|
|
45
|
+
import { generateText, type LanguageModel } from "ai";
|
|
46
|
+
import { createS3FilesTool } from "s3-files-ai-sdk";
|
|
47
|
+
|
|
48
|
+
export async function summarizeWorkspace(model: LanguageModel) {
|
|
49
|
+
const agentFs = createS3FilesTool({
|
|
50
|
+
mode: "direct",
|
|
51
|
+
mountPath: process.env.S3_FILES_MOUNT_PATH ?? "/mnt/agent-fs",
|
|
52
|
+
agentId: process.env.AGENT_ID ?? "agent-direct-demo",
|
|
53
|
+
lockTimeoutMs: 10_000,
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
const result = await generateText({
|
|
57
|
+
model,
|
|
58
|
+
tools: agentFs.tools,
|
|
59
|
+
prompt:
|
|
60
|
+
"List /notes, read the most relevant file, and summarize the current project state.",
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
return result.text;
|
|
64
|
+
}
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
See the runnable example in [`examples/generate-text-direct.ts`](./examples/generate-text-direct.ts).
|
|
68
|
+
|
|
69
|
+
## Quickstart: Remote Mode
|
|
70
|
+
|
|
71
|
+
The remote mode is for environments that cannot mount S3 Files directly, such as Vercel or other edge-style runtimes.
|
|
72
|
+
|
|
73
|
+
```ts
|
|
74
|
+
import { ToolLoopAgent, type LanguageModel } from "ai";
|
|
75
|
+
import { createS3FilesTool } from "s3-files-ai-sdk";
|
|
76
|
+
|
|
77
|
+
export function createRemoteWorkspaceAgent(model: LanguageModel) {
|
|
78
|
+
const agentFs = createS3FilesTool({
|
|
79
|
+
mode: "remote",
|
|
80
|
+
remoteEndpoint: process.env.S3_FILES_ENDPOINT!,
|
|
81
|
+
bearerToken: process.env.S3_FILES_BEARER_TOKEN!,
|
|
82
|
+
agentId: process.env.AGENT_ID ?? "agent-remote-demo",
|
|
83
|
+
lockTimeoutMs: 10_000,
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
return new ToolLoopAgent({
|
|
87
|
+
model,
|
|
88
|
+
tools: agentFs.tools,
|
|
89
|
+
instructions:
|
|
90
|
+
"Use the filesystem tool to inspect, update, and persist project files.",
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
See [`examples/tool-loop-agent-remote.ts`](./examples/tool-loop-agent-remote.ts).
|
|
96
|
+
|
|
97
|
+
## Quickstart: Proxy Endpoint
|
|
98
|
+
|
|
99
|
+
The proxy runs in a Node.js environment that can access the mount.
|
|
100
|
+
|
|
101
|
+
```ts
|
|
102
|
+
import { createS3FilesProxy } from "s3-files-ai-sdk/proxy";
|
|
103
|
+
|
|
104
|
+
export const POST = createS3FilesProxy({
|
|
105
|
+
mountPath: process.env.S3_FILES_MOUNT_PATH ?? "/mnt/agent-fs",
|
|
106
|
+
bearerToken: process.env.S3_FILES_BEARER_TOKEN!,
|
|
107
|
+
});
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
See [`examples/next-route-proxy.ts`](./examples/next-route-proxy.ts).
|
|
111
|
+
|
|
112
|
+
## Supported Commands
|
|
113
|
+
|
|
114
|
+
The tool exposes a single AI SDK tool with these commands:
|
|
115
|
+
|
|
116
|
+
| Command | Purpose | Notes |
|
|
117
|
+
| --- | --- | --- |
|
|
118
|
+
| `list` | List directory contents | Supports `depth` and `limit` |
|
|
119
|
+
| `view` | Read file contents | Supports `startLine` and `endLine`; output is truncated by configured byte/line limits |
|
|
120
|
+
| `write` | Create, overwrite, or append to a file | Supports `append` and `createParents` |
|
|
121
|
+
| `mkdir` | Create a directory | Supports `recursive` |
|
|
122
|
+
| `delete` | Delete a file or directory | `recursive` is required for non-empty directories |
|
|
123
|
+
| `stat` | Inspect metadata | Returns type, size, mtime, and mode when available |
|
|
124
|
+
| `str_replace` | Perform safe text replacement | Errors on 0 or multiple matches unless `replaceAll: true` |
|
|
125
|
+
|
|
126
|
+
The tool returns compact, command-specific output objects and an even smaller model-facing string output for the LLM.
|
|
127
|
+
|
|
128
|
+
## Security and Isolation
|
|
129
|
+
|
|
130
|
+
- Every tool instance is scoped to `/agents/${agentId}/`
|
|
131
|
+
- Paths are normalized and rejected if they attempt traversal, target reserved S3 Files recovery paths, or access the internal lock directory
|
|
132
|
+
- Remote mode uses bearer-token authentication and keeps the protocol internal to the package
|
|
133
|
+
|
|
134
|
+
## Locking
|
|
135
|
+
|
|
136
|
+
Set `lockTimeoutMs` to enable library-level advisory locking for mutating commands.
|
|
137
|
+
|
|
138
|
+
- Locks are cooperative, not OS-level mandatory locks
|
|
139
|
+
- Lock directories live under `/.s3-files-locks/` inside the scoped agent root
|
|
140
|
+
- The lock manager cleans up stale leases and times out rather than blocking forever
|
|
141
|
+
|
|
142
|
+
Amazon S3 Files itself supports POSIX locking, but this package uses portable sidecar locks so the behavior is predictable across both direct and proxied flows.
|
|
143
|
+
|
|
144
|
+
## S3 Files Behavior to Know About
|
|
145
|
+
|
|
146
|
+
This library intentionally documents S3 Files behavior instead of hiding it:
|
|
147
|
+
|
|
148
|
+
- First access to a directory can be slower while metadata is warmed
|
|
149
|
+
- Sync between the mounted filesystem and S3 is asynchronous
|
|
150
|
+
- S3 remains the source of truth if another writer updates the same keys outside the mount
|
|
151
|
+
|
|
152
|
+
For the official details, see the AWS docs:
|
|
153
|
+
|
|
154
|
+
- [Working with Amazon S3 Files](https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-files.html)
|
|
155
|
+
- [Getting started with Amazon S3 Files](https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-files-getting-started.html)
|
|
156
|
+
- [Launch blog post](https://aws.amazon.com/blogs/aws/launching-s3-files-making-s3-buckets-accessible-as-file-systems/)
|
|
157
|
+
|
|
158
|
+
## Public API
|
|
159
|
+
|
|
160
|
+
The package exports:
|
|
161
|
+
|
|
162
|
+
- `createS3FilesTool`
|
|
163
|
+
- `createS3FilesProxy`
|
|
164
|
+
- `S3FilesToolConfig`
|
|
165
|
+
- `CreateS3FilesToolResult`
|
|
166
|
+
- `S3FilesProxyConfig`
|
|
167
|
+
- `RetryPolicy`
|
|
168
|
+
- `S3FilesToolInput`
|
|
169
|
+
- `S3FilesToolOutput`
|
|
170
|
+
|
|
171
|
+
Type definitions live in [`src/types.ts`](./src/types.ts).
|
|
172
|
+
|
|
173
|
+
## Examples and Use Cases
|
|
174
|
+
|
|
175
|
+
- [`examples/generate-text-direct.ts`](./examples/generate-text-direct.ts)
|
|
176
|
+
- [`examples/tool-loop-agent-remote.ts`](./examples/tool-loop-agent-remote.ts)
|
|
177
|
+
- [`examples/next-route-proxy.ts`](./examples/next-route-proxy.ts)
|
|
178
|
+
- [`examples/persistent-workspace-cycle.ts`](./examples/persistent-workspace-cycle.ts)
|
|
179
|
+
- [`docs/use-cases.md`](./docs/use-cases.md)
|
|
180
|
+
|
|
181
|
+
## Development
|
|
182
|
+
|
|
183
|
+
```bash
|
|
184
|
+
npm install
|
|
185
|
+
npm run ci
|
|
186
|
+
```
|
|
187
|
+
|
|
188
|
+
## Releasing
|
|
189
|
+
|
|
190
|
+
This repo uses Changesets and GitHub Actions for releases.
|
|
191
|
+
|
|
192
|
+
- Normal releases can use npm trusted publishing with GitHub OIDC
|
|
193
|
+
- The first-ever publish of a brand-new package still needs `NPM_TOKEN` so npm can create the package before a trusted publisher is attached
|
|
194
|
+
|
|
195
|
+
For contribution and release details, see [`CONTRIBUTING.md`](./CONTRIBUTING.md).
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
import {
|
|
2
|
+
PROTOCOL_VERSION,
|
|
3
|
+
S3FilesError,
|
|
4
|
+
fromSerializedError,
|
|
5
|
+
proxyResponseSchema,
|
|
6
|
+
s3FilesToolInputSchema,
|
|
7
|
+
s3FilesToolOutputSchema,
|
|
8
|
+
withRetry
|
|
9
|
+
} from "./chunk-IMGFCLOX.js";
|
|
10
|
+
|
|
11
|
+
// src/adapters/remote-backend.ts
|
|
12
|
+
function createRemoteCommandExecutor(config) {
|
|
13
|
+
const fetchImpl = config.fetch ?? globalThis.fetch;
|
|
14
|
+
if (!fetchImpl) {
|
|
15
|
+
throw new S3FilesError({
|
|
16
|
+
code: "NOT_SUPPORTED",
|
|
17
|
+
message: "Remote mode requires a global fetch implementation.",
|
|
18
|
+
statusCode: 500
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
return async (input) => withRetry({
|
|
22
|
+
policy: config.retryPolicy,
|
|
23
|
+
run: async () => {
|
|
24
|
+
const response = await fetchImpl(config.remoteEndpoint, {
|
|
25
|
+
method: "POST",
|
|
26
|
+
headers: {
|
|
27
|
+
"content-type": "application/json",
|
|
28
|
+
authorization: `Bearer ${config.bearerToken}`,
|
|
29
|
+
...config.headers
|
|
30
|
+
},
|
|
31
|
+
body: JSON.stringify({
|
|
32
|
+
version: PROTOCOL_VERSION,
|
|
33
|
+
agentId: config.agentId,
|
|
34
|
+
command: input,
|
|
35
|
+
options: {
|
|
36
|
+
lockTimeoutMs: config.lockTimeoutMs,
|
|
37
|
+
maxReadBytes: config.maxReadBytes,
|
|
38
|
+
maxReadLines: config.maxReadLines,
|
|
39
|
+
maxListEntries: config.maxListEntries
|
|
40
|
+
}
|
|
41
|
+
})
|
|
42
|
+
});
|
|
43
|
+
const payload = await parseResponse(response);
|
|
44
|
+
if (!payload.ok) {
|
|
45
|
+
const errorOptions = {
|
|
46
|
+
code: payload.error.code,
|
|
47
|
+
message: payload.error.message,
|
|
48
|
+
statusCode: payload.error.statusCode
|
|
49
|
+
};
|
|
50
|
+
if (payload.error.details !== void 0) {
|
|
51
|
+
errorOptions.details = payload.error.details;
|
|
52
|
+
}
|
|
53
|
+
if (payload.error.retryable !== void 0) {
|
|
54
|
+
errorOptions.retryable = payload.error.retryable;
|
|
55
|
+
}
|
|
56
|
+
throw fromSerializedError(errorOptions);
|
|
57
|
+
}
|
|
58
|
+
return payload.result;
|
|
59
|
+
},
|
|
60
|
+
shouldRetry: (error) => {
|
|
61
|
+
return error instanceof TypeError || error instanceof S3FilesError && (error.retryable || error.statusCode === 429 || error.statusCode >= 500);
|
|
62
|
+
}
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
async function parseResponse(response) {
|
|
66
|
+
const rawText = await response.text();
|
|
67
|
+
let parsed = null;
|
|
68
|
+
if (rawText.length > 0) {
|
|
69
|
+
try {
|
|
70
|
+
parsed = JSON.parse(rawText);
|
|
71
|
+
} catch {
|
|
72
|
+
parsed = null;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
if (!response.ok && parsed === null) {
|
|
76
|
+
throw new S3FilesError({
|
|
77
|
+
code: response.status === 401 ? "AUTHENTICATION_FAILED" : "BACKEND_UNAVAILABLE",
|
|
78
|
+
message: response.status === 401 ? "Proxy authentication failed." : `Remote filesystem request failed with status ${response.status}.`,
|
|
79
|
+
statusCode: response.status,
|
|
80
|
+
retryable: response.status === 429 || response.status >= 500,
|
|
81
|
+
details: { body: rawText || void 0 }
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
const validated = proxyResponseSchema.safeParse(parsed);
|
|
85
|
+
if (!validated.success) {
|
|
86
|
+
throw new S3FilesError({
|
|
87
|
+
code: "REMOTE_ERROR",
|
|
88
|
+
message: "Remote filesystem response did not match the expected protocol.",
|
|
89
|
+
statusCode: response.status || 500,
|
|
90
|
+
details: { issues: validated.error.issues }
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
if (!response.ok && validated.data.ok) {
|
|
94
|
+
throw new S3FilesError({
|
|
95
|
+
code: "REMOTE_ERROR",
|
|
96
|
+
message: `Remote filesystem request failed with status ${response.status}.`,
|
|
97
|
+
statusCode: response.status,
|
|
98
|
+
retryable: response.status === 429 || response.status >= 500
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
return validated.data;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// src/core/create-tool.ts
|
|
105
|
+
import { tool } from "ai";
|
|
106
|
+
|
|
107
|
+
// src/core/tool-output.ts
|
|
108
|
+
function toModelOutput(output) {
|
|
109
|
+
switch (output.command) {
|
|
110
|
+
case "list": {
|
|
111
|
+
const header = `Listed ${output.entries.length} entr${output.entries.length === 1 ? "y" : "ies"} in ${output.path}.`;
|
|
112
|
+
const body = output.entries.length === 0 ? "Directory is empty." : output.entries.map((entry) => {
|
|
113
|
+
const parts = [`[${entry.type}]`, entry.path];
|
|
114
|
+
if (entry.type === "file") {
|
|
115
|
+
parts.push(`${entry.size} B`);
|
|
116
|
+
}
|
|
117
|
+
if (entry.mtime) {
|
|
118
|
+
parts.push(`mtime ${entry.mtime}`);
|
|
119
|
+
}
|
|
120
|
+
return parts.join(" ");
|
|
121
|
+
}).join("\n");
|
|
122
|
+
const suffix = output.truncated ? `
|
|
123
|
+
Output truncated to ${output.limit} entries.` : "";
|
|
124
|
+
return `${header}
|
|
125
|
+
${body}${suffix}`;
|
|
126
|
+
}
|
|
127
|
+
case "view": {
|
|
128
|
+
const range = `Viewing ${output.path} lines ${output.startLine}-${output.endLine} of ${output.totalLines}.`;
|
|
129
|
+
const truncation = output.truncated ? `
|
|
130
|
+
Truncated${output.truncatedByBytes ? " by byte limit" : ""}${output.truncatedByLines ? " by line limit" : ""}.` : "";
|
|
131
|
+
return `${range}
|
|
132
|
+
${output.content}${truncation}`;
|
|
133
|
+
}
|
|
134
|
+
case "write":
|
|
135
|
+
return `${output.appended ? "Appended" : "Wrote"} ${output.bytesWritten} bytes to ${output.path}.`;
|
|
136
|
+
case "mkdir":
|
|
137
|
+
return `Created directory ${output.path}.`;
|
|
138
|
+
case "delete":
|
|
139
|
+
return `Deleted ${output.path}.`;
|
|
140
|
+
case "stat": {
|
|
141
|
+
const entry = output.entry;
|
|
142
|
+
const parts = [
|
|
143
|
+
`${entry.path} is a ${entry.type}`,
|
|
144
|
+
`${entry.size} B`,
|
|
145
|
+
entry.mtime ? `mtime ${entry.mtime}` : null,
|
|
146
|
+
entry.mode !== void 0 ? `mode ${entry.mode.toString(8)}` : null
|
|
147
|
+
].filter(Boolean);
|
|
148
|
+
return parts.join(", ");
|
|
149
|
+
}
|
|
150
|
+
case "str_replace":
|
|
151
|
+
return `Replaced ${output.replacements} match${output.replacements === 1 ? "" : "es"} in ${output.path} and wrote ${output.bytesWritten} bytes.`;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// src/core/create-tool.ts
|
|
156
|
+
function createToolFromExecutor(options) {
|
|
157
|
+
const filesystemTool = tool({
|
|
158
|
+
description: options.description ?? "Read, write, list, inspect, create, delete, and safely edit files inside the agent's isolated filesystem root.",
|
|
159
|
+
inputSchema: s3FilesToolInputSchema,
|
|
160
|
+
outputSchema: s3FilesToolOutputSchema,
|
|
161
|
+
execute: async (input) => options.execute(input),
|
|
162
|
+
toModelOutput: ({ output }) => ({
|
|
163
|
+
type: "text",
|
|
164
|
+
value: toModelOutput(output)
|
|
165
|
+
})
|
|
166
|
+
});
|
|
167
|
+
return {
|
|
168
|
+
name: options.toolName,
|
|
169
|
+
tool: filesystemTool,
|
|
170
|
+
tools: {
|
|
171
|
+
[options.toolName]: filesystemTool
|
|
172
|
+
},
|
|
173
|
+
agentRoot: "/"
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
export {
|
|
178
|
+
createRemoteCommandExecutor,
|
|
179
|
+
createToolFromExecutor
|
|
180
|
+
};
|
|
181
|
+
//# sourceMappingURL=chunk-5U3BLEEU.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/adapters/remote-backend.ts","../src/core/create-tool.ts","../src/core/tool-output.ts"],"sourcesContent":["import type { S3FilesToolInput, S3FilesToolOutput } from \"../core/tool-schema.js\";\nimport { fromSerializedError, S3FilesError } from \"../core/errors.js\";\nimport { withRetry } from \"../core/retry-policy.js\";\nimport { PROTOCOL_VERSION, proxyResponseSchema } from \"../http/protocol.js\";\nimport type { RemoteS3FilesToolConfig } from \"../types.js\";\n\nexport function createRemoteCommandExecutor(\n config: RemoteS3FilesToolConfig,\n): (input: S3FilesToolInput) => Promise<S3FilesToolOutput> {\n const fetchImpl = config.fetch ?? globalThis.fetch;\n\n if (!fetchImpl) {\n throw new S3FilesError({\n code: \"NOT_SUPPORTED\",\n message: \"Remote mode requires a global fetch implementation.\",\n statusCode: 500,\n });\n }\n\n return async (input) =>\n withRetry({\n policy: config.retryPolicy,\n run: async () => {\n const response = await fetchImpl(config.remoteEndpoint, {\n method: \"POST\",\n headers: {\n \"content-type\": \"application/json\",\n authorization: `Bearer ${config.bearerToken}`,\n ...config.headers,\n },\n body: JSON.stringify({\n version: PROTOCOL_VERSION,\n agentId: config.agentId,\n command: input,\n options: {\n lockTimeoutMs: config.lockTimeoutMs,\n maxReadBytes: config.maxReadBytes,\n maxReadLines: config.maxReadLines,\n maxListEntries: config.maxListEntries,\n },\n }),\n });\n\n const payload = await parseResponse(response);\n\n if (!payload.ok) {\n const errorOptions: Parameters<typeof fromSerializedError>[0] = {\n code: payload.error.code as Parameters<\n typeof fromSerializedError\n >[0][\"code\"],\n message: payload.error.message,\n statusCode: payload.error.statusCode,\n };\n\n if (payload.error.details !== undefined) {\n errorOptions.details = payload.error.details;\n }\n\n if (payload.error.retryable !== undefined) {\n errorOptions.retryable = payload.error.retryable;\n }\n\n throw fromSerializedError(errorOptions);\n }\n\n return payload.result;\n },\n shouldRetry: (error) => {\n return (\n error instanceof TypeError ||\n (error instanceof S3FilesError &&\n (error.retryable || error.statusCode === 429 || error.statusCode >= 500))\n );\n },\n });\n}\n\nasync function parseResponse(response: Response) {\n const rawText = await response.text();\n let parsed: unknown = null;\n\n if (rawText.length > 0) {\n try {\n parsed = JSON.parse(rawText);\n } catch {\n parsed = null;\n }\n }\n\n if (!response.ok && parsed === null) {\n throw new S3FilesError({\n code: response.status === 401 ? \"AUTHENTICATION_FAILED\" : \"BACKEND_UNAVAILABLE\",\n message:\n response.status === 401\n ? \"Proxy authentication failed.\"\n : `Remote filesystem request failed with status ${response.status}.`,\n statusCode: response.status,\n retryable: response.status === 429 || response.status >= 500,\n details: { body: rawText || undefined },\n });\n }\n\n const validated = proxyResponseSchema.safeParse(parsed);\n if (!validated.success) {\n throw new S3FilesError({\n code: \"REMOTE_ERROR\",\n message: \"Remote filesystem response did not match the expected protocol.\",\n statusCode: response.status || 500,\n details: { issues: validated.error.issues },\n });\n }\n\n if (!response.ok && validated.data.ok) {\n throw new S3FilesError({\n code: \"REMOTE_ERROR\",\n message: `Remote filesystem request failed with status ${response.status}.`,\n statusCode: response.status,\n retryable: response.status === 429 || response.status >= 500,\n });\n }\n\n return validated.data;\n}\n","import { tool } from \"ai\";\n\nimport type { CreateS3FilesToolResult } from \"../types.js\";\nimport type { S3FilesToolInput, S3FilesToolOutput } from \"./tool-schema.js\";\nimport { s3FilesToolInputSchema, s3FilesToolOutputSchema } from \"./tool-schema.js\";\nimport { toModelOutput } from \"./tool-output.js\";\n\nexport interface CreateToolOptions {\n toolName: string;\n description?: string;\n execute: (input: S3FilesToolInput) => Promise<S3FilesToolOutput>;\n}\n\nexport function createToolFromExecutor(\n options: CreateToolOptions,\n): CreateS3FilesToolResult {\n const filesystemTool = tool<S3FilesToolInput, S3FilesToolOutput>({\n description:\n options.description ??\n \"Read, write, list, inspect, create, delete, and safely edit files inside the agent's isolated filesystem root.\",\n inputSchema: s3FilesToolInputSchema,\n outputSchema: s3FilesToolOutputSchema,\n execute: async (input) => options.execute(input),\n toModelOutput: ({ output }) => ({\n type: \"text\",\n value: toModelOutput(output),\n }),\n });\n\n return {\n name: options.toolName,\n tool: filesystemTool,\n tools: {\n [options.toolName]: filesystemTool,\n },\n agentRoot: \"/\",\n };\n}\n","import type { S3FilesToolOutput } from \"./tool-schema.js\";\n\nexport function toModelOutput(output: S3FilesToolOutput): string {\n switch (output.command) {\n case \"list\": {\n const header = `Listed ${output.entries.length} entr${output.entries.length === 1 ? \"y\" : \"ies\"} in ${output.path}.`;\n const body =\n output.entries.length === 0\n ? \"Directory is empty.\"\n : output.entries\n .map((entry) => {\n const parts = [`[${entry.type}]`, entry.path];\n if (entry.type === \"file\") {\n parts.push(`${entry.size} B`);\n }\n if (entry.mtime) {\n parts.push(`mtime ${entry.mtime}`);\n }\n return parts.join(\" \");\n })\n .join(\"\\n\");\n\n const suffix = output.truncated\n ? `\\nOutput truncated to ${output.limit} entries.`\n : \"\";\n\n return `${header}\\n${body}${suffix}`;\n }\n\n case \"view\": {\n const range = `Viewing ${output.path} lines ${output.startLine}-${output.endLine} of ${output.totalLines}.`;\n const truncation = output.truncated\n ? `\\nTruncated${output.truncatedByBytes ? \" by byte limit\" : \"\"}${output.truncatedByLines ? \" by line limit\" : \"\"}.`\n : \"\";\n return `${range}\\n${output.content}${truncation}`;\n }\n\n case \"write\":\n return `${output.appended ? \"Appended\" : \"Wrote\"} ${output.bytesWritten} bytes to ${output.path}.`;\n\n case \"mkdir\":\n return `Created directory ${output.path}.`;\n\n case \"delete\":\n return `Deleted ${output.path}.`;\n\n case \"stat\": {\n const entry = output.entry;\n const parts = [\n `${entry.path} is a ${entry.type}`,\n `${entry.size} B`,\n entry.mtime ? `mtime ${entry.mtime}` : null,\n entry.mode !== undefined ? `mode ${entry.mode.toString(8)}` : null,\n ].filter(Boolean);\n\n return parts.join(\", \");\n }\n\n case \"str_replace\":\n return `Replaced ${output.replacements} match${output.replacements === 1 ? \"\" : \"es\"} in ${output.path} and wrote ${output.bytesWritten} bytes.`;\n }\n}\n"],"mappings":";;;;;;;;;;;AAMO,SAAS,4BACd,QACyD;AACzD,QAAM,YAAY,OAAO,SAAS,WAAW;AAE7C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI,aAAa;AAAA,MACrB,MAAM;AAAA,MACN,SAAS;AAAA,MACT,YAAY;AAAA,IACd,CAAC;AAAA,EACH;AAEA,SAAO,OAAO,UACZ,UAAU;AAAA,IACR,QAAQ,OAAO;AAAA,IACf,KAAK,YAAY;AACf,YAAM,WAAW,MAAM,UAAU,OAAO,gBAAgB;AAAA,QACtD,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,eAAe,UAAU,OAAO,WAAW;AAAA,UAC3C,GAAG,OAAO;AAAA,QACZ;AAAA,QACA,MAAM,KAAK,UAAU;AAAA,UACnB,SAAS;AAAA,UACT,SAAS,OAAO;AAAA,UAChB,SAAS;AAAA,UACT,SAAS;AAAA,YACP,eAAe,OAAO;AAAA,YACtB,cAAc,OAAO;AAAA,YACrB,cAAc,OAAO;AAAA,YACrB,gBAAgB,OAAO;AAAA,UACzB;AAAA,QACF,CAAC;AAAA,MACH,CAAC;AAED,YAAM,UAAU,MAAM,cAAc,QAAQ;AAE5C,UAAI,CAAC,QAAQ,IAAI;AACf,cAAM,eAA0D;AAAA,UAC9D,MAAM,QAAQ,MAAM;AAAA,UAGpB,SAAS,QAAQ,MAAM;AAAA,UACvB,YAAY,QAAQ,MAAM;AAAA,QAC5B;AAEA,YAAI,QAAQ,MAAM,YAAY,QAAW;AACvC,uBAAa,UAAU,QAAQ,MAAM;AAAA,QACvC;AAEA,YAAI,QAAQ,MAAM,cAAc,QAAW;AACzC,uBAAa,YAAY,QAAQ,MAAM;AAAA,QACzC;AAEA,cAAM,oBAAoB,YAAY;AAAA,MACxC;AAEA,aAAO,QAAQ;AAAA,IACjB;AAAA,IACA,aAAa,CAAC,UAAU;AACtB,aACE,iBAAiB,aAChB,iBAAiB,iBACf,MAAM,aAAa,MAAM,eAAe,OAAO,MAAM,cAAc;AAAA,IAE1E;AAAA,EACF,CAAC;AACL;AAEA,eAAe,cAAc,UAAoB;AAC/C,QAAM,UAAU,MAAM,SAAS,KAAK;AACpC,MAAI,SAAkB;AAEtB,MAAI,QAAQ,SAAS,GAAG;AACtB,QAAI;AACF,eAAS,KAAK,MAAM,OAAO;AAAA,IAC7B,QAAQ;AACN,eAAS;AAAA,IACX;AAAA,EACF;AAEA,MAAI,CAAC,SAAS,MAAM,WAAW,MAAM;AACnC,UAAM,IAAI,aAAa;AAAA,MACrB,MAAM,SAAS,WAAW,MAAM,0BAA0B;AAAA,MAC1D,SACE,SAAS,WAAW,MAChB,iCACA,gDAAgD,SAAS,MAAM;AAAA,MACrE,YAAY,SAAS;AAAA,MACrB,WAAW,SAAS,WAAW,OAAO,SAAS,UAAU;AAAA,MACzD,SAAS,EAAE,MAAM,WAAW,OAAU;AAAA,IACxC,CAAC;AAAA,EACH;AAEA,QAAM,YAAY,oBAAoB,UAAU,MAAM;AACtD,MAAI,CAAC,UAAU,SAAS;AACtB,UAAM,IAAI,aAAa;AAAA,MACrB,MAAM;AAAA,MACN,SAAS;AAAA,MACT,YAAY,SAAS,UAAU;AAAA,MAC/B,SAAS,EAAE,QAAQ,UAAU,MAAM,OAAO;AAAA,IAC5C,CAAC;AAAA,EACH;AAEA,MAAI,CAAC,SAAS,MAAM,UAAU,KAAK,IAAI;AACrC,UAAM,IAAI,aAAa;AAAA,MACrB,MAAM;AAAA,MACN,SAAS,gDAAgD,SAAS,MAAM;AAAA,MACxE,YAAY,SAAS;AAAA,MACrB,WAAW,SAAS,WAAW,OAAO,SAAS,UAAU;AAAA,IAC3D,CAAC;AAAA,EACH;AAEA,SAAO,UAAU;AACnB;;;AC1HA,SAAS,YAAY;;;ACEd,SAAS,cAAc,QAAmC;AAC/D,UAAQ,OAAO,SAAS;AAAA,IACtB,KAAK,QAAQ;AACX,YAAM,SAAS,UAAU,OAAO,QAAQ,MAAM,QAAQ,OAAO,QAAQ,WAAW,IAAI,MAAM,KAAK,OAAO,OAAO,IAAI;AACjH,YAAM,OACJ,OAAO,QAAQ,WAAW,IACtB,wBACA,OAAO,QACJ,IAAI,CAAC,UAAU;AACd,cAAM,QAAQ,CAAC,IAAI,MAAM,IAAI,KAAK,MAAM,IAAI;AAC5C,YAAI,MAAM,SAAS,QAAQ;AACzB,gBAAM,KAAK,GAAG,MAAM,IAAI,IAAI;AAAA,QAC9B;AACA,YAAI,MAAM,OAAO;AACf,gBAAM,KAAK,SAAS,MAAM,KAAK,EAAE;AAAA,QACnC;AACA,eAAO,MAAM,KAAK,GAAG;AAAA,MACvB,CAAC,EACA,KAAK,IAAI;AAElB,YAAM,SAAS,OAAO,YAClB;AAAA,sBAAyB,OAAO,KAAK,cACrC;AAEJ,aAAO,GAAG,MAAM;AAAA,EAAK,IAAI,GAAG,MAAM;AAAA,IACpC;AAAA,IAEA,KAAK,QAAQ;AACX,YAAM,QAAQ,WAAW,OAAO,IAAI,UAAU,OAAO,SAAS,IAAI,OAAO,OAAO,OAAO,OAAO,UAAU;AACxG,YAAM,aAAa,OAAO,YACtB;AAAA,WAAc,OAAO,mBAAmB,mBAAmB,EAAE,GAAG,OAAO,mBAAmB,mBAAmB,EAAE,MAC/G;AACJ,aAAO,GAAG,KAAK;AAAA,EAAK,OAAO,OAAO,GAAG,UAAU;AAAA,IACjD;AAAA,IAEA,KAAK;AACH,aAAO,GAAG,OAAO,WAAW,aAAa,OAAO,IAAI,OAAO,YAAY,aAAa,OAAO,IAAI;AAAA,IAEjG,KAAK;AACH,aAAO,qBAAqB,OAAO,IAAI;AAAA,IAEzC,KAAK;AACH,aAAO,WAAW,OAAO,IAAI;AAAA,IAE/B,KAAK,QAAQ;AACX,YAAM,QAAQ,OAAO;AACrB,YAAM,QAAQ;AAAA,QACZ,GAAG,MAAM,IAAI,SAAS,MAAM,IAAI;AAAA,QAChC,GAAG,MAAM,IAAI;AAAA,QACb,MAAM,QAAQ,SAAS,MAAM,KAAK,KAAK;AAAA,QACvC,MAAM,SAAS,SAAY,QAAQ,MAAM,KAAK,SAAS,CAAC,CAAC,KAAK;AAAA,MAChE,EAAE,OAAO,OAAO;AAEhB,aAAO,MAAM,KAAK,IAAI;AAAA,IACxB;AAAA,IAEA,KAAK;AACH,aAAO,YAAY,OAAO,YAAY,SAAS,OAAO,iBAAiB,IAAI,KAAK,IAAI,OAAO,OAAO,IAAI,cAAc,OAAO,YAAY;AAAA,EAC3I;AACF;;;ADhDO,SAAS,uBACd,SACyB;AACzB,QAAM,iBAAiB,KAA0C;AAAA,IAC/D,aACE,QAAQ,eACR;AAAA,IACF,aAAa;AAAA,IACb,cAAc;AAAA,IACd,SAAS,OAAO,UAAU,QAAQ,QAAQ,KAAK;AAAA,IAC/C,eAAe,CAAC,EAAE,OAAO,OAAO;AAAA,MAC9B,MAAM;AAAA,MACN,OAAO,cAAc,MAAM;AAAA,IAC7B;AAAA,EACF,CAAC;AAED,SAAO;AAAA,IACL,MAAM,QAAQ;AAAA,IACd,MAAM;AAAA,IACN,OAAO;AAAA,MACL,CAAC,QAAQ,QAAQ,GAAG;AAAA,IACtB;AAAA,IACA,WAAW;AAAA,EACb;AACF;","names":[]}
|