@dexto/tools-filesystem 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +44 -0
- package/dist/directory-approval.integration.test.cjs +467 -0
- package/dist/directory-approval.integration.test.d.cts +2 -0
- package/dist/directory-approval.integration.test.d.ts +2 -0
- package/dist/directory-approval.integration.test.js +444 -0
- package/dist/edit-file-tool.cjs +181 -0
- package/dist/edit-file-tool.d.cts +17 -0
- package/dist/edit-file-tool.d.ts +17 -0
- package/dist/edit-file-tool.js +147 -0
- package/dist/error-codes.cjs +53 -0
- package/dist/error-codes.d.cts +32 -0
- package/dist/error-codes.d.ts +32 -0
- package/dist/error-codes.js +29 -0
- package/dist/errors.cjs +302 -0
- package/dist/errors.d.cts +112 -0
- package/dist/errors.d.ts +112 -0
- package/dist/errors.js +278 -0
- package/dist/file-tool-types.cjs +16 -0
- package/dist/file-tool-types.d.cts +46 -0
- package/dist/file-tool-types.d.ts +46 -0
- package/dist/file-tool-types.js +0 -0
- package/dist/filesystem-service.cjs +526 -0
- package/dist/filesystem-service.d.cts +107 -0
- package/dist/filesystem-service.d.ts +107 -0
- package/dist/filesystem-service.js +492 -0
- package/dist/glob-files-tool.cjs +70 -0
- package/dist/glob-files-tool.d.cts +16 -0
- package/dist/glob-files-tool.d.ts +16 -0
- package/dist/glob-files-tool.js +46 -0
- package/dist/grep-content-tool.cjs +86 -0
- package/dist/grep-content-tool.d.cts +16 -0
- package/dist/grep-content-tool.d.ts +16 -0
- package/dist/grep-content-tool.js +62 -0
- package/dist/index.cjs +55 -0
- package/dist/index.d.cts +14 -0
- package/dist/index.d.ts +14 -0
- package/dist/index.js +22 -0
- package/dist/path-validator.cjs +232 -0
- package/dist/path-validator.d.cts +90 -0
- package/dist/path-validator.d.ts +90 -0
- package/dist/path-validator.js +198 -0
- package/dist/path-validator.test.cjs +444 -0
- package/dist/path-validator.test.d.cts +2 -0
- package/dist/path-validator.test.d.ts +2 -0
- package/dist/path-validator.test.js +443 -0
- package/dist/read-file-tool.cjs +117 -0
- package/dist/read-file-tool.d.cts +17 -0
- package/dist/read-file-tool.d.ts +17 -0
- package/dist/read-file-tool.js +83 -0
- package/dist/tool-provider.cjs +108 -0
- package/dist/tool-provider.d.cts +74 -0
- package/dist/tool-provider.d.ts +74 -0
- package/dist/tool-provider.js +84 -0
- package/dist/types.cjs +16 -0
- package/dist/types.d.cts +172 -0
- package/dist/types.d.ts +172 -0
- package/dist/types.js +0 -0
- package/dist/write-file-tool.cjs +177 -0
- package/dist/write-file-tool.d.cts +17 -0
- package/dist/write-file-tool.d.ts +17 -0
- package/dist/write-file-tool.js +143 -0
- package/package.json +42 -0
|
@@ -0,0 +1,443 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, vi } from "vitest";
|
|
2
|
+
import { PathValidator } from "./path-validator.js";
|
|
3
|
+
const createMockLogger = () => ({
|
|
4
|
+
debug: vi.fn(),
|
|
5
|
+
info: vi.fn(),
|
|
6
|
+
warn: vi.fn(),
|
|
7
|
+
error: vi.fn(),
|
|
8
|
+
createChild: vi.fn().mockReturnThis()
|
|
9
|
+
});
|
|
10
|
+
describe("PathValidator", () => {
|
|
11
|
+
let mockLogger;
|
|
12
|
+
beforeEach(() => {
|
|
13
|
+
mockLogger = createMockLogger();
|
|
14
|
+
vi.clearAllMocks();
|
|
15
|
+
});
|
|
16
|
+
describe("validatePath", () => {
|
|
17
|
+
describe("Empty and Invalid Paths", () => {
|
|
18
|
+
it("should reject empty path", () => {
|
|
19
|
+
const validator = new PathValidator(
|
|
20
|
+
{
|
|
21
|
+
allowedPaths: ["/home/user/project"],
|
|
22
|
+
blockedPaths: [],
|
|
23
|
+
blockedExtensions: [],
|
|
24
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
25
|
+
enableBackups: false,
|
|
26
|
+
backupRetentionDays: 7,
|
|
27
|
+
workingDirectory: "/home/user/project"
|
|
28
|
+
},
|
|
29
|
+
mockLogger
|
|
30
|
+
);
|
|
31
|
+
const result = validator.validatePath("");
|
|
32
|
+
expect(result.isValid).toBe(false);
|
|
33
|
+
expect(result.error).toBe("Path cannot be empty");
|
|
34
|
+
});
|
|
35
|
+
it("should reject whitespace-only path", () => {
|
|
36
|
+
const validator = new PathValidator(
|
|
37
|
+
{
|
|
38
|
+
allowedPaths: ["/home/user/project"],
|
|
39
|
+
blockedPaths: [],
|
|
40
|
+
blockedExtensions: [],
|
|
41
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
42
|
+
enableBackups: false,
|
|
43
|
+
backupRetentionDays: 7,
|
|
44
|
+
workingDirectory: "/home/user/project"
|
|
45
|
+
},
|
|
46
|
+
mockLogger
|
|
47
|
+
);
|
|
48
|
+
const result = validator.validatePath(" ");
|
|
49
|
+
expect(result.isValid).toBe(false);
|
|
50
|
+
expect(result.error).toBe("Path cannot be empty");
|
|
51
|
+
});
|
|
52
|
+
});
|
|
53
|
+
describe("Allowed Paths", () => {
|
|
54
|
+
it("should allow paths within allowed directories", () => {
|
|
55
|
+
const validator = new PathValidator(
|
|
56
|
+
{
|
|
57
|
+
allowedPaths: ["/home/user/project"],
|
|
58
|
+
blockedPaths: [],
|
|
59
|
+
blockedExtensions: [],
|
|
60
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
61
|
+
enableBackups: false,
|
|
62
|
+
backupRetentionDays: 7,
|
|
63
|
+
workingDirectory: "/home/user/project"
|
|
64
|
+
},
|
|
65
|
+
mockLogger
|
|
66
|
+
);
|
|
67
|
+
const result = validator.validatePath("/home/user/project/src/file.ts");
|
|
68
|
+
expect(result.isValid).toBe(true);
|
|
69
|
+
expect(result.normalizedPath).toBeDefined();
|
|
70
|
+
});
|
|
71
|
+
it("should allow relative paths within working directory", () => {
|
|
72
|
+
const validator = new PathValidator(
|
|
73
|
+
{
|
|
74
|
+
allowedPaths: ["/home/user/project"],
|
|
75
|
+
blockedPaths: [],
|
|
76
|
+
blockedExtensions: [],
|
|
77
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
78
|
+
enableBackups: false,
|
|
79
|
+
backupRetentionDays: 7,
|
|
80
|
+
workingDirectory: "/home/user/project"
|
|
81
|
+
},
|
|
82
|
+
mockLogger
|
|
83
|
+
);
|
|
84
|
+
const result = validator.validatePath("src/file.ts");
|
|
85
|
+
expect(result.isValid).toBe(true);
|
|
86
|
+
});
|
|
87
|
+
it("should reject paths outside allowed directories", () => {
|
|
88
|
+
const validator = new PathValidator(
|
|
89
|
+
{
|
|
90
|
+
allowedPaths: ["/home/user/project"],
|
|
91
|
+
blockedPaths: [],
|
|
92
|
+
blockedExtensions: [],
|
|
93
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
94
|
+
enableBackups: false,
|
|
95
|
+
backupRetentionDays: 7,
|
|
96
|
+
workingDirectory: "/home/user/project"
|
|
97
|
+
},
|
|
98
|
+
mockLogger
|
|
99
|
+
);
|
|
100
|
+
const result = validator.validatePath("/external/project/file.ts");
|
|
101
|
+
expect(result.isValid).toBe(false);
|
|
102
|
+
expect(result.error).toContain("not within allowed paths");
|
|
103
|
+
});
|
|
104
|
+
it("should allow all paths when allowedPaths is empty", () => {
|
|
105
|
+
const validator = new PathValidator(
|
|
106
|
+
{
|
|
107
|
+
allowedPaths: [],
|
|
108
|
+
blockedPaths: [],
|
|
109
|
+
blockedExtensions: [],
|
|
110
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
111
|
+
enableBackups: false,
|
|
112
|
+
backupRetentionDays: 7,
|
|
113
|
+
workingDirectory: "/home/user/project"
|
|
114
|
+
},
|
|
115
|
+
mockLogger
|
|
116
|
+
);
|
|
117
|
+
const result = validator.validatePath("/anywhere/file.ts");
|
|
118
|
+
expect(result.isValid).toBe(true);
|
|
119
|
+
});
|
|
120
|
+
});
|
|
121
|
+
describe("Path Traversal Detection", () => {
|
|
122
|
+
it("should reject path traversal attempts", () => {
|
|
123
|
+
const validator = new PathValidator(
|
|
124
|
+
{
|
|
125
|
+
allowedPaths: ["/home/user/project"],
|
|
126
|
+
blockedPaths: [],
|
|
127
|
+
blockedExtensions: [],
|
|
128
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
129
|
+
enableBackups: false,
|
|
130
|
+
backupRetentionDays: 7,
|
|
131
|
+
workingDirectory: "/home/user/project"
|
|
132
|
+
},
|
|
133
|
+
mockLogger
|
|
134
|
+
);
|
|
135
|
+
const result = validator.validatePath("/home/user/project/../../../etc/passwd");
|
|
136
|
+
expect(result.isValid).toBe(false);
|
|
137
|
+
expect(result.error).toBe("Path traversal detected");
|
|
138
|
+
});
|
|
139
|
+
});
|
|
140
|
+
describe("Blocked Paths", () => {
|
|
141
|
+
it("should reject paths in blocked directories", () => {
|
|
142
|
+
const validator = new PathValidator(
|
|
143
|
+
{
|
|
144
|
+
allowedPaths: ["/home/user/project"],
|
|
145
|
+
blockedPaths: [".git", "node_modules"],
|
|
146
|
+
blockedExtensions: [],
|
|
147
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
148
|
+
enableBackups: false,
|
|
149
|
+
backupRetentionDays: 7,
|
|
150
|
+
workingDirectory: "/home/user/project"
|
|
151
|
+
},
|
|
152
|
+
mockLogger
|
|
153
|
+
);
|
|
154
|
+
const result = validator.validatePath("/home/user/project/.git/config");
|
|
155
|
+
expect(result.isValid).toBe(false);
|
|
156
|
+
expect(result.error).toContain("blocked");
|
|
157
|
+
});
|
|
158
|
+
it("should reject paths in node_modules", () => {
|
|
159
|
+
const validator = new PathValidator(
|
|
160
|
+
{
|
|
161
|
+
allowedPaths: ["/home/user/project"],
|
|
162
|
+
blockedPaths: ["node_modules"],
|
|
163
|
+
blockedExtensions: [],
|
|
164
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
165
|
+
enableBackups: false,
|
|
166
|
+
backupRetentionDays: 7,
|
|
167
|
+
workingDirectory: "/home/user/project"
|
|
168
|
+
},
|
|
169
|
+
mockLogger
|
|
170
|
+
);
|
|
171
|
+
const result = validator.validatePath(
|
|
172
|
+
"/home/user/project/node_modules/lodash/index.js"
|
|
173
|
+
);
|
|
174
|
+
expect(result.isValid).toBe(false);
|
|
175
|
+
expect(result.error).toContain("blocked");
|
|
176
|
+
});
|
|
177
|
+
});
|
|
178
|
+
describe("Blocked Extensions", () => {
|
|
179
|
+
it("should reject files with blocked extensions", () => {
|
|
180
|
+
const validator = new PathValidator(
|
|
181
|
+
{
|
|
182
|
+
allowedPaths: ["/home/user/project"],
|
|
183
|
+
blockedPaths: [],
|
|
184
|
+
blockedExtensions: [".exe", ".dll"],
|
|
185
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
186
|
+
enableBackups: false,
|
|
187
|
+
backupRetentionDays: 7,
|
|
188
|
+
workingDirectory: "/home/user/project"
|
|
189
|
+
},
|
|
190
|
+
mockLogger
|
|
191
|
+
);
|
|
192
|
+
const result = validator.validatePath("/home/user/project/malware.exe");
|
|
193
|
+
expect(result.isValid).toBe(false);
|
|
194
|
+
expect(result.error).toContain(".exe is not allowed");
|
|
195
|
+
});
|
|
196
|
+
it("should handle extensions without leading dot", () => {
|
|
197
|
+
const validator = new PathValidator(
|
|
198
|
+
{
|
|
199
|
+
allowedPaths: ["/home/user/project"],
|
|
200
|
+
blockedPaths: [],
|
|
201
|
+
blockedExtensions: ["exe", "dll"],
|
|
202
|
+
// No leading dot
|
|
203
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
204
|
+
enableBackups: false,
|
|
205
|
+
backupRetentionDays: 7,
|
|
206
|
+
workingDirectory: "/home/user/project"
|
|
207
|
+
},
|
|
208
|
+
mockLogger
|
|
209
|
+
);
|
|
210
|
+
const result = validator.validatePath("/home/user/project/file.exe");
|
|
211
|
+
expect(result.isValid).toBe(false);
|
|
212
|
+
});
|
|
213
|
+
it("should be case-insensitive for extensions", () => {
|
|
214
|
+
const validator = new PathValidator(
|
|
215
|
+
{
|
|
216
|
+
allowedPaths: ["/home/user/project"],
|
|
217
|
+
blockedPaths: [],
|
|
218
|
+
blockedExtensions: [".exe"],
|
|
219
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
220
|
+
enableBackups: false,
|
|
221
|
+
backupRetentionDays: 7,
|
|
222
|
+
workingDirectory: "/home/user/project"
|
|
223
|
+
},
|
|
224
|
+
mockLogger
|
|
225
|
+
);
|
|
226
|
+
const result = validator.validatePath("/home/user/project/file.EXE");
|
|
227
|
+
expect(result.isValid).toBe(false);
|
|
228
|
+
});
|
|
229
|
+
});
|
|
230
|
+
describe("Directory Approval Checker Integration", () => {
|
|
231
|
+
it("should consult approval checker for external paths", () => {
|
|
232
|
+
const validator = new PathValidator(
|
|
233
|
+
{
|
|
234
|
+
allowedPaths: ["/home/user/project"],
|
|
235
|
+
blockedPaths: [],
|
|
236
|
+
blockedExtensions: [],
|
|
237
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
238
|
+
enableBackups: false,
|
|
239
|
+
backupRetentionDays: 7,
|
|
240
|
+
workingDirectory: "/home/user/project"
|
|
241
|
+
},
|
|
242
|
+
mockLogger
|
|
243
|
+
);
|
|
244
|
+
let result = validator.validatePath("/external/project/file.ts");
|
|
245
|
+
expect(result.isValid).toBe(false);
|
|
246
|
+
const approvalChecker = (filePath) => {
|
|
247
|
+
return filePath.startsWith("/external/project");
|
|
248
|
+
};
|
|
249
|
+
validator.setDirectoryApprovalChecker(approvalChecker);
|
|
250
|
+
result = validator.validatePath("/external/project/file.ts");
|
|
251
|
+
expect(result.isValid).toBe(true);
|
|
252
|
+
});
|
|
253
|
+
it("should not use approval checker for config-allowed paths", () => {
|
|
254
|
+
const approvalChecker = vi.fn().mockReturnValue(false);
|
|
255
|
+
const validator = new PathValidator(
|
|
256
|
+
{
|
|
257
|
+
allowedPaths: ["/home/user/project"],
|
|
258
|
+
blockedPaths: [],
|
|
259
|
+
blockedExtensions: [],
|
|
260
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
261
|
+
enableBackups: false,
|
|
262
|
+
backupRetentionDays: 7,
|
|
263
|
+
workingDirectory: "/home/user/project"
|
|
264
|
+
},
|
|
265
|
+
mockLogger
|
|
266
|
+
);
|
|
267
|
+
validator.setDirectoryApprovalChecker(approvalChecker);
|
|
268
|
+
const result = validator.validatePath("/home/user/project/src/file.ts");
|
|
269
|
+
expect(result.isValid).toBe(true);
|
|
270
|
+
expect(approvalChecker).not.toHaveBeenCalled();
|
|
271
|
+
});
|
|
272
|
+
});
|
|
273
|
+
});
|
|
274
|
+
describe("isPathWithinAllowed", () => {
|
|
275
|
+
it("should return true for paths within config-allowed directories", () => {
|
|
276
|
+
const validator = new PathValidator(
|
|
277
|
+
{
|
|
278
|
+
allowedPaths: ["/home/user/project"],
|
|
279
|
+
blockedPaths: [],
|
|
280
|
+
blockedExtensions: [],
|
|
281
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
282
|
+
enableBackups: false,
|
|
283
|
+
backupRetentionDays: 7,
|
|
284
|
+
workingDirectory: "/home/user/project"
|
|
285
|
+
},
|
|
286
|
+
mockLogger
|
|
287
|
+
);
|
|
288
|
+
expect(validator.isPathWithinAllowed("/home/user/project/src/file.ts")).toBe(true);
|
|
289
|
+
expect(validator.isPathWithinAllowed("/home/user/project/deep/nested/file.ts")).toBe(
|
|
290
|
+
true
|
|
291
|
+
);
|
|
292
|
+
});
|
|
293
|
+
it("should return false for paths outside config-allowed directories", () => {
|
|
294
|
+
const validator = new PathValidator(
|
|
295
|
+
{
|
|
296
|
+
allowedPaths: ["/home/user/project"],
|
|
297
|
+
blockedPaths: [],
|
|
298
|
+
blockedExtensions: [],
|
|
299
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
300
|
+
enableBackups: false,
|
|
301
|
+
backupRetentionDays: 7,
|
|
302
|
+
workingDirectory: "/home/user/project"
|
|
303
|
+
},
|
|
304
|
+
mockLogger
|
|
305
|
+
);
|
|
306
|
+
expect(validator.isPathWithinAllowed("/external/project/file.ts")).toBe(false);
|
|
307
|
+
expect(validator.isPathWithinAllowed("/home/user/other/file.ts")).toBe(false);
|
|
308
|
+
});
|
|
309
|
+
it("should NOT consult approval checker (used for prompting decisions)", () => {
|
|
310
|
+
const approvalChecker = vi.fn().mockReturnValue(true);
|
|
311
|
+
const validator = new PathValidator(
|
|
312
|
+
{
|
|
313
|
+
allowedPaths: ["/home/user/project"],
|
|
314
|
+
blockedPaths: [],
|
|
315
|
+
blockedExtensions: [],
|
|
316
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
317
|
+
enableBackups: false,
|
|
318
|
+
backupRetentionDays: 7,
|
|
319
|
+
workingDirectory: "/home/user/project"
|
|
320
|
+
},
|
|
321
|
+
mockLogger
|
|
322
|
+
);
|
|
323
|
+
validator.setDirectoryApprovalChecker(approvalChecker);
|
|
324
|
+
expect(validator.isPathWithinAllowed("/external/project/file.ts")).toBe(false);
|
|
325
|
+
expect(approvalChecker).not.toHaveBeenCalled();
|
|
326
|
+
});
|
|
327
|
+
it("should return false for empty path", () => {
|
|
328
|
+
const validator = new PathValidator(
|
|
329
|
+
{
|
|
330
|
+
allowedPaths: ["/home/user/project"],
|
|
331
|
+
blockedPaths: [],
|
|
332
|
+
blockedExtensions: [],
|
|
333
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
334
|
+
enableBackups: false,
|
|
335
|
+
backupRetentionDays: 7,
|
|
336
|
+
workingDirectory: "/home/user/project"
|
|
337
|
+
},
|
|
338
|
+
mockLogger
|
|
339
|
+
);
|
|
340
|
+
expect(validator.isPathWithinAllowed("")).toBe(false);
|
|
341
|
+
expect(validator.isPathWithinAllowed(" ")).toBe(false);
|
|
342
|
+
});
|
|
343
|
+
it("should return true when allowedPaths is empty (all paths allowed)", () => {
|
|
344
|
+
const validator = new PathValidator(
|
|
345
|
+
{
|
|
346
|
+
allowedPaths: [],
|
|
347
|
+
blockedPaths: [],
|
|
348
|
+
blockedExtensions: [],
|
|
349
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
350
|
+
enableBackups: false,
|
|
351
|
+
backupRetentionDays: 7,
|
|
352
|
+
workingDirectory: "/home/user/project"
|
|
353
|
+
},
|
|
354
|
+
mockLogger
|
|
355
|
+
);
|
|
356
|
+
expect(validator.isPathWithinAllowed("/anywhere/file.ts")).toBe(true);
|
|
357
|
+
});
|
|
358
|
+
});
|
|
359
|
+
describe("Path Containment (Parent Directory Coverage)", () => {
|
|
360
|
+
it("should recognize that approving parent covers child paths", () => {
|
|
361
|
+
const validator = new PathValidator(
|
|
362
|
+
{
|
|
363
|
+
allowedPaths: ["/external/sub"],
|
|
364
|
+
blockedPaths: [],
|
|
365
|
+
blockedExtensions: [],
|
|
366
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
367
|
+
enableBackups: false,
|
|
368
|
+
backupRetentionDays: 7,
|
|
369
|
+
workingDirectory: "/home/user/project"
|
|
370
|
+
},
|
|
371
|
+
mockLogger
|
|
372
|
+
);
|
|
373
|
+
expect(validator.isPathWithinAllowed("/external/sub/deep/nested/file.ts")).toBe(true);
|
|
374
|
+
});
|
|
375
|
+
it("should not allow sibling directories", () => {
|
|
376
|
+
const validator = new PathValidator(
|
|
377
|
+
{
|
|
378
|
+
allowedPaths: ["/external/sub"],
|
|
379
|
+
blockedPaths: [],
|
|
380
|
+
blockedExtensions: [],
|
|
381
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
382
|
+
enableBackups: false,
|
|
383
|
+
backupRetentionDays: 7,
|
|
384
|
+
workingDirectory: "/home/user/project"
|
|
385
|
+
},
|
|
386
|
+
mockLogger
|
|
387
|
+
);
|
|
388
|
+
expect(validator.isPathWithinAllowed("/external/other/file.ts")).toBe(false);
|
|
389
|
+
});
|
|
390
|
+
it("should not allow parent directories when child is approved", () => {
|
|
391
|
+
const validator = new PathValidator(
|
|
392
|
+
{
|
|
393
|
+
allowedPaths: ["/external/sub/deep"],
|
|
394
|
+
blockedPaths: [],
|
|
395
|
+
blockedExtensions: [],
|
|
396
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
397
|
+
enableBackups: false,
|
|
398
|
+
backupRetentionDays: 7,
|
|
399
|
+
workingDirectory: "/home/user/project"
|
|
400
|
+
},
|
|
401
|
+
mockLogger
|
|
402
|
+
);
|
|
403
|
+
expect(validator.isPathWithinAllowed("/external/sub/file.ts")).toBe(false);
|
|
404
|
+
});
|
|
405
|
+
});
|
|
406
|
+
describe("getAllowedPaths and getBlockedPaths", () => {
|
|
407
|
+
it("should return normalized allowed paths", () => {
|
|
408
|
+
const validator = new PathValidator(
|
|
409
|
+
{
|
|
410
|
+
allowedPaths: [".", "./src"],
|
|
411
|
+
blockedPaths: [],
|
|
412
|
+
blockedExtensions: [],
|
|
413
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
414
|
+
enableBackups: false,
|
|
415
|
+
backupRetentionDays: 7,
|
|
416
|
+
workingDirectory: "/home/user/project"
|
|
417
|
+
},
|
|
418
|
+
mockLogger
|
|
419
|
+
);
|
|
420
|
+
const allowedPaths = validator.getAllowedPaths();
|
|
421
|
+
expect(allowedPaths).toHaveLength(2);
|
|
422
|
+
expect(allowedPaths[0]).toBe("/home/user/project");
|
|
423
|
+
expect(allowedPaths[1]).toBe("/home/user/project/src");
|
|
424
|
+
});
|
|
425
|
+
it("should return blocked paths", () => {
|
|
426
|
+
const validator = new PathValidator(
|
|
427
|
+
{
|
|
428
|
+
allowedPaths: ["/home/user/project"],
|
|
429
|
+
blockedPaths: [".git", "node_modules"],
|
|
430
|
+
blockedExtensions: [],
|
|
431
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
432
|
+
enableBackups: false,
|
|
433
|
+
backupRetentionDays: 7,
|
|
434
|
+
workingDirectory: "/home/user/project"
|
|
435
|
+
},
|
|
436
|
+
mockLogger
|
|
437
|
+
);
|
|
438
|
+
const blockedPaths = validator.getBlockedPaths();
|
|
439
|
+
expect(blockedPaths).toContain(".git");
|
|
440
|
+
expect(blockedPaths).toContain("node_modules");
|
|
441
|
+
});
|
|
442
|
+
});
|
|
443
|
+
});
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
var read_file_tool_exports = {};
|
|
30
|
+
__export(read_file_tool_exports, {
|
|
31
|
+
createReadFileTool: () => createReadFileTool
|
|
32
|
+
});
|
|
33
|
+
module.exports = __toCommonJS(read_file_tool_exports);
|
|
34
|
+
var path = __toESM(require("node:path"), 1);
|
|
35
|
+
var import_zod = require("zod");
|
|
36
|
+
var import_core = require("@dexto/core");
|
|
37
|
+
const ReadFileInputSchema = import_zod.z.object({
|
|
38
|
+
file_path: import_zod.z.string().describe("Absolute path to the file to read"),
|
|
39
|
+
limit: import_zod.z.number().int().positive().optional().describe("Maximum number of lines to read (optional)"),
|
|
40
|
+
offset: import_zod.z.number().int().min(1).optional().describe("Starting line number (1-based, optional)")
|
|
41
|
+
}).strict();
|
|
42
|
+
function createReadFileTool(options) {
|
|
43
|
+
const { fileSystemService, directoryApproval } = options;
|
|
44
|
+
let pendingApprovalParentDir;
|
|
45
|
+
return {
|
|
46
|
+
id: "read_file",
|
|
47
|
+
description: "Read the contents of a file with optional pagination. Returns file content, line count, encoding, and whether the output was truncated. Use limit and offset parameters for large files to read specific sections. This tool is for reading files within allowed paths only.",
|
|
48
|
+
inputSchema: ReadFileInputSchema,
|
|
49
|
+
/**
|
|
50
|
+
* Check if this read operation needs directory access approval.
|
|
51
|
+
* Returns custom approval request if the file is outside allowed paths.
|
|
52
|
+
*/
|
|
53
|
+
getApprovalOverride: (args) => {
|
|
54
|
+
const { file_path } = args;
|
|
55
|
+
if (!file_path) return null;
|
|
56
|
+
const isAllowed = fileSystemService.isPathWithinConfigAllowed(file_path);
|
|
57
|
+
if (isAllowed) {
|
|
58
|
+
return null;
|
|
59
|
+
}
|
|
60
|
+
if (directoryApproval?.isSessionApproved(file_path)) {
|
|
61
|
+
return null;
|
|
62
|
+
}
|
|
63
|
+
const absolutePath = path.resolve(file_path);
|
|
64
|
+
const parentDir = path.dirname(absolutePath);
|
|
65
|
+
pendingApprovalParentDir = parentDir;
|
|
66
|
+
return {
|
|
67
|
+
type: import_core.ApprovalType.DIRECTORY_ACCESS,
|
|
68
|
+
metadata: {
|
|
69
|
+
path: absolutePath,
|
|
70
|
+
parentDir,
|
|
71
|
+
operation: "read",
|
|
72
|
+
toolName: "read_file"
|
|
73
|
+
}
|
|
74
|
+
};
|
|
75
|
+
},
|
|
76
|
+
/**
|
|
77
|
+
* Handle approved directory access - remember the directory for session
|
|
78
|
+
*/
|
|
79
|
+
onApprovalGranted: (response) => {
|
|
80
|
+
if (!directoryApproval || !pendingApprovalParentDir) return;
|
|
81
|
+
const data = response.data;
|
|
82
|
+
const rememberDirectory = data?.rememberDirectory ?? false;
|
|
83
|
+
directoryApproval.addApproved(
|
|
84
|
+
pendingApprovalParentDir,
|
|
85
|
+
rememberDirectory ? "session" : "once"
|
|
86
|
+
);
|
|
87
|
+
pendingApprovalParentDir = void 0;
|
|
88
|
+
},
|
|
89
|
+
execute: async (input, _context) => {
|
|
90
|
+
const { file_path, limit, offset } = input;
|
|
91
|
+
const result = await fileSystemService.readFile(file_path, {
|
|
92
|
+
limit,
|
|
93
|
+
offset
|
|
94
|
+
});
|
|
95
|
+
const _display = {
|
|
96
|
+
type: "file",
|
|
97
|
+
path: file_path,
|
|
98
|
+
operation: "read",
|
|
99
|
+
size: result.size,
|
|
100
|
+
lineCount: result.lines
|
|
101
|
+
};
|
|
102
|
+
return {
|
|
103
|
+
content: result.content,
|
|
104
|
+
lines: result.lines,
|
|
105
|
+
encoding: result.encoding,
|
|
106
|
+
truncated: result.truncated,
|
|
107
|
+
size: result.size,
|
|
108
|
+
...result.mimeType && { mimeType: result.mimeType },
|
|
109
|
+
_display
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
115
|
+
0 && (module.exports = {
|
|
116
|
+
createReadFileTool
|
|
117
|
+
});
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { InternalTool } from '@dexto/core';
|
|
2
|
+
import { FileToolOptions } from './file-tool-types.cjs';
|
|
3
|
+
import './filesystem-service.cjs';
|
|
4
|
+
import './types.cjs';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Read File Tool
|
|
8
|
+
*
|
|
9
|
+
* Internal tool for reading file contents with size limits and pagination
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Create the read_file internal tool with directory approval support
|
|
14
|
+
*/
|
|
15
|
+
declare function createReadFileTool(options: FileToolOptions): InternalTool;
|
|
16
|
+
|
|
17
|
+
export { createReadFileTool };
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { InternalTool } from '@dexto/core';
|
|
2
|
+
import { FileToolOptions } from './file-tool-types.js';
|
|
3
|
+
import './filesystem-service.js';
|
|
4
|
+
import './types.js';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Read File Tool
|
|
8
|
+
*
|
|
9
|
+
* Internal tool for reading file contents with size limits and pagination
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Create the read_file internal tool with directory approval support
|
|
14
|
+
*/
|
|
15
|
+
declare function createReadFileTool(options: FileToolOptions): InternalTool;
|
|
16
|
+
|
|
17
|
+
export { createReadFileTool };
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import * as path from "node:path";
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
import { ApprovalType } from "@dexto/core";
|
|
4
|
+
const ReadFileInputSchema = z.object({
|
|
5
|
+
file_path: z.string().describe("Absolute path to the file to read"),
|
|
6
|
+
limit: z.number().int().positive().optional().describe("Maximum number of lines to read (optional)"),
|
|
7
|
+
offset: z.number().int().min(1).optional().describe("Starting line number (1-based, optional)")
|
|
8
|
+
}).strict();
|
|
9
|
+
function createReadFileTool(options) {
|
|
10
|
+
const { fileSystemService, directoryApproval } = options;
|
|
11
|
+
let pendingApprovalParentDir;
|
|
12
|
+
return {
|
|
13
|
+
id: "read_file",
|
|
14
|
+
description: "Read the contents of a file with optional pagination. Returns file content, line count, encoding, and whether the output was truncated. Use limit and offset parameters for large files to read specific sections. This tool is for reading files within allowed paths only.",
|
|
15
|
+
inputSchema: ReadFileInputSchema,
|
|
16
|
+
/**
|
|
17
|
+
* Check if this read operation needs directory access approval.
|
|
18
|
+
* Returns custom approval request if the file is outside allowed paths.
|
|
19
|
+
*/
|
|
20
|
+
getApprovalOverride: (args) => {
|
|
21
|
+
const { file_path } = args;
|
|
22
|
+
if (!file_path) return null;
|
|
23
|
+
const isAllowed = fileSystemService.isPathWithinConfigAllowed(file_path);
|
|
24
|
+
if (isAllowed) {
|
|
25
|
+
return null;
|
|
26
|
+
}
|
|
27
|
+
if (directoryApproval?.isSessionApproved(file_path)) {
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
const absolutePath = path.resolve(file_path);
|
|
31
|
+
const parentDir = path.dirname(absolutePath);
|
|
32
|
+
pendingApprovalParentDir = parentDir;
|
|
33
|
+
return {
|
|
34
|
+
type: ApprovalType.DIRECTORY_ACCESS,
|
|
35
|
+
metadata: {
|
|
36
|
+
path: absolutePath,
|
|
37
|
+
parentDir,
|
|
38
|
+
operation: "read",
|
|
39
|
+
toolName: "read_file"
|
|
40
|
+
}
|
|
41
|
+
};
|
|
42
|
+
},
|
|
43
|
+
/**
|
|
44
|
+
* Handle approved directory access - remember the directory for session
|
|
45
|
+
*/
|
|
46
|
+
onApprovalGranted: (response) => {
|
|
47
|
+
if (!directoryApproval || !pendingApprovalParentDir) return;
|
|
48
|
+
const data = response.data;
|
|
49
|
+
const rememberDirectory = data?.rememberDirectory ?? false;
|
|
50
|
+
directoryApproval.addApproved(
|
|
51
|
+
pendingApprovalParentDir,
|
|
52
|
+
rememberDirectory ? "session" : "once"
|
|
53
|
+
);
|
|
54
|
+
pendingApprovalParentDir = void 0;
|
|
55
|
+
},
|
|
56
|
+
execute: async (input, _context) => {
|
|
57
|
+
const { file_path, limit, offset } = input;
|
|
58
|
+
const result = await fileSystemService.readFile(file_path, {
|
|
59
|
+
limit,
|
|
60
|
+
offset
|
|
61
|
+
});
|
|
62
|
+
const _display = {
|
|
63
|
+
type: "file",
|
|
64
|
+
path: file_path,
|
|
65
|
+
operation: "read",
|
|
66
|
+
size: result.size,
|
|
67
|
+
lineCount: result.lines
|
|
68
|
+
};
|
|
69
|
+
return {
|
|
70
|
+
content: result.content,
|
|
71
|
+
lines: result.lines,
|
|
72
|
+
encoding: result.encoding,
|
|
73
|
+
truncated: result.truncated,
|
|
74
|
+
size: result.size,
|
|
75
|
+
...result.mimeType && { mimeType: result.mimeType },
|
|
76
|
+
_display
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
export {
|
|
82
|
+
createReadFileTool
|
|
83
|
+
};
|