@valentinkolb/filegate 2.0.0 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -1
- package/package.json +1 -1
- package/src/client.ts +6 -0
- package/src/handlers/files.ts +57 -21
- package/src/schemas.ts +228 -158
package/README.md
CHANGED
|
@@ -172,6 +172,14 @@ await client.transfer({
|
|
|
172
172
|
gid: 1000,
|
|
173
173
|
fileMode: "644",
|
|
174
174
|
});
|
|
175
|
+
|
|
176
|
+
// Allow overwriting existing files (default: false)
|
|
177
|
+
await client.transfer({
|
|
178
|
+
from: "/data/new-file.txt",
|
|
179
|
+
to: "/data/existing-file.txt",
|
|
180
|
+
mode: "copy",
|
|
181
|
+
ensureUniqueName: false, // Overwrite if target exists
|
|
182
|
+
});
|
|
175
183
|
```
|
|
176
184
|
|
|
177
185
|
**Rules:**
|
|
@@ -179,6 +187,8 @@ await client.transfer({
|
|
|
179
187
|
- `mode: "copy"` without ownership - Only within the same base path
|
|
180
188
|
- `mode: "copy"` with ownership - Allows cross-base copying (ownership is applied recursively)
|
|
181
189
|
- Both operations work recursively on directories
|
|
190
|
+
- `ensureUniqueName: true` (default) - Appends `-01`, `-02`, etc. if target exists
|
|
191
|
+
- `ensureUniqueName: false` - Overwrites existing target file
|
|
182
192
|
|
|
183
193
|
### Chunked Uploads
|
|
184
194
|
|
|
@@ -301,6 +311,9 @@ const client = new Filegate({
|
|
|
301
311
|
// Get file or directory info
|
|
302
312
|
await client.info({ path: "/data/file.txt", showHidden: false });
|
|
303
313
|
|
|
314
|
+
// Get directory info with recursive sizes (slower)
|
|
315
|
+
await client.info({ path: "/data/uploads", computeSizes: true });
|
|
316
|
+
|
|
304
317
|
// Download file (returns streaming Response)
|
|
305
318
|
await client.download({ path: "/data/file.txt" });
|
|
306
319
|
|
|
@@ -346,6 +359,7 @@ await client.transfer({
|
|
|
346
359
|
gid: 1000,
|
|
347
360
|
fileMode: "644",
|
|
348
361
|
dirMode: "755",
|
|
362
|
+
ensureUniqueName: true, // default: append -01, -02 if target exists
|
|
349
363
|
});
|
|
350
364
|
|
|
351
365
|
// Search files with glob patterns
|
|
@@ -439,7 +453,7 @@ All `/files/*` endpoints require `Authorization: Bearer <token>`.
|
|
|
439
453
|
| GET | `/docs` | OpenAPI documentation (Scalar UI) |
|
|
440
454
|
| GET | `/openapi.json` | OpenAPI specification |
|
|
441
455
|
| GET | `/llms.txt` | LLM-friendly markdown documentation |
|
|
442
|
-
| GET | `/files/info` | Get file or directory info |
|
|
456
|
+
| GET | `/files/info` | Get file or directory info. Use `?computeSizes=true` for recursive dir sizes |
|
|
443
457
|
| GET | `/files/content` | Download file or directory (TAR). Use `?inline=true` to view in browser |
|
|
444
458
|
| PUT | `/files/content` | Upload file |
|
|
445
459
|
| POST | `/files/mkdir` | Create directory |
|
package/package.json
CHANGED
package/src/client.ts
CHANGED
|
@@ -33,6 +33,8 @@ export interface ClientOptions {
|
|
|
33
33
|
export interface InfoOptions {
|
|
34
34
|
path: string;
|
|
35
35
|
showHidden?: boolean;
|
|
36
|
+
/** If true, compute recursive sizes for directories (slower, default: false) */
|
|
37
|
+
computeSizes?: boolean;
|
|
36
38
|
}
|
|
37
39
|
|
|
38
40
|
// --- Download ---
|
|
@@ -94,6 +96,8 @@ export interface TransferOptions {
|
|
|
94
96
|
from: string;
|
|
95
97
|
to: string;
|
|
96
98
|
mode: "move" | "copy";
|
|
99
|
+
/** If true (default), appends -01, -02, etc. to avoid overwriting existing files */
|
|
100
|
+
ensureUniqueName?: boolean;
|
|
97
101
|
/** Owner UID - required for cross-base copy */
|
|
98
102
|
uid?: number;
|
|
99
103
|
/** Owner GID - required for cross-base copy */
|
|
@@ -241,6 +245,7 @@ export class Filegate {
|
|
|
241
245
|
path: opts.path,
|
|
242
246
|
showHidden: String(opts.showHidden ?? false),
|
|
243
247
|
});
|
|
248
|
+
if (opts.computeSizes) params.set("computeSizes", "true");
|
|
244
249
|
const res = await this._fetch(`${this.url}/files/info?${params}`, { headers: this.hdrs() });
|
|
245
250
|
return this.handleResponse(res);
|
|
246
251
|
}
|
|
@@ -305,6 +310,7 @@ export class Filegate {
|
|
|
305
310
|
to: opts.to,
|
|
306
311
|
mode: opts.mode,
|
|
307
312
|
};
|
|
313
|
+
if (opts.ensureUniqueName !== undefined) body.ensureUniqueName = opts.ensureUniqueName;
|
|
308
314
|
if (opts.uid !== undefined) body.ownerUid = opts.uid;
|
|
309
315
|
if (opts.gid !== undefined) body.ownerGid = opts.gid;
|
|
310
316
|
if (opts.fileMode) body.fileMode = opts.fileMode;
|
package/src/handlers/files.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { Hono } from "hono";
|
|
2
2
|
import { describeRoute } from "hono-openapi";
|
|
3
|
-
import { readdir, mkdir, rm, rename, cp, stat } from "node:fs/promises";
|
|
4
|
-
import { join, basename, relative } from "node:path";
|
|
3
|
+
import { readdir, mkdir, rm, rename, cp, stat, access } from "node:fs/promises";
|
|
4
|
+
import { join, basename, relative, dirname, extname } from "node:path";
|
|
5
5
|
import sanitizeFilename from "sanitize-filename";
|
|
6
6
|
import { validatePath, validateSameBase } from "../lib/path";
|
|
7
7
|
import { parseOwnershipBody, applyOwnership, applyOwnershipRecursive } from "../lib/ownership";
|
|
@@ -23,6 +23,35 @@ import { config } from "../config";
|
|
|
23
23
|
|
|
24
24
|
const app = new Hono();
|
|
25
25
|
|
|
26
|
+
// Generate a unique path by appending -01, -02, etc. if target exists
|
|
27
|
+
const getUniquePath = async (targetPath: string): Promise<string> => {
|
|
28
|
+
// Check if target exists
|
|
29
|
+
try {
|
|
30
|
+
await access(targetPath);
|
|
31
|
+
} catch {
|
|
32
|
+
// Doesn't exist, use as-is
|
|
33
|
+
return targetPath;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const dir = dirname(targetPath);
|
|
37
|
+
const ext = extname(targetPath);
|
|
38
|
+
const base = basename(targetPath, ext);
|
|
39
|
+
|
|
40
|
+
for (let i = 1; i <= 99; i++) {
|
|
41
|
+
const suffix = i.toString().padStart(2, "0");
|
|
42
|
+
const newPath = join(dir, `${base}-${suffix}${ext}`);
|
|
43
|
+
try {
|
|
44
|
+
await access(newPath);
|
|
45
|
+
} catch {
|
|
46
|
+
return newPath;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Fallback: use timestamp if all 99 are taken
|
|
51
|
+
const timestamp = Date.now();
|
|
52
|
+
return join(dir, `${base}-${timestamp}${ext}`);
|
|
53
|
+
};
|
|
54
|
+
|
|
26
55
|
// Cross-platform directory size using `du` command
|
|
27
56
|
const getDirSize = async (dirPath: string): Promise<number> => {
|
|
28
57
|
const isMac = process.platform === "darwin";
|
|
@@ -79,7 +108,7 @@ app.get(
|
|
|
79
108
|
}),
|
|
80
109
|
v("query", InfoQuerySchema),
|
|
81
110
|
async (c) => {
|
|
82
|
-
const { path, showHidden } = c.req.valid("query");
|
|
111
|
+
const { path, showHidden, computeSizes } = c.req.valid("query");
|
|
83
112
|
|
|
84
113
|
const result = await validatePath(path, { allowBasePath: true });
|
|
85
114
|
if (!result.ok) return c.json({ error: result.error }, result.status);
|
|
@@ -97,17 +126,18 @@ app.get(
|
|
|
97
126
|
|
|
98
127
|
const entries = await readdir(result.realPath, { withFileTypes: true });
|
|
99
128
|
|
|
100
|
-
// Parallel file info retrieval
|
|
129
|
+
// Parallel file info retrieval (computeSizes only when requested)
|
|
101
130
|
const items = (
|
|
102
131
|
await Promise.all(
|
|
103
132
|
entries
|
|
104
133
|
.filter((e) => showHidden || !e.name.startsWith("."))
|
|
105
|
-
.map((e) => getFileInfo(join(result.realPath, e.name), result.realPath,
|
|
134
|
+
.map((e) => getFileInfo(join(result.realPath, e.name), result.realPath, computeSizes).catch(() => null)),
|
|
106
135
|
)
|
|
107
136
|
).filter((item): item is FileInfo => item !== null);
|
|
108
137
|
|
|
109
138
|
const info = await getFileInfo(result.realPath);
|
|
110
|
-
|
|
139
|
+
const totalSize = computeSizes ? items.reduce((sum, item) => sum + item.size, 0) : 0;
|
|
140
|
+
return c.json({ ...info, size: totalSize, items, total: items.length });
|
|
111
141
|
},
|
|
112
142
|
);
|
|
113
143
|
|
|
@@ -376,7 +406,7 @@ app.post(
|
|
|
376
406
|
}),
|
|
377
407
|
v("json", TransferBodySchema),
|
|
378
408
|
async (c) => {
|
|
379
|
-
const { from, to, mode, ownerUid, ownerGid, fileMode, dirMode } = c.req.valid("json");
|
|
409
|
+
const { from, to, mode, ensureUniqueName, ownerUid, ownerGid, fileMode, dirMode } = c.req.valid("json");
|
|
380
410
|
|
|
381
411
|
// Build ownership if provided
|
|
382
412
|
const ownership =
|
|
@@ -400,18 +430,20 @@ app.post(
|
|
|
400
430
|
return c.json({ error: "source not found" }, 404);
|
|
401
431
|
}
|
|
402
432
|
|
|
403
|
-
await
|
|
404
|
-
|
|
433
|
+
const targetPath = ensureUniqueName ? await getUniquePath(result.realTo) : result.realTo;
|
|
434
|
+
|
|
435
|
+
await mkdir(join(targetPath, ".."), { recursive: true });
|
|
436
|
+
await rename(result.realPath, targetPath);
|
|
405
437
|
|
|
406
438
|
// Apply ownership if provided (for move within same base)
|
|
407
439
|
if (ownership) {
|
|
408
|
-
const ownershipError = await applyOwnershipRecursive(
|
|
440
|
+
const ownershipError = await applyOwnershipRecursive(targetPath, ownership);
|
|
409
441
|
if (ownershipError) {
|
|
410
442
|
return c.json({ error: ownershipError }, 500);
|
|
411
443
|
}
|
|
412
444
|
}
|
|
413
445
|
|
|
414
|
-
return c.json(await getFileInfo(
|
|
446
|
+
return c.json(await getFileInfo(targetPath));
|
|
415
447
|
}
|
|
416
448
|
|
|
417
449
|
// Copy: check if same base or cross-base with ownership
|
|
@@ -425,19 +457,21 @@ app.post(
|
|
|
425
457
|
return c.json({ error: "source not found" }, 404);
|
|
426
458
|
}
|
|
427
459
|
|
|
428
|
-
await
|
|
429
|
-
|
|
460
|
+
const targetPath = ensureUniqueName ? await getUniquePath(sameBaseResult.realTo) : sameBaseResult.realTo;
|
|
461
|
+
|
|
462
|
+
await mkdir(join(targetPath, ".."), { recursive: true });
|
|
463
|
+
await cp(sameBaseResult.realPath, targetPath, { recursive: true });
|
|
430
464
|
|
|
431
465
|
// Apply ownership if provided
|
|
432
466
|
if (ownership) {
|
|
433
|
-
const ownershipError = await applyOwnershipRecursive(
|
|
467
|
+
const ownershipError = await applyOwnershipRecursive(targetPath, ownership);
|
|
434
468
|
if (ownershipError) {
|
|
435
|
-
await rm(
|
|
469
|
+
await rm(targetPath, { recursive: true }).catch(() => {});
|
|
436
470
|
return c.json({ error: ownershipError }, 500);
|
|
437
471
|
}
|
|
438
472
|
}
|
|
439
473
|
|
|
440
|
-
return c.json(await getFileInfo(
|
|
474
|
+
return c.json(await getFileInfo(targetPath));
|
|
441
475
|
}
|
|
442
476
|
|
|
443
477
|
// Cross-base copy - ownership is required
|
|
@@ -458,17 +492,19 @@ app.post(
|
|
|
458
492
|
return c.json({ error: "source not found" }, 404);
|
|
459
493
|
}
|
|
460
494
|
|
|
461
|
-
await
|
|
462
|
-
|
|
495
|
+
const targetPath = ensureUniqueName ? await getUniquePath(toResult.realPath) : toResult.realPath;
|
|
496
|
+
|
|
497
|
+
await mkdir(join(targetPath, ".."), { recursive: true });
|
|
498
|
+
await cp(fromResult.realPath, targetPath, { recursive: true });
|
|
463
499
|
|
|
464
500
|
// Apply ownership recursively to copied content
|
|
465
|
-
const ownershipError = await applyOwnershipRecursive(
|
|
501
|
+
const ownershipError = await applyOwnershipRecursive(targetPath, ownership);
|
|
466
502
|
if (ownershipError) {
|
|
467
|
-
await rm(
|
|
503
|
+
await rm(targetPath, { recursive: true }).catch(() => {});
|
|
468
504
|
return c.json({ error: ownershipError }, 500);
|
|
469
505
|
}
|
|
470
506
|
|
|
471
|
-
return c.json(await getFileInfo(
|
|
507
|
+
return c.json(await getFileInfo(targetPath));
|
|
472
508
|
},
|
|
473
509
|
);
|
|
474
510
|
|
package/src/schemas.ts
CHANGED
|
@@ -4,71 +4,94 @@ import { z } from "zod";
|
|
|
4
4
|
// Common
|
|
5
5
|
// ============================================================================
|
|
6
6
|
|
|
7
|
-
export const ErrorSchema = z
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
export const
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
7
|
+
export const ErrorSchema = z
|
|
8
|
+
.object({
|
|
9
|
+
error: z.string().describe("Error message describing what went wrong"),
|
|
10
|
+
})
|
|
11
|
+
.describe("Error response returned when a request fails");
|
|
12
|
+
|
|
13
|
+
export const FileTypeSchema = z.enum(["file", "directory"]).describe("Type of filesystem entry");
|
|
14
|
+
|
|
15
|
+
export const FileInfoSchema = z
|
|
16
|
+
.object({
|
|
17
|
+
name: z.string().describe("Filename or directory name"),
|
|
18
|
+
path: z.string().describe("Relative path from the base directory"),
|
|
19
|
+
type: FileTypeSchema,
|
|
20
|
+
size: z.number().describe("File size in bytes, or total directory size for directories"),
|
|
21
|
+
mtime: z.iso.datetime().describe("Last modification time in ISO 8601 format"),
|
|
22
|
+
isHidden: z.boolean().describe("True if the name starts with a dot"),
|
|
23
|
+
mimeType: z.string().optional().describe("MIME type of the file (only for files)"),
|
|
24
|
+
})
|
|
25
|
+
.describe("Information about a file or directory");
|
|
22
26
|
|
|
23
27
|
export const DirInfoSchema = FileInfoSchema.extend({
|
|
24
|
-
items: z.array(FileInfoSchema),
|
|
25
|
-
total: z.number(),
|
|
26
|
-
});
|
|
28
|
+
items: z.array(FileInfoSchema).describe("List of files and directories in this directory"),
|
|
29
|
+
total: z.number().describe("Total number of items in the directory"),
|
|
30
|
+
}).describe("Directory information including its contents");
|
|
27
31
|
|
|
28
32
|
// ============================================================================
|
|
29
33
|
// Query Params
|
|
30
34
|
// ============================================================================
|
|
31
35
|
|
|
32
|
-
export const PathQuerySchema = z
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
.
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
.string()
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
36
|
+
export const PathQuerySchema = z
|
|
37
|
+
.object({
|
|
38
|
+
path: z.string().min(1).describe("Absolute path to the file or directory"),
|
|
39
|
+
})
|
|
40
|
+
.describe("Query parameters for path-based operations");
|
|
41
|
+
|
|
42
|
+
export const ContentQuerySchema = z
|
|
43
|
+
.object({
|
|
44
|
+
path: z.string().min(1).describe("Absolute path to the file or directory to download"),
|
|
45
|
+
inline: z
|
|
46
|
+
.string()
|
|
47
|
+
.optional()
|
|
48
|
+
.transform((v) => v === "true")
|
|
49
|
+
.describe("If 'true', display in browser instead of downloading (Content-Disposition: inline)"),
|
|
50
|
+
})
|
|
51
|
+
.describe("Query parameters for content download");
|
|
52
|
+
|
|
53
|
+
export const InfoQuerySchema = z
|
|
54
|
+
.object({
|
|
55
|
+
path: z.string().min(1).describe("Absolute path to the file or directory"),
|
|
56
|
+
showHidden: z
|
|
57
|
+
.string()
|
|
58
|
+
.optional()
|
|
59
|
+
.transform((v) => v === "true")
|
|
60
|
+
.describe("If 'true', include hidden files (starting with dot) in directory listings"),
|
|
61
|
+
computeSizes: z
|
|
62
|
+
.string()
|
|
63
|
+
.optional()
|
|
64
|
+
.transform((v) => v === "true")
|
|
65
|
+
.describe("If 'true', compute recursive sizes for directories (slower, default: false)"),
|
|
66
|
+
})
|
|
67
|
+
.describe("Query parameters for file/directory info");
|
|
68
|
+
|
|
69
|
+
export const SearchQuerySchema = z
|
|
70
|
+
.object({
|
|
71
|
+
paths: z.string().min(1).describe("Comma-separated list of base paths to search in"),
|
|
72
|
+
pattern: z.string().min(1).max(500).describe("Glob pattern to match files (e.g., '*.txt', '**/*.pdf')"),
|
|
73
|
+
showHidden: z
|
|
74
|
+
.string()
|
|
75
|
+
.optional()
|
|
76
|
+
.transform((v) => v === "true")
|
|
77
|
+
.describe("If 'true', include hidden files in search results"),
|
|
78
|
+
limit: z
|
|
79
|
+
.string()
|
|
80
|
+
.optional()
|
|
81
|
+
.transform((v) => (v ? parseInt(v, 10) : undefined))
|
|
82
|
+
.describe("Maximum number of results to return"),
|
|
83
|
+
files: z
|
|
84
|
+
.string()
|
|
85
|
+
.optional()
|
|
86
|
+
.transform((v) => v !== "false")
|
|
87
|
+
.describe("If 'false', exclude files from results (default: true)"),
|
|
88
|
+
directories: z
|
|
89
|
+
.string()
|
|
90
|
+
.optional()
|
|
91
|
+
.transform((v) => v === "true")
|
|
92
|
+
.describe("If 'true', include directories in results (default: false)"),
|
|
93
|
+
})
|
|
94
|
+
.describe("Query parameters for glob-based file search");
|
|
72
95
|
|
|
73
96
|
/** Count recursive wildcards (**) in a glob pattern */
|
|
74
97
|
export const countRecursiveWildcards = (pattern: string): number => {
|
|
@@ -79,116 +102,163 @@ export const countRecursiveWildcards = (pattern: string): number => {
|
|
|
79
102
|
// Request Bodies
|
|
80
103
|
// ============================================================================
|
|
81
104
|
|
|
82
|
-
export const MkdirBodySchema = z
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
.string()
|
|
102
|
-
.
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
.
|
|
126
|
-
|
|
105
|
+
export const MkdirBodySchema = z
|
|
106
|
+
.object({
|
|
107
|
+
path: z.string().min(1).describe("Absolute path of the directory to create"),
|
|
108
|
+
ownerUid: z.number().int().optional().describe("Unix user ID to set as owner"),
|
|
109
|
+
ownerGid: z.number().int().optional().describe("Unix group ID to set as owner"),
|
|
110
|
+
mode: z
|
|
111
|
+
.string()
|
|
112
|
+
.regex(/^[0-7]{3,4}$/)
|
|
113
|
+
.optional()
|
|
114
|
+
.describe("Unix permission mode (e.g., '755' or '0755')"),
|
|
115
|
+
})
|
|
116
|
+
.describe("Request body for creating a directory");
|
|
117
|
+
|
|
118
|
+
export const TransferModeSchema = z
|
|
119
|
+
.enum(["move", "copy"])
|
|
120
|
+
.describe("Transfer operation type: 'move' (rename) or 'copy' (duplicate)");
|
|
121
|
+
|
|
122
|
+
export const TransferBodySchema = z
|
|
123
|
+
.object({
|
|
124
|
+
from: z.string().min(1).describe("Source path of the file or directory"),
|
|
125
|
+
to: z.string().min(1).describe("Destination path for the file or directory"),
|
|
126
|
+
mode: TransferModeSchema,
|
|
127
|
+
ensureUniqueName: z
|
|
128
|
+
.boolean()
|
|
129
|
+
.default(true)
|
|
130
|
+
.describe("If true, append -01, -02, etc. to avoid overwriting existing files (default: true)"),
|
|
131
|
+
ownerUid: z.number().int().optional().describe("Unix user ID for ownership (required for cross-base copy)"),
|
|
132
|
+
ownerGid: z.number().int().optional().describe("Unix group ID for ownership (required for cross-base copy)"),
|
|
133
|
+
fileMode: z
|
|
134
|
+
.string()
|
|
135
|
+
.regex(/^[0-7]{3,4}$/)
|
|
136
|
+
.optional()
|
|
137
|
+
.describe("Unix permission mode for files (e.g., '644', required for cross-base copy)"),
|
|
138
|
+
dirMode: z
|
|
139
|
+
.string()
|
|
140
|
+
.regex(/^[0-7]{3,4}$/)
|
|
141
|
+
.optional()
|
|
142
|
+
.describe("Unix permission mode for directories (e.g., '755', defaults to fileMode if not set)"),
|
|
143
|
+
})
|
|
144
|
+
.describe("Request body for moving or copying files/directories");
|
|
145
|
+
|
|
146
|
+
export const UploadStartBodySchema = z
|
|
147
|
+
.object({
|
|
148
|
+
path: z.string().min(1).describe("Directory path where the file will be uploaded"),
|
|
149
|
+
filename: z.string().min(1).describe("Name of the file to upload"),
|
|
150
|
+
size: z.number().int().positive().describe("Total size of the file in bytes"),
|
|
151
|
+
checksum: z
|
|
152
|
+
.string()
|
|
153
|
+
.regex(/^sha256:[a-f0-9]{64}$/)
|
|
154
|
+
.describe("SHA-256 checksum of the entire file (format: 'sha256:<64 hex chars>')"),
|
|
155
|
+
chunkSize: z.number().int().positive().describe("Size of each chunk in bytes"),
|
|
156
|
+
ownerUid: z.number().int().optional().describe("Unix user ID to set as owner"),
|
|
157
|
+
ownerGid: z.number().int().optional().describe("Unix group ID to set as owner"),
|
|
158
|
+
mode: z
|
|
159
|
+
.string()
|
|
160
|
+
.regex(/^[0-7]{3,4}$/)
|
|
161
|
+
.optional()
|
|
162
|
+
.describe("Unix permission mode for the uploaded file (e.g., '644')"),
|
|
163
|
+
dirMode: z
|
|
164
|
+
.string()
|
|
165
|
+
.regex(/^[0-7]{3,4}$/)
|
|
166
|
+
.optional()
|
|
167
|
+
.describe("Unix permission mode for auto-created parent directories (e.g., '755')"),
|
|
168
|
+
})
|
|
169
|
+
.describe("Request body to start or resume a chunked upload");
|
|
127
170
|
|
|
128
171
|
// ============================================================================
|
|
129
172
|
// Response Schemas
|
|
130
173
|
// ============================================================================
|
|
131
174
|
|
|
132
|
-
export const SearchResultSchema = z
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
175
|
+
export const SearchResultSchema = z
|
|
176
|
+
.object({
|
|
177
|
+
basePath: z.string().describe("Base path that was searched"),
|
|
178
|
+
files: z.array(FileInfoSchema).describe("List of matching files and directories"),
|
|
179
|
+
total: z.number().describe("Number of matches found in this base path"),
|
|
180
|
+
hasMore: z.boolean().describe("True if there are more results beyond the limit"),
|
|
181
|
+
})
|
|
182
|
+
.describe("Search results for a single base path");
|
|
183
|
+
|
|
184
|
+
export const SearchResponseSchema = z
|
|
185
|
+
.object({
|
|
186
|
+
results: z.array(SearchResultSchema).describe("Search results grouped by base path"),
|
|
187
|
+
totalFiles: z.number().describe("Total number of matches across all base paths"),
|
|
188
|
+
})
|
|
189
|
+
.describe("Complete search response with results from all searched paths");
|
|
190
|
+
|
|
191
|
+
export const UploadStartResponseSchema = z
|
|
192
|
+
.object({
|
|
193
|
+
uploadId: z
|
|
194
|
+
.string()
|
|
195
|
+
.regex(/^[a-f0-9]{16}$/)
|
|
196
|
+
.describe("Unique identifier for this upload session"),
|
|
197
|
+
totalChunks: z.number().describe("Total number of chunks expected"),
|
|
198
|
+
chunkSize: z.number().describe("Size of each chunk in bytes"),
|
|
199
|
+
uploadedChunks: z.array(z.number()).describe("Indices of chunks already uploaded (for resume)"),
|
|
200
|
+
completed: z.literal(false).describe("Always false for start response"),
|
|
201
|
+
})
|
|
202
|
+
.describe("Response when starting or resuming a chunked upload");
|
|
203
|
+
|
|
204
|
+
export const UploadChunkProgressSchema = z
|
|
205
|
+
.object({
|
|
206
|
+
chunkIndex: z.number().describe("Index of the chunk that was just uploaded"),
|
|
207
|
+
uploadedChunks: z.array(z.number()).describe("All chunk indices uploaded so far"),
|
|
208
|
+
completed: z.literal(false).describe("False while upload is still in progress"),
|
|
209
|
+
})
|
|
210
|
+
.describe("Response after uploading a chunk (upload not yet complete)");
|
|
211
|
+
|
|
212
|
+
export const UploadChunkCompleteSchema = z
|
|
213
|
+
.object({
|
|
214
|
+
completed: z.literal(true).describe("True when all chunks have been uploaded"),
|
|
215
|
+
file: FileInfoSchema.extend({
|
|
216
|
+
checksum: z.string().describe("SHA-256 checksum of the assembled file"),
|
|
217
|
+
}).describe("Information about the completed file"),
|
|
218
|
+
})
|
|
219
|
+
.describe("Response after uploading the final chunk");
|
|
220
|
+
|
|
221
|
+
export const UploadChunkResponseSchema = z
|
|
222
|
+
.union([UploadChunkProgressSchema, UploadChunkCompleteSchema])
|
|
223
|
+
.describe("Response after uploading a chunk (either progress or completion)");
|
|
164
224
|
|
|
165
225
|
// ============================================================================
|
|
166
226
|
// Header Schemas
|
|
167
227
|
// ============================================================================
|
|
168
228
|
|
|
169
|
-
export const UploadFileHeadersSchema = z
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
})
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
"
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
229
|
+
export const UploadFileHeadersSchema = z
|
|
230
|
+
.object({
|
|
231
|
+
"x-file-path": z.string().min(1).describe("Directory path where the file will be uploaded"),
|
|
232
|
+
"x-file-name": z.string().min(1).describe("Name of the file to upload"),
|
|
233
|
+
"x-owner-uid": z.string().regex(/^\d+$/).transform(Number).optional().describe("Unix user ID to set as owner"),
|
|
234
|
+
"x-owner-gid": z.string().regex(/^\d+$/).transform(Number).optional().describe("Unix group ID to set as owner"),
|
|
235
|
+
"x-file-mode": z
|
|
236
|
+
.string()
|
|
237
|
+
.regex(/^[0-7]{3,4}$/)
|
|
238
|
+
.optional()
|
|
239
|
+
.describe("Unix permission mode for the file (e.g., '644')"),
|
|
240
|
+
"x-dir-mode": z
|
|
241
|
+
.string()
|
|
242
|
+
.regex(/^[0-7]{3,4}$/)
|
|
243
|
+
.optional()
|
|
244
|
+
.describe("Unix permission mode for auto-created directories (e.g., '755')"),
|
|
245
|
+
})
|
|
246
|
+
.describe("Headers for simple file upload");
|
|
247
|
+
|
|
248
|
+
export const UploadChunkHeadersSchema = z
|
|
249
|
+
.object({
|
|
250
|
+
"x-upload-id": z
|
|
251
|
+
.string()
|
|
252
|
+
.regex(/^[a-f0-9]{16}$/)
|
|
253
|
+
.describe("Upload session ID from the start response"),
|
|
254
|
+
"x-chunk-index": z.string().regex(/^\d+$/).transform(Number).describe("Zero-based index of this chunk"),
|
|
255
|
+
"x-chunk-checksum": z
|
|
256
|
+
.string()
|
|
257
|
+
.regex(/^sha256:[a-f0-9]{64}$/)
|
|
258
|
+
.optional()
|
|
259
|
+
.describe("SHA-256 checksum of this chunk for verification (format: 'sha256:<64 hex chars>')"),
|
|
260
|
+
})
|
|
261
|
+
.describe("Headers for uploading a chunk");
|
|
192
262
|
|
|
193
263
|
// ============================================================================
|
|
194
264
|
// Types
|