pi-forge 0.0.0 → 1.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +48 -4
- package/bin/pi-forge.mjs +37 -0
- package/dist/client/assets/CodeMirrorEditor-BqaaP1EE.js +34 -0
- package/dist/client/assets/CodeMirrorEditor-BqaaP1EE.js.map +1 -0
- package/dist/client/assets/index-B-529kgJ.css +32 -0
- package/dist/client/assets/index-BzKzxXFs.js +392 -0
- package/dist/client/assets/index-BzKzxXFs.js.map +1 -0
- package/dist/client/assets/workbox-window.prod.es5-BBnX5xw4.js +3 -0
- package/dist/client/assets/workbox-window.prod.es5-BBnX5xw4.js.map +1 -0
- package/dist/client/icons/icon-192.png +0 -0
- package/dist/client/icons/icon-512.png +0 -0
- package/dist/client/icons/icon-maskable-512.png +0 -0
- package/dist/client/icons/icon.svg +9 -0
- package/dist/client/index.html +24 -0
- package/dist/client/manifest.webmanifest +1 -0
- package/dist/client/offline.html +142 -0
- package/dist/client/sw.js +3 -0
- package/dist/client/sw.js.map +1 -0
- package/dist/client/workbox-6d7155ed.js +3 -0
- package/dist/client/workbox-6d7155ed.js.map +1 -0
- package/dist/server/agent-resource-loader.js +126 -0
- package/dist/server/agent-resource-loader.js.map +1 -0
- package/dist/server/attachment-converters.js +96 -0
- package/dist/server/attachment-converters.js.map +1 -0
- package/dist/server/auth.js +209 -0
- package/dist/server/auth.js.map +1 -0
- package/dist/server/compaction-history.js +106 -0
- package/dist/server/compaction-history.js.map +1 -0
- package/dist/server/concurrency.js +49 -0
- package/dist/server/concurrency.js.map +1 -0
- package/dist/server/config-export.js +220 -0
- package/dist/server/config-export.js.map +1 -0
- package/dist/server/config-manager.js +528 -0
- package/dist/server/config-manager.js.map +1 -0
- package/dist/server/config.js +326 -0
- package/dist/server/config.js.map +1 -0
- package/dist/server/conversion-worker.mjs +90 -0
- package/dist/server/diagnostics.js +137 -0
- package/dist/server/diagnostics.js.map +1 -0
- package/dist/server/extensions-discovery.js +147 -0
- package/dist/server/extensions-discovery.js.map +1 -0
- package/dist/server/file-manager.js +734 -0
- package/dist/server/file-manager.js.map +1 -0
- package/dist/server/file-references.js +215 -0
- package/dist/server/file-references.js.map +1 -0
- package/dist/server/file-searcher.js +385 -0
- package/dist/server/file-searcher.js.map +1 -0
- package/dist/server/git-runner.js +684 -0
- package/dist/server/git-runner.js.map +1 -0
- package/dist/server/index.js +468 -0
- package/dist/server/index.js.map +1 -0
- package/dist/server/mcp/config.js +133 -0
- package/dist/server/mcp/config.js.map +1 -0
- package/dist/server/mcp/manager.js +351 -0
- package/dist/server/mcp/manager.js.map +1 -0
- package/dist/server/mcp/tool-bridge.js +173 -0
- package/dist/server/mcp/tool-bridge.js.map +1 -0
- package/dist/server/project-manager.js +301 -0
- package/dist/server/project-manager.js.map +1 -0
- package/dist/server/pty-manager.js +354 -0
- package/dist/server/pty-manager.js.map +1 -0
- package/dist/server/routes/_schemas.js +73 -0
- package/dist/server/routes/_schemas.js.map +1 -0
- package/dist/server/routes/auth.js +164 -0
- package/dist/server/routes/auth.js.map +1 -0
- package/dist/server/routes/config.js +1163 -0
- package/dist/server/routes/config.js.map +1 -0
- package/dist/server/routes/control.js +464 -0
- package/dist/server/routes/control.js.map +1 -0
- package/dist/server/routes/exec.js +217 -0
- package/dist/server/routes/exec.js.map +1 -0
- package/dist/server/routes/files.js +847 -0
- package/dist/server/routes/files.js.map +1 -0
- package/dist/server/routes/git.js +837 -0
- package/dist/server/routes/git.js.map +1 -0
- package/dist/server/routes/health.js +97 -0
- package/dist/server/routes/health.js.map +1 -0
- package/dist/server/routes/mcp.js +300 -0
- package/dist/server/routes/mcp.js.map +1 -0
- package/dist/server/routes/projects.js +259 -0
- package/dist/server/routes/projects.js.map +1 -0
- package/dist/server/routes/prompt.js +496 -0
- package/dist/server/routes/prompt.js.map +1 -0
- package/dist/server/routes/sessions.js +783 -0
- package/dist/server/routes/sessions.js.map +1 -0
- package/dist/server/routes/stream.js +69 -0
- package/dist/server/routes/stream.js.map +1 -0
- package/dist/server/routes/terminal.js +335 -0
- package/dist/server/routes/terminal.js.map +1 -0
- package/dist/server/session-registry.js +1197 -0
- package/dist/server/session-registry.js.map +1 -0
- package/dist/server/skill-overrides.js +151 -0
- package/dist/server/skill-overrides.js.map +1 -0
- package/dist/server/skills-export.js +257 -0
- package/dist/server/skills-export.js.map +1 -0
- package/dist/server/sse-bridge.js +220 -0
- package/dist/server/sse-bridge.js.map +1 -0
- package/dist/server/tool-overrides.js +277 -0
- package/dist/server/tool-overrides.js.map +1 -0
- package/dist/server/turn-diff-builder.js +280 -0
- package/dist/server/turn-diff-builder.js.map +1 -0
- package/package.json +53 -12
|
@@ -0,0 +1,847 @@
|
|
|
1
|
+
import { ChecksumMismatchError, DirectoryNotEmptyError, FileTooLargeError, InvalidNameError, NotAFileError, NotFoundError, PathOutsideRootError, TargetExistsError, deleteEntry, downloadStream, getTree, listAllFiles, makeDirectory, moveEntry, readFile, renameEntry, writeFile, writeFileBytes, } from "../file-manager.js";
|
|
2
|
+
import { config } from "../config.js";
|
|
3
|
+
import { getProject } from "../project-manager.js";
|
|
4
|
+
import { searchFiles, SearchEngineUnavailableError } from "../file-searcher.js";
|
|
5
|
+
import { errorSchema } from "./_schemas.js";
|
|
6
|
+
const MAX_UPLOAD_BYTES = 500 * 1024 * 1024;
|
|
7
|
+
const MAX_UPLOAD_FILES = 16;
|
|
8
|
+
// Aggregate cap across all files in a single upload request. The
|
|
9
|
+
// per-file cap × file count gives 8 GB of theoretical headroom — the
|
|
10
|
+
// aggregate cap puts a tighter ceiling on memory + disk pressure when
|
|
11
|
+
// the user picks a folder full of medium files. Tracked in the parts
|
|
12
|
+
// loop and surfaced as 413 with `aggregate_too_large` so the UI can
|
|
13
|
+
// distinguish from per-file overflows.
|
|
14
|
+
const MAX_TOTAL_UPLOAD_BYTES = 2 * 1024 * 1024 * 1024;
|
|
15
|
+
class AggregateLimitError extends Error {
|
|
16
|
+
constructor(limit) {
|
|
17
|
+
super(`aggregate upload exceeds ${limit} bytes`);
|
|
18
|
+
this.name = "AggregateLimitError";
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Wrap a multipart file stream so the running byte total is checked
|
|
23
|
+
* against {@link MAX_TOTAL_UPLOAD_BYTES} on every chunk. Throws
|
|
24
|
+
* {@link AggregateLimitError} the moment the aggregate crosses the
|
|
25
|
+
* cap; writeFileBytes catches the throw, unlinks its tmp file, and
|
|
26
|
+
* the route handler maps it to 413. We pass the running counter via
|
|
27
|
+
* getter/setter so the count is shared across files in the same
|
|
28
|
+
* request without leaking module state.
|
|
29
|
+
*/
|
|
30
|
+
function trackAggregate(source, getTotal, setTotal) {
|
|
31
|
+
return {
|
|
32
|
+
async *[Symbol.asyncIterator]() {
|
|
33
|
+
for await (const chunk of source) {
|
|
34
|
+
const next = getTotal() + chunk.byteLength;
|
|
35
|
+
if (next > MAX_TOTAL_UPLOAD_BYTES) {
|
|
36
|
+
throw new AggregateLimitError(MAX_TOTAL_UPLOAD_BYTES);
|
|
37
|
+
}
|
|
38
|
+
setTotal(next);
|
|
39
|
+
yield chunk;
|
|
40
|
+
}
|
|
41
|
+
},
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
/* ----------------------------- schemas ----------------------------- */
|
|
45
|
+
// `additionalProperties: true` on the recursive `children` so Fastify's
|
|
46
|
+
// serializer doesn't drop fields if we add new ones in a future SDK
|
|
47
|
+
// release.
|
|
48
|
+
const treeNodeSchema = {
|
|
49
|
+
type: "object",
|
|
50
|
+
required: ["name", "path", "type"],
|
|
51
|
+
additionalProperties: true,
|
|
52
|
+
properties: {
|
|
53
|
+
name: { type: "string" },
|
|
54
|
+
path: { type: "string" },
|
|
55
|
+
type: { type: "string", enum: ["file", "directory"] },
|
|
56
|
+
children: { type: "array", items: { type: "object", additionalProperties: true } },
|
|
57
|
+
truncated: { type: "boolean" },
|
|
58
|
+
},
|
|
59
|
+
};
|
|
60
|
+
const readResponseSchema = {
|
|
61
|
+
type: "object",
|
|
62
|
+
required: ["path", "content", "size", "language", "binary"],
|
|
63
|
+
properties: {
|
|
64
|
+
path: { type: "string" },
|
|
65
|
+
content: { type: "string" },
|
|
66
|
+
size: { type: "integer", minimum: 0 },
|
|
67
|
+
language: { type: "string" },
|
|
68
|
+
binary: { type: "boolean" },
|
|
69
|
+
},
|
|
70
|
+
};
|
|
71
|
+
/* ----------------------------- helpers ----------------------------- */
|
|
72
|
+
/**
|
|
73
|
+
* Parse + clamp the `limit` query param (string-typed because Fastify
|
|
74
|
+
* deserializes querystrings as strings). Defaults to 50, caps at 200.
|
|
75
|
+
* Used by the `@`-completion endpoint where unbounded results would
|
|
76
|
+
* blow up the popover render.
|
|
77
|
+
*/
|
|
78
|
+
function clampLimit(raw) {
|
|
79
|
+
if (raw === undefined)
|
|
80
|
+
return 50;
|
|
81
|
+
const n = Number.parseInt(raw, 10);
|
|
82
|
+
if (!Number.isFinite(n) || n <= 0)
|
|
83
|
+
return 50;
|
|
84
|
+
return Math.min(n, 200);
|
|
85
|
+
}
|
|
86
|
+
/* ----------------------------- error mapping ----------------------------- */
|
|
87
|
+
/**
|
|
88
|
+
* Translate file-manager errors into wire-shape responses. Routes funnel
|
|
89
|
+
* everything through this so the mapping is centralised — a future error
|
|
90
|
+
* type lands in one place.
|
|
91
|
+
*/
|
|
92
|
+
function mapError(reply, err) {
|
|
93
|
+
if (err instanceof PathOutsideRootError) {
|
|
94
|
+
return reply
|
|
95
|
+
.code(403)
|
|
96
|
+
.send({ error: "path_not_allowed", message: "path is outside the project root" });
|
|
97
|
+
}
|
|
98
|
+
if (err instanceof InvalidNameError) {
|
|
99
|
+
return reply.code(400).send({ error: "invalid_name", message: err.message });
|
|
100
|
+
}
|
|
101
|
+
if (err instanceof NotFoundError) {
|
|
102
|
+
return reply.code(404).send({ error: "not_found", message: "file or directory not found" });
|
|
103
|
+
}
|
|
104
|
+
if (err instanceof NotAFileError) {
|
|
105
|
+
return reply.code(400).send({ error: "not_a_file", message: "target is not a regular file" });
|
|
106
|
+
}
|
|
107
|
+
if (err instanceof FileTooLargeError) {
|
|
108
|
+
return reply.code(413).send({ error: "file_too_large", message: `${err.size} > ${err.limit}` });
|
|
109
|
+
}
|
|
110
|
+
if (err instanceof DirectoryNotEmptyError) {
|
|
111
|
+
return reply.code(409).send({
|
|
112
|
+
error: "directory_not_empty",
|
|
113
|
+
message: "delete the contents first; recursive delete is not supported",
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
if (err instanceof TargetExistsError) {
|
|
117
|
+
return reply.code(409).send({ error: "target_exists", message: "destination already exists" });
|
|
118
|
+
}
|
|
119
|
+
if (err instanceof ChecksumMismatchError) {
|
|
120
|
+
return reply.code(422).send({
|
|
121
|
+
error: "checksum_mismatch",
|
|
122
|
+
message: `expected sha256 ${err.expected}, computed ${err.actual}`,
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
if (err instanceof SearchEngineUnavailableError) {
|
|
126
|
+
return reply.code(503).send({ error: "engine_unavailable", message: err.message });
|
|
127
|
+
}
|
|
128
|
+
// Raw NodeJS.ErrnoException fallback. Without this, an EACCES on a
|
|
129
|
+
// perms-restricted file in the project tree, an EISDIR from trying to
|
|
130
|
+
// read a directory as a file, or a vanished file (ENOENT) all collapsed
|
|
131
|
+
// to a generic 500 — the user got no actionable diagnostic and the
|
|
132
|
+
// operator had to grep logs to figure out what happened.
|
|
133
|
+
const code = err.code;
|
|
134
|
+
if (code === "ENOENT") {
|
|
135
|
+
return reply.code(404).send({ error: "not_found", message: "file or directory not found" });
|
|
136
|
+
}
|
|
137
|
+
if (code === "EACCES" || code === "EPERM") {
|
|
138
|
+
return reply
|
|
139
|
+
.code(403)
|
|
140
|
+
.send({ error: "permission_denied", message: "filesystem permission denied" });
|
|
141
|
+
}
|
|
142
|
+
if (code === "EISDIR") {
|
|
143
|
+
return reply
|
|
144
|
+
.code(400)
|
|
145
|
+
.send({ error: "not_a_file", message: "target is a directory, not a file" });
|
|
146
|
+
}
|
|
147
|
+
if (code === "ENOTDIR") {
|
|
148
|
+
return reply
|
|
149
|
+
.code(400)
|
|
150
|
+
.send({ error: "not_a_directory", message: "target is a file, not a directory" });
|
|
151
|
+
}
|
|
152
|
+
reply.log.error({ err }, "unmapped file-manager error");
|
|
153
|
+
return reply.code(500).send({ error: "internal_error" });
|
|
154
|
+
}
|
|
155
|
+
/**
|
|
156
|
+
* Resolve the project for a request and short-circuit with 404 when it
|
|
157
|
+
* doesn't exist. Returns the project on success; the route handler should
|
|
158
|
+
* return immediately if `undefined` comes back.
|
|
159
|
+
*/
|
|
160
|
+
/**
|
|
161
|
+
* Resolve the project for a request.
|
|
162
|
+
*
|
|
163
|
+
* Contract: returns the project on success. On miss, sends a 404
|
|
164
|
+
* via `reply` AND returns undefined — caller MUST `if (project ===
|
|
165
|
+
* undefined) return reply;` immediately. Returning bare `undefined`
|
|
166
|
+
* trips Fastify's `FST_ERR_REP_ALREADY_SENT` because the handler's
|
|
167
|
+
* resolved value is interpreted as "send this," racing the 404 the
|
|
168
|
+
* helper already sent. The 404 response is intentionally awaited
|
|
169
|
+
* (Fastify reply.send returns the reply object; awaiting it ensures
|
|
170
|
+
* any onSend hooks have run before the route handler proceeds).
|
|
171
|
+
*/
|
|
172
|
+
async function resolveProject(projectId, reply) {
|
|
173
|
+
const project = await getProject(projectId);
|
|
174
|
+
if (project === undefined) {
|
|
175
|
+
await reply.code(404).send({ error: "project_not_found", message: "no project with that id" });
|
|
176
|
+
return undefined;
|
|
177
|
+
}
|
|
178
|
+
return { id: project.id, path: project.path };
|
|
179
|
+
}
|
|
180
|
+
/* ----------------------------- routes ----------------------------- */
|
|
181
|
+
export const fileRoutes = async (fastify) => {
|
|
182
|
+
// ---- @-completion (chat input file references) ----
|
|
183
|
+
// Polled on every keystroke inside an `@<query>` token; results
|
|
184
|
+
// shown in a popover above the input. Returns up to 50 paths
|
|
185
|
+
// matching the query as a path-substring; ranked so a basename
|
|
186
|
+
// hit beats a deep-path hit.
|
|
187
|
+
fastify.get("/files/complete", {
|
|
188
|
+
// Polled per keystroke — silence access logs to keep the
|
|
189
|
+
// stream readable. Errors still log at warn+.
|
|
190
|
+
logLevel: "warn",
|
|
191
|
+
schema: {
|
|
192
|
+
description: "Flat list of project files matching `query` (path-substring, " +
|
|
193
|
+
"case-insensitive). Used by the chat input's `@` autocomplete. " +
|
|
194
|
+
"Skips the same noisy directories as /files/tree. Returns up to " +
|
|
195
|
+
"`limit` (default 50) POSIX-style paths relative to the project " +
|
|
196
|
+
"root, ranked so a basename match beats a deep-path match and " +
|
|
197
|
+
"shorter paths beat longer ones.",
|
|
198
|
+
tags: ["files"],
|
|
199
|
+
querystring: {
|
|
200
|
+
type: "object",
|
|
201
|
+
required: ["projectId"],
|
|
202
|
+
properties: {
|
|
203
|
+
projectId: { type: "string", minLength: 1 },
|
|
204
|
+
query: { type: "string", maxLength: 256 },
|
|
205
|
+
limit: { type: "string" },
|
|
206
|
+
},
|
|
207
|
+
},
|
|
208
|
+
response: {
|
|
209
|
+
200: {
|
|
210
|
+
type: "object",
|
|
211
|
+
required: ["paths"],
|
|
212
|
+
properties: { paths: { type: "array", items: { type: "string" } } },
|
|
213
|
+
},
|
|
214
|
+
404: errorSchema,
|
|
215
|
+
500: errorSchema,
|
|
216
|
+
},
|
|
217
|
+
},
|
|
218
|
+
}, async (req, reply) => {
|
|
219
|
+
const project = await getProject(req.query.projectId);
|
|
220
|
+
if (project === undefined) {
|
|
221
|
+
return reply.code(404).send({ error: "project_not_found" });
|
|
222
|
+
}
|
|
223
|
+
const query = (req.query.query ?? "").toLowerCase();
|
|
224
|
+
const limit = clampLimit(req.query.limit);
|
|
225
|
+
try {
|
|
226
|
+
const all = await listAllFiles(project.path);
|
|
227
|
+
if (query.length === 0) {
|
|
228
|
+
// Empty query — return the first `limit` files (alphabetically),
|
|
229
|
+
// matches editor "Quick Open"-style empty-state behaviour.
|
|
230
|
+
return { paths: all.sort().slice(0, limit) };
|
|
231
|
+
}
|
|
232
|
+
const scored = [];
|
|
233
|
+
for (const p of all) {
|
|
234
|
+
const lower = p.toLowerCase();
|
|
235
|
+
const slash = lower.lastIndexOf("/");
|
|
236
|
+
const base = slash === -1 ? lower : lower.slice(slash + 1);
|
|
237
|
+
let score;
|
|
238
|
+
if (base === query)
|
|
239
|
+
score = 0;
|
|
240
|
+
else if (base.startsWith(query))
|
|
241
|
+
score = 1;
|
|
242
|
+
else if (base.includes(query))
|
|
243
|
+
score = 2;
|
|
244
|
+
else if (lower.includes(query))
|
|
245
|
+
score = 3;
|
|
246
|
+
else
|
|
247
|
+
continue;
|
|
248
|
+
scored.push({ path: p, score });
|
|
249
|
+
}
|
|
250
|
+
scored.sort((a, b) => a.score !== b.score ? a.score - b.score : a.path.length - b.path.length);
|
|
251
|
+
return { paths: scored.slice(0, limit).map((s) => s.path) };
|
|
252
|
+
}
|
|
253
|
+
catch (err) {
|
|
254
|
+
return mapError(reply, err);
|
|
255
|
+
}
|
|
256
|
+
});
|
|
257
|
+
fastify.get("/files/tree", {
|
|
258
|
+
schema: {
|
|
259
|
+
description: "Recursive directory tree for the project. Skips noisy folders " +
|
|
260
|
+
"(node_modules, .git, dist, build, __pycache__, .next, .nuxt, " +
|
|
261
|
+
"coverage, .vite, .turbo, .cache). Default max depth 6 — deeper " +
|
|
262
|
+
"directories are returned with `truncated: true` so the UI can " +
|
|
263
|
+
"lazy-fetch them on demand.",
|
|
264
|
+
tags: ["files"],
|
|
265
|
+
querystring: {
|
|
266
|
+
type: "object",
|
|
267
|
+
required: ["projectId"],
|
|
268
|
+
properties: {
|
|
269
|
+
projectId: { type: "string", minLength: 1 },
|
|
270
|
+
maxDepth: { type: "string", pattern: "^[0-9]+$" },
|
|
271
|
+
},
|
|
272
|
+
},
|
|
273
|
+
response: { 200: treeNodeSchema, 404: errorSchema, 500: errorSchema },
|
|
274
|
+
},
|
|
275
|
+
}, async (req, reply) => {
|
|
276
|
+
const project = await resolveProject(req.query.projectId, reply);
|
|
277
|
+
if (project === undefined)
|
|
278
|
+
return reply;
|
|
279
|
+
try {
|
|
280
|
+
// Clamp client-supplied maxDepth to a sane window. The schema
|
|
281
|
+
// already gates on `^[0-9]+$`, so parseInt is safe; we cap at
|
|
282
|
+
// 32 because anything past that is either a misconfiguration
|
|
283
|
+
// or someone trying to force a deep recursion DoS.
|
|
284
|
+
let maxDepth;
|
|
285
|
+
if (req.query.maxDepth !== undefined) {
|
|
286
|
+
const n = Number.parseInt(req.query.maxDepth, 10);
|
|
287
|
+
maxDepth = Math.min(Math.max(n, 1), 32);
|
|
288
|
+
}
|
|
289
|
+
const tree = await getTree(project.path, maxDepth !== undefined ? { maxDepth } : {});
|
|
290
|
+
return tree;
|
|
291
|
+
}
|
|
292
|
+
catch (err) {
|
|
293
|
+
return mapError(reply, err);
|
|
294
|
+
}
|
|
295
|
+
});
|
|
296
|
+
fastify.get("/files/download", {
|
|
297
|
+
schema: {
|
|
298
|
+
description: "Download a file or directory from the project. Files stream " +
|
|
299
|
+
"verbatim with `Content-Disposition: attachment`; directories " +
|
|
300
|
+
"stream as a gzipped tar (`<dir>.tar.gz`) with the same exclusions " +
|
|
301
|
+
"as the file tree (node_modules, .git, dist, build, etc.). Omitting " +
|
|
302
|
+
"`path` downloads the whole project as a tar.gz.",
|
|
303
|
+
tags: ["files"],
|
|
304
|
+
querystring: {
|
|
305
|
+
type: "object",
|
|
306
|
+
required: ["projectId"],
|
|
307
|
+
properties: {
|
|
308
|
+
projectId: { type: "string", minLength: 1 },
|
|
309
|
+
path: { type: "string", minLength: 1 },
|
|
310
|
+
},
|
|
311
|
+
},
|
|
312
|
+
response: {
|
|
313
|
+
// Binary stream — OpenAPI describes it as `string` + `format: binary`.
|
|
314
|
+
200: { type: "string", format: "binary" },
|
|
315
|
+
400: errorSchema,
|
|
316
|
+
403: errorSchema,
|
|
317
|
+
404: errorSchema,
|
|
318
|
+
500: errorSchema,
|
|
319
|
+
},
|
|
320
|
+
},
|
|
321
|
+
}, async (req, reply) => {
|
|
322
|
+
const project = await resolveProject(req.query.projectId, reply);
|
|
323
|
+
if (project === undefined)
|
|
324
|
+
return reply;
|
|
325
|
+
const target = req.query.path ?? project.path;
|
|
326
|
+
try {
|
|
327
|
+
const result = await downloadStream(target, project.path);
|
|
328
|
+
// RFC 5987 filename* = UTF-8 + percent-encoded so non-ASCII
|
|
329
|
+
// names survive Chrome / Firefox / Safari. Keep the legacy
|
|
330
|
+
// `filename=` for older clients with the same name ASCII-
|
|
331
|
+
// sanitised — most filenames are ASCII anyway.
|
|
332
|
+
const asciiName = result.filename.replace(/[^\x20-\x7e]/g, "_");
|
|
333
|
+
const utfName = encodeURIComponent(result.filename);
|
|
334
|
+
reply.header("Content-Disposition", `attachment; filename="${asciiName}"; filename*=UTF-8''${utfName}`);
|
|
335
|
+
if (result.kind === "file") {
|
|
336
|
+
reply.header("Content-Type", "application/octet-stream");
|
|
337
|
+
reply.header("Content-Length", String(result.size));
|
|
338
|
+
}
|
|
339
|
+
else {
|
|
340
|
+
reply.header("Content-Type", "application/gzip");
|
|
341
|
+
// No Content-Length — we don't know the gzipped size up front.
|
|
342
|
+
}
|
|
343
|
+
return reply.send(result.stream);
|
|
344
|
+
}
|
|
345
|
+
catch (err) {
|
|
346
|
+
return mapError(reply, err);
|
|
347
|
+
}
|
|
348
|
+
});
|
|
349
|
+
fastify.get("/files/read", {
|
|
350
|
+
schema: {
|
|
351
|
+
description: "Read a UTF-8 file from the project. 5 MB cap (returns 413). " +
|
|
352
|
+
"Binary files return `{ binary: true, content: '' }` rather than a " +
|
|
353
|
+
"garbled UTF-8 decode — clients should not pass binary content " +
|
|
354
|
+
"to the editor.",
|
|
355
|
+
tags: ["files"],
|
|
356
|
+
querystring: {
|
|
357
|
+
type: "object",
|
|
358
|
+
required: ["projectId", "path"],
|
|
359
|
+
properties: {
|
|
360
|
+
projectId: { type: "string", minLength: 1 },
|
|
361
|
+
path: { type: "string", minLength: 1 },
|
|
362
|
+
},
|
|
363
|
+
},
|
|
364
|
+
response: {
|
|
365
|
+
200: readResponseSchema,
|
|
366
|
+
400: errorSchema,
|
|
367
|
+
403: errorSchema,
|
|
368
|
+
404: errorSchema,
|
|
369
|
+
413: errorSchema,
|
|
370
|
+
500: errorSchema,
|
|
371
|
+
},
|
|
372
|
+
},
|
|
373
|
+
}, async (req, reply) => {
|
|
374
|
+
const project = await resolveProject(req.query.projectId, reply);
|
|
375
|
+
if (project === undefined)
|
|
376
|
+
return reply;
|
|
377
|
+
try {
|
|
378
|
+
const result = await readFile(req.query.path, project.path);
|
|
379
|
+
return result;
|
|
380
|
+
}
|
|
381
|
+
catch (err) {
|
|
382
|
+
return mapError(reply, err);
|
|
383
|
+
}
|
|
384
|
+
});
|
|
385
|
+
fastify.put("/files/write", {
|
|
386
|
+
schema: {
|
|
387
|
+
description: "Atomic write (tmp + rename). Creates parent directories as " +
|
|
388
|
+
"needed. The body's `path` is required to be inside the project " +
|
|
389
|
+
"root — 403 otherwise.",
|
|
390
|
+
tags: ["files"],
|
|
391
|
+
body: {
|
|
392
|
+
type: "object",
|
|
393
|
+
required: ["projectId", "path", "content"],
|
|
394
|
+
additionalProperties: false,
|
|
395
|
+
properties: {
|
|
396
|
+
projectId: { type: "string", minLength: 1 },
|
|
397
|
+
path: { type: "string", minLength: 1 },
|
|
398
|
+
content: { type: "string" },
|
|
399
|
+
},
|
|
400
|
+
},
|
|
401
|
+
response: {
|
|
402
|
+
200: { type: "object", required: ["path"], properties: { path: { type: "string" } } },
|
|
403
|
+
400: errorSchema,
|
|
404
|
+
403: errorSchema,
|
|
405
|
+
404: errorSchema,
|
|
406
|
+
500: errorSchema,
|
|
407
|
+
},
|
|
408
|
+
},
|
|
409
|
+
}, async (req, reply) => {
|
|
410
|
+
const project = await resolveProject(req.body.projectId, reply);
|
|
411
|
+
if (project === undefined)
|
|
412
|
+
return reply;
|
|
413
|
+
try {
|
|
414
|
+
await writeFile(req.body.path, project.path, req.body.content);
|
|
415
|
+
return { path: req.body.path };
|
|
416
|
+
}
|
|
417
|
+
catch (err) {
|
|
418
|
+
return mapError(reply, err);
|
|
419
|
+
}
|
|
420
|
+
});
|
|
421
|
+
fastify.post("/files/mkdir", {
|
|
422
|
+
schema: {
|
|
423
|
+
description: "Create a single directory under `parentPath`.",
|
|
424
|
+
tags: ["files"],
|
|
425
|
+
body: {
|
|
426
|
+
type: "object",
|
|
427
|
+
required: ["projectId", "parentPath", "name"],
|
|
428
|
+
additionalProperties: false,
|
|
429
|
+
properties: {
|
|
430
|
+
projectId: { type: "string", minLength: 1 },
|
|
431
|
+
parentPath: { type: "string", minLength: 1 },
|
|
432
|
+
name: { type: "string", minLength: 1 },
|
|
433
|
+
},
|
|
434
|
+
},
|
|
435
|
+
response: {
|
|
436
|
+
200: { type: "object", required: ["path"], properties: { path: { type: "string" } } },
|
|
437
|
+
400: errorSchema,
|
|
438
|
+
403: errorSchema,
|
|
439
|
+
404: errorSchema,
|
|
440
|
+
409: errorSchema,
|
|
441
|
+
500: errorSchema,
|
|
442
|
+
},
|
|
443
|
+
},
|
|
444
|
+
}, async (req, reply) => {
|
|
445
|
+
const project = await resolveProject(req.body.projectId, reply);
|
|
446
|
+
if (project === undefined)
|
|
447
|
+
return reply;
|
|
448
|
+
try {
|
|
449
|
+
const created = await makeDirectory(req.body.parentPath, project.path, req.body.name);
|
|
450
|
+
return { path: created };
|
|
451
|
+
}
|
|
452
|
+
catch (err) {
|
|
453
|
+
return mapError(reply, err);
|
|
454
|
+
}
|
|
455
|
+
});
|
|
456
|
+
fastify.post("/files/rename", {
|
|
457
|
+
schema: {
|
|
458
|
+
description: "Rename a file or directory in place — `name` is the new basename. " +
|
|
459
|
+
"Use /files/move to relocate across directories.",
|
|
460
|
+
tags: ["files"],
|
|
461
|
+
body: {
|
|
462
|
+
type: "object",
|
|
463
|
+
required: ["projectId", "path", "name"],
|
|
464
|
+
additionalProperties: false,
|
|
465
|
+
properties: {
|
|
466
|
+
projectId: { type: "string", minLength: 1 },
|
|
467
|
+
path: { type: "string", minLength: 1 },
|
|
468
|
+
name: { type: "string", minLength: 1 },
|
|
469
|
+
},
|
|
470
|
+
},
|
|
471
|
+
response: {
|
|
472
|
+
200: { type: "object", required: ["path"], properties: { path: { type: "string" } } },
|
|
473
|
+
400: errorSchema,
|
|
474
|
+
403: errorSchema,
|
|
475
|
+
404: errorSchema,
|
|
476
|
+
409: errorSchema,
|
|
477
|
+
500: errorSchema,
|
|
478
|
+
},
|
|
479
|
+
},
|
|
480
|
+
}, async (req, reply) => {
|
|
481
|
+
const project = await resolveProject(req.body.projectId, reply);
|
|
482
|
+
if (project === undefined)
|
|
483
|
+
return reply;
|
|
484
|
+
try {
|
|
485
|
+
const renamed = await renameEntry(req.body.path, project.path, req.body.name);
|
|
486
|
+
return { path: renamed };
|
|
487
|
+
}
|
|
488
|
+
catch (err) {
|
|
489
|
+
return mapError(reply, err);
|
|
490
|
+
}
|
|
491
|
+
});
|
|
492
|
+
fastify.post("/files/move", {
|
|
493
|
+
schema: {
|
|
494
|
+
description: "Move a file or directory to `dest` (a full destination path). " +
|
|
495
|
+
"Refuses to move a directory under itself; refuses if `dest` " +
|
|
496
|
+
"already exists.",
|
|
497
|
+
tags: ["files"],
|
|
498
|
+
body: {
|
|
499
|
+
type: "object",
|
|
500
|
+
required: ["projectId", "src", "dest"],
|
|
501
|
+
additionalProperties: false,
|
|
502
|
+
properties: {
|
|
503
|
+
projectId: { type: "string", minLength: 1 },
|
|
504
|
+
src: { type: "string", minLength: 1 },
|
|
505
|
+
dest: { type: "string", minLength: 1 },
|
|
506
|
+
},
|
|
507
|
+
},
|
|
508
|
+
response: {
|
|
509
|
+
200: { type: "object", required: ["path"], properties: { path: { type: "string" } } },
|
|
510
|
+
400: errorSchema,
|
|
511
|
+
403: errorSchema,
|
|
512
|
+
404: errorSchema,
|
|
513
|
+
409: errorSchema,
|
|
514
|
+
500: errorSchema,
|
|
515
|
+
},
|
|
516
|
+
},
|
|
517
|
+
}, async (req, reply) => {
|
|
518
|
+
const project = await resolveProject(req.body.projectId, reply);
|
|
519
|
+
if (project === undefined)
|
|
520
|
+
return reply;
|
|
521
|
+
try {
|
|
522
|
+
const moved = await moveEntry(req.body.src, req.body.dest, project.path);
|
|
523
|
+
return { path: moved };
|
|
524
|
+
}
|
|
525
|
+
catch (err) {
|
|
526
|
+
return mapError(reply, err);
|
|
527
|
+
}
|
|
528
|
+
});
|
|
529
|
+
fastify.delete("/files/delete", {
|
|
530
|
+
schema: {
|
|
531
|
+
description: "Delete a file or directory. Empty directories delete unconditionally. " +
|
|
532
|
+
"Non-empty directories return 409 unless `?recursive=true` is set, in " +
|
|
533
|
+
"which case the entire subtree is removed. The UI prompts the user with " +
|
|
534
|
+
"a second confirmation before retrying with the recursive flag — single- " +
|
|
535
|
+
"user single-tenant, but `rm -rf` should still be an explicit choice.",
|
|
536
|
+
tags: ["files"],
|
|
537
|
+
querystring: {
|
|
538
|
+
type: "object",
|
|
539
|
+
required: ["projectId", "path"],
|
|
540
|
+
properties: {
|
|
541
|
+
projectId: { type: "string", minLength: 1 },
|
|
542
|
+
path: { type: "string", minLength: 1 },
|
|
543
|
+
recursive: { type: "string", enum: ["true", "false"] },
|
|
544
|
+
},
|
|
545
|
+
},
|
|
546
|
+
response: {
|
|
547
|
+
204: { type: "null" },
|
|
548
|
+
400: errorSchema,
|
|
549
|
+
403: errorSchema,
|
|
550
|
+
404: errorSchema,
|
|
551
|
+
409: errorSchema,
|
|
552
|
+
500: errorSchema,
|
|
553
|
+
},
|
|
554
|
+
},
|
|
555
|
+
}, async (req, reply) => {
|
|
556
|
+
const project = await resolveProject(req.query.projectId, reply);
|
|
557
|
+
if (project === undefined)
|
|
558
|
+
return reply;
|
|
559
|
+
try {
|
|
560
|
+
const recursive = req.query.recursive === "true";
|
|
561
|
+
await deleteEntry(req.query.path, project.path, { recursive });
|
|
562
|
+
return reply.code(204).send();
|
|
563
|
+
}
|
|
564
|
+
catch (err) {
|
|
565
|
+
return mapError(reply, err);
|
|
566
|
+
}
|
|
567
|
+
});
|
|
568
|
+
fastify.get("/files/search", {
|
|
569
|
+
config: {
|
|
570
|
+
rateLimit: {
|
|
571
|
+
max: config.rateLimits.searchMax,
|
|
572
|
+
timeWindow: config.rateLimits.searchWindowMs,
|
|
573
|
+
},
|
|
574
|
+
},
|
|
575
|
+
schema: {
|
|
576
|
+
description: "Cross-project text + regex search. Uses ripgrep when available " +
|
|
577
|
+
"(fast + gitignore-aware) and falls back to a Node walk on hosts " +
|
|
578
|
+
"without rg. Response includes `engine: 'ripgrep' | 'node'` so the " +
|
|
579
|
+
"UI can render a fallback-mode badge. Hard caps: 1000 matches max " +
|
|
580
|
+
"per request, 30s wall clock, 5 MB per file. Binary files are " +
|
|
581
|
+
"skipped via NUL-byte heuristic on the fallback path; ripgrep " +
|
|
582
|
+
"uses its own (better) binary detection.",
|
|
583
|
+
tags: ["files"],
|
|
584
|
+
querystring: {
|
|
585
|
+
type: "object",
|
|
586
|
+
required: ["projectId", "q"],
|
|
587
|
+
properties: {
|
|
588
|
+
projectId: { type: "string", minLength: 1 },
|
|
589
|
+
q: { type: "string", minLength: 1, maxLength: 1024 },
|
|
590
|
+
regex: { type: "string", enum: ["0", "1", "true", "false"] },
|
|
591
|
+
caseSensitive: { type: "string", enum: ["0", "1", "true", "false"] },
|
|
592
|
+
includeGitignored: { type: "string", enum: ["0", "1", "true", "false"] },
|
|
593
|
+
include: { type: "string", maxLength: 256 },
|
|
594
|
+
exclude: { type: "string", maxLength: 256 },
|
|
595
|
+
limit: { type: "string", pattern: "^[0-9]+$" },
|
|
596
|
+
},
|
|
597
|
+
},
|
|
598
|
+
response: {
|
|
599
|
+
200: {
|
|
600
|
+
type: "object",
|
|
601
|
+
required: ["engine", "matches", "truncated"],
|
|
602
|
+
properties: {
|
|
603
|
+
engine: { type: "string", enum: ["ripgrep", "node"] },
|
|
604
|
+
truncated: { type: "boolean" },
|
|
605
|
+
matches: {
|
|
606
|
+
type: "array",
|
|
607
|
+
items: {
|
|
608
|
+
type: "object",
|
|
609
|
+
required: ["path", "line", "column", "length", "lineSnippet"],
|
|
610
|
+
properties: {
|
|
611
|
+
path: { type: "string" },
|
|
612
|
+
line: { type: "integer", minimum: 1 },
|
|
613
|
+
column: { type: "integer", minimum: 1 },
|
|
614
|
+
length: { type: "integer", minimum: 0 },
|
|
615
|
+
lineSnippet: { type: "string" },
|
|
616
|
+
},
|
|
617
|
+
},
|
|
618
|
+
},
|
|
619
|
+
},
|
|
620
|
+
},
|
|
621
|
+
400: errorSchema,
|
|
622
|
+
404: errorSchema,
|
|
623
|
+
500: errorSchema,
|
|
624
|
+
},
|
|
625
|
+
},
|
|
626
|
+
}, async (req, reply) => {
|
|
627
|
+
const project = await resolveProject(req.query.projectId, reply);
|
|
628
|
+
if (project === undefined)
|
|
629
|
+
return reply;
|
|
630
|
+
const { q } = req.query;
|
|
631
|
+
const regex = req.query.regex === "1" || req.query.regex === "true";
|
|
632
|
+
const caseSensitive = req.query.caseSensitive === "1" || req.query.caseSensitive === "true";
|
|
633
|
+
const includeGitignored = req.query.includeGitignored === "1" || req.query.includeGitignored === "true";
|
|
634
|
+
const limit = req.query.limit !== undefined
|
|
635
|
+
? Math.min(1000, Math.max(1, Number.parseInt(req.query.limit, 10)))
|
|
636
|
+
: 200;
|
|
637
|
+
try {
|
|
638
|
+
const opts = {
|
|
639
|
+
query: q,
|
|
640
|
+
regex,
|
|
641
|
+
caseSensitive,
|
|
642
|
+
includeGitignored,
|
|
643
|
+
limit,
|
|
644
|
+
timeoutMs: 30_000,
|
|
645
|
+
};
|
|
646
|
+
if (req.query.include !== undefined && req.query.include.length > 0) {
|
|
647
|
+
opts.include = req.query.include;
|
|
648
|
+
}
|
|
649
|
+
if (req.query.exclude !== undefined && req.query.exclude.length > 0) {
|
|
650
|
+
opts.exclude = req.query.exclude;
|
|
651
|
+
}
|
|
652
|
+
const result = await searchFiles(project.path, opts);
|
|
653
|
+
return result;
|
|
654
|
+
}
|
|
655
|
+
catch (err) {
|
|
656
|
+
return mapError(reply, err);
|
|
657
|
+
}
|
|
658
|
+
});
|
|
659
|
+
// ----------------------------- upload -----------------------------
|
|
660
|
+
// Multipart upload of one or more files into a chosen folder under
|
|
661
|
+
// the project. Each file is streamed to a tmp path, hashed with
|
|
662
|
+
// SHA-256 as bytes flow, and atomically renamed into place IFF the
|
|
663
|
+
// computed digest matches the one the client supplied (or the client
|
|
664
|
+
// declined to supply one — we still return the computed value so the
|
|
665
|
+
// caller can verify out-of-band). Per-file cap and file-count cap
|
|
666
|
+
// are enforced via the per-call multipart `limits` override.
|
|
667
|
+
//
|
|
668
|
+
// Field shape (FormData order matters — fields BEFORE files so we
|
|
669
|
+
// know `parentPath`/`overwrite`/`sha256:<name>` by the time the file
|
|
670
|
+
// part is parsed):
|
|
671
|
+
// - projectId: string (required)
|
|
672
|
+
// - parentPath: string — absolute, inside project (required)
|
|
673
|
+
// - overwrite: "1"/"true" — replace existing files
|
|
674
|
+
// - sha256:<filename>: 64-char lowercase hex (optional, per file)
|
|
675
|
+
// - <any-field-name>: file part(s)
|
|
676
|
+
fastify.post("/files/upload", {
|
|
677
|
+
config: {
|
|
678
|
+
rateLimit: {
|
|
679
|
+
max: config.rateLimits.uploadMax,
|
|
680
|
+
timeWindow: config.rateLimits.uploadWindowMs,
|
|
681
|
+
},
|
|
682
|
+
},
|
|
683
|
+
schema: {
|
|
684
|
+
description: `Upload one or more files into a project folder via multipart/form-data. ` +
|
|
685
|
+
`Each file is streamed to disk, its SHA-256 is computed on the fly, and ` +
|
|
686
|
+
`the rename to the final name is performed only after a checksum match ` +
|
|
687
|
+
`(when the client supplied one via the \`sha256:<filename>\` text field). ` +
|
|
688
|
+
`Per-file cap: ${MAX_UPLOAD_BYTES / (1024 * 1024)} MB. Aggregate cap: ` +
|
|
689
|
+
`${MAX_TOTAL_UPLOAD_BYTES / (1024 * 1024)} MB across all parts. Max ` +
|
|
690
|
+
`${MAX_UPLOAD_FILES} files per request. Existing targets return 409 unless ` +
|
|
691
|
+
`\`overwrite=1\` is sent. Per-file overflows return 413 \`file_too_large\`; ` +
|
|
692
|
+
`aggregate overflows return 413 \`aggregate_too_large\`.`,
|
|
693
|
+
tags: ["files"],
|
|
694
|
+
consumes: ["multipart/form-data"],
|
|
695
|
+
response: {
|
|
696
|
+
200: {
|
|
697
|
+
type: "object",
|
|
698
|
+
required: ["files"],
|
|
699
|
+
properties: {
|
|
700
|
+
files: {
|
|
701
|
+
type: "array",
|
|
702
|
+
items: {
|
|
703
|
+
type: "object",
|
|
704
|
+
required: ["path", "size", "sha256"],
|
|
705
|
+
properties: {
|
|
706
|
+
path: { type: "string" },
|
|
707
|
+
size: { type: "integer", minimum: 0 },
|
|
708
|
+
sha256: { type: "string" },
|
|
709
|
+
},
|
|
710
|
+
},
|
|
711
|
+
},
|
|
712
|
+
},
|
|
713
|
+
},
|
|
714
|
+
400: errorSchema,
|
|
715
|
+
403: errorSchema,
|
|
716
|
+
404: errorSchema,
|
|
717
|
+
409: errorSchema,
|
|
718
|
+
413: errorSchema,
|
|
719
|
+
415: errorSchema,
|
|
720
|
+
422: errorSchema,
|
|
721
|
+
500: errorSchema,
|
|
722
|
+
},
|
|
723
|
+
},
|
|
724
|
+
}, async (req, reply) => {
|
|
725
|
+
if (!req.isMultipart()) {
|
|
726
|
+
return reply.code(415).send({ error: "expected_multipart" });
|
|
727
|
+
}
|
|
728
|
+
let projectId;
|
|
729
|
+
let parentPath;
|
|
730
|
+
let overwrite = false;
|
|
731
|
+
let aggregateBytes = 0;
|
|
732
|
+
const expectedHashes = new Map();
|
|
733
|
+
const written = [];
|
|
734
|
+
try {
|
|
735
|
+
const parts = req.parts({
|
|
736
|
+
limits: {
|
|
737
|
+
fileSize: MAX_UPLOAD_BYTES,
|
|
738
|
+
files: MAX_UPLOAD_FILES,
|
|
739
|
+
fields: 64,
|
|
740
|
+
},
|
|
741
|
+
});
|
|
742
|
+
for await (const part of parts) {
|
|
743
|
+
if (part.type === "field") {
|
|
744
|
+
if (part.fieldname === "projectId" && typeof part.value === "string") {
|
|
745
|
+
projectId = part.value;
|
|
746
|
+
}
|
|
747
|
+
else if (part.fieldname === "parentPath" && typeof part.value === "string") {
|
|
748
|
+
parentPath = part.value;
|
|
749
|
+
}
|
|
750
|
+
else if (part.fieldname === "overwrite" && typeof part.value === "string") {
|
|
751
|
+
overwrite = part.value === "1" || part.value === "true";
|
|
752
|
+
}
|
|
753
|
+
else if (part.fieldname.startsWith("sha256:") && typeof part.value === "string") {
|
|
754
|
+
const name = part.fieldname.slice("sha256:".length);
|
|
755
|
+
if (name.length > 0)
|
|
756
|
+
expectedHashes.set(name, part.value.toLowerCase());
|
|
757
|
+
}
|
|
758
|
+
continue;
|
|
759
|
+
}
|
|
760
|
+
// File part. Project + parent must already be parsed — the
|
|
761
|
+
// FormData field-order contract is documented above.
|
|
762
|
+
const file = part;
|
|
763
|
+
if (projectId === undefined) {
|
|
764
|
+
return reply.code(400).send({
|
|
765
|
+
error: "missing_field",
|
|
766
|
+
message: "projectId must precede file parts in the multipart body",
|
|
767
|
+
});
|
|
768
|
+
}
|
|
769
|
+
if (parentPath === undefined) {
|
|
770
|
+
return reply.code(400).send({
|
|
771
|
+
error: "missing_field",
|
|
772
|
+
message: "parentPath must precede file parts in the multipart body",
|
|
773
|
+
});
|
|
774
|
+
}
|
|
775
|
+
const project = await getProject(projectId);
|
|
776
|
+
if (project === undefined) {
|
|
777
|
+
return reply.code(404).send({ error: "project_not_found" });
|
|
778
|
+
}
|
|
779
|
+
const filename = file.filename;
|
|
780
|
+
if (filename === undefined || filename.length === 0) {
|
|
781
|
+
return reply.code(400).send({ error: "missing_filename" });
|
|
782
|
+
}
|
|
783
|
+
const expected = expectedHashes.get(filename);
|
|
784
|
+
// Stream the part body straight through writeFileBytes so we
|
|
785
|
+
// never buffer the whole file in memory. We wrap the part
|
|
786
|
+
// stream in an aggregate-tracking iterator so the request
|
|
787
|
+
// aborts as soon as the running total crosses
|
|
788
|
+
// MAX_TOTAL_UPLOAD_BYTES — without this, a user could send
|
|
789
|
+
// 16 × 500 MB and burn 8 GB of disk before the route layer
|
|
790
|
+
// noticed.
|
|
791
|
+
const trackedSource = trackAggregate(file.file, () => aggregateBytes, (n) => {
|
|
792
|
+
aggregateBytes = n;
|
|
793
|
+
});
|
|
794
|
+
let result;
|
|
795
|
+
try {
|
|
796
|
+
result = await writeFileBytes(parentPath, filename, project.path, trackedSource, {
|
|
797
|
+
...(expected !== undefined ? { expectedSha256: expected } : {}),
|
|
798
|
+
overwrite,
|
|
799
|
+
});
|
|
800
|
+
}
|
|
801
|
+
catch (err) {
|
|
802
|
+
if (err instanceof AggregateLimitError) {
|
|
803
|
+
// Roll back every previously-written file in this same
|
|
804
|
+
// request. Without this, a 3-file upload where the 3rd
|
|
805
|
+
// trips the aggregate cap would leave the first two on
|
|
806
|
+
// disk; the user sees a 413 and (reasonably) thinks
|
|
807
|
+
// nothing was uploaded, then retries and gets confusing
|
|
808
|
+
// 409 target_exists for the first two.
|
|
809
|
+
for (const prior of written) {
|
|
810
|
+
await deleteEntry(prior.path, project.path).catch(() => undefined);
|
|
811
|
+
}
|
|
812
|
+
return reply.code(413).send({
|
|
813
|
+
error: "aggregate_too_large",
|
|
814
|
+
message: `Total upload size exceeds the ${MAX_TOTAL_UPLOAD_BYTES / (1024 * 1024)} MB aggregate limit.`,
|
|
815
|
+
});
|
|
816
|
+
}
|
|
817
|
+
throw err;
|
|
818
|
+
}
|
|
819
|
+
if (file.file.truncated) {
|
|
820
|
+
// The file exceeded the per-file cap; writeFileBytes already
|
|
821
|
+
// wrote whatever streamed through. Roll it back so we don't
|
|
822
|
+
// leave a partial upload visible.
|
|
823
|
+
await deleteEntry(result.path, project.path).catch(() => undefined);
|
|
824
|
+
return reply.code(413).send({
|
|
825
|
+
error: "file_too_large",
|
|
826
|
+
message: `Upload "${filename}" exceeds the ${MAX_UPLOAD_BYTES / (1024 * 1024)} MB per-file limit.`,
|
|
827
|
+
});
|
|
828
|
+
}
|
|
829
|
+
written.push({
|
|
830
|
+
path: result.path,
|
|
831
|
+
size: result.size,
|
|
832
|
+
sha256: result.sha256,
|
|
833
|
+
});
|
|
834
|
+
}
|
|
835
|
+
if (written.length === 0) {
|
|
836
|
+
return reply
|
|
837
|
+
.code(400)
|
|
838
|
+
.send({ error: "no_files", message: "no file parts in the request" });
|
|
839
|
+
}
|
|
840
|
+
return { files: written };
|
|
841
|
+
}
|
|
842
|
+
catch (err) {
|
|
843
|
+
return mapError(reply, err);
|
|
844
|
+
}
|
|
845
|
+
});
|
|
846
|
+
};
|
|
847
|
+
//# sourceMappingURL=files.js.map
|