ctxbin 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,694 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ var __create = Object.create;
4
+ var __defProp = Object.defineProperty;
5
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
6
+ var __getOwnPropNames = Object.getOwnPropertyNames;
7
+ var __getProtoOf = Object.getPrototypeOf;
8
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
9
+ var __export = (target, all) => {
10
+ for (var name in all)
11
+ __defProp(target, name, { get: all[name], enumerable: true });
12
+ };
13
+ var __copyProps = (to, from, except, desc) => {
14
+ if (from && typeof from === "object" || typeof from === "function") {
15
+ for (let key of __getOwnPropNames(from))
16
+ if (!__hasOwnProp.call(to, key) && key !== except)
17
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
18
+ }
19
+ return to;
20
+ };
21
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
22
+ // If the importer is in node compatibility mode or this is not an ESM
23
+ // file that has been converted to a CommonJS file using a Babel-
24
+ // compatible transform (i.e. "__esModule" has not been set), then set
25
+ // "default" to the CommonJS "module.exports" for node compatibility.
26
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
27
+ mod
28
+ ));
29
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
30
+
31
+ // src/index.ts
32
+ var index_exports = {};
33
+ __export(index_exports, {
34
+ CtxbinError: () => CtxbinError,
35
+ SKILLPACK_HEADER: () => SKILLPACK_HEADER,
36
+ SKILLREF_HEADER: () => SKILLREF_HEADER,
37
+ createSkillpackFromDir: () => createSkillpackFromDir,
38
+ createSkillrefValue: () => createSkillrefValue,
39
+ detectSkillValueType: () => detectSkillValueType,
40
+ formatError: () => formatError,
41
+ loadSkillrefToDir: () => loadSkillrefToDir,
42
+ normalizeGithubUrl: () => normalizeGithubUrl,
43
+ normalizeSkillPath: () => normalizeSkillPath,
44
+ parseSkillrefValue: () => parseSkillrefValue,
45
+ resolveSaveInput: () => resolveSaveInput,
46
+ safeChmod: () => safeChmod,
47
+ validateCommitSha: () => validateCommitSha
48
+ });
49
+ module.exports = __toCommonJS(index_exports);
50
+
51
+ // src/validators.ts
52
+ var import_node_path = __toESM(require("path"));
53
+
54
+ // src/errors.ts
55
+ var CtxbinError = class extends Error {
56
+ code;
57
+ constructor(code, message) {
58
+ super(message);
59
+ this.code = code;
60
+ }
61
+ };
62
+ function fail(code, message) {
63
+ throw new CtxbinError(code, message);
64
+ }
65
+ function formatError(err) {
66
+ if (err instanceof CtxbinError) {
67
+ return `CTXBIN_ERR ${err.code}: ${err.message}`;
68
+ }
69
+ const message = err instanceof Error ? err.message : String(err);
70
+ return `CTXBIN_ERR IO: ${message}`;
71
+ }
72
+
73
+ // src/validators.ts
74
+ function normalizeGithubUrl(input) {
75
+ let url;
76
+ try {
77
+ url = new URL(input);
78
+ } catch {
79
+ return fail("INVALID_URL", "invalid URL");
80
+ }
81
+ if (url.protocol !== "https:") {
82
+ return fail("INVALID_URL", "URL must use https");
83
+ }
84
+ if (url.hostname !== "github.com") {
85
+ return fail("INVALID_URL", "only github.com is supported");
86
+ }
87
+ if (url.search || url.hash) {
88
+ return fail("INVALID_URL", "URL must not include query or hash");
89
+ }
90
+ const parts = url.pathname.split("/").filter(Boolean);
91
+ if (parts.length !== 2) {
92
+ return fail("INVALID_URL", "URL must be https://github.com/<owner>/<repo>");
93
+ }
94
+ const owner = parts[0];
95
+ let repo = parts[1];
96
+ if (repo.endsWith(".git")) {
97
+ repo = repo.slice(0, -4);
98
+ }
99
+ if (!owner || !repo) {
100
+ return fail("INVALID_URL", "URL must be https://github.com/<owner>/<repo>");
101
+ }
102
+ return `https://github.com/${owner}/${repo}`;
103
+ }
104
+ function validateCommitSha(ref) {
105
+ if (!/^[0-9a-f]{40}$/.test(ref)) {
106
+ return fail("INVALID_REF", "ref must be a 40-hex commit SHA");
107
+ }
108
+ return ref;
109
+ }
110
+ function normalizeSkillPath(input) {
111
+ const trimmed = input.trim();
112
+ if (!trimmed) {
113
+ return fail("INVALID_PATH", "path must be a non-empty directory path");
114
+ }
115
+ const cleaned = trimmed.replace(/\\/g, "/");
116
+ if (cleaned.startsWith("/")) {
117
+ return fail("INVALID_PATH", "path must be relative, not absolute");
118
+ }
119
+ const normalized = import_node_path.default.posix.normalize(cleaned).replace(/^\.\//, "");
120
+ if (normalized === "." || normalized === "") {
121
+ return fail("INVALID_PATH", "path must be a non-empty directory path");
122
+ }
123
+ if (normalized.startsWith("../") || normalized.includes("/../") || normalized === "..") {
124
+ return fail("INVALID_PATH", "path must not include .. segments");
125
+ }
126
+ if (normalized.endsWith("/")) {
127
+ return normalized.slice(0, -1);
128
+ }
129
+ return normalized;
130
+ }
131
+ function assertSafeTarPath(entryPath) {
132
+ const cleaned = entryPath.replace(/\\/g, "/");
133
+ if (cleaned.startsWith("/")) {
134
+ return fail("INVALID_PATH", `tar entry path must be relative: ${entryPath}`);
135
+ }
136
+ const normalized = import_node_path.default.posix.normalize(cleaned);
137
+ if (normalized.startsWith("../") || normalized === ".." || normalized.includes("/../")) {
138
+ return fail("INVALID_PATH", `tar entry path contains traversal: ${entryPath}`);
139
+ }
140
+ }
141
+
142
+ // src/constants.ts
143
+ var SKILLPACK_HEADER = "ctxbin-skillpack@1\n";
144
+ var SKILLREF_HEADER = "ctxbin-skillref@1\n";
145
+ var MAX_SKILLPACK_BYTES = 7 * 1024 * 1024;
146
+ var MAX_SKILLREF_DOWNLOAD_BYTES = 20 * 1024 * 1024;
147
+ var MAX_SKILLREF_EXTRACT_BYTES = 100 * 1024 * 1024;
148
+ var MAX_SKILLREF_FILES = 5e3;
149
+ var SKILLREF_CONNECT_TIMEOUT_MS = 5e3;
150
+ var SKILLREF_DOWNLOAD_TIMEOUT_MS = 3e4;
151
+ var DEFAULT_EXCLUDES = [".git", "node_modules", ".DS_Store"];
152
+
153
+ // src/value.ts
154
+ function detectSkillValueType(value) {
155
+ if (value.startsWith(SKILLPACK_HEADER)) return "skillpack";
156
+ if (value.startsWith(SKILLREF_HEADER)) return "skillref";
157
+ return "string";
158
+ }
159
+
160
+ // src/skillpack.ts
161
+ var import_promises4 = __toESM(require("fs/promises"));
162
+ var import_node_fs = require("fs");
163
+ var import_node_path4 = __toESM(require("path"));
164
+ var import_node_os = __toESM(require("os"));
165
+ var import_node_zlib = __toESM(require("zlib"));
166
+ var import_promises5 = require("stream/promises");
167
+ var import_tar2 = __toESM(require("tar"));
168
+
169
+ // src/fs-ops.ts
170
+ var import_promises2 = __toESM(require("fs/promises"));
171
+ var import_node_path2 = __toESM(require("path"));
172
+
173
+ // src/chmod.ts
174
+ var import_promises = __toESM(require("fs/promises"));
175
+ async function safeChmod(path6, mode) {
176
+ try {
177
+ await import_promises.default.chmod(path6, mode);
178
+ } catch (err) {
179
+ if (process.platform === "win32") {
180
+ return;
181
+ }
182
+ throw err;
183
+ }
184
+ }
185
+
186
+ // src/fs-ops.ts
187
+ async function ensureDir(dir) {
188
+ await import_promises2.default.mkdir(dir, { recursive: true });
189
+ }
190
+ function toPosix(p) {
191
+ return p.split(import_node_path2.default.sep).join("/");
192
+ }
193
+ async function copyDirContents(src, dest) {
194
+ await ensureDir(dest);
195
+ const entries = await import_promises2.default.readdir(src, { withFileTypes: true });
196
+ for (const entry of entries) {
197
+ const srcPath = import_node_path2.default.join(src, entry.name);
198
+ const destPath = import_node_path2.default.join(dest, entry.name);
199
+ if (entry.isDirectory()) {
200
+ await copyDirContents(srcPath, destPath);
201
+ const stat = await import_promises2.default.stat(srcPath);
202
+ await safeChmod(destPath, stat.mode & 511);
203
+ continue;
204
+ }
205
+ if (entry.isFile()) {
206
+ await ensureDir(import_node_path2.default.dirname(destPath));
207
+ await import_promises2.default.copyFile(srcPath, destPath);
208
+ const stat = await import_promises2.default.stat(srcPath);
209
+ await safeChmod(destPath, stat.mode & 511);
210
+ continue;
211
+ }
212
+ return fail("IO", `unsupported file type during copy: ${srcPath}`);
213
+ }
214
+ }
215
+
216
+ // src/perm.ts
217
+ var import_promises3 = __toESM(require("fs/promises"));
218
+ var import_node_path3 = __toESM(require("path"));
219
+ async function applyNormalizedPermissions(root, execSet) {
220
+ async function walk(absDir) {
221
+ const entries = await import_promises3.default.readdir(absDir, { withFileTypes: true });
222
+ for (const entry of entries) {
223
+ const absPath = import_node_path3.default.join(absDir, entry.name);
224
+ if (entry.isDirectory()) {
225
+ await safeChmod(absPath, 493);
226
+ await walk(absPath);
227
+ continue;
228
+ }
229
+ if (entry.isFile()) {
230
+ const rel = toPosix(import_node_path3.default.relative(root, absPath));
231
+ const mode = execSet.has(rel) ? 493 : 420;
232
+ await safeChmod(absPath, mode);
233
+ continue;
234
+ }
235
+ return fail("IO", `unsupported file type after extract: ${absPath}`);
236
+ }
237
+ }
238
+ await walk(root);
239
+ }
240
+
241
+ // src/tar-utils.ts
242
+ var import_tar = __toESM(require("tar"));
243
+ async function listTarEntries(file) {
244
+ const entries = [];
245
+ await import_tar.default.t({
246
+ file,
247
+ onentry(entry) {
248
+ entries.push({
249
+ path: entry.path,
250
+ type: entry.type,
251
+ size: entry.size ?? 0,
252
+ mode: entry.mode ?? 0
253
+ });
254
+ }
255
+ });
256
+ return entries;
257
+ }
258
+
259
+ // src/skillpack.ts
260
+ async function createSkillpackFromDir(dirPath) {
261
+ const stats = await import_promises4.default.stat(dirPath).catch(() => null);
262
+ if (!stats || !stats.isDirectory()) {
263
+ return fail("INVALID_INPUT", `--dir is not a directory: ${dirPath}`);
264
+ }
265
+ const entries = await collectEntries(dirPath);
266
+ const tmpDir = await import_promises4.default.mkdtemp(import_node_path4.default.join(import_node_os.default.tmpdir(), "ctxbin-skillpack-"));
267
+ const tarPath = import_node_path4.default.join(tmpDir, "skillpack.tar.gz");
268
+ try {
269
+ const tarStream = import_tar2.default.c(
270
+ {
271
+ cwd: dirPath,
272
+ portable: true,
273
+ mtime: /* @__PURE__ */ new Date(0)
274
+ },
275
+ entries
276
+ );
277
+ const gzip = import_node_zlib.default.createGzip({ mtime: 0 });
278
+ await (0, import_promises5.pipeline)(tarStream, gzip, (0, import_node_fs.createWriteStream)(tarPath));
279
+ const stat = await import_promises4.default.stat(tarPath);
280
+ if (stat.size > MAX_SKILLPACK_BYTES) {
281
+ return fail(
282
+ "SIZE_LIMIT",
283
+ `skillpack tar.gz size ${stat.size} bytes exceeds ${MAX_SKILLPACK_BYTES} bytes`
284
+ );
285
+ }
286
+ const data = await import_promises4.default.readFile(tarPath);
287
+ const b64 = data.toString("base64");
288
+ return SKILLPACK_HEADER + b64;
289
+ } finally {
290
+ await import_promises4.default.rm(tmpDir, { recursive: true, force: true });
291
+ }
292
+ }
293
+ async function collectEntries(root) {
294
+ const results = [];
295
+ async function walk(absDir, relDir) {
296
+ const entries = await import_promises4.default.readdir(absDir, { withFileTypes: true });
297
+ entries.sort((a, b) => a.name.localeCompare(b.name));
298
+ for (const entry of entries) {
299
+ if (DEFAULT_EXCLUDES.includes(entry.name)) {
300
+ if (entry.isDirectory()) {
301
+ continue;
302
+ }
303
+ if (entry.isFile() && entry.name === ".DS_Store") {
304
+ continue;
305
+ }
306
+ }
307
+ const absPath = import_node_path4.default.join(absDir, entry.name);
308
+ const relPath = relDir ? import_node_path4.default.posix.join(relDir, entry.name) : entry.name;
309
+ const stat = await import_promises4.default.lstat(absPath);
310
+ if (stat.isSymbolicLink()) {
311
+ return fail("IO", `symlink not allowed in skillpack: ${absPath}`);
312
+ }
313
+ if (entry.isDirectory()) {
314
+ results.push(relPath);
315
+ await walk(absPath, relPath);
316
+ continue;
317
+ }
318
+ if (entry.isFile()) {
319
+ if (entry.name === ".DS_Store") {
320
+ continue;
321
+ }
322
+ results.push(relPath);
323
+ continue;
324
+ }
325
+ return fail("IO", `unsupported file type in skillpack: ${absPath}`);
326
+ }
327
+ }
328
+ await walk(root, "");
329
+ results.sort();
330
+ return results;
331
+ }
332
+
333
+ // src/skillref.ts
334
+ var import_promises6 = __toESM(require("fs/promises"));
335
+ var import_node_path5 = __toESM(require("path"));
336
+ var import_node_os2 = __toESM(require("os"));
337
+ var import_node_fs2 = require("fs");
338
+ var import_tar3 = __toESM(require("tar"));
339
+ var ALLOWED_TYPES = /* @__PURE__ */ new Set(["File", "Directory"]);
340
+ function createSkillrefValue(url, skillPath, ref) {
341
+ const normalizedUrl = normalizeGithubUrl(url);
342
+ const normalizedPath = normalizeSkillPath(skillPath);
343
+ const payload = ref ? JSON.stringify({ url: normalizedUrl, path: normalizedPath, ref: validateCommitSha(ref) }) : JSON.stringify({ url: normalizedUrl, path: normalizedPath, track: "default" });
344
+ return SKILLREF_HEADER + payload;
345
+ }
346
+ function parseSkillrefValue(value) {
347
+ if (!value.startsWith(SKILLREF_HEADER)) {
348
+ return fail("TYPE_MISMATCH", "value is not a skillref");
349
+ }
350
+ const raw = value.slice(SKILLREF_HEADER.length);
351
+ let parsed;
352
+ try {
353
+ parsed = JSON.parse(raw);
354
+ } catch {
355
+ return fail("IO", "invalid skillref payload JSON");
356
+ }
357
+ if (!parsed || typeof parsed.url !== "string" || typeof parsed.path !== "string") {
358
+ return fail("IO", "invalid skillref payload fields");
359
+ }
360
+ const normalized = {
361
+ url: normalizeGithubUrl(parsed.url),
362
+ path: normalizeSkillPath(parsed.path)
363
+ };
364
+ if (typeof parsed.ref === "string") {
365
+ return { ...normalized, ref: validateCommitSha(parsed.ref) };
366
+ }
367
+ if (parsed.track === "default") {
368
+ return { ...normalized, track: "default" };
369
+ }
370
+ return fail("IO", "invalid skillref payload fields");
371
+ }
372
+ async function loadSkillrefToDir(value, targetDir) {
373
+ const skillref = parseSkillrefValue(value);
374
+ const resolvedRef = skillref.ref ?? await fetchDefaultBranch(skillref.url);
375
+ const tmpRoot = await import_promises6.default.mkdtemp(import_node_path5.default.join(import_node_os2.default.tmpdir(), "ctxbin-skillref-"));
376
+ const tarPath = import_node_path5.default.join(tmpRoot, "skillref.tar.gz");
377
+ try {
378
+ await downloadArchive(skillref.url, resolvedRef, tarPath);
379
+ const entries = await listTarEntries(tarPath).catch(() => fail("IO", "failed to parse tar archive"));
380
+ const analysis = analyzeEntries(entries, skillref.path);
381
+ const extractDir = import_node_path5.default.join(tmpRoot, "extract");
382
+ await ensureDir(extractDir);
383
+ const stripCount = 1 + skillref.path.split("/").length;
384
+ await import_tar3.default.x({
385
+ file: tarPath,
386
+ cwd: extractDir,
387
+ preserveOwner: false,
388
+ noMtime: true,
389
+ strip: stripCount,
390
+ filter: (p, entry) => {
391
+ const entryPath = entry?.path ?? p;
392
+ return isUnderPath(entryPath, analysis.prefix, skillref.path);
393
+ }
394
+ });
395
+ await applyNormalizedPermissions(extractDir, analysis.execSet);
396
+ await ensureDir(targetDir);
397
+ await copyDirContents(extractDir, targetDir);
398
+ } finally {
399
+ await import_promises6.default.rm(tmpRoot, { recursive: true, force: true });
400
+ }
401
+ }
402
+ async function downloadArchive(repoUrl, ref, outPath) {
403
+ const { owner, repo } = splitGithubUrl(repoUrl);
404
+ const url = `https://codeload.github.com/${owner}/${repo}/tar.gz/${ref}`;
405
+ const controller = new AbortController();
406
+ const totalTimer = setTimeout(() => controller.abort(), SKILLREF_DOWNLOAD_TIMEOUT_MS);
407
+ let res;
408
+ try {
409
+ res = await fetchWithRedirect(url, 1, controller, ["github.com", "codeload.github.com"]);
410
+ } catch (err) {
411
+ clearTimeout(totalTimer);
412
+ return fail("NETWORK", `download failed: ${err instanceof Error ? err.message : String(err)}`);
413
+ }
414
+ if (!res.ok) {
415
+ clearTimeout(totalTimer);
416
+ const text = await res.text();
417
+ return fail("NETWORK", `download failed (${res.status}): ${text}`);
418
+ }
419
+ if (!res.body) {
420
+ clearTimeout(totalTimer);
421
+ return fail("NETWORK", "download failed: empty response body");
422
+ }
423
+ const fileStream = (0, import_node_fs2.createWriteStream)(outPath);
424
+ let total = 0;
425
+ let magic = Buffer.alloc(0);
426
+ try {
427
+ for await (const chunk of res.body) {
428
+ if (magic.length < 2) {
429
+ const needed = 2 - magic.length;
430
+ magic = Buffer.concat([magic, chunk.subarray(0, needed)]);
431
+ if (magic.length === 2) {
432
+ if (magic[0] !== 31 || magic[1] !== 139) {
433
+ fileStream.close();
434
+ controller.abort();
435
+ return fail("IO", "downloaded file is not gzip data");
436
+ }
437
+ }
438
+ }
439
+ total += chunk.length;
440
+ if (total > MAX_SKILLREF_DOWNLOAD_BYTES) {
441
+ fileStream.close();
442
+ controller.abort();
443
+ return fail(
444
+ "SIZE_LIMIT",
445
+ `downloaded archive size ${total} exceeds ${MAX_SKILLREF_DOWNLOAD_BYTES} bytes`
446
+ );
447
+ }
448
+ fileStream.write(chunk);
449
+ }
450
+ } catch (err) {
451
+ fileStream.close();
452
+ clearTimeout(totalTimer);
453
+ if (err instanceof CtxbinError) {
454
+ throw err;
455
+ }
456
+ return fail("NETWORK", `download failed: ${err instanceof Error ? err.message : String(err)}`);
457
+ } finally {
458
+ clearTimeout(totalTimer);
459
+ }
460
+ if (magic.length < 2) {
461
+ fileStream.close();
462
+ return fail("IO", "downloaded file is incomplete");
463
+ }
464
+ await new Promise((resolve, reject) => {
465
+ fileStream.end(() => resolve());
466
+ fileStream.on("error", reject);
467
+ });
468
+ }
469
+ async function fetchWithRedirect(url, redirectsLeft, controller, allowedHosts, init) {
470
+ const connectTimer = setTimeout(() => controller.abort(), SKILLREF_CONNECT_TIMEOUT_MS);
471
+ const res = await fetch(url, {
472
+ ...init,
473
+ signal: controller.signal,
474
+ redirect: "manual"
475
+ });
476
+ clearTimeout(connectTimer);
477
+ if (isRedirect(res.status)) {
478
+ if (redirectsLeft <= 0) {
479
+ return fail("NETWORK", "too many redirects");
480
+ }
481
+ const location = res.headers.get("location");
482
+ if (!location) {
483
+ return fail("NETWORK", "redirect without location header");
484
+ }
485
+ const nextUrl = new URL(location, url).toString();
486
+ const host = new URL(nextUrl).hostname;
487
+ if (!allowedHosts.includes(host)) {
488
+ return fail("NETWORK", `redirected to unsupported host: ${host}`);
489
+ }
490
+ return fetchWithRedirect(nextUrl, redirectsLeft - 1, controller, allowedHosts, init);
491
+ }
492
+ return res;
493
+ }
494
+ function isRedirect(status) {
495
+ return [301, 302, 303, 307, 308].includes(status);
496
+ }
497
+ function splitGithubUrl(repoUrl) {
498
+ const url = new URL(repoUrl);
499
+ const parts = url.pathname.split("/").filter(Boolean);
500
+ if (parts.length !== 2) {
501
+ return fail("INVALID_URL", "URL must be https://github.com/<owner>/<repo>");
502
+ }
503
+ return { owner: parts[0], repo: parts[1] };
504
+ }
505
+ async function fetchDefaultBranch(repoUrl) {
506
+ const { owner, repo } = splitGithubUrl(repoUrl);
507
+ const url = `https://api.github.com/repos/${owner}/${repo}`;
508
+ const controller = new AbortController();
509
+ const totalTimer = setTimeout(() => controller.abort(), SKILLREF_DOWNLOAD_TIMEOUT_MS);
510
+ let res;
511
+ try {
512
+ res = await fetchWithRedirect(url, 1, controller, ["github.com", "api.github.com"], {
513
+ headers: {
514
+ "User-Agent": "ctxbin",
515
+ Accept: "application/vnd.github+json"
516
+ }
517
+ });
518
+ } catch (err) {
519
+ clearTimeout(totalTimer);
520
+ return fail("NETWORK", `default branch lookup failed: ${err instanceof Error ? err.message : String(err)}`);
521
+ }
522
+ if (!res.ok) {
523
+ clearTimeout(totalTimer);
524
+ const text = await res.text();
525
+ return fail("NETWORK", `default branch lookup failed (${res.status}): ${text}`);
526
+ }
527
+ let data;
528
+ try {
529
+ data = await res.json();
530
+ } catch {
531
+ clearTimeout(totalTimer);
532
+ return fail("NETWORK", "default branch lookup returned invalid JSON");
533
+ }
534
+ clearTimeout(totalTimer);
535
+ if (!data || typeof data.default_branch !== "string" || data.default_branch.length === 0) {
536
+ return fail("NETWORK", "default branch lookup returned no default_branch");
537
+ }
538
+ return data.default_branch;
539
+ }
540
+ function analyzeEntries(entries, requestedPath) {
541
+ if (entries.length === 0) {
542
+ return fail("NOT_FOUND", "archive contained no entries");
543
+ }
544
+ const prefix = entries[0].path.split("/")[0];
545
+ if (!prefix) {
546
+ return fail("IO", "unable to determine archive prefix");
547
+ }
548
+ const execSet = /* @__PURE__ */ new Set();
549
+ let entryCount = 0;
550
+ let totalSize = 0;
551
+ let matched = false;
552
+ for (const entry of entries) {
553
+ assertSafeTarPath(entry.path);
554
+ if (!ALLOWED_TYPES.has(entry.type)) {
555
+ return fail("IO", `unsupported entry type in archive: ${entry.path}`);
556
+ }
557
+ if (entry.path === prefix) {
558
+ continue;
559
+ }
560
+ if (!entry.path.startsWith(`${prefix}/`)) {
561
+ return fail("IO", "archive has unexpected top-level layout");
562
+ }
563
+ const rel = entry.path.slice(prefix.length + 1);
564
+ if (!rel) {
565
+ continue;
566
+ }
567
+ const relToReq = stripRequestedPath(rel, requestedPath);
568
+ if (relToReq === null) {
569
+ continue;
570
+ }
571
+ matched = true;
572
+ entryCount += 1;
573
+ if (rel === requestedPath && entry.type === "File") {
574
+ return fail("INVALID_PATH", "requested path is not a directory");
575
+ }
576
+ if (entry.type === "File") {
577
+ totalSize += entry.size ?? 0;
578
+ if (entry.mode & 73) {
579
+ execSet.add(relToReq);
580
+ }
581
+ }
582
+ }
583
+ if (!matched) {
584
+ return fail("NOT_FOUND", "requested path not found in archive");
585
+ }
586
+ if (entryCount > MAX_SKILLREF_FILES) {
587
+ return fail("SIZE_LIMIT", `extracted entry count ${entryCount} exceeds ${MAX_SKILLREF_FILES}`);
588
+ }
589
+ if (totalSize > MAX_SKILLREF_EXTRACT_BYTES) {
590
+ return fail("SIZE_LIMIT", `extracted size ${totalSize} exceeds ${MAX_SKILLREF_EXTRACT_BYTES}`);
591
+ }
592
+ return { prefix, execSet };
593
+ }
594
+ function stripRequestedPath(rel, requestedPath) {
595
+ if (rel === requestedPath) {
596
+ return "";
597
+ }
598
+ const prefix = requestedPath + "/";
599
+ if (rel.startsWith(prefix)) {
600
+ return rel.slice(prefix.length);
601
+ }
602
+ return null;
603
+ }
604
+ function isUnderPath(entryPath, prefix, requestedPath) {
605
+ if (entryPath === prefix) {
606
+ return false;
607
+ }
608
+ if (!entryPath.startsWith(`${prefix}/`)) {
609
+ return false;
610
+ }
611
+ const rel = entryPath.slice(prefix.length + 1);
612
+ if (!rel) {
613
+ return false;
614
+ }
615
+ if (rel === requestedPath || rel.startsWith(requestedPath + "/")) {
616
+ return true;
617
+ }
618
+ return false;
619
+ }
620
+
621
+ // src/input.ts
622
+ var import_promises7 = __toESM(require("fs/promises"));
623
+ var import_node_process = __toESM(require("process"));
624
+ async function resolveSaveInput(resource, opts, stdinIsTTY = Boolean(import_node_process.default.stdin.isTTY)) {
625
+ const hasFile = typeof opts.file === "string";
626
+ const hasValue = typeof opts.value === "string";
627
+ const hasDir = typeof opts.dir === "string";
628
+ const urlFlagsUsed = Boolean(opts.url || opts.ref || opts.path);
629
+ const hasUrl = Boolean(opts.url && opts.path);
630
+ const explicitCount = [hasFile, hasValue, hasDir, hasUrl].filter(Boolean).length;
631
+ const hasStdin = !stdinIsTTY && explicitCount === 0;
632
+ if (urlFlagsUsed && !hasUrl) {
633
+ return fail("INVALID_INPUT", "--url and --path must be provided together");
634
+ }
635
+ const methods = explicitCount + (hasStdin ? 1 : 0);
636
+ if (methods !== 1) {
637
+ return fail("INVALID_INPUT", "exactly one input method must be used");
638
+ }
639
+ if (hasDir && resource !== "skill") {
640
+ return fail("INVALID_INPUT", "--dir is only valid for skill save");
641
+ }
642
+ if (hasUrl && resource !== "skill") {
643
+ return fail("INVALID_INPUT", "--url/--ref/--path are only valid for skill save");
644
+ }
645
+ if (opts.append && (hasDir || hasUrl)) {
646
+ return fail("INVALID_INPUT", "--append cannot be used with --dir or --url");
647
+ }
648
+ if (hasDir) {
649
+ const value = await createSkillpackFromDir(opts.dir);
650
+ return { kind: "skillpack", value };
651
+ }
652
+ if (hasUrl) {
653
+ const value = createSkillrefValue(opts.url, opts.path, opts.ref);
654
+ return { kind: "skillref", value };
655
+ }
656
+ if (hasFile) {
657
+ const content = await import_promises7.default.readFile(opts.file, "utf8");
658
+ return { kind: "string", value: content };
659
+ }
660
+ if (hasValue) {
661
+ return { kind: "string", value: opts.value };
662
+ }
663
+ const stdin = await readStdin();
664
+ return { kind: "string", value: stdin };
665
+ }
666
+ async function readStdin() {
667
+ return new Promise((resolve, reject) => {
668
+ let data = "";
669
+ import_node_process.default.stdin.setEncoding("utf8");
670
+ import_node_process.default.stdin.on("data", (chunk) => {
671
+ data += chunk;
672
+ });
673
+ import_node_process.default.stdin.on("end", () => resolve(data));
674
+ import_node_process.default.stdin.on("error", reject);
675
+ });
676
+ }
677
+ // Annotate the CommonJS export names for ESM import in node:
678
+ 0 && (module.exports = {
679
+ CtxbinError,
680
+ SKILLPACK_HEADER,
681
+ SKILLREF_HEADER,
682
+ createSkillpackFromDir,
683
+ createSkillrefValue,
684
+ detectSkillValueType,
685
+ formatError,
686
+ loadSkillrefToDir,
687
+ normalizeGithubUrl,
688
+ normalizeSkillPath,
689
+ parseSkillrefValue,
690
+ resolveSaveInput,
691
+ safeChmod,
692
+ validateCommitSha
693
+ });
694
+ //# sourceMappingURL=index.js.map