ctxbin 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js ADDED
@@ -0,0 +1,1066 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ var __create = Object.create;
4
+ var __defProp = Object.defineProperty;
5
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
6
+ var __getOwnPropNames = Object.getOwnPropertyNames;
7
+ var __getProtoOf = Object.getPrototypeOf;
8
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
9
+ var __copyProps = (to, from, except, desc) => {
10
+ if (from && typeof from === "object" || typeof from === "function") {
11
+ for (let key of __getOwnPropNames(from))
12
+ if (!__hasOwnProp.call(to, key) && key !== except)
13
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
14
+ }
15
+ return to;
16
+ };
17
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
18
+ // If the importer is in node compatibility mode or this is not an ESM
19
+ // file that has been converted to a CommonJS file using a Babel-
20
+ // compatible transform (i.e. "__esModule" has not been set), then set
21
+ // "default" to the CommonJS "module.exports" for node compatibility.
22
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
23
+ mod
24
+ ));
25
+
26
+ // src/cli.ts
27
+ var import_node_util2 = require("util");
28
+ var import_node_process2 = __toESM(require("process"));
29
+ var import_node_path8 = __toESM(require("path"));
30
+ var import_promises9 = __toESM(require("fs/promises"));
31
+ var import_node_fs3 = require("fs");
32
+
33
+ // src/errors.ts
34
+ var CtxbinError = class extends Error {
35
+ code;
36
+ constructor(code, message) {
37
+ super(message);
38
+ this.code = code;
39
+ }
40
+ };
41
+ function fail(code, message) {
42
+ throw new CtxbinError(code, message);
43
+ }
44
+ function formatError(err) {
45
+ if (err instanceof CtxbinError) {
46
+ return `CTXBIN_ERR ${err.code}: ${err.message}`;
47
+ }
48
+ const message = err instanceof Error ? err.message : String(err);
49
+ return `CTXBIN_ERR IO: ${message}`;
50
+ }
51
+
52
+ // src/config.ts
53
+ var import_promises = __toESM(require("fs/promises"));
54
+ var import_node_path = __toESM(require("path"));
55
+ var import_node_os = __toESM(require("os"));
56
+ var ENV_URL = "CTXBIN_STORE_URL";
57
+ var ENV_TOKEN = "CTXBIN_STORE_TOKEN";
58
+ async function loadConfig() {
59
+ const envUrl = process.env[ENV_URL];
60
+ const envToken = process.env[ENV_TOKEN];
61
+ if (envUrl || envToken) {
62
+ if (!envUrl || !envToken) {
63
+ return fail("INVALID_INPUT", "both CTXBIN_STORE_URL and CTXBIN_STORE_TOKEN must be set");
64
+ }
65
+ return { url: envUrl, token: envToken };
66
+ }
67
+ const configPath = import_node_path.default.join(import_node_os.default.homedir(), ".ctxbin", "config.json");
68
+ let raw;
69
+ try {
70
+ raw = await import_promises.default.readFile(configPath, "utf8");
71
+ } catch {
72
+ return fail("INVALID_INPUT", "missing CTXBIN_STORE_URL/CTXBIN_STORE_TOKEN and no ~/.ctxbin/config.json");
73
+ }
74
+ let parsed;
75
+ try {
76
+ parsed = JSON.parse(raw);
77
+ } catch {
78
+ return fail("INVALID_INPUT", "invalid JSON in ~/.ctxbin/config.json");
79
+ }
80
+ const url = parsed.store_url || parsed.storeUrl || parsed.url;
81
+ const token = parsed.store_token || parsed.storeToken || parsed.token;
82
+ if (!url || !token) {
83
+ return fail("INVALID_INPUT", "config.json must include store_url and store_token");
84
+ }
85
+ return { url, token };
86
+ }
87
+ async function writeConfig(config) {
88
+ const dir = import_node_path.default.join(import_node_os.default.homedir(), ".ctxbin");
89
+ const configPath = import_node_path.default.join(dir, "config.json");
90
+ await import_promises.default.mkdir(dir, { recursive: true, mode: 448 });
91
+ const payload = {
92
+ store_url: config.url,
93
+ store_token: config.token
94
+ };
95
+ await import_promises.default.writeFile(configPath, JSON.stringify(payload, null, 2), "utf8");
96
+ }
97
+
98
+ // src/store.ts
99
+ function createStore(url, token) {
100
+ const baseUrl = url.replace(/\/$/, "");
101
+ async function command(cmd, ...args) {
102
+ let res;
103
+ try {
104
+ res = await fetch(baseUrl, {
105
+ method: "POST",
106
+ headers: {
107
+ Authorization: `Bearer ${token}`,
108
+ "Content-Type": "application/json"
109
+ },
110
+ body: JSON.stringify([cmd, ...args])
111
+ });
112
+ } catch (err) {
113
+ return fail("NETWORK", `store request failed: ${err instanceof Error ? err.message : String(err)}`);
114
+ }
115
+ if (!res.ok) {
116
+ const text = await res.text();
117
+ return fail("NETWORK", `store request failed (${res.status}): ${text}`);
118
+ }
119
+ let data;
120
+ try {
121
+ data = await res.json();
122
+ } catch {
123
+ return fail("NETWORK", "store response was not valid JSON");
124
+ }
125
+ if (data.error) {
126
+ return fail("NETWORK", data.error);
127
+ }
128
+ return data.result;
129
+ }
130
+ return {
131
+ async get(hash, field) {
132
+ const result = await command("HGET", hash, field);
133
+ return result ?? null;
134
+ },
135
+ async set(hash, field, value) {
136
+ await command("HSET", hash, field, value);
137
+ },
138
+ async delete(hash, field) {
139
+ await command("HDEL", hash, field);
140
+ },
141
+ async list(hash) {
142
+ const result = await command("HGETALL", hash);
143
+ if (!result) return [];
144
+ if (!Array.isArray(result)) {
145
+ return fail("NETWORK", "store response for HGETALL was not an array");
146
+ }
147
+ const pairs = [];
148
+ for (let i = 0; i < result.length; i += 2) {
149
+ const field = result[i];
150
+ const value = result[i + 1];
151
+ if (typeof field !== "string" || typeof value !== "string") {
152
+ return fail("NETWORK", "store response for HGETALL contained invalid entries");
153
+ }
154
+ pairs.push({ field, value });
155
+ }
156
+ pairs.sort((a, b) => a.field.localeCompare(b.field));
157
+ return pairs;
158
+ }
159
+ };
160
+ }
161
+
162
+ // src/git.ts
163
+ var import_node_child_process = require("child_process");
164
+ var import_node_path2 = __toESM(require("path"));
165
+ var import_node_util = require("util");
166
+ var execFileAsync = (0, import_node_util.promisify)(import_node_child_process.execFile);
167
+ async function git(args) {
168
+ try {
169
+ const { stdout } = await execFileAsync("git", args, { encoding: "utf8" });
170
+ return stdout.trim();
171
+ } catch {
172
+ return fail("NOT_IN_GIT", "not inside a git repository");
173
+ }
174
+ }
175
+ async function inferCtxKey() {
176
+ const root = await git(["rev-parse", "--show-toplevel"]);
177
+ const branch = await git(["rev-parse", "--abbrev-ref", "HEAD"]);
178
+ const project = import_node_path2.default.basename(root);
179
+ if (!project || !branch) {
180
+ return fail("NOT_IN_GIT", "unable to infer ctx key from git repository");
181
+ }
182
+ return `${project}/${branch}`;
183
+ }
184
+
185
+ // src/skillpack.ts
186
+ var import_promises5 = __toESM(require("fs/promises"));
187
+ var import_node_fs = require("fs");
188
+ var import_node_path6 = __toESM(require("path"));
189
+ var import_node_os2 = __toESM(require("os"));
190
+ var import_node_zlib = __toESM(require("zlib"));
191
+ var import_promises6 = require("stream/promises");
192
+ var import_tar2 = __toESM(require("tar"));
193
+
194
+ // src/constants.ts
195
+ var SKILLPACK_HEADER = "ctxbin-skillpack@1\n";
196
+ var SKILLREF_HEADER = "ctxbin-skillref@1\n";
197
+ var MAX_SKILLPACK_BYTES = 7 * 1024 * 1024;
198
+ var MAX_SKILLREF_DOWNLOAD_BYTES = 20 * 1024 * 1024;
199
+ var MAX_SKILLREF_EXTRACT_BYTES = 100 * 1024 * 1024;
200
+ var MAX_SKILLREF_FILES = 5e3;
201
+ var SKILLREF_CONNECT_TIMEOUT_MS = 5e3;
202
+ var SKILLREF_DOWNLOAD_TIMEOUT_MS = 3e4;
203
+ var DEFAULT_EXCLUDES = [".git", "node_modules", ".DS_Store"];
204
+
205
+ // src/fs-ops.ts
206
+ var import_promises3 = __toESM(require("fs/promises"));
207
+ var import_node_path3 = __toESM(require("path"));
208
+
209
+ // src/chmod.ts
210
+ var import_promises2 = __toESM(require("fs/promises"));
211
+ async function safeChmod(path9, mode) {
212
+ try {
213
+ await import_promises2.default.chmod(path9, mode);
214
+ } catch (err) {
215
+ if (process.platform === "win32") {
216
+ return;
217
+ }
218
+ throw err;
219
+ }
220
+ }
221
+
222
+ // src/fs-ops.ts
223
+ async function ensureDir(dir) {
224
+ await import_promises3.default.mkdir(dir, { recursive: true });
225
+ }
226
+ function toPosix(p) {
227
+ return p.split(import_node_path3.default.sep).join("/");
228
+ }
229
+ async function copyDirContents(src, dest) {
230
+ await ensureDir(dest);
231
+ const entries = await import_promises3.default.readdir(src, { withFileTypes: true });
232
+ for (const entry of entries) {
233
+ const srcPath = import_node_path3.default.join(src, entry.name);
234
+ const destPath = import_node_path3.default.join(dest, entry.name);
235
+ if (entry.isDirectory()) {
236
+ await copyDirContents(srcPath, destPath);
237
+ const stat = await import_promises3.default.stat(srcPath);
238
+ await safeChmod(destPath, stat.mode & 511);
239
+ continue;
240
+ }
241
+ if (entry.isFile()) {
242
+ await ensureDir(import_node_path3.default.dirname(destPath));
243
+ await import_promises3.default.copyFile(srcPath, destPath);
244
+ const stat = await import_promises3.default.stat(srcPath);
245
+ await safeChmod(destPath, stat.mode & 511);
246
+ continue;
247
+ }
248
+ return fail("IO", `unsupported file type during copy: ${srcPath}`);
249
+ }
250
+ }
251
+
252
+ // src/perm.ts
253
+ var import_promises4 = __toESM(require("fs/promises"));
254
+ var import_node_path4 = __toESM(require("path"));
255
+ async function applyNormalizedPermissions(root, execSet) {
256
+ async function walk(absDir) {
257
+ const entries = await import_promises4.default.readdir(absDir, { withFileTypes: true });
258
+ for (const entry of entries) {
259
+ const absPath = import_node_path4.default.join(absDir, entry.name);
260
+ if (entry.isDirectory()) {
261
+ await safeChmod(absPath, 493);
262
+ await walk(absPath);
263
+ continue;
264
+ }
265
+ if (entry.isFile()) {
266
+ const rel = toPosix(import_node_path4.default.relative(root, absPath));
267
+ const mode = execSet.has(rel) ? 493 : 420;
268
+ await safeChmod(absPath, mode);
269
+ continue;
270
+ }
271
+ return fail("IO", `unsupported file type after extract: ${absPath}`);
272
+ }
273
+ }
274
+ await walk(root);
275
+ }
276
+
277
+ // src/validators.ts
278
+ var import_node_path5 = __toESM(require("path"));
279
+ function normalizeGithubUrl(input) {
280
+ let url;
281
+ try {
282
+ url = new URL(input);
283
+ } catch {
284
+ return fail("INVALID_URL", "invalid URL");
285
+ }
286
+ if (url.protocol !== "https:") {
287
+ return fail("INVALID_URL", "URL must use https");
288
+ }
289
+ if (url.hostname !== "github.com") {
290
+ return fail("INVALID_URL", "only github.com is supported");
291
+ }
292
+ if (url.search || url.hash) {
293
+ return fail("INVALID_URL", "URL must not include query or hash");
294
+ }
295
+ const parts = url.pathname.split("/").filter(Boolean);
296
+ if (parts.length !== 2) {
297
+ return fail("INVALID_URL", "URL must be https://github.com/<owner>/<repo>");
298
+ }
299
+ const owner = parts[0];
300
+ let repo = parts[1];
301
+ if (repo.endsWith(".git")) {
302
+ repo = repo.slice(0, -4);
303
+ }
304
+ if (!owner || !repo) {
305
+ return fail("INVALID_URL", "URL must be https://github.com/<owner>/<repo>");
306
+ }
307
+ return `https://github.com/${owner}/${repo}`;
308
+ }
309
+ function validateCommitSha(ref) {
310
+ if (!/^[0-9a-f]{40}$/.test(ref)) {
311
+ return fail("INVALID_REF", "ref must be a 40-hex commit SHA");
312
+ }
313
+ return ref;
314
+ }
315
+ function normalizeSkillPath(input) {
316
+ const trimmed = input.trim();
317
+ if (!trimmed) {
318
+ return fail("INVALID_PATH", "path must be a non-empty directory path");
319
+ }
320
+ const cleaned = trimmed.replace(/\\/g, "/");
321
+ if (cleaned.startsWith("/")) {
322
+ return fail("INVALID_PATH", "path must be relative, not absolute");
323
+ }
324
+ const normalized = import_node_path5.default.posix.normalize(cleaned).replace(/^\.\//, "");
325
+ if (normalized === "." || normalized === "") {
326
+ return fail("INVALID_PATH", "path must be a non-empty directory path");
327
+ }
328
+ if (normalized.startsWith("../") || normalized.includes("/../") || normalized === "..") {
329
+ return fail("INVALID_PATH", "path must not include .. segments");
330
+ }
331
+ if (normalized.endsWith("/")) {
332
+ return normalized.slice(0, -1);
333
+ }
334
+ return normalized;
335
+ }
336
+ function assertSafeTarPath(entryPath) {
337
+ const cleaned = entryPath.replace(/\\/g, "/");
338
+ if (cleaned.startsWith("/")) {
339
+ return fail("INVALID_PATH", `tar entry path must be relative: ${entryPath}`);
340
+ }
341
+ const normalized = import_node_path5.default.posix.normalize(cleaned);
342
+ if (normalized.startsWith("../") || normalized === ".." || normalized.includes("/../")) {
343
+ return fail("INVALID_PATH", `tar entry path contains traversal: ${entryPath}`);
344
+ }
345
+ }
346
+
347
+ // src/tar-utils.ts
348
+ var import_tar = __toESM(require("tar"));
349
+ async function listTarEntries(file) {
350
+ const entries = [];
351
+ await import_tar.default.t({
352
+ file,
353
+ onentry(entry) {
354
+ entries.push({
355
+ path: entry.path,
356
+ type: entry.type,
357
+ size: entry.size ?? 0,
358
+ mode: entry.mode ?? 0
359
+ });
360
+ }
361
+ });
362
+ return entries;
363
+ }
364
+
365
+ // src/skillpack.ts
366
+ var ALLOWED_TYPES = /* @__PURE__ */ new Set(["File", "Directory"]);
367
+ async function createSkillpackFromDir(dirPath) {
368
+ const stats = await import_promises5.default.stat(dirPath).catch(() => null);
369
+ if (!stats || !stats.isDirectory()) {
370
+ return fail("INVALID_INPUT", `--dir is not a directory: ${dirPath}`);
371
+ }
372
+ const entries = await collectEntries(dirPath);
373
+ const tmpDir = await import_promises5.default.mkdtemp(import_node_path6.default.join(import_node_os2.default.tmpdir(), "ctxbin-skillpack-"));
374
+ const tarPath = import_node_path6.default.join(tmpDir, "skillpack.tar.gz");
375
+ try {
376
+ const tarStream = import_tar2.default.c(
377
+ {
378
+ cwd: dirPath,
379
+ portable: true,
380
+ mtime: /* @__PURE__ */ new Date(0)
381
+ },
382
+ entries
383
+ );
384
+ const gzip = import_node_zlib.default.createGzip({ mtime: 0 });
385
+ await (0, import_promises6.pipeline)(tarStream, gzip, (0, import_node_fs.createWriteStream)(tarPath));
386
+ const stat = await import_promises5.default.stat(tarPath);
387
+ if (stat.size > MAX_SKILLPACK_BYTES) {
388
+ return fail(
389
+ "SIZE_LIMIT",
390
+ `skillpack tar.gz size ${stat.size} bytes exceeds ${MAX_SKILLPACK_BYTES} bytes`
391
+ );
392
+ }
393
+ const data = await import_promises5.default.readFile(tarPath);
394
+ const b64 = data.toString("base64");
395
+ return SKILLPACK_HEADER + b64;
396
+ } finally {
397
+ await import_promises5.default.rm(tmpDir, { recursive: true, force: true });
398
+ }
399
+ }
400
+ async function extractSkillpackToDir(value, targetDir) {
401
+ const base64 = value.slice(SKILLPACK_HEADER.length);
402
+ let buffer;
403
+ try {
404
+ buffer = Buffer.from(base64, "base64");
405
+ } catch {
406
+ return fail("IO", "invalid skillpack base64 data");
407
+ }
408
+ const tmpRoot = await import_promises5.default.mkdtemp(import_node_path6.default.join(import_node_os2.default.tmpdir(), "ctxbin-skillpack-"));
409
+ const tarPath = import_node_path6.default.join(tmpRoot, "skillpack.tar.gz");
410
+ await import_promises5.default.writeFile(tarPath, buffer);
411
+ try {
412
+ const entries = await listTarEntries(tarPath);
413
+ const execSet = validateTarEntries(entries);
414
+ const extractDir = import_node_path6.default.join(tmpRoot, "extract");
415
+ await ensureDir(extractDir);
416
+ await import_tar2.default.x({
417
+ file: tarPath,
418
+ cwd: extractDir,
419
+ preserveOwner: false,
420
+ noMtime: true
421
+ });
422
+ await applyNormalizedPermissions(extractDir, execSet);
423
+ await ensureDir(targetDir);
424
+ await copyDirContents(extractDir, targetDir);
425
+ } finally {
426
+ await import_promises5.default.rm(tmpRoot, { recursive: true, force: true });
427
+ }
428
+ }
429
+ async function collectEntries(root) {
430
+ const results = [];
431
+ async function walk(absDir, relDir) {
432
+ const entries = await import_promises5.default.readdir(absDir, { withFileTypes: true });
433
+ entries.sort((a, b) => a.name.localeCompare(b.name));
434
+ for (const entry of entries) {
435
+ if (DEFAULT_EXCLUDES.includes(entry.name)) {
436
+ if (entry.isDirectory()) {
437
+ continue;
438
+ }
439
+ if (entry.isFile() && entry.name === ".DS_Store") {
440
+ continue;
441
+ }
442
+ }
443
+ const absPath = import_node_path6.default.join(absDir, entry.name);
444
+ const relPath = relDir ? import_node_path6.default.posix.join(relDir, entry.name) : entry.name;
445
+ const stat = await import_promises5.default.lstat(absPath);
446
+ if (stat.isSymbolicLink()) {
447
+ return fail("IO", `symlink not allowed in skillpack: ${absPath}`);
448
+ }
449
+ if (entry.isDirectory()) {
450
+ results.push(relPath);
451
+ await walk(absPath, relPath);
452
+ continue;
453
+ }
454
+ if (entry.isFile()) {
455
+ if (entry.name === ".DS_Store") {
456
+ continue;
457
+ }
458
+ results.push(relPath);
459
+ continue;
460
+ }
461
+ return fail("IO", `unsupported file type in skillpack: ${absPath}`);
462
+ }
463
+ }
464
+ await walk(root, "");
465
+ results.sort();
466
+ return results;
467
+ }
468
+ function validateTarEntries(entries) {
469
+ const execSet = /* @__PURE__ */ new Set();
470
+ for (const entry of entries) {
471
+ assertSafeTarPath(entry.path);
472
+ if (!ALLOWED_TYPES.has(entry.type)) {
473
+ return fail("IO", `unsupported entry type in skillpack: ${entry.path}`);
474
+ }
475
+ if (entry.type === "File" && entry.mode & 73) {
476
+ execSet.add(entry.path);
477
+ }
478
+ }
479
+ return execSet;
480
+ }
481
+
482
+ // src/skillref.ts
483
+ var import_promises7 = __toESM(require("fs/promises"));
484
+ var import_node_path7 = __toESM(require("path"));
485
+ var import_node_os3 = __toESM(require("os"));
486
+ var import_node_fs2 = require("fs");
487
+ var import_tar3 = __toESM(require("tar"));
488
+ var ALLOWED_TYPES2 = /* @__PURE__ */ new Set(["File", "Directory"]);
489
+ function createSkillrefValue(url, skillPath, ref) {
490
+ const normalizedUrl = normalizeGithubUrl(url);
491
+ const normalizedPath = normalizeSkillPath(skillPath);
492
+ const payload = ref ? JSON.stringify({ url: normalizedUrl, path: normalizedPath, ref: validateCommitSha(ref) }) : JSON.stringify({ url: normalizedUrl, path: normalizedPath, track: "default" });
493
+ return SKILLREF_HEADER + payload;
494
+ }
495
+ function parseSkillrefValue(value) {
496
+ if (!value.startsWith(SKILLREF_HEADER)) {
497
+ return fail("TYPE_MISMATCH", "value is not a skillref");
498
+ }
499
+ const raw = value.slice(SKILLREF_HEADER.length);
500
+ let parsed;
501
+ try {
502
+ parsed = JSON.parse(raw);
503
+ } catch {
504
+ return fail("IO", "invalid skillref payload JSON");
505
+ }
506
+ if (!parsed || typeof parsed.url !== "string" || typeof parsed.path !== "string") {
507
+ return fail("IO", "invalid skillref payload fields");
508
+ }
509
+ const normalized = {
510
+ url: normalizeGithubUrl(parsed.url),
511
+ path: normalizeSkillPath(parsed.path)
512
+ };
513
+ if (typeof parsed.ref === "string") {
514
+ return { ...normalized, ref: validateCommitSha(parsed.ref) };
515
+ }
516
+ if (parsed.track === "default") {
517
+ return { ...normalized, track: "default" };
518
+ }
519
+ return fail("IO", "invalid skillref payload fields");
520
+ }
521
+ async function loadSkillrefToDir(value, targetDir) {
522
+ const skillref = parseSkillrefValue(value);
523
+ const resolvedRef = skillref.ref ?? await fetchDefaultBranch(skillref.url);
524
+ const tmpRoot = await import_promises7.default.mkdtemp(import_node_path7.default.join(import_node_os3.default.tmpdir(), "ctxbin-skillref-"));
525
+ const tarPath = import_node_path7.default.join(tmpRoot, "skillref.tar.gz");
526
+ try {
527
+ await downloadArchive(skillref.url, resolvedRef, tarPath);
528
+ const entries = await listTarEntries(tarPath).catch(() => fail("IO", "failed to parse tar archive"));
529
+ const analysis = analyzeEntries(entries, skillref.path);
530
+ const extractDir = import_node_path7.default.join(tmpRoot, "extract");
531
+ await ensureDir(extractDir);
532
+ const stripCount = 1 + skillref.path.split("/").length;
533
+ await import_tar3.default.x({
534
+ file: tarPath,
535
+ cwd: extractDir,
536
+ preserveOwner: false,
537
+ noMtime: true,
538
+ strip: stripCount,
539
+ filter: (p, entry) => {
540
+ const entryPath = entry?.path ?? p;
541
+ return isUnderPath(entryPath, analysis.prefix, skillref.path);
542
+ }
543
+ });
544
+ await applyNormalizedPermissions(extractDir, analysis.execSet);
545
+ await ensureDir(targetDir);
546
+ await copyDirContents(extractDir, targetDir);
547
+ } finally {
548
+ await import_promises7.default.rm(tmpRoot, { recursive: true, force: true });
549
+ }
550
+ }
551
+ async function downloadArchive(repoUrl, ref, outPath) {
552
+ const { owner, repo } = splitGithubUrl(repoUrl);
553
+ const url = `https://codeload.github.com/${owner}/${repo}/tar.gz/${ref}`;
554
+ const controller = new AbortController();
555
+ const totalTimer = setTimeout(() => controller.abort(), SKILLREF_DOWNLOAD_TIMEOUT_MS);
556
+ let res;
557
+ try {
558
+ res = await fetchWithRedirect(url, 1, controller, ["github.com", "codeload.github.com"]);
559
+ } catch (err) {
560
+ clearTimeout(totalTimer);
561
+ return fail("NETWORK", `download failed: ${err instanceof Error ? err.message : String(err)}`);
562
+ }
563
+ if (!res.ok) {
564
+ clearTimeout(totalTimer);
565
+ const text = await res.text();
566
+ return fail("NETWORK", `download failed (${res.status}): ${text}`);
567
+ }
568
+ if (!res.body) {
569
+ clearTimeout(totalTimer);
570
+ return fail("NETWORK", "download failed: empty response body");
571
+ }
572
+ const fileStream = (0, import_node_fs2.createWriteStream)(outPath);
573
+ let total = 0;
574
+ let magic = Buffer.alloc(0);
575
+ try {
576
+ for await (const chunk of res.body) {
577
+ if (magic.length < 2) {
578
+ const needed = 2 - magic.length;
579
+ magic = Buffer.concat([magic, chunk.subarray(0, needed)]);
580
+ if (magic.length === 2) {
581
+ if (magic[0] !== 31 || magic[1] !== 139) {
582
+ fileStream.close();
583
+ controller.abort();
584
+ return fail("IO", "downloaded file is not gzip data");
585
+ }
586
+ }
587
+ }
588
+ total += chunk.length;
589
+ if (total > MAX_SKILLREF_DOWNLOAD_BYTES) {
590
+ fileStream.close();
591
+ controller.abort();
592
+ return fail(
593
+ "SIZE_LIMIT",
594
+ `downloaded archive size ${total} exceeds ${MAX_SKILLREF_DOWNLOAD_BYTES} bytes`
595
+ );
596
+ }
597
+ fileStream.write(chunk);
598
+ }
599
+ } catch (err) {
600
+ fileStream.close();
601
+ clearTimeout(totalTimer);
602
+ if (err instanceof CtxbinError) {
603
+ throw err;
604
+ }
605
+ return fail("NETWORK", `download failed: ${err instanceof Error ? err.message : String(err)}`);
606
+ } finally {
607
+ clearTimeout(totalTimer);
608
+ }
609
+ if (magic.length < 2) {
610
+ fileStream.close();
611
+ return fail("IO", "downloaded file is incomplete");
612
+ }
613
+ await new Promise((resolve, reject) => {
614
+ fileStream.end(() => resolve());
615
+ fileStream.on("error", reject);
616
+ });
617
+ }
618
+ async function fetchWithRedirect(url, redirectsLeft, controller, allowedHosts, init) {
619
+ const connectTimer = setTimeout(() => controller.abort(), SKILLREF_CONNECT_TIMEOUT_MS);
620
+ const res = await fetch(url, {
621
+ ...init,
622
+ signal: controller.signal,
623
+ redirect: "manual"
624
+ });
625
+ clearTimeout(connectTimer);
626
+ if (isRedirect(res.status)) {
627
+ if (redirectsLeft <= 0) {
628
+ return fail("NETWORK", "too many redirects");
629
+ }
630
+ const location = res.headers.get("location");
631
+ if (!location) {
632
+ return fail("NETWORK", "redirect without location header");
633
+ }
634
+ const nextUrl = new URL(location, url).toString();
635
+ const host = new URL(nextUrl).hostname;
636
+ if (!allowedHosts.includes(host)) {
637
+ return fail("NETWORK", `redirected to unsupported host: ${host}`);
638
+ }
639
+ return fetchWithRedirect(nextUrl, redirectsLeft - 1, controller, allowedHosts, init);
640
+ }
641
+ return res;
642
+ }
643
+ function isRedirect(status) {
644
+ return [301, 302, 303, 307, 308].includes(status);
645
+ }
646
+ function splitGithubUrl(repoUrl) {
647
+ const url = new URL(repoUrl);
648
+ const parts = url.pathname.split("/").filter(Boolean);
649
+ if (parts.length !== 2) {
650
+ return fail("INVALID_URL", "URL must be https://github.com/<owner>/<repo>");
651
+ }
652
+ return { owner: parts[0], repo: parts[1] };
653
+ }
654
+ async function fetchDefaultBranch(repoUrl) {
655
+ const { owner, repo } = splitGithubUrl(repoUrl);
656
+ const url = `https://api.github.com/repos/${owner}/${repo}`;
657
+ const controller = new AbortController();
658
+ const totalTimer = setTimeout(() => controller.abort(), SKILLREF_DOWNLOAD_TIMEOUT_MS);
659
+ let res;
660
+ try {
661
+ res = await fetchWithRedirect(url, 1, controller, ["github.com", "api.github.com"], {
662
+ headers: {
663
+ "User-Agent": "ctxbin",
664
+ Accept: "application/vnd.github+json"
665
+ }
666
+ });
667
+ } catch (err) {
668
+ clearTimeout(totalTimer);
669
+ return fail("NETWORK", `default branch lookup failed: ${err instanceof Error ? err.message : String(err)}`);
670
+ }
671
+ if (!res.ok) {
672
+ clearTimeout(totalTimer);
673
+ const text = await res.text();
674
+ return fail("NETWORK", `default branch lookup failed (${res.status}): ${text}`);
675
+ }
676
+ let data;
677
+ try {
678
+ data = await res.json();
679
+ } catch {
680
+ clearTimeout(totalTimer);
681
+ return fail("NETWORK", "default branch lookup returned invalid JSON");
682
+ }
683
+ clearTimeout(totalTimer);
684
+ if (!data || typeof data.default_branch !== "string" || data.default_branch.length === 0) {
685
+ return fail("NETWORK", "default branch lookup returned no default_branch");
686
+ }
687
+ return data.default_branch;
688
+ }
689
+ function analyzeEntries(entries, requestedPath) {
690
+ if (entries.length === 0) {
691
+ return fail("NOT_FOUND", "archive contained no entries");
692
+ }
693
+ const prefix = entries[0].path.split("/")[0];
694
+ if (!prefix) {
695
+ return fail("IO", "unable to determine archive prefix");
696
+ }
697
+ const execSet = /* @__PURE__ */ new Set();
698
+ let entryCount = 0;
699
+ let totalSize = 0;
700
+ let matched = false;
701
+ for (const entry of entries) {
702
+ assertSafeTarPath(entry.path);
703
+ if (!ALLOWED_TYPES2.has(entry.type)) {
704
+ return fail("IO", `unsupported entry type in archive: ${entry.path}`);
705
+ }
706
+ if (entry.path === prefix) {
707
+ continue;
708
+ }
709
+ if (!entry.path.startsWith(`${prefix}/`)) {
710
+ return fail("IO", "archive has unexpected top-level layout");
711
+ }
712
+ const rel = entry.path.slice(prefix.length + 1);
713
+ if (!rel) {
714
+ continue;
715
+ }
716
+ const relToReq = stripRequestedPath(rel, requestedPath);
717
+ if (relToReq === null) {
718
+ continue;
719
+ }
720
+ matched = true;
721
+ entryCount += 1;
722
+ if (rel === requestedPath && entry.type === "File") {
723
+ return fail("INVALID_PATH", "requested path is not a directory");
724
+ }
725
+ if (entry.type === "File") {
726
+ totalSize += entry.size ?? 0;
727
+ if (entry.mode & 73) {
728
+ execSet.add(relToReq);
729
+ }
730
+ }
731
+ }
732
+ if (!matched) {
733
+ return fail("NOT_FOUND", "requested path not found in archive");
734
+ }
735
+ if (entryCount > MAX_SKILLREF_FILES) {
736
+ return fail("SIZE_LIMIT", `extracted entry count ${entryCount} exceeds ${MAX_SKILLREF_FILES}`);
737
+ }
738
+ if (totalSize > MAX_SKILLREF_EXTRACT_BYTES) {
739
+ return fail("SIZE_LIMIT", `extracted size ${totalSize} exceeds ${MAX_SKILLREF_EXTRACT_BYTES}`);
740
+ }
741
+ return { prefix, execSet };
742
+ }
743
+ function stripRequestedPath(rel, requestedPath) {
744
+ if (rel === requestedPath) {
745
+ return "";
746
+ }
747
+ const prefix = requestedPath + "/";
748
+ if (rel.startsWith(prefix)) {
749
+ return rel.slice(prefix.length);
750
+ }
751
+ return null;
752
+ }
753
+ function isUnderPath(entryPath, prefix, requestedPath) {
754
+ if (entryPath === prefix) {
755
+ return false;
756
+ }
757
+ if (!entryPath.startsWith(`${prefix}/`)) {
758
+ return false;
759
+ }
760
+ const rel = entryPath.slice(prefix.length + 1);
761
+ if (!rel) {
762
+ return false;
763
+ }
764
+ if (rel === requestedPath || rel.startsWith(requestedPath + "/")) {
765
+ return true;
766
+ }
767
+ return false;
768
+ }
769
+
770
+ // src/value.ts
771
+ function detectSkillValueType(value) {
772
+ if (value.startsWith(SKILLPACK_HEADER)) return "skillpack";
773
+ if (value.startsWith(SKILLREF_HEADER)) return "skillref";
774
+ return "string";
775
+ }
776
+
777
+ // src/input.ts
778
+ var import_promises8 = __toESM(require("fs/promises"));
779
+ var import_node_process = __toESM(require("process"));
780
+ async function resolveSaveInput(resource, opts, stdinIsTTY = Boolean(import_node_process.default.stdin.isTTY)) {
781
+ const hasFile = typeof opts.file === "string";
782
+ const hasValue = typeof opts.value === "string";
783
+ const hasDir = typeof opts.dir === "string";
784
+ const urlFlagsUsed = Boolean(opts.url || opts.ref || opts.path);
785
+ const hasUrl = Boolean(opts.url && opts.path);
786
+ const explicitCount = [hasFile, hasValue, hasDir, hasUrl].filter(Boolean).length;
787
+ const hasStdin = !stdinIsTTY && explicitCount === 0;
788
+ if (urlFlagsUsed && !hasUrl) {
789
+ return fail("INVALID_INPUT", "--url and --path must be provided together");
790
+ }
791
+ const methods = explicitCount + (hasStdin ? 1 : 0);
792
+ if (methods !== 1) {
793
+ return fail("INVALID_INPUT", "exactly one input method must be used");
794
+ }
795
+ if (hasDir && resource !== "skill") {
796
+ return fail("INVALID_INPUT", "--dir is only valid for skill save");
797
+ }
798
+ if (hasUrl && resource !== "skill") {
799
+ return fail("INVALID_INPUT", "--url/--ref/--path are only valid for skill save");
800
+ }
801
+ if (opts.append && (hasDir || hasUrl)) {
802
+ return fail("INVALID_INPUT", "--append cannot be used with --dir or --url");
803
+ }
804
+ if (hasDir) {
805
+ const value = await createSkillpackFromDir(opts.dir);
806
+ return { kind: "skillpack", value };
807
+ }
808
+ if (hasUrl) {
809
+ const value = createSkillrefValue(opts.url, opts.path, opts.ref);
810
+ return { kind: "skillref", value };
811
+ }
812
+ if (hasFile) {
813
+ const content = await import_promises8.default.readFile(opts.file, "utf8");
814
+ return { kind: "string", value: content };
815
+ }
816
+ if (hasValue) {
817
+ return { kind: "string", value: opts.value };
818
+ }
819
+ const stdin = await readStdin();
820
+ return { kind: "string", value: stdin };
821
+ }
822
+ async function readStdin() {
823
+ return new Promise((resolve, reject) => {
824
+ let data = "";
825
+ import_node_process.default.stdin.setEncoding("utf8");
826
+ import_node_process.default.stdin.on("data", (chunk) => {
827
+ data += chunk;
828
+ });
829
+ import_node_process.default.stdin.on("end", () => resolve(data));
830
+ import_node_process.default.stdin.on("error", reject);
831
+ });
832
+ }
833
+
834
+ // src/cli.ts
835
+ async function main() {
836
+ let positionals;
837
+ let values;
838
+ try {
839
+ ({ positionals, values } = (0, import_node_util2.parseArgs)({
840
+ args: import_node_process2.default.argv.slice(2),
841
+ options: {
842
+ append: { type: "boolean" },
843
+ version: { type: "boolean", short: "v" },
844
+ file: { type: "string" },
845
+ value: { type: "string" },
846
+ dir: { type: "string" },
847
+ url: { type: "string" },
848
+ ref: { type: "string" },
849
+ path: { type: "string" }
850
+ },
851
+ allowPositionals: true
852
+ }));
853
+ } catch (err) {
854
+ return fail("INVALID_INPUT", err instanceof Error ? err.message : "invalid arguments");
855
+ }
856
+ const [resource, command, keyArg, ...extra] = positionals;
857
+ if (values.version) {
858
+ import_node_process2.default.stdout.write(getVersion() + "\n");
859
+ return;
860
+ }
861
+ if (!resource) {
862
+ return fail("INVALID_INPUT", "missing resource");
863
+ }
864
+ if (resource === "init") {
865
+ if (command || keyArg || extra.length) {
866
+ return fail("INVALID_INPUT", "init does not accept additional arguments");
867
+ }
868
+ await runInit();
869
+ return;
870
+ }
871
+ if (!command) {
872
+ return fail("INVALID_INPUT", "missing command");
873
+ }
874
+ if (extra.length > 0) {
875
+ return fail("INVALID_INPUT", "too many positional arguments");
876
+ }
877
+ const opts = {
878
+ append: Boolean(values.append),
879
+ file: values.file,
880
+ value: values.value,
881
+ dir: values.dir,
882
+ url: values.url,
883
+ ref: values.ref,
884
+ path: values.path
885
+ };
886
+ let store;
887
+ try {
888
+ const storeConfig = await loadConfig();
889
+ store = createStore(storeConfig.url, storeConfig.token);
890
+ } catch (err) {
891
+ if (resource === "skill" && command === "load" && keyArg === "ctxbin") {
892
+ const fallback = await loadBundledSkill();
893
+ if (fallback) {
894
+ import_node_process2.default.stdout.write(fallback);
895
+ return;
896
+ }
897
+ }
898
+ throw err;
899
+ }
900
+ const hash = resolveHash(resource);
901
+ if (command === "list") {
902
+ if (keyArg) {
903
+ return fail("INVALID_INPUT", "list does not accept a key");
904
+ }
905
+ ensureNoListFlags(opts);
906
+ await handleList(store, resource, hash);
907
+ return;
908
+ }
909
+ const key = await resolveKey(resource, keyArg);
910
+ switch (command) {
911
+ case "load":
912
+ await handleLoad(store, resource, hash, key, opts);
913
+ return;
914
+ case "save":
915
+ await handleSave(store, resource, hash, key, opts);
916
+ return;
917
+ case "delete":
918
+ await handleDelete(store, resource, hash, key, opts);
919
+ return;
920
+ default:
921
+ return fail("INVALID_INPUT", `unknown command: ${command}`);
922
+ }
923
+ }
924
+ async function runInit() {
925
+ const readline = await import("readline/promises");
926
+ const rl = readline.createInterface({ input: import_node_process2.default.stdin, output: import_node_process2.default.stdout });
927
+ const url = (await rl.question("CTXBIN_STORE_URL: ")).trim();
928
+ const token = (await rl.question("CTXBIN_STORE_TOKEN: ")).trim();
929
+ await rl.close();
930
+ if (!url || !token) {
931
+ return fail("INVALID_INPUT", "both URL and token are required");
932
+ }
933
+ await writeConfig({ url, token });
934
+ }
935
+ function resolveHash(resource) {
936
+ if (resource === "ctx" || resource === "agent" || resource === "skill") {
937
+ return resource;
938
+ }
939
+ return fail("INVALID_INPUT", `unknown resource: ${resource}`);
940
+ }
941
+ async function resolveKey(resource, keyArg) {
942
+ if (resource === "ctx") {
943
+ if (keyArg) return keyArg;
944
+ return inferCtxKey();
945
+ }
946
+ if (!keyArg) {
947
+ return fail("MISSING_KEY", "key is required");
948
+ }
949
+ return keyArg;
950
+ }
951
+ async function handleLoad(store, resource, hash, key, opts) {
952
+ ensureNoSaveInput(opts, "load");
953
+ const value = await store.get(hash, key);
954
+ if (value === null) {
955
+ if (resource === "skill" && key === "ctxbin") {
956
+ const fallback = await loadBundledSkill();
957
+ if (fallback) {
958
+ import_node_process2.default.stdout.write(fallback);
959
+ return;
960
+ }
961
+ }
962
+ return fail("NOT_FOUND", `no value for ${hash}:${key}`);
963
+ }
964
+ if (resource === "skill") {
965
+ const kind = detectSkillValueType(value);
966
+ if (kind === "string") {
967
+ if (opts.dir) {
968
+ return fail("TYPE_MISMATCH", "--dir cannot be used with string values");
969
+ }
970
+ import_node_process2.default.stdout.write(value);
971
+ return;
972
+ }
973
+ if (!opts.dir) {
974
+ return fail("TYPE_MISMATCH", "--dir is required for skillpack/skillref load");
975
+ }
976
+ if (kind === "skillpack") {
977
+ await extractSkillpackToDir(value, opts.dir);
978
+ return;
979
+ }
980
+ if (kind === "skillref") {
981
+ await loadSkillrefToDir(value, opts.dir);
982
+ return;
983
+ }
984
+ }
985
+ if (opts.dir) {
986
+ return fail("TYPE_MISMATCH", "--dir is only valid for skill values");
987
+ }
988
+ import_node_process2.default.stdout.write(value);
989
+ }
990
+ async function handleSave(store, resource, hash, key, opts) {
991
+ const input = await resolveSaveInput(resource, opts);
992
+ if (opts.append) {
993
+ if (input.kind !== "string") {
994
+ return fail("INVALID_INPUT", "--append only applies to string inputs");
995
+ }
996
+ const existing = await store.get(hash, key);
997
+ if (resource === "skill" && existing && detectSkillValueType(existing) !== "string") {
998
+ return fail("TYPE_MISMATCH", "cannot append to skillpack/skillref values");
999
+ }
1000
+ const merged = existing ? `${existing}
1001
+
1002
+ ${input.value}` : input.value;
1003
+ await store.set(hash, key, merged);
1004
+ return;
1005
+ }
1006
+ if (input.kind !== "string" && resource !== "skill") {
1007
+ return fail("TYPE_MISMATCH", "non-string inputs are only valid for skill");
1008
+ }
1009
+ await store.set(hash, key, input.value);
1010
+ }
1011
+ async function handleDelete(store, resource, hash, key, opts) {
1012
+ ensureNoSaveInput(opts, "delete");
1013
+ if (opts.dir) {
1014
+ return fail("INVALID_INPUT", "--dir is not valid for delete");
1015
+ }
1016
+ await store.delete(hash, key);
1017
+ }
1018
+ async function handleList(store, resource, hash) {
1019
+ const entries = await store.list(hash);
1020
+ if (entries.length === 0) return;
1021
+ const lines = entries.map(({ field, value }) => {
1022
+ let type = "--value";
1023
+ if (resource === "skill") {
1024
+ const kind = detectSkillValueType(value);
1025
+ if (kind === "skillpack") type = "--dir";
1026
+ if (kind === "skillref") type = "--url";
1027
+ }
1028
+ return `${field} ${type}`;
1029
+ });
1030
+ import_node_process2.default.stdout.write(lines.join("\n"));
1031
+ }
1032
+ function ensureNoSaveInput(opts, command) {
1033
+ if (opts.append || opts.file || opts.value || opts.url || opts.ref || opts.path) {
1034
+ return fail("INVALID_INPUT", `${command} does not accept input flags`);
1035
+ }
1036
+ }
1037
+ function ensureNoListFlags(opts) {
1038
+ if (opts.append || opts.file || opts.value || opts.dir || opts.url || opts.ref || opts.path) {
1039
+ return fail("INVALID_INPUT", "list does not accept input flags");
1040
+ }
1041
+ }
1042
+ main().catch((err) => {
1043
+ import_node_process2.default.stderr.write(formatError(err) + "\n");
1044
+ import_node_process2.default.exit(1);
1045
+ });
1046
+ async function loadBundledSkill() {
1047
+ try {
1048
+ const bundled = import_node_path8.default.resolve(__dirname, "skills", "ctxbin", "SKILL.md");
1049
+ return await import_promises9.default.readFile(bundled, "utf8");
1050
+ } catch {
1051
+ return null;
1052
+ }
1053
+ }
1054
+ function getVersion() {
1055
+ try {
1056
+ const pkgPath = import_node_path8.default.resolve(__dirname, "..", "package.json");
1057
+ const raw = (0, import_node_fs3.readFileSync)(pkgPath, "utf8");
1058
+ const data = JSON.parse(raw);
1059
+ if (data && typeof data.version === "string") {
1060
+ return data.version;
1061
+ }
1062
+ } catch {
1063
+ }
1064
+ return "0.0.0";
1065
+ }
1066
+ //# sourceMappingURL=cli.js.map