@modexagents/core 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,1979 @@
1
+ // src/schema.ts
2
+ import { z } from "zod";
3
+ var SCHEMA_VERSION = 0;
4
+ var SLUG_PATTERN = /^[a-z0-9]+(-[a-z0-9]+)*$/;
5
+ var SkillSchema = z.object({
6
+ slug: z.string().min(1).max(64).regex(SLUG_PATTERN, "slug must be kebab-case [a-z0-9-]"),
7
+ name: z.string().min(1).max(120),
8
+ description: z.string().min(1).max(2e3),
9
+ tags: z.array(z.string().regex(SLUG_PATTERN, "tag must be kebab-case [a-z0-9-]").max(40)).min(1).max(6),
10
+ source: z.string().min(1).max(255)
11
+ });
12
+ var SkillsDocSchema = z.object({
13
+ schema_version: z.literal(SCHEMA_VERSION),
14
+ skills: z.array(SkillSchema)
15
+ });
16
+ var ExtractedSkillSchema = SkillSchema.omit({ source: true });
17
+ var ExtractionResultSchema = z.object({
18
+ skills: z.array(ExtractedSkillSchema)
19
+ });
20
+
21
+ // src/serialize.ts
22
+ function compareCodepoints(a, b) {
23
+ if (a < b) return -1;
24
+ if (a > b) return 1;
25
+ return 0;
26
+ }
27
+ function normalizeSkill(skill) {
28
+ return {
29
+ slug: skill.slug,
30
+ name: skill.name.trim(),
31
+ description: skill.description.trim(),
32
+ tags: [...skill.tags].sort(compareCodepoints),
33
+ source: skill.source
34
+ };
35
+ }
36
+ function renderSkill(skill) {
37
+ return [
38
+ `## ${skill.slug}`,
39
+ "",
40
+ `**Name:** ${skill.name}`,
41
+ "",
42
+ `**Description:** ${skill.description}`,
43
+ "",
44
+ `**Tags:** ${skill.tags.join(", ")}`,
45
+ "",
46
+ `**Source:** ${skill.source}`,
47
+ "",
48
+ "---"
49
+ ].join("\n");
50
+ }
51
+ function serialize(doc) {
52
+ const skills = doc.skills.map(normalizeSkill).sort((a, b) => compareCodepoints(a.slug, b.slug));
53
+ const frontmatter = ["---", `schema_version: ${SCHEMA_VERSION}`, "---"].join("\n");
54
+ if (skills.length === 0) {
55
+ return `${frontmatter}
56
+ `;
57
+ }
58
+ const body = skills.map(renderSkill).join("\n\n");
59
+ return `${frontmatter}
60
+
61
+ ${body}
62
+ `;
63
+ }
64
+ function buildDoc(skills) {
65
+ return { schema_version: SCHEMA_VERSION, skills };
66
+ }
67
+
68
+ // src/extract.ts
69
+ import Anthropic from "@anthropic-ai/sdk";
70
+
71
+ // src/prompt.ts
72
+ var MODEL_ID = "claude-haiku-4-5-20251001";
73
+ var EMIT_SKILLS_TOOL = {
74
+ name: "emit_skills",
75
+ description: "Emit the structured set of skills extracted from the user-provided corpus.",
76
+ input_schema: {
77
+ type: "object",
78
+ properties: {
79
+ skills: {
80
+ type: "array",
81
+ items: {
82
+ type: "object",
83
+ properties: {
84
+ slug: {
85
+ type: "string",
86
+ pattern: "^[a-z0-9]+(-[a-z0-9]+)*$",
87
+ maxLength: 64,
88
+ description: "Kebab-case identifier, unique within this output. No underscores, spaces, or capitals."
89
+ },
90
+ name: {
91
+ type: "string",
92
+ minLength: 1,
93
+ maxLength: 120,
94
+ description: "Short human-readable title (3-8 words)."
95
+ },
96
+ description: {
97
+ type: "string",
98
+ minLength: 1,
99
+ maxLength: 2e3,
100
+ description: "One paragraph (1-4 sentences), self-contained, written so a reader who has not seen the corpus can apply it."
101
+ },
102
+ tags: {
103
+ type: "array",
104
+ minItems: 1,
105
+ maxItems: 6,
106
+ items: {
107
+ type: "string",
108
+ pattern: "^[a-z0-9]+(-[a-z0-9]+)*$",
109
+ maxLength: 40
110
+ },
111
+ description: "1-6 kebab-case topical labels."
112
+ }
113
+ },
114
+ required: ["slug", "name", "description", "tags"],
115
+ additionalProperties: false
116
+ }
117
+ }
118
+ },
119
+ required: ["skills"],
120
+ additionalProperties: false
121
+ }
122
+ };
123
+ var SYSTEM_PROMPT = `You are an extraction engine that reads a corpus and produces a structured set of distinct skills.
124
+
125
+ A "skill" is a transferable, reusable practice or technique that a person could apply across contexts \u2014 not a fact, definition, story, or one-off opinion.
126
+
127
+ For every skill you identify, emit:
128
+ - slug: kebab-case identifier matching ^[a-z0-9]+(-[a-z0-9]+)*$, unique within this output
129
+ - name: short human-readable title (3-8 words; Title Case acceptable)
130
+ - description: one paragraph (1-4 sentences), self-contained, written as a directly applicable instruction
131
+ - tags: 1-6 kebab-case topical labels matching ^[a-z0-9]+(-[a-z0-9]+)*$
132
+
133
+ Quality rules:
134
+ - Only emit skills that are clearly supported by the corpus.
135
+ - Do not invent skills to pad the list. Returning fewer high-quality skills is preferred to many vague ones.
136
+ - Avoid near-duplicates. If two passages describe the same underlying technique, emit one skill.
137
+ - Do not include source citations, page numbers, or quotes in the description.
138
+ - The description must stand on its own; a reader who has not seen the corpus must be able to apply the skill.
139
+
140
+ Always respond by calling the emit_skills tool exactly once with the full result. Do not include any text outside the tool call.`;
141
+
142
+ // src/extract.ts
143
+ var MAX_CORPUS_BYTES = 5e5;
144
+ var ExtractionError = class extends Error {
145
+ cause;
146
+ constructor(message, cause) {
147
+ super(message);
148
+ this.name = "ExtractionError";
149
+ this.cause = cause;
150
+ }
151
+ };
152
+ async function extractSkills(corpus, opts) {
153
+ const bytes = Buffer.byteLength(corpus, "utf8");
154
+ if (bytes > MAX_CORPUS_BYTES) {
155
+ throw new ExtractionError(
156
+ `Corpus is ${bytes} bytes, which exceeds the Phase A limit of ${MAX_CORPUS_BYTES}. Chunking arrives in Phase C.`
157
+ );
158
+ }
159
+ if (corpus.trim().length === 0) {
160
+ throw new ExtractionError("Corpus is empty.");
161
+ }
162
+ const apiKey = opts.apiKey ?? process.env["ANTHROPIC_API_KEY"];
163
+ if (!opts.client && !apiKey) {
164
+ throw new ExtractionError(
165
+ "ANTHROPIC_API_KEY is not set. Export it in your environment, or copy .env.example to .env."
166
+ );
167
+ }
168
+ const client = opts.client ?? new Anthropic({ apiKey });
169
+ const model = opts.model ?? MODEL_ID;
170
+ let response;
171
+ try {
172
+ response = await client.messages.create({
173
+ model,
174
+ max_tokens: opts.maxTokens ?? 4096,
175
+ system: [
176
+ {
177
+ type: "text",
178
+ text: SYSTEM_PROMPT,
179
+ cache_control: { type: "ephemeral" }
180
+ }
181
+ ],
182
+ tools: [EMIT_SKILLS_TOOL],
183
+ tool_choice: { type: "tool", name: "emit_skills" },
184
+ messages: [
185
+ {
186
+ role: "user",
187
+ content: corpus
188
+ }
189
+ ]
190
+ });
191
+ } catch (err) {
192
+ throw new ExtractionError("Anthropic API call failed", err);
193
+ }
194
+ const toolUse = response.content.find(
195
+ (b) => b.type === "tool_use" && b.name === "emit_skills"
196
+ );
197
+ if (!toolUse || toolUse.type !== "tool_use") {
198
+ throw new ExtractionError(
199
+ `Model did not emit the emit_skills tool call (stop_reason=${response.stop_reason}).`
200
+ );
201
+ }
202
+ const parsed = ExtractionResultSchema.safeParse(toolUse.input);
203
+ if (!parsed.success) {
204
+ const detail = parsed.error.issues.map((i) => `${i.path.join(".") || "<root>"}: ${i.message}`).join("; ");
205
+ throw new ExtractionError(`Model output failed validation: ${detail}`);
206
+ }
207
+ return parsed.data.skills.map((s) => ({ ...s, source: opts.source }));
208
+ }
209
+
210
+ // src/sources/index.ts
211
+ import { extname as extname2 } from "path";
212
+
213
+ // src/sources/epub.ts
214
+ import { basename } from "path";
215
+
216
+ // src/sources/types.ts
217
+ var SourceError = class extends Error {
218
+ constructor(message) {
219
+ super(message);
220
+ this.name = "SourceError";
221
+ }
222
+ };
223
+
224
+ // src/sources/epub.ts
225
+ function htmlToText(html) {
226
+ return html.replace(/<\/(p|div|h[1-6]|li|br|tr|hr)>/gi, "\n").replace(/<br\s*\/?>/gi, "\n").replace(/<[^>]+>/g, "").replace(/&nbsp;/g, " ").replace(/&amp;/g, "&").replace(/&lt;/g, "<").replace(/&gt;/g, ">").replace(/&quot;/g, '"').replace(/&#39;/g, "'").replace(/[ \t]+\n/g, "\n").replace(/\n{3,}/g, "\n\n").trim();
227
+ }
228
+ async function loadEpubSource(path) {
229
+ const { EPub } = await import("epub2");
230
+ let book;
231
+ try {
232
+ book = await EPub.createAsync(path);
233
+ } catch (err) {
234
+ throw new SourceError(`Could not open EPUB ${path}: ${err.message}`);
235
+ }
236
+ const chapters = [];
237
+ for (const item of book.flow) {
238
+ if (!item.id) continue;
239
+ let html;
240
+ try {
241
+ html = await book.getChapterAsync(item.id);
242
+ } catch (err) {
243
+ throw new SourceError(
244
+ `Could not read chapter '${item.id}' from ${path}: ${err.message}`
245
+ );
246
+ }
247
+ const text = htmlToText(html);
248
+ if (text.length > 0) chapters.push(text);
249
+ }
250
+ const content = chapters.join("\n\n");
251
+ if (content.trim().length === 0) {
252
+ throw new SourceError(`EPUB ${path} extracted to empty text (no readable chapters).`);
253
+ }
254
+ return {
255
+ source: basename(path),
256
+ source_url: null,
257
+ source_kind: "epub",
258
+ content
259
+ };
260
+ }
261
+
262
+ // src/sources/pdf.ts
263
+ import { readFile } from "fs/promises";
264
+ import { basename as basename2 } from "path";
265
+ async function loadPdfSource(path) {
266
+ let bytes;
267
+ try {
268
+ bytes = await readFile(path);
269
+ } catch (err) {
270
+ throw new SourceError(`Could not read ${path}: ${err.message}`);
271
+ }
272
+ const { extractText } = await import("unpdf");
273
+ let result;
274
+ try {
275
+ result = await extractText(new Uint8Array(bytes), { mergePages: true });
276
+ } catch (err) {
277
+ throw new SourceError(`Could not parse PDF ${path}: ${err.message}`);
278
+ }
279
+ const text = Array.isArray(result.text) ? result.text.join("\n") : result.text;
280
+ if (text.trim().length === 0) {
281
+ throw new SourceError(
282
+ `PDF ${path} extracted to empty text (${result.totalPages} pages). Likely an image-only/scanned PDF; OCR is out of scope for Phase C.`
283
+ );
284
+ }
285
+ return {
286
+ source: basename2(path),
287
+ source_url: null,
288
+ source_kind: "pdf",
289
+ content: text
290
+ };
291
+ }
292
+
293
+ // src/sources/text.ts
294
+ import { readFile as readFile2 } from "fs/promises";
295
+ import { basename as basename3, extname } from "path";
296
+ var KIND_BY_EXT = {
297
+ ".txt": "text",
298
+ ".md": "markdown"
299
+ };
300
+ var TEXT_EXTENSIONS = Object.keys(KIND_BY_EXT);
301
+ async function loadTextSource(path) {
302
+ const ext = extname(path).toLowerCase();
303
+ const kind = KIND_BY_EXT[ext];
304
+ if (kind === void 0) {
305
+ throw new SourceError(
306
+ `Not a text source: ${path} (extension ${ext || "(none)"} is not .txt or .md).`
307
+ );
308
+ }
309
+ let content;
310
+ try {
311
+ content = await readFile2(path, "utf8");
312
+ } catch (err) {
313
+ throw new SourceError(`Could not read ${path}: ${err.message}`);
314
+ }
315
+ return {
316
+ source: basename3(path),
317
+ source_url: null,
318
+ source_kind: kind,
319
+ content
320
+ };
321
+ }
322
+
323
+ // src/sources/web.ts
324
+ import { lookup as dnsLookup } from "dns/promises";
325
+ import { isIP } from "net";
326
+ var DEFAULT_TIMEOUT_MS = 3e4;
327
+ var DEFAULT_MAX_BYTES = 10 * 1024 * 1024;
328
+ var DEFAULT_USER_AGENT = `modex-cli/${"0.3.1"} (+https://modex.md)`;
329
+ var MAX_REDIRECTS = 10;
330
+ var ALLOWED_CONTENT_TYPES = [
331
+ "text/html",
332
+ "application/xhtml+xml",
333
+ "text/plain"
334
+ ];
335
+ function normalizeUrl(input) {
336
+ let u;
337
+ try {
338
+ u = new URL(input);
339
+ } catch {
340
+ throw new SourceError(`Invalid URL: ${input}`);
341
+ }
342
+ if (u.protocol !== "http:" && u.protocol !== "https:") {
343
+ throw new SourceError(`Unsupported URL scheme: ${u.protocol} (only http and https are allowed).`);
344
+ }
345
+ u.hash = "";
346
+ u.hostname = u.hostname.toLowerCase();
347
+ if (u.protocol === "http:" && u.port === "80" || u.protocol === "https:" && u.port === "443") {
348
+ u.port = "";
349
+ }
350
+ return u.toString();
351
+ }
352
+ function isPrivateIPv4(ip) {
353
+ const parts = ip.split(".").map(Number);
354
+ if (parts.length !== 4 || parts.some((p) => Number.isNaN(p) || p < 0 || p > 255)) {
355
+ return true;
356
+ }
357
+ const [a, b] = parts;
358
+ if (a === 0) return true;
359
+ if (a === 10) return true;
360
+ if (a === 127) return true;
361
+ if (a === 169 && b === 254) return true;
362
+ if (a === 172 && b >= 16 && b <= 31) return true;
363
+ if (a === 192 && b === 168) return true;
364
+ if (a === 100 && b >= 64 && b <= 127) return true;
365
+ return false;
366
+ }
367
+ function parseHextets(ip) {
368
+ let work = ip.toLowerCase();
369
+ let ipv4Tail = null;
370
+ const lastColon = work.lastIndexOf(":");
371
+ const tail = lastColon >= 0 ? work.slice(lastColon + 1) : work;
372
+ if (tail.includes(".")) {
373
+ const octets = tail.split(".").map(Number);
374
+ if (octets.length !== 4 || octets.some((o) => Number.isNaN(o) || o < 0 || o > 255)) {
375
+ return null;
376
+ }
377
+ ipv4Tail = [
378
+ octets[0] << 8 | octets[1],
379
+ octets[2] << 8 | octets[3]
380
+ ];
381
+ work = work.slice(0, lastColon + 1) + "0:0";
382
+ }
383
+ const halves = work.split("::");
384
+ if (halves.length > 2) return null;
385
+ const toGroups = (s) => {
386
+ if (s === "") return [];
387
+ const parts = s.split(":");
388
+ const groups2 = [];
389
+ for (const p of parts) {
390
+ if (!/^[0-9a-f]{1,4}$/.test(p)) return null;
391
+ groups2.push(parseInt(p, 16));
392
+ }
393
+ return groups2;
394
+ };
395
+ const head = toGroups(halves[0]);
396
+ const back = halves.length === 2 ? toGroups(halves[1]) : [];
397
+ if (head === null || back === null) return null;
398
+ let groups;
399
+ if (halves.length === 2) {
400
+ const fill = 8 - head.length - back.length;
401
+ if (fill < 1) return null;
402
+ groups = [...head, ...new Array(fill).fill(0), ...back];
403
+ } else {
404
+ groups = head;
405
+ }
406
+ if (ipv4Tail) {
407
+ groups = [...groups.slice(0, 6), ...ipv4Tail];
408
+ }
409
+ return groups.length === 8 ? groups : null;
410
+ }
411
+ function isPrivateIPv6(ip) {
412
+ const h = parseHextets(ip);
413
+ if (h === null) return true;
414
+ if (h.every((g) => g === 0)) return true;
415
+ if (h.slice(0, 7).every((g) => g === 0) && h[7] === 1) return true;
416
+ if (h.slice(0, 5).every((g) => g === 0) && h[5] === 65535) {
417
+ const a = h[6] >> 8 & 255;
418
+ const b = h[6] & 255;
419
+ const c = h[7] >> 8 & 255;
420
+ const d = h[7] & 255;
421
+ return isPrivateIPv4(`${a}.${b}.${c}.${d}`);
422
+ }
423
+ const first = h[0];
424
+ if (first >= 65152 && first <= 65215) return true;
425
+ if (first >= 64512 && first <= 65023) return true;
426
+ return false;
427
+ }
428
+ function isPrivateIP(ip, family) {
429
+ return family === 4 ? isPrivateIPv4(ip) : isPrivateIPv6(ip);
430
+ }
431
+ async function assertPublicHost(hostname, lookup) {
432
+ const unbracketed = hostname.startsWith("[") && hostname.endsWith("]") ? hostname.slice(1, -1) : hostname;
433
+ if (isIP(unbracketed) !== 0) {
434
+ throw new SourceError(
435
+ `Refusing to fetch from an IP literal (${hostname}). Use a hostname.`
436
+ );
437
+ }
438
+ let result;
439
+ try {
440
+ result = await lookup(hostname);
441
+ } catch (err) {
442
+ throw new SourceError(`Could not resolve hostname ${hostname}: ${err.message}`);
443
+ }
444
+ if (isPrivateIP(result.address, result.family)) {
445
+ throw new SourceError(
446
+ `Refusing to fetch ${hostname}: resolves to a private/loopback address (${result.address}).`
447
+ );
448
+ }
449
+ }
450
+ async function readCappedBody(response, maxBytes) {
451
+ const ct = response.headers.get("content-type") ?? "";
452
+ const charset = (/charset=([^;]+)/i.exec(ct)?.[1] ?? "utf-8").trim().toLowerCase();
453
+ const lengthHeader = response.headers.get("content-length");
454
+ if (lengthHeader && Number(lengthHeader) > maxBytes) {
455
+ throw new SourceError(
456
+ `Response is ${lengthHeader} bytes (Content-Length); exceeds limit of ${maxBytes}.`
457
+ );
458
+ }
459
+ if (!response.body) {
460
+ const text = await response.text();
461
+ if (Buffer.byteLength(text) > maxBytes) {
462
+ throw new SourceError(`Response body exceeded ${maxBytes} bytes.`);
463
+ }
464
+ return { buffer: Buffer.from(text, "utf8"), charset };
465
+ }
466
+ const reader = response.body.getReader();
467
+ const chunks = [];
468
+ let total = 0;
469
+ while (true) {
470
+ const { done, value } = await reader.read();
471
+ if (done) break;
472
+ total += value.byteLength;
473
+ if (total > maxBytes) {
474
+ await reader.cancel();
475
+ throw new SourceError(`Response body exceeded ${maxBytes} bytes (streamed).`);
476
+ }
477
+ chunks.push(value);
478
+ }
479
+ return { buffer: Buffer.concat(chunks), charset };
480
+ }
481
+ async function fetchWithGuards(url, opts) {
482
+ let current = url;
483
+ for (let hop = 0; hop <= MAX_REDIRECTS; hop++) {
484
+ const u = new URL(current);
485
+ await assertPublicHost(u.hostname, opts.lookup);
486
+ const ctrl = new AbortController();
487
+ const timer = setTimeout(() => ctrl.abort(), opts.timeoutMs);
488
+ let response;
489
+ try {
490
+ response = await opts.fetch(current, {
491
+ method: "GET",
492
+ redirect: "manual",
493
+ signal: ctrl.signal,
494
+ headers: {
495
+ "user-agent": opts.userAgent,
496
+ accept: "text/html, application/xhtml+xml, text/plain;q=0.5"
497
+ }
498
+ });
499
+ } catch (err) {
500
+ const e = err;
501
+ if (e.name === "AbortError") {
502
+ throw new SourceError(`Fetch timed out after ${opts.timeoutMs}ms: ${current}`);
503
+ }
504
+ throw new SourceError(`Fetch failed for ${current}: ${e.message}`);
505
+ } finally {
506
+ clearTimeout(timer);
507
+ }
508
+ if (response.status >= 300 && response.status < 400) {
509
+ const location = response.headers.get("location");
510
+ if (!location) {
511
+ throw new SourceError(`Redirect from ${current} without a Location header.`);
512
+ }
513
+ const next = new URL(location, current).toString();
514
+ const nextProto = new URL(next).protocol;
515
+ if (nextProto !== "http:" && nextProto !== "https:") {
516
+ throw new SourceError(`Redirect from ${current} to non-http(s) target ${next}.`);
517
+ }
518
+ current = next;
519
+ continue;
520
+ }
521
+ if (!response.ok) {
522
+ throw new SourceError(`Fetch ${current} returned HTTP ${response.status}.`);
523
+ }
524
+ const ctype = (response.headers.get("content-type") ?? "").toLowerCase();
525
+ const allowed = ALLOWED_CONTENT_TYPES.some((t) => ctype.startsWith(t));
526
+ if (!allowed) {
527
+ throw new SourceError(
528
+ `Fetch ${current} returned unsupported content-type: ${ctype || "(none)"}. Phase C web fetching accepts ${ALLOWED_CONTENT_TYPES.join(", ")}.`
529
+ );
530
+ }
531
+ const { buffer, charset } = await readCappedBody(response, opts.maxBytes);
532
+ let html;
533
+ try {
534
+ html = new TextDecoder(charset).decode(buffer);
535
+ } catch {
536
+ html = new TextDecoder("utf-8").decode(buffer);
537
+ }
538
+ return { finalUrl: current, html };
539
+ }
540
+ throw new SourceError(`Exceeded ${MAX_REDIRECTS} redirects starting from ${url}.`);
541
+ }
542
+ async function htmlToArticleText(html, url) {
543
+ const { Readability } = await import("@mozilla/readability");
544
+ const { parseHTML } = await import("linkedom");
545
+ const dom = parseHTML(html);
546
+ const doc = dom.document;
547
+ try {
548
+ doc.documentURI = url;
549
+ } catch {
550
+ }
551
+ const article = new Readability(doc).parse();
552
+ if (article && article.textContent && article.textContent.trim().length > 0) {
553
+ return article.textContent.trim();
554
+ }
555
+ const body = doc.querySelector("body");
556
+ const fallback = (body?.textContent ?? "").trim();
557
+ if (fallback.length === 0) {
558
+ throw new SourceError(`Readability and body fallback both extracted empty text for ${url}.`);
559
+ }
560
+ return fallback;
561
+ }
562
+ async function loadWebSource(target, opts = {}) {
563
+ const url = normalizeUrl(target);
564
+ const fetchImpl = opts.fetch ?? globalThis.fetch;
565
+ const lookup = opts.dnsLookup ?? (async (host) => {
566
+ const r = await dnsLookup(host);
567
+ return { address: r.address, family: r.family === 6 ? 6 : 4 };
568
+ });
569
+ const { html } = await fetchWithGuards(url, {
570
+ fetch: fetchImpl,
571
+ lookup,
572
+ timeoutMs: opts.timeoutMs ?? DEFAULT_TIMEOUT_MS,
573
+ maxBytes: opts.maxBytes ?? DEFAULT_MAX_BYTES,
574
+ userAgent: opts.userAgent ?? DEFAULT_USER_AGENT
575
+ });
576
+ const text = await htmlToArticleText(html, url);
577
+ return {
578
+ source: url,
579
+ source_url: url,
580
+ source_kind: "web",
581
+ content: text
582
+ };
583
+ }
584
+
585
+ // src/sources/index.ts
586
+ function isWebUrl(target) {
587
+ return target.startsWith("http://") || target.startsWith("https://");
588
+ }
589
+ var EXT_LOADERS = {
590
+ ".txt": loadTextSource,
591
+ ".md": loadTextSource,
592
+ ".pdf": loadPdfSource,
593
+ ".epub": loadEpubSource
594
+ };
595
+ async function readSource(target) {
596
+ if (isWebUrl(target)) {
597
+ return loadWebSource(target);
598
+ }
599
+ const ext = extname2(target).toLowerCase();
600
+ const loader = EXT_LOADERS[ext];
601
+ if (loader === void 0) {
602
+ const shown = ext || "(no extension)";
603
+ throw new SourceError(
604
+ `Unsupported source type: ${shown}. Supported: .txt, .md, .pdf, .epub, http(s)://\u2026 URLs.`
605
+ );
606
+ }
607
+ return loader(target);
608
+ }
609
+
610
+ // src/glob.ts
611
+ import { stat } from "fs/promises";
612
+ import { isAbsolute, resolve } from "path";
613
+ import { glob } from "tinyglobby";
614
+ var GLOB_MAGIC = /[*?[\]{}!]/;
615
+ function isGlobLike(pattern) {
616
+ return GLOB_MAGIC.test(pattern);
617
+ }
618
+ async function expandPatterns(patterns, opts = {}) {
619
+ if (patterns.length === 0) {
620
+ throw new SourceError("No patterns supplied.");
621
+ }
622
+ const cwd = opts.cwd ?? process.cwd();
623
+ const out = [];
624
+ for (const pattern of patterns) {
625
+ if (isWebUrl(pattern)) {
626
+ out.push(pattern);
627
+ continue;
628
+ }
629
+ if (isGlobLike(pattern)) {
630
+ const matches = await glob(pattern, { cwd, onlyFiles: true, absolute: true });
631
+ if (matches.length === 0) {
632
+ throw new SourceError(`Glob pattern matched zero files: ${pattern}`);
633
+ }
634
+ matches.sort();
635
+ out.push(...matches);
636
+ continue;
637
+ }
638
+ const abs = isAbsolute(pattern) ? pattern : resolve(cwd, pattern);
639
+ let s;
640
+ try {
641
+ s = await stat(abs);
642
+ } catch (err) {
643
+ throw new SourceError(`Path does not exist: ${pattern} (${err.message})`);
644
+ }
645
+ if (!s.isFile()) {
646
+ throw new SourceError(`Path is not a regular file: ${pattern}`);
647
+ }
648
+ out.push(abs);
649
+ }
650
+ return out;
651
+ }
652
+
653
+ // src/parseSkills.ts
654
+ var ParseSkillsError = class extends Error {
655
+ constructor(message) {
656
+ super(message);
657
+ this.name = "ParseSkillsError";
658
+ }
659
+ };
660
+ var FIELD_PATTERN = /^\*\*([A-Z][A-Za-z]*):\*\* (.*)$/;
661
+ function parseSkills(input) {
662
+ if (input.includes("\r")) {
663
+ throw new ParseSkillsError("CR characters are not allowed; expected LF line endings.");
664
+ }
665
+ const lines = input.split("\n");
666
+ if (lines[0] !== "---" || lines[1] !== `schema_version: ${SCHEMA_VERSION}` || lines[2] !== "---") {
667
+ throw new ParseSkillsError(
668
+ `Missing or unrecognized frontmatter (expected schema_version: ${SCHEMA_VERSION}).`
669
+ );
670
+ }
671
+ const skills = [];
672
+ let i = 3;
673
+ while (i < lines.length) {
674
+ if (lines[i] === "" || lines[i] === void 0) {
675
+ i++;
676
+ continue;
677
+ }
678
+ const heading = lines[i];
679
+ const slugMatch = heading.match(/^## (.+)$/);
680
+ if (!slugMatch) {
681
+ throw new ParseSkillsError(`Expected '## <slug>' heading, got: ${JSON.stringify(heading)}`);
682
+ }
683
+ const slug = slugMatch[1];
684
+ i++;
685
+ const raw = { slug };
686
+ while (i < lines.length) {
687
+ if (lines[i] === "") {
688
+ i++;
689
+ continue;
690
+ }
691
+ const line = lines[i];
692
+ if (line === "---") {
693
+ i++;
694
+ break;
695
+ }
696
+ const fieldMatch = line.match(FIELD_PATTERN);
697
+ if (!fieldMatch) {
698
+ throw new ParseSkillsError(
699
+ `Expected '**Field:** value' or '---' inside skill '${slug}', got: ${JSON.stringify(line)}`
700
+ );
701
+ }
702
+ const [, field, value] = fieldMatch;
703
+ switch (field) {
704
+ case "Name":
705
+ raw.name = value;
706
+ break;
707
+ case "Description":
708
+ raw.description = value;
709
+ break;
710
+ case "Tags":
711
+ raw.tags = value.split(",").map((t) => t.trim()).filter((t) => t.length > 0);
712
+ break;
713
+ case "Source":
714
+ raw.source = value;
715
+ break;
716
+ default:
717
+ throw new ParseSkillsError(`Unknown field '${field}' inside skill '${slug}'.`);
718
+ }
719
+ i++;
720
+ }
721
+ if (raw.name === void 0 || raw.description === void 0 || raw.tags === void 0 || raw.source === void 0) {
722
+ throw new ParseSkillsError(
723
+ `Skill '${slug}' is missing required fields (Name, Description, Tags, Source).`
724
+ );
725
+ }
726
+ skills.push({
727
+ slug,
728
+ name: raw.name,
729
+ description: raw.description,
730
+ tags: raw.tags,
731
+ source: raw.source
732
+ });
733
+ }
734
+ return SkillsDocSchema.parse({ schema_version: SCHEMA_VERSION, skills });
735
+ }
736
+
737
+ // src/merge.ts
738
+ function normalizeTags(tags) {
739
+ return [...new Set(tags)].sort();
740
+ }
741
+ function tagsEquivalent(a, b) {
742
+ const na = normalizeTags(a);
743
+ const nb = normalizeTags(b);
744
+ if (na.length !== nb.length) return false;
745
+ for (let i = 0; i < na.length; i++) if (na[i] !== nb[i]) return false;
746
+ return true;
747
+ }
748
+ function skillsEqual(a, b) {
749
+ return a.name === b.name && a.description === b.description && a.source === b.source && tagsEquivalent(a.tags, b.tags);
750
+ }
751
+ function mergeSkills(existing, incoming) {
752
+ const bySlug = /* @__PURE__ */ new Map();
753
+ for (const s of existing) bySlug.set(s.slug, s);
754
+ const added = [];
755
+ const updated = [];
756
+ for (const s of incoming) {
757
+ const prev = bySlug.get(s.slug);
758
+ if (prev === void 0) {
759
+ added.push(s.slug);
760
+ } else if (!skillsEqual(prev, s)) {
761
+ updated.push(s.slug);
762
+ }
763
+ bySlug.set(s.slug, s);
764
+ }
765
+ added.sort();
766
+ updated.sort();
767
+ return {
768
+ merged: [...bySlug.values()],
769
+ added,
770
+ updated
771
+ };
772
+ }
773
+
774
+ // src/canonicalJson.ts
775
+ var CanonicalJsonError = class extends Error {
776
+ constructor(message) {
777
+ super(message);
778
+ this.name = "CanonicalJsonError";
779
+ }
780
+ };
781
+ function compareCodepoints2(a, b) {
782
+ if (a < b) return -1;
783
+ if (a > b) return 1;
784
+ return 0;
785
+ }
786
+ function canonicalize(value) {
787
+ if (value === null) return "null";
788
+ if (typeof value === "boolean") return value ? "true" : "false";
789
+ if (typeof value === "string") return JSON.stringify(value);
790
+ if (typeof value === "number") {
791
+ if (!Number.isInteger(value)) {
792
+ throw new CanonicalJsonError(
793
+ `non-integer number not allowed in canonical JSON: ${value}`
794
+ );
795
+ }
796
+ return String(value);
797
+ }
798
+ if (Array.isArray(value)) {
799
+ return `[${value.map(canonicalize).join(",")}]`;
800
+ }
801
+ if (typeof value === "object") {
802
+ const keys = Object.keys(value).sort(compareCodepoints2);
803
+ const parts = keys.map((k) => `${JSON.stringify(k)}:${canonicalize(value[k])}`);
804
+ return `{${parts.join(",")}}`;
805
+ }
806
+ throw new CanonicalJsonError(`unsupported value type: ${typeof value}`);
807
+ }
808
+
809
+ // src/fileLock.ts
810
+ import { open, readFile as readFile3, unlink } from "fs/promises";
811
+ var DEFAULT_LOCK_TIMEOUT_MS = 1e4;
812
+ var DEFAULT_STALE_MS = 3e4;
813
+ var RETRY_BACKOFF_MS = 50;
814
+ var FileLockError = class extends Error {
815
+ constructor(message) {
816
+ super(message);
817
+ this.name = "FileLockError";
818
+ }
819
+ };
820
+ function sleep(ms) {
821
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
822
+ }
823
+ function pidAlive(pid) {
824
+ try {
825
+ process.kill(pid, 0);
826
+ return true;
827
+ } catch (err) {
828
+ return err.code === "EPERM";
829
+ }
830
+ }
831
+ async function readHolder(lockPath) {
832
+ try {
833
+ const raw = await readFile3(lockPath, "utf8");
834
+ const parsed = JSON.parse(raw);
835
+ if (typeof parsed.pid === "number" && typeof parsed.acquired_at === "number") {
836
+ return { pid: parsed.pid, acquired_at: parsed.acquired_at };
837
+ }
838
+ return null;
839
+ } catch {
840
+ return null;
841
+ }
842
+ }
843
+ async function withFileLock(targetPath, fn, opts = {}) {
844
+ const lockPath = `${targetPath}.lock`;
845
+ const timeoutMs = opts.timeoutMs ?? DEFAULT_LOCK_TIMEOUT_MS;
846
+ const staleMs = opts.staleMs ?? DEFAULT_STALE_MS;
847
+ const now = opts.now ?? Date.now;
848
+ const deadline = now() + timeoutMs;
849
+ for (; ; ) {
850
+ try {
851
+ const handle = await open(lockPath, "wx");
852
+ try {
853
+ await handle.writeFile(
854
+ JSON.stringify({ pid: process.pid, acquired_at: now() }),
855
+ "utf8"
856
+ );
857
+ } finally {
858
+ await handle.close();
859
+ }
860
+ try {
861
+ return await fn();
862
+ } finally {
863
+ await unlink(lockPath).catch(() => {
864
+ });
865
+ }
866
+ } catch (err) {
867
+ if (err.code !== "EEXIST") throw err;
868
+ const holder = await readHolder(lockPath);
869
+ const stale = holder !== null && (!pidAlive(holder.pid) || now() - holder.acquired_at > staleMs);
870
+ if (stale) {
871
+ await unlink(lockPath).catch(() => {
872
+ });
873
+ continue;
874
+ }
875
+ if (now() >= deadline) {
876
+ throw new FileLockError(
877
+ `Timed out after ${timeoutMs}ms acquiring ${lockPath}` + (holder ? ` (held by pid ${holder.pid})` : "")
878
+ );
879
+ }
880
+ await sleep(RETRY_BACKOFF_MS);
881
+ }
882
+ }
883
+ }
884
+
885
+ // src/operations/feed.ts
886
+ import { createHash as createHash2 } from "crypto";
887
+
888
+ // src/agent.ts
889
+ import { mkdir, readFile as readFile5, readdir, rename, stat as stat2, writeFile } from "fs/promises";
890
+ import { join } from "path";
891
+ import { v7 as uuidv7 } from "uuid";
892
+ import { z as z3 } from "zod";
893
+
894
+ // src/provenance.ts
895
+ import { createHash } from "crypto";
896
+ import { appendFile, readFile as readFile4 } from "fs/promises";
897
+ import { z as z2 } from "zod";
898
+ var PROVENANCE_SCHEMA_VERSION = 1;
899
+ var SHA256_HEX = /^[0-9a-f]{64}$/;
900
+ var SOURCE_KINDS = ["text", "markdown", "pdf", "epub", "web"];
901
+ var FeedInputSchema = z2.object({
902
+ source: z2.string().min(1),
903
+ source_url: z2.union([z2.string().url(), z2.null()]),
904
+ source_kind: z2.enum(SOURCE_KINDS),
905
+ source_sha256: z2.string().regex(SHA256_HEX),
906
+ source_bytes: z2.number().int().nonnegative(),
907
+ model: z2.string().min(1)
908
+ });
909
+ var FeedOutputSchema = z2.object({
910
+ skills_added: z2.array(z2.string()),
911
+ skills_updated: z2.array(z2.string()),
912
+ skills_removed: z2.array(z2.string()),
913
+ skills_md_sha256: z2.string().regex(SHA256_HEX),
914
+ skills_md_bytes: z2.number().int().nonnegative()
915
+ });
916
+ var AgentCreatedInputSchema = z2.object({ name: z2.string() });
917
+ var AgentCreatedOutputSchema = z2.object({ agent_id: z2.string().min(1) });
918
+ var BoundInputSchema = z2.object({
919
+ skills_md_sha256: z2.string().regex(SHA256_HEX),
920
+ aspiration_sha256s: z2.array(z2.string().regex(SHA256_HEX))
921
+ });
922
+ var BoundOutputSchema = z2.object({
923
+ registry_url: z2.string().url(),
924
+ bound_at: z2.string().min(1)
925
+ });
926
+ var AspirationAddedInputSchema = z2.object({
927
+ aspiration_sha256: z2.string().regex(SHA256_HEX),
928
+ aspiration_bytes: z2.number().int().nonnegative(),
929
+ source: z2.string().min(1)
930
+ });
931
+ var AspirationAddedOutputSchema = z2.object({
932
+ registry_url: z2.string().url()
933
+ });
934
+ var BaseEntryFieldsSchema = {
935
+ schema_version: z2.literal(PROVENANCE_SCHEMA_VERSION),
936
+ seq: z2.number().int().positive(),
937
+ ts: z2.string().min(1),
938
+ prev: z2.union([z2.string().regex(SHA256_HEX), z2.null()]),
939
+ entry_sha256: z2.string().regex(SHA256_HEX)
940
+ };
941
+ var FeedEntrySchema = z2.object({
942
+ ...BaseEntryFieldsSchema,
943
+ kind: z2.literal("feed"),
944
+ input: FeedInputSchema,
945
+ output: FeedOutputSchema
946
+ });
947
+ var AgentCreatedEntrySchema = z2.object({
948
+ ...BaseEntryFieldsSchema,
949
+ kind: z2.literal("agent_created"),
950
+ input: AgentCreatedInputSchema,
951
+ output: AgentCreatedOutputSchema
952
+ });
953
+ var BoundEntrySchema = z2.object({
954
+ ...BaseEntryFieldsSchema,
955
+ kind: z2.literal("bound"),
956
+ input: BoundInputSchema,
957
+ output: BoundOutputSchema
958
+ });
959
+ var AspirationAddedEntrySchema = z2.object({
960
+ ...BaseEntryFieldsSchema,
961
+ kind: z2.literal("aspiration_added"),
962
+ input: AspirationAddedInputSchema,
963
+ output: AspirationAddedOutputSchema
964
+ });
965
+ var ProvenanceEntrySchema = z2.discriminatedUnion("kind", [
966
+ FeedEntrySchema,
967
+ AgentCreatedEntrySchema,
968
+ BoundEntrySchema,
969
+ AspirationAddedEntrySchema
970
+ ]);
971
+ var KNOWN_ENTRY_KINDS = [
972
+ "feed",
973
+ "agent_created",
974
+ "bound",
975
+ "aspiration_added"
976
+ ];
977
+ var ProvenanceError = class extends Error {
978
+ constructor(message) {
979
+ super(message);
980
+ this.name = "ProvenanceError";
981
+ }
982
+ };
983
+ function sha256Hex(input) {
984
+ return createHash("sha256").update(input, "utf8").digest("hex");
985
+ }
986
+ function computeEntryHash(entry) {
987
+ return sha256Hex(canonicalize(entry));
988
+ }
989
+ async function readChain(path) {
990
+ let raw;
991
+ try {
992
+ raw = await readFile4(path, "utf8");
993
+ } catch (err) {
994
+ const e = err;
995
+ if (e.code === "ENOENT") return [];
996
+ throw err;
997
+ }
998
+ if (raw.length === 0) return [];
999
+ const lines = raw.split("\n").filter((l) => l.length > 0);
1000
+ const entries = [];
1001
+ for (let i = 0; i < lines.length; i++) {
1002
+ let parsedJson;
1003
+ try {
1004
+ parsedJson = JSON.parse(lines[i]);
1005
+ } catch (err) {
1006
+ throw new ProvenanceError(`Line ${i + 1}: invalid JSON (${err.message})`);
1007
+ }
1008
+ if (parsedJson !== null && typeof parsedJson === "object" && "schema_version" in parsedJson && parsedJson.schema_version === 0) {
1009
+ throw new ProvenanceError(
1010
+ `Line ${i + 1}: provenance entry has schema_version=0 (modex-cli@0.1.x, Phase B). v1 (Phase C) is not backward-compatible: the feed entry shape gained source_kind and source_url. Create a fresh agent under .modex/, or pin to @modexagents/cli@0.1.x for legacy agents.`
1011
+ );
1012
+ }
1013
+ if (parsedJson !== null && typeof parsedJson === "object" && "kind" in parsedJson && typeof parsedJson.kind === "string" && !KNOWN_ENTRY_KINDS.includes(parsedJson.kind)) {
1014
+ throw new ProvenanceError(
1015
+ `Line ${i + 1}: provenance entry kind '${parsedJson.kind}' is not recognized by this build of modex-cli. It was likely written by a newer version \u2014 upgrade modex-cli to read this chain.`
1016
+ );
1017
+ }
1018
+ const result = ProvenanceEntrySchema.safeParse(parsedJson);
1019
+ if (!result.success) {
1020
+ throw new ProvenanceError(
1021
+ `Line ${i + 1}: schema mismatch (${result.error.issues.map((iss) => `${iss.path.join(".")}: ${iss.message}`).join("; ")})`
1022
+ );
1023
+ }
1024
+ entries.push(result.data);
1025
+ }
1026
+ return entries;
1027
+ }
1028
+ function verifyChain(entries) {
1029
+ let expectedPrev = null;
1030
+ for (let i = 0; i < entries.length; i++) {
1031
+ const e = entries[i];
1032
+ if (e.seq !== i + 1) {
1033
+ throw new ProvenanceError(
1034
+ `Entry ${i}: expected seq=${i + 1}, got seq=${e.seq}`
1035
+ );
1036
+ }
1037
+ if (e.prev !== expectedPrev) {
1038
+ throw new ProvenanceError(
1039
+ `Entry ${i} (seq=${e.seq}): prev=${e.prev ?? "null"} does not match previous entry_sha256=${expectedPrev ?? "null"}`
1040
+ );
1041
+ }
1042
+ const { entry_sha256, ...rest } = e;
1043
+ const recomputed = computeEntryHash(rest);
1044
+ if (recomputed !== entry_sha256) {
1045
+ throw new ProvenanceError(
1046
+ `Entry ${i} (seq=${e.seq}): entry_sha256 mismatch (stored=${entry_sha256}, recomputed=${recomputed})`
1047
+ );
1048
+ }
1049
+ expectedPrev = entry_sha256;
1050
+ }
1051
+ }
1052
+ async function recordEntry(opts) {
1053
+ return withFileLock(
1054
+ opts.path,
1055
+ async () => {
1056
+ let seq = opts.seq;
1057
+ let prev = opts.prev;
1058
+ if (seq === void 0 || prev === void 0) {
1059
+ const existing = await readChain(opts.path);
1060
+ seq = seq ?? existing.length + 1;
1061
+ prev = prev ?? (existing.length === 0 ? null : existing[existing.length - 1].entry_sha256);
1062
+ }
1063
+ const ts = opts.ts ?? opts.draft.ts ?? (/* @__PURE__ */ new Date()).toISOString();
1064
+ const skeleton = {
1065
+ schema_version: PROVENANCE_SCHEMA_VERSION,
1066
+ seq,
1067
+ ts,
1068
+ kind: opts.draft.kind,
1069
+ input: opts.draft.input,
1070
+ output: opts.draft.output,
1071
+ prev
1072
+ };
1073
+ const validation = ProvenanceEntrySchema.safeParse({
1074
+ ...skeleton,
1075
+ entry_sha256: "0".repeat(64)
1076
+ });
1077
+ if (!validation.success) {
1078
+ throw new ProvenanceError(
1079
+ `Invalid entry: ${validation.error.issues.map((i) => `${i.path.join(".")}: ${i.message}`).join("; ")}`
1080
+ );
1081
+ }
1082
+ const entry_sha256 = computeEntryHash(skeleton);
1083
+ const full = { ...skeleton, entry_sha256 };
1084
+ const line = canonicalize(full) + "\n";
1085
+ await appendFile(opts.path, line, "utf8");
1086
+ return full;
1087
+ },
1088
+ opts.lock
1089
+ );
1090
+ }
1091
+
1092
+ // src/agent.ts
1093
+ var AGENT_CONFIG_SCHEMA_VERSION = 0;
1094
+ var DEFAULT_TOKEN_CAP_WARN_AT = 32e3;
1095
+ var MODEX_DIR_NAME = ".modex";
1096
+ var UUID7_PATTERN = /^[0-9a-f]{8}-[0-9a-f]{4}-7[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
1097
+ var AgentConfigSchema = z3.object({
1098
+ schema_version: z3.literal(AGENT_CONFIG_SCHEMA_VERSION),
1099
+ id: z3.string().regex(UUID7_PATTERN, "agent id must be a UUIDv7"),
1100
+ name: z3.string(),
1101
+ created_at: z3.string().min(1),
1102
+ token_cap_warn_at: z3.number().int().positive()
1103
+ });
1104
+ var AgentError = class extends Error {
1105
+ constructor(message) {
1106
+ super(message);
1107
+ this.name = "AgentError";
1108
+ }
1109
+ };
1110
+ function modexDir(baseDir) {
1111
+ return join(baseDir, MODEX_DIR_NAME);
1112
+ }
1113
+ function agentPaths(baseDir, id) {
1114
+ const dir = join(modexDir(baseDir), id);
1115
+ return {
1116
+ dir,
1117
+ configFile: join(dir, "config.json"),
1118
+ skillsFile: join(dir, "skills.md"),
1119
+ provenanceFile: join(dir, "provenance.jsonl"),
1120
+ registryFile: join(dir, "registry.json")
1121
+ };
1122
+ }
1123
+ async function writeJsonAtomic(path, value) {
1124
+ const tmp = `${path}.tmp.${process.pid}.${Date.now()}`;
1125
+ await writeFile(tmp, JSON.stringify(value, null, 2) + "\n", "utf8");
1126
+ await rename(tmp, path);
1127
+ }
1128
+ async function writeSkillsAtomic(path, content) {
1129
+ const tmp = `${path}.tmp.${process.pid}.${Date.now()}`;
1130
+ await writeFile(tmp, content, "utf8");
1131
+ await rename(tmp, path);
1132
+ }
1133
+ async function ensureModexGitignore(baseDir) {
1134
+ const gitignorePath = join(modexDir(baseDir), ".gitignore");
1135
+ try {
1136
+ await writeFile(gitignorePath, "*.lock\n", { flag: "wx" });
1137
+ } catch (err) {
1138
+ if (err.code !== "EEXIST") throw err;
1139
+ }
1140
+ }
1141
+ async function createAgent(opts = {}) {
1142
+ const baseDir = opts.baseDir ?? process.cwd();
1143
+ const id = opts.id ?? uuidv7();
1144
+ const created_at = opts.ts ?? (/* @__PURE__ */ new Date()).toISOString();
1145
+ const name = opts.name ?? "";
1146
+ const token_cap_warn_at = opts.tokenCapWarnAt ?? DEFAULT_TOKEN_CAP_WARN_AT;
1147
+ const paths = agentPaths(baseDir, id);
1148
+ await mkdir(paths.dir, { recursive: true });
1149
+ await ensureModexGitignore(baseDir);
1150
+ const config = {
1151
+ schema_version: AGENT_CONFIG_SCHEMA_VERSION,
1152
+ id,
1153
+ name,
1154
+ created_at,
1155
+ token_cap_warn_at
1156
+ };
1157
+ await writeJsonAtomic(paths.configFile, config);
1158
+ await recordEntry({
1159
+ path: paths.provenanceFile,
1160
+ draft: {
1161
+ kind: "agent_created",
1162
+ input: { name },
1163
+ output: { agent_id: id }
1164
+ },
1165
+ ts: created_at
1166
+ });
1167
+ return { config, paths };
1168
+ }
1169
+ async function loadAgent(id, baseDir) {
1170
+ const root = baseDir ?? process.cwd();
1171
+ const paths = agentPaths(root, id);
1172
+ let raw;
1173
+ try {
1174
+ raw = await readFile5(paths.configFile, "utf8");
1175
+ } catch (err) {
1176
+ const e = err;
1177
+ if (e.code === "ENOENT") {
1178
+ throw new AgentError(
1179
+ `Agent '${id}' not found at ${paths.configFile}. Run 'modex agents create' first, or 'modex agents list' to see existing agents.`
1180
+ );
1181
+ }
1182
+ throw err;
1183
+ }
1184
+ let parsed;
1185
+ try {
1186
+ parsed = JSON.parse(raw);
1187
+ } catch (err) {
1188
+ throw new AgentError(`Agent '${id}' has invalid config.json: ${err.message}`);
1189
+ }
1190
+ const result = AgentConfigSchema.safeParse(parsed);
1191
+ if (!result.success) {
1192
+ throw new AgentError(
1193
+ `Agent '${id}' config.json failed validation: ${result.error.issues.map((i) => `${i.path.join(".")}: ${i.message}`).join("; ")}`
1194
+ );
1195
+ }
1196
+ return { config: result.data, paths };
1197
+ }
1198
+ async function listAgents(baseDir) {
1199
+ const root = baseDir ?? process.cwd();
1200
+ const dir = modexDir(root);
1201
+ let entries;
1202
+ try {
1203
+ entries = await readdir(dir);
1204
+ } catch (err) {
1205
+ const e = err;
1206
+ if (e.code === "ENOENT") return [];
1207
+ throw err;
1208
+ }
1209
+ const records = [];
1210
+ for (const name of entries) {
1211
+ if (!UUID7_PATTERN.test(name)) continue;
1212
+ const candidate = join(dir, name);
1213
+ let s;
1214
+ try {
1215
+ s = await stat2(candidate);
1216
+ } catch {
1217
+ continue;
1218
+ }
1219
+ if (!s.isDirectory()) continue;
1220
+ try {
1221
+ records.push(await loadAgent(name, root));
1222
+ } catch {
1223
+ continue;
1224
+ }
1225
+ }
1226
+ records.sort((a, b) => a.config.created_at < b.config.created_at ? -1 : 1);
1227
+ return records;
1228
+ }
1229
+ async function readAgentSkills(skillsFile) {
1230
+ let raw;
1231
+ try {
1232
+ raw = await readFile5(skillsFile, "utf8");
1233
+ } catch (err) {
1234
+ const e = err;
1235
+ if (e.code === "ENOENT") return [];
1236
+ throw err;
1237
+ }
1238
+ return parseSkills(raw).skills;
1239
+ }
1240
+
1241
+ // src/tokenEstimate.ts
1242
+ function estimateTokens(text) {
1243
+ let count = 0;
1244
+ for (const _ of text) count++;
1245
+ return Math.ceil(count / 4);
1246
+ }
1247
+
1248
+ // src/operations/feed.ts
1249
+ function sha256Hex2(text) {
1250
+ return createHash2("sha256").update(text, "utf8").digest("hex");
1251
+ }
1252
+ function resolveTs(ts) {
1253
+ if (ts === void 0) return (/* @__PURE__ */ new Date()).toISOString();
1254
+ if (typeof ts === "function") return ts();
1255
+ return ts;
1256
+ }
1257
+ async function runFeed(agentId, patterns, opts = {}) {
1258
+ const stdout = opts.stdout ?? process.stdout;
1259
+ const stderr = opts.stderr ?? process.stderr;
1260
+ const agent = await loadAgent(agentId, opts.baseDir);
1261
+ const targets = await expandPatterns(patterns, { cwd: opts.baseDir });
1262
+ const loader = opts.loadSource ?? readSource;
1263
+ const perSource = [];
1264
+ for (let i = 0; i < targets.length; i++) {
1265
+ const target = targets[i];
1266
+ const loaded = await loader(target);
1267
+ const incoming = await extractSkills(loaded.content, {
1268
+ source: loaded.source,
1269
+ model: opts.model,
1270
+ apiKey: opts.apiKey,
1271
+ client: opts.client
1272
+ });
1273
+ const existing = await readAgentSkills(agent.paths.skillsFile);
1274
+ const { merged, added, updated } = mergeSkills(existing, incoming);
1275
+ const skillsMd = serialize(buildDoc(merged));
1276
+ await writeSkillsAtomic(agent.paths.skillsFile, skillsMd);
1277
+ const skillsMdSha256 = sha256Hex2(skillsMd);
1278
+ const skillsMdBytes = Buffer.byteLength(skillsMd, "utf8");
1279
+ const sourceSha256 = sha256Hex2(loaded.content);
1280
+ const sourceBytes = Buffer.byteLength(loaded.content, "utf8");
1281
+ await recordEntry({
1282
+ path: agent.paths.provenanceFile,
1283
+ draft: {
1284
+ kind: "feed",
1285
+ input: {
1286
+ source: loaded.source,
1287
+ source_url: loaded.source_url,
1288
+ source_kind: loaded.source_kind,
1289
+ source_sha256: sourceSha256,
1290
+ source_bytes: sourceBytes,
1291
+ model: opts.model ?? MODEL_ID
1292
+ },
1293
+ output: {
1294
+ skills_added: added,
1295
+ skills_updated: updated,
1296
+ skills_removed: [],
1297
+ skills_md_sha256: skillsMdSha256,
1298
+ skills_md_bytes: skillsMdBytes
1299
+ }
1300
+ },
1301
+ ts: resolveTs(opts.ts)
1302
+ });
1303
+ const estimatedTokens = estimateTokens(skillsMd);
1304
+ const warnedOverCap = estimatedTokens >= agent.config.token_cap_warn_at;
1305
+ if (warnedOverCap) {
1306
+ stderr.write(
1307
+ `warning: skills.md is ~${estimatedTokens} tokens (cap: ${agent.config.token_cap_warn_at}). Consider splitting into multiple agents.
1308
+ `
1309
+ );
1310
+ }
1311
+ stdout.write(
1312
+ `[${i + 1}/${targets.length}] ${loaded.source}: +${added.length} added, ${updated.length} updated, sha256=${skillsMdSha256.slice(0, 12)}
1313
+ `
1314
+ );
1315
+ perSource.push({
1316
+ target,
1317
+ source: loaded.source,
1318
+ added,
1319
+ updated,
1320
+ skillsMdSha256,
1321
+ skillsMdBytes,
1322
+ estimatedTokens,
1323
+ warnedOverCap
1324
+ });
1325
+ }
1326
+ const totalAdded = perSource.reduce((n, r) => n + r.added.length, 0);
1327
+ const totalUpdated = perSource.reduce((n, r) => n + r.updated.length, 0);
1328
+ stdout.write(
1329
+ `done: ${perSource.length} source${perSource.length === 1 ? "" : "s"}, ${totalAdded} added, ${totalUpdated} updated total
1330
+ `
1331
+ );
1332
+ return { agentId: agent.config.id, perSource };
1333
+ }
1334
+ function isFeedError(err) {
1335
+ return err instanceof ExtractionError || err instanceof SourceError || err instanceof AgentError || err instanceof ParseSkillsError || err instanceof ProvenanceError;
1336
+ }
1337
+
1338
+ // src/operations/agents.ts
1339
+ async function runAgentsCreate(opts = {}) {
1340
+ const stdout = opts.stdout ?? process.stdout;
1341
+ const agent = await createAgent({ name: opts.name, baseDir: opts.baseDir });
1342
+ stdout.write(`${agent.config.id}
1343
+ `);
1344
+ return agent.config.id;
1345
+ }
1346
+ async function runAgentsList(opts = {}) {
1347
+ const stdout = opts.stdout ?? process.stdout;
1348
+ const agents = await listAgents(opts.baseDir);
1349
+ if (agents.length === 0) {
1350
+ stdout.write("(no agents in this directory \u2014 run `modex agents create` to make one)\n");
1351
+ return;
1352
+ }
1353
+ for (const a of agents) {
1354
+ const skills = await readAgentSkills(a.paths.skillsFile);
1355
+ const name = a.config.name.length > 0 ? a.config.name : "(unnamed)";
1356
+ stdout.write(`${a.config.id} ${name} ${a.config.created_at} ${skills.length} skills
1357
+ `);
1358
+ }
1359
+ }
1360
+
1361
+ // src/credentials.ts
1362
+ import { chmod, mkdir as mkdir2, readFile as readFile6, rename as rename2, unlink as unlink2, writeFile as writeFile2 } from "fs/promises";
1363
+ import { homedir } from "os";
1364
+ import { join as join2 } from "path";
1365
+ import { z as z4 } from "zod";
1366
+ var CREDENTIALS_SCHEMA_VERSION = 1;
1367
+ var DEFAULT_REGISTRY_URL = "https://registry.modex.md";
1368
+ var CredentialsSchema = z4.object({
1369
+ schema_version: z4.literal(CREDENTIALS_SCHEMA_VERSION),
1370
+ access_token: z4.string().min(1),
1371
+ registry_url: z4.string().url()
1372
+ });
1373
+ var CredentialsError = class extends Error {
1374
+ constructor(message) {
1375
+ super(message);
1376
+ this.name = "CredentialsError";
1377
+ }
1378
+ };
1379
+ function configDir() {
1380
+ const xdg = process.env["XDG_CONFIG_HOME"];
1381
+ const base = xdg && xdg.length > 0 ? xdg : join2(homedir(), ".config");
1382
+ return join2(base, "modex");
1383
+ }
1384
+ function credentialsPath(dir = configDir()) {
1385
+ return join2(dir, "credentials.json");
1386
+ }
1387
+ async function loadCredentials(dir = configDir()) {
1388
+ const path = credentialsPath(dir);
1389
+ let raw;
1390
+ try {
1391
+ raw = await readFile6(path, "utf8");
1392
+ } catch (err) {
1393
+ const e = err;
1394
+ if (e.code === "ENOENT") return null;
1395
+ throw new CredentialsError(`Could not read ${path}: ${e.message}`);
1396
+ }
1397
+ let parsed;
1398
+ try {
1399
+ parsed = JSON.parse(raw);
1400
+ } catch (err) {
1401
+ throw new CredentialsError(`${path} is not valid JSON: ${err.message}`);
1402
+ }
1403
+ const result = CredentialsSchema.safeParse(parsed);
1404
+ if (!result.success) {
1405
+ throw new CredentialsError(
1406
+ `${path} failed validation: ${result.error.issues.map((i) => `${i.path.join(".")}: ${i.message}`).join("; ")}`
1407
+ );
1408
+ }
1409
+ return result.data;
1410
+ }
1411
+ async function saveCredentials(credentials, dir = configDir()) {
1412
+ await mkdir2(dir, { recursive: true, mode: 448 });
1413
+ const path = credentialsPath(dir);
1414
+ const tmp = `${path}.tmp.${process.pid}.${Date.now()}`;
1415
+ const body = JSON.stringify(credentials, null, 2) + "\n";
1416
+ await writeFile2(tmp, body, { mode: 384, encoding: "utf8" });
1417
+ await chmod(tmp, 384);
1418
+ await rename2(tmp, path);
1419
+ await chmod(path, 384);
1420
+ }
1421
+ async function clearCredentials(dir = configDir()) {
1422
+ try {
1423
+ await unlink2(credentialsPath(dir));
1424
+ } catch (err) {
1425
+ const e = err;
1426
+ if (e.code === "ENOENT") return;
1427
+ throw new CredentialsError(`Could not clear credentials: ${e.message}`);
1428
+ }
1429
+ }
1430
+
1431
+ // src/registry/types.ts
1432
+ import { z as z5 } from "zod";
1433
+ var RegistryError = class extends Error {
1434
+ // HTTP status, or null for transport-level failures (DNS, connection reset).
1435
+ status;
1436
+ // The `error` field from a structured error body, if the registry sent one.
1437
+ code;
1438
+ constructor(message, opts = {}) {
1439
+ super(message);
1440
+ this.name = "RegistryError";
1441
+ this.status = opts.status ?? null;
1442
+ this.code = opts.code ?? null;
1443
+ }
1444
+ };
1445
+ var DeviceCodeStartSchema = z5.object({
1446
+ device_code: z5.string().min(1),
1447
+ user_code: z5.string().min(1),
1448
+ verify_url: z5.string().url(),
1449
+ expires_in: z5.number().int().positive(),
1450
+ interval: z5.number().int().positive()
1451
+ });
1452
+ var TokenResponseSchema = z5.union([
1453
+ z5.object({ access_token: z5.string().min(1) }),
1454
+ z5.object({ error: z5.string().min(1) })
1455
+ ]);
1456
+ var BindResponseSchema = z5.object({
1457
+ skills_md_sha256: z5.string().min(1),
1458
+ bound_at: z5.string().min(1)
1459
+ }).passthrough();
1460
+ var AspirationResponseSchema = z5.object({
1461
+ created_at: z5.string().min(1).optional()
1462
+ }).passthrough();
1463
+
1464
+ // src/registry/client.ts
1465
+ var SLOW_DOWN_INCREMENT_S = 5;
1466
+ function resolveDeps(deps = {}) {
1467
+ return {
1468
+ fetch: deps.fetch ?? globalThis.fetch,
1469
+ sleep: deps.sleep ?? ((ms) => new Promise((r) => setTimeout(r, ms))),
1470
+ now: deps.now ?? Date.now
1471
+ };
1472
+ }
1473
+ function endpoint(registryUrl, path) {
1474
+ return registryUrl.replace(/\/+$/, "") + path;
1475
+ }
1476
+ async function postJson(fetchImpl, url, body, token) {
1477
+ const headers = { "content-type": "application/json" };
1478
+ if (token) headers["authorization"] = `Bearer ${token}`;
1479
+ let response;
1480
+ try {
1481
+ response = await fetchImpl(url, {
1482
+ method: "POST",
1483
+ headers,
1484
+ body: JSON.stringify(body)
1485
+ });
1486
+ } catch (err) {
1487
+ throw new RegistryError(
1488
+ `Could not reach the registry at ${url}: ${err.message}`,
1489
+ { status: null }
1490
+ );
1491
+ }
1492
+ let json = null;
1493
+ const text = await response.text();
1494
+ if (text.length > 0) {
1495
+ try {
1496
+ json = JSON.parse(text);
1497
+ } catch {
1498
+ }
1499
+ }
1500
+ return { status: response.status, json };
1501
+ }
1502
+ function bodyErrorCode(json) {
1503
+ if (json !== null && typeof json === "object" && "error" in json) {
1504
+ const code = json.error;
1505
+ return typeof code === "string" ? code : null;
1506
+ }
1507
+ return null;
1508
+ }
1509
+ async function startDeviceCode(registryUrl, deps) {
1510
+ const { fetch } = resolveDeps(deps);
1511
+ const { status, json } = await postJson(
1512
+ fetch,
1513
+ endpoint(registryUrl, "/v1/auth/cli/device-code"),
1514
+ {}
1515
+ );
1516
+ if (status !== 200) {
1517
+ throw new RegistryError(
1518
+ `Registry rejected the device-code request (HTTP ${status}).`,
1519
+ { status, code: bodyErrorCode(json) }
1520
+ );
1521
+ }
1522
+ const parsed = DeviceCodeStartSchema.safeParse(json);
1523
+ if (!parsed.success) {
1524
+ throw new RegistryError(
1525
+ `Registry device-code response was malformed: ${parsed.error.issues.map((i) => `${i.path.join(".")}: ${i.message}`).join("; ")}`,
1526
+ { status }
1527
+ );
1528
+ }
1529
+ return parsed.data;
1530
+ }
1531
+ async function pollForToken(registryUrl, start, deps) {
1532
+ const { fetch, sleep: sleep2, now } = resolveDeps(deps);
1533
+ const onPending = deps?.onPending;
1534
+ const deadline = now() + start.expires_in * 1e3;
1535
+ let intervalS = start.interval;
1536
+ const url = endpoint(registryUrl, "/v1/auth/cli/token");
1537
+ for (; ; ) {
1538
+ if (now() >= deadline) {
1539
+ throw new RegistryError(
1540
+ "The login code expired before it was approved. Run `modex login` again.",
1541
+ { code: "expired_token" }
1542
+ );
1543
+ }
1544
+ await sleep2(intervalS * 1e3);
1545
+ const { status, json } = await postJson(fetch, url, { device_code: start.device_code });
1546
+ const parsed = TokenResponseSchema.safeParse(json);
1547
+ if (parsed.success && "access_token" in parsed.data) {
1548
+ return parsed.data.access_token;
1549
+ }
1550
+ const code = parsed.success && "error" in parsed.data ? parsed.data.error : bodyErrorCode(json);
1551
+ if (code === "authorization_pending") {
1552
+ onPending?.();
1553
+ continue;
1554
+ }
1555
+ if (code === "slow_down") {
1556
+ intervalS += SLOW_DOWN_INCREMENT_S;
1557
+ onPending?.();
1558
+ continue;
1559
+ }
1560
+ throw new RegistryError(
1561
+ code ? `Registry refused the login: ${code}.` : `Registry token endpoint returned an unexpected response (HTTP ${status}).`,
1562
+ { status, code: code ?? null }
1563
+ );
1564
+ }
1565
+ }
1566
+ async function bindAgent(registryUrl, agentId, token, body, deps) {
1567
+ const { fetch } = resolveDeps(deps);
1568
+ const { status, json } = await postJson(
1569
+ fetch,
1570
+ endpoint(registryUrl, `/v1/agents/${encodeURIComponent(agentId)}/bind`),
1571
+ body,
1572
+ token
1573
+ );
1574
+ if (status === 401) {
1575
+ throw new RegistryError("Your registry token is no longer valid.", { status, code: bodyErrorCode(json) });
1576
+ }
1577
+ if (status === 409) {
1578
+ throw new RegistryError(
1579
+ `Agent ${agentId} is already bound to a different user. Re-bind is only allowed by the user who first claimed it.`,
1580
+ { status, code: bodyErrorCode(json) }
1581
+ );
1582
+ }
1583
+ if (status < 200 || status >= 300) {
1584
+ throw new RegistryError(
1585
+ `Registry rejected the bind (HTTP ${status}).`,
1586
+ { status, code: bodyErrorCode(json) }
1587
+ );
1588
+ }
1589
+ const parsed = BindResponseSchema.safeParse(json);
1590
+ if (!parsed.success) {
1591
+ throw new RegistryError(
1592
+ `Registry bind response was malformed: ${parsed.error.issues.map((i) => `${i.path.join(".")}: ${i.message}`).join("; ")}`,
1593
+ { status }
1594
+ );
1595
+ }
1596
+ return parsed.data;
1597
+ }
1598
+ async function addAspiration(registryUrl, agentId, token, body, deps) {
1599
+ const { fetch } = resolveDeps(deps);
1600
+ const { status, json } = await postJson(
1601
+ fetch,
1602
+ endpoint(registryUrl, `/v1/agents/${encodeURIComponent(agentId)}/aspirations`),
1603
+ body,
1604
+ token
1605
+ );
1606
+ if (status === 401) {
1607
+ throw new RegistryError("Your registry token is no longer valid.", { status, code: bodyErrorCode(json) });
1608
+ }
1609
+ if (status === 404) {
1610
+ throw new RegistryError(
1611
+ `Agent ${agentId} is not bound on the registry. Run \`modex bind ${agentId}\` first.`,
1612
+ { status, code: bodyErrorCode(json) }
1613
+ );
1614
+ }
1615
+ if (status < 200 || status >= 300) {
1616
+ throw new RegistryError(
1617
+ `Registry rejected the aspiration (HTTP ${status}).`,
1618
+ { status, code: bodyErrorCode(json) }
1619
+ );
1620
+ }
1621
+ const parsed = AspirationResponseSchema.safeParse(json ?? {});
1622
+ if (!parsed.success) {
1623
+ throw new RegistryError(
1624
+ `Registry aspiration response was malformed: ${parsed.error.issues.map((i) => `${i.path.join(".")}: ${i.message}`).join("; ")}`,
1625
+ { status }
1626
+ );
1627
+ }
1628
+ return parsed.data;
1629
+ }
1630
+
1631
+ // src/operations/login.ts
1632
+ async function runLogin(opts = {}) {
1633
+ const stdout = opts.stdout ?? process.stdout;
1634
+ const registryUrl = opts.registry ?? DEFAULT_REGISTRY_URL;
1635
+ const deps = { fetch: opts.fetch, sleep: opts.sleep, now: opts.now };
1636
+ const start = await startDeviceCode(registryUrl, deps);
1637
+ stdout.write(
1638
+ `To authorize this CLI, open:
1639
+ ${start.verify_url}
1640
+ and enter the code:
1641
+ ${start.user_code}
1642
+
1643
+ Waiting for approval\u2026
1644
+ `
1645
+ );
1646
+ const accessToken = await pollForToken(registryUrl, start, {
1647
+ ...deps,
1648
+ onPending: () => stdout.write(".")
1649
+ });
1650
+ stdout.write("\n");
1651
+ await saveCredentials(
1652
+ { schema_version: 1, access_token: accessToken, registry_url: registryUrl },
1653
+ opts.configDir
1654
+ );
1655
+ stdout.write(`Logged in to ${registryUrl}.
1656
+ `);
1657
+ return { registryUrl };
1658
+ }
1659
+ async function runLogout(opts = {}) {
1660
+ const stdout = opts.stdout ?? process.stdout;
1661
+ await clearCredentials(opts.configDir);
1662
+ stdout.write("Logged out. Local credentials cleared.\n");
1663
+ }
1664
+ function isLoginError(err) {
1665
+ return err instanceof RegistryError || err instanceof CredentialsError;
1666
+ }
1667
+
1668
+ // src/operations/bind.ts
1669
+ import { createHash as createHash3 } from "crypto";
1670
+ import { readFile as readFile8 } from "fs/promises";
1671
+
1672
+ // src/registryState.ts
1673
+ import { readFile as readFile7, rename as rename3, writeFile as writeFile3 } from "fs/promises";
1674
+ import { z as z6 } from "zod";
1675
+ var REGISTRY_STATE_SCHEMA_VERSION = 1;
1676
+ var SHA256_HEX2 = /^[0-9a-f]{64}$/;
1677
+ var RegistryStateSchema = z6.object({
1678
+ schema_version: z6.literal(REGISTRY_STATE_SCHEMA_VERSION),
1679
+ registry_url: z6.string().url(),
1680
+ agent_id: z6.string().min(1),
1681
+ bound_at: z6.string().min(1),
1682
+ last_server_skills_md_sha256: z6.string().regex(SHA256_HEX2)
1683
+ });
1684
+ var RegistryStateError = class extends Error {
1685
+ constructor(message) {
1686
+ super(message);
1687
+ this.name = "RegistryStateError";
1688
+ }
1689
+ };
1690
+ async function readRegistryState(path) {
1691
+ let raw;
1692
+ try {
1693
+ raw = await readFile7(path, "utf8");
1694
+ } catch (err) {
1695
+ const e = err;
1696
+ if (e.code === "ENOENT") return null;
1697
+ throw new RegistryStateError(`Could not read ${path}: ${e.message}`);
1698
+ }
1699
+ let parsed;
1700
+ try {
1701
+ parsed = JSON.parse(raw);
1702
+ } catch (err) {
1703
+ throw new RegistryStateError(`${path} is not valid JSON: ${err.message}`);
1704
+ }
1705
+ const result = RegistryStateSchema.safeParse(parsed);
1706
+ if (!result.success) {
1707
+ throw new RegistryStateError(
1708
+ `${path} failed validation: ${result.error.issues.map((i) => `${i.path.join(".")}: ${i.message}`).join("; ")}`
1709
+ );
1710
+ }
1711
+ return result.data;
1712
+ }
1713
+ async function writeRegistryState(path, state) {
1714
+ const tmp = `${path}.tmp.${process.pid}.${Date.now()}`;
1715
+ await writeFile3(tmp, JSON.stringify(state, null, 2) + "\n", "utf8");
1716
+ await rename3(tmp, path);
1717
+ }
1718
+
1719
+ // src/operations/bind.ts
1720
+ var isAspirationAdded = (e) => e.kind === "aspiration_added";
1721
+ function sha256Hex3(text) {
1722
+ return createHash3("sha256").update(text, "utf8").digest("hex");
1723
+ }
1724
+ async function runBind(agentId, opts = {}) {
1725
+ const stdout = opts.stdout ?? process.stdout;
1726
+ const credentials = await loadCredentials(opts.configDir);
1727
+ if (credentials === null) {
1728
+ throw new CredentialsError("Not logged in. Run `modex login` first.");
1729
+ }
1730
+ const agent = await loadAgent(agentId, opts.baseDir);
1731
+ let skillsMd;
1732
+ try {
1733
+ skillsMd = await readFile8(agent.paths.skillsFile, "utf8");
1734
+ } catch (err) {
1735
+ const e = err;
1736
+ if (e.code !== "ENOENT") throw err;
1737
+ skillsMd = serialize(buildDoc(await readAgentSkills(agent.paths.skillsFile)));
1738
+ }
1739
+ const skillsMdSha256 = sha256Hex3(skillsMd);
1740
+ const chain = await readChain(agent.paths.provenanceFile);
1741
+ if (chain.length === 0) {
1742
+ throw new AgentError(
1743
+ `Agent ${agentId} has an empty provenance chain \u2014 cannot bind. The chain should always contain at least the agent_created entry.`
1744
+ );
1745
+ }
1746
+ const provenanceHead = chain[chain.length - 1].entry_sha256;
1747
+ const aspirationSha256s = chain.filter(isAspirationAdded).map((e) => e.input.aspiration_sha256);
1748
+ let response;
1749
+ try {
1750
+ response = await bindAgent(
1751
+ credentials.registry_url,
1752
+ agent.config.id,
1753
+ credentials.access_token,
1754
+ {
1755
+ skills_md: skillsMd,
1756
+ skills_md_sha256: skillsMdSha256,
1757
+ provenance_head_sha256: provenanceHead,
1758
+ aspiration_sha256s: aspirationSha256s
1759
+ },
1760
+ { fetch: opts.fetch }
1761
+ );
1762
+ } catch (err) {
1763
+ if (err instanceof RegistryError && err.status === 401) {
1764
+ await clearCredentials(opts.configDir);
1765
+ throw new RegistryError(
1766
+ "Your registry token is no longer valid and has been cleared. Run `modex login` again.",
1767
+ { status: 401 }
1768
+ );
1769
+ }
1770
+ throw err;
1771
+ }
1772
+ await recordEntry({
1773
+ path: agent.paths.provenanceFile,
1774
+ draft: {
1775
+ kind: "bound",
1776
+ input: {
1777
+ skills_md_sha256: skillsMdSha256,
1778
+ aspiration_sha256s: aspirationSha256s
1779
+ },
1780
+ output: {
1781
+ registry_url: credentials.registry_url,
1782
+ bound_at: response.bound_at
1783
+ }
1784
+ },
1785
+ ts: opts.ts
1786
+ });
1787
+ await writeRegistryState(agent.paths.registryFile, {
1788
+ schema_version: 1,
1789
+ registry_url: credentials.registry_url,
1790
+ agent_id: agent.config.id,
1791
+ bound_at: response.bound_at,
1792
+ last_server_skills_md_sha256: response.skills_md_sha256
1793
+ });
1794
+ stdout.write(
1795
+ `Bound ${agent.config.id} to ${credentials.registry_url}
1796
+ skills.md sha256: ${skillsMdSha256.slice(0, 12)}\u2026
1797
+ bound_at: ${response.bound_at}
1798
+ `
1799
+ );
1800
+ return {
1801
+ agentId: agent.config.id,
1802
+ registryUrl: credentials.registry_url,
1803
+ skillsMdSha256,
1804
+ boundAt: response.bound_at
1805
+ };
1806
+ }
1807
+ function isBindError(err) {
1808
+ return err instanceof RegistryError || err instanceof CredentialsError || err instanceof AgentError || err instanceof ProvenanceError || err instanceof RegistryStateError;
1809
+ }
1810
+
1811
+ // src/operations/aspirations.ts
1812
+ import { createHash as createHash4 } from "crypto";
1813
+ import { readFile as readFile9 } from "fs/promises";
1814
+ import { basename as basename4 } from "path";
1815
+ function sha256Hex4(text) {
1816
+ return createHash4("sha256").update(text, "utf8").digest("hex");
1817
+ }
1818
+ async function runAspirationsAdd(agentId, mdFile, opts = {}) {
1819
+ const stdout = opts.stdout ?? process.stdout;
1820
+ const credentials = await loadCredentials(opts.configDir);
1821
+ if (credentials === null) {
1822
+ throw new CredentialsError("Not logged in. Run `modex login` first.");
1823
+ }
1824
+ const agent = await loadAgent(agentId, opts.baseDir);
1825
+ const registryState = await readRegistryState(agent.paths.registryFile);
1826
+ if (registryState === null) {
1827
+ throw new AgentError(
1828
+ `Agent ${agentId} is not bound to a registry. Run \`modex bind ${agentId}\` first.`
1829
+ );
1830
+ }
1831
+ let content;
1832
+ try {
1833
+ content = await readFile9(mdFile, "utf8");
1834
+ } catch (err) {
1835
+ throw new AgentError(`Could not read aspiration file ${mdFile}: ${err.message}`);
1836
+ }
1837
+ if (content.trim().length === 0) {
1838
+ throw new AgentError(`Aspiration file ${mdFile} is empty.`);
1839
+ }
1840
+ const aspirationSha256 = sha256Hex4(content);
1841
+ const source = basename4(mdFile);
1842
+ try {
1843
+ await addAspiration(
1844
+ registryState.registry_url,
1845
+ agent.config.id,
1846
+ credentials.access_token,
1847
+ { sha256: aspirationSha256, content },
1848
+ { fetch: opts.fetch }
1849
+ );
1850
+ } catch (err) {
1851
+ if (err instanceof RegistryError && err.status === 401) {
1852
+ await clearCredentials(opts.configDir);
1853
+ throw new RegistryError(
1854
+ "Your registry token is no longer valid and has been cleared. Run `modex login` again.",
1855
+ { status: 401 }
1856
+ );
1857
+ }
1858
+ throw err;
1859
+ }
1860
+ await recordEntry({
1861
+ path: agent.paths.provenanceFile,
1862
+ draft: {
1863
+ kind: "aspiration_added",
1864
+ input: {
1865
+ aspiration_sha256: aspirationSha256,
1866
+ aspiration_bytes: Buffer.byteLength(content, "utf8"),
1867
+ source
1868
+ },
1869
+ output: {
1870
+ registry_url: registryState.registry_url
1871
+ }
1872
+ },
1873
+ ts: opts.ts
1874
+ });
1875
+ stdout.write(
1876
+ `Added aspiration to ${agent.config.id}
1877
+ source: ${source}
1878
+ sha256: ${aspirationSha256.slice(0, 12)}\u2026
1879
+ `
1880
+ );
1881
+ return {
1882
+ agentId: agent.config.id,
1883
+ registryUrl: registryState.registry_url,
1884
+ aspirationSha256,
1885
+ source
1886
+ };
1887
+ }
1888
+ function isAspirationsError(err) {
1889
+ return err instanceof RegistryError || err instanceof CredentialsError || err instanceof AgentError || err instanceof ProvenanceError || err instanceof RegistryStateError;
1890
+ }
1891
+
1892
+ // src/operations/index.ts
1893
+ function isUserFacingError(err) {
1894
+ return isFeedError(err) || isLoginError(err) || isBindError(err) || isAspirationsError(err);
1895
+ }
1896
+ export {
1897
+ AGENT_CONFIG_SCHEMA_VERSION,
1898
+ AgentConfigSchema,
1899
+ AgentCreatedEntrySchema,
1900
+ AgentError,
1901
+ AspirationAddedEntrySchema,
1902
+ BoundEntrySchema,
1903
+ CREDENTIALS_SCHEMA_VERSION,
1904
+ CanonicalJsonError,
1905
+ CredentialsError,
1906
+ CredentialsSchema,
1907
+ DEFAULT_LOCK_TIMEOUT_MS,
1908
+ DEFAULT_REGISTRY_URL,
1909
+ DEFAULT_STALE_MS,
1910
+ DEFAULT_TOKEN_CAP_WARN_AT,
1911
+ EMIT_SKILLS_TOOL,
1912
+ ExtractedSkillSchema,
1913
+ ExtractionError,
1914
+ ExtractionResultSchema,
1915
+ FeedEntrySchema,
1916
+ FileLockError,
1917
+ KNOWN_ENTRY_KINDS,
1918
+ MODEL_ID,
1919
+ MODEX_DIR_NAME,
1920
+ PROVENANCE_SCHEMA_VERSION,
1921
+ ParseSkillsError,
1922
+ ProvenanceEntrySchema,
1923
+ ProvenanceError,
1924
+ REGISTRY_STATE_SCHEMA_VERSION,
1925
+ RegistryError,
1926
+ RegistryStateError,
1927
+ RegistryStateSchema,
1928
+ SCHEMA_VERSION,
1929
+ SOURCE_KINDS,
1930
+ SYSTEM_PROMPT,
1931
+ SkillSchema,
1932
+ SkillsDocSchema,
1933
+ SourceError,
1934
+ addAspiration,
1935
+ bindAgent,
1936
+ buildDoc,
1937
+ canonicalize,
1938
+ clearCredentials,
1939
+ computeEntryHash,
1940
+ configDir,
1941
+ createAgent,
1942
+ credentialsPath,
1943
+ estimateTokens,
1944
+ expandPatterns,
1945
+ extractSkills,
1946
+ isAspirationsError,
1947
+ isBindError,
1948
+ isFeedError,
1949
+ isLoginError,
1950
+ isUserFacingError,
1951
+ isWebUrl,
1952
+ listAgents,
1953
+ loadAgent,
1954
+ loadCredentials,
1955
+ mergeSkills,
1956
+ normalizeUrl,
1957
+ parseSkills,
1958
+ pollForToken,
1959
+ readAgentSkills,
1960
+ readChain,
1961
+ readRegistryState,
1962
+ readSource,
1963
+ recordEntry,
1964
+ runAgentsCreate,
1965
+ runAgentsList,
1966
+ runAspirationsAdd,
1967
+ runBind,
1968
+ runFeed,
1969
+ runLogin,
1970
+ runLogout,
1971
+ saveCredentials,
1972
+ serialize,
1973
+ startDeviceCode,
1974
+ verifyChain,
1975
+ withFileLock,
1976
+ writeRegistryState,
1977
+ writeSkillsAtomic
1978
+ };
1979
+ //# sourceMappingURL=index.js.map